This commit is contained in:
Christoph J. Scherr 2023-05-09 15:43:53 +02:00
parent 8b139c30f7
commit 0a8776e84b
Signed by: PlexSheep
GPG Key ID: 25B4ACF7D88186CC
4 changed files with 141 additions and 0 deletions

2
.gitignore vendored Normal file
View File

@ -0,0 +1,2 @@
.venv
src/.env

22
requirements.txt Normal file
View File

@ -0,0 +1,22 @@
aiohttp==3.8.4
aiosignal==1.3.1
async-timeout==4.0.2
attrs==23.1.0
beautifulsoup4==4.12.2
bs4==0.0.1
certifi==2023.5.7
charset-normalizer==3.1.0
discord==2.2.3
discord.py==2.2.3
docutils==0.19
frozenlist==1.3.3
idna==3.4
lockfile==0.12.2
lxml==4.9.2
multidict==6.0.4
python-daemon==3.0.1
python-dotenv==1.0.0
requests==2.30.0
soupsieve==2.4.1
urllib3==2.0.2
yarl==1.9.2

0
src/__init__.py Normal file
View File

117
src/main.py Executable file
View File

@ -0,0 +1,117 @@
from datetime import datetime
import daemon
import requests
import time
from time import sleep
import os
import re
from bs4 import BeautifulSoup
import dotenv
CONFIG = dotenv.dotenv_values(".env")
def read_known() -> list:
known: list = []
try:
with open(CONFIG['STORAGE_FILE'], 'r+') as f:
known = f.read().splitlines()
f.close()
except Exception as e:
print("Could not initialize known titles: %s" % e)
print("="*120 + "\nLoading from file\n" + "="*120)
for item, index in zip(known, range(0, len(known))):
print(f"{index}:\t{item}")
print("="*120)
return known
def daemon():
try:
while True:
r = requests.get(CONFIG['URL'])
soup = BeautifulSoup(r.content, features='xml')
articles = soup.findAll('item')
article_list: list = []
for a in articles:
title: str = str(a.find('title')).replace("<title>", "").replace("</title>", "")
desc: str = str(a.find('description')).replace("<description>", "").replace("</description>", "")
link: str = str(a.find('enclosure'))
link = re.sub(r"<.*>", "", link)
dur: str = str(a.find('duration')).replace("<duration>", "").replace("</duration>", "")
pub: str = str(a.find('pubDate')).replace("<pubDate>", "").replace("</pubDate>", "")
article = {
'title': title,
'desc': desc,
'link': link,
'dur': dur,
'pub': pub
}
article_list.append(article)
known: list = read_known()
for a in article_list:
if a['title'] in known:
print("found known: %s" % a['title'])
else:
print("found unknown: %s" % a['title'])
title = a['title']
desc = a['desc']
link = a['link']
dur = a['dur']
pub = a['pub']
content = f"""
=============== Neue Folge ===============
Titel: {title}
Beschreibung:
{desc}
Link: {link}
Datum: {pub}
Länge: {dur}
==========================================
"""
print(content)
#notify = requests.post(
# url=WEBHOOK,
# json={
# "content": content
# }
# )
#print(notify.text)
known.append(a['title'])
with open(CONFIG['STORAGE_FILE'], 'w+') as file:
try:
for item in known:
file.write(item + '\n')
except Exception as e:
print("Could not write to file: %s" % e)
pass
finally:
file.close()
time.sleep(0.5)
print("="*120 + "\nentering sleep for %d seconds at %s" % (int(CONFIG['SLEEP_SECS']), datetime.now()))
print("="*120)
sleep(int(CONFIG['SLEEP_SECS']))
except ValueError as e:
print(e)
pass
if __name__ == "__main__":
daemon()
with daemon.DaemonContext():
daemon()