Select Git revision
tp3-git.iml
Code owners
Assign users and groups as approvers for specific file changes. Learn more.
aggreg.py 3.11 KiB
import feedparser
from urllib.parse import urlparse
from datetime import datetime
import time
from argparse import ArgumentParser
import yaml
def charge_urls(liste_url: list[str]) -> list[dict[str, any] | None]:
# if the key 'bozo' is true, then the feed had an error during processing, so we set it to None
return [
feed if not (feed := feedparser.parse(url))['bozo'] else None
for url in liste_url
]
def fusion_flux(liste_url: list[str], liste_flux: list[dict[str, any] | None],
tri_chrono: bool) -> list[dict[str, str]]:
feeds = [{
'titre': entry.title,
'categorie': entry.category,
'serveur': urlparse(entry.title_detail.base).netloc,
'date_publi': entry.published,
'lien': entry.link,
'description': entry.description,
'guid': entry.guid
} for feed in liste_flux if feed is not None for entry in feed['entries']]
if tri_chrono:
feeds.sort(key=lambda e: datetime.strptime(e['date_publi'],
"%a, %d %b %Y %H:%M"),
reverse=True)
else:
feeds.sort(
key=lambda e: ["CRITICAL", "MAJOR", "MINOR"].index(e['categorie']))
return feeds
def genere_html(liste_evenements: list[dict[str, str]],
chemin_html: str) -> None:
output = "<!DOCTYPE html><html lang=\"en\"><head><meta charset=\"utf-8\"><meta name=\"viewport\" content=\"width=device-width, initial-scale=1\"><title>Events log</title><link rel=\"stylesheet\" href=\"css/feed.css\" type=\"text/css\"/></head><body><article><header><h1 class=\"maintitle\">Events log</h1></header>"
output += f"<p class=\"currenttime\">{time.asctime()}</p>"
output += f"<div class=\"container\">"
for evenement in liste_evenements:
output += f"<article class=\"event {evenement['categorie'].lower()}\"><header>"
output += f"<h2>{evenement['titre']}</h2>"
output += f"<p class=\"server\">from: {evenement['serveur']}</p>"
output += "</header>"
output += f"<p>{evenement['date_publi']}</p>"
output += f"<p>{evenement['categorie']}</p>"
output += f"<p class=\"guid\">{evenement['guid']}</p>"
output += f"<p><a href=\"eventlink\"{evenement['lien']}\">{evenement['lien']}</a></p>"
output += f"<p>{evenement['description']}</p>"
output += "</article>"
output += "</div></article></body></html>"
with open(chemin_html, "w") as outputfile:
outputfile.write(output)
def main():
parser = ArgumentParser(description="obtain events feeds from RSS and compile them into a nice webpage")
parser.add_argument("-c", "--config", help="specifies a config path instead of the default config path %(default)s", default="/etc/eventswrangler.yml")
configpath = parser.parse_args().config
with open(configpath) as file:
conf = yaml.safe_load(file)
urls = [source + '/' + conf['rss-name'] for source in conf['sources']]
feeds = charge_urls(urls)
fusioned = fusion_flux(urls, feeds, conf['tri-chrono'])
genere_html(fusioned, conf['destination'])
if __name__ == "__main__":
main()