diff --git a/aggreg.py b/aggreg.py index af351c1b492de61919885e5dd08807c0a2cfb349..01a9450e7f114cbc835f4e1459984817de09a783 100644 --- a/aggreg.py +++ b/aggreg.py @@ -2,7 +2,6 @@ import feedparser from urllib.parse import urlparse from datetime import datetime - def charge_urls(liste_url: list[str]) -> list[dict[str, any] | None]: # if the key 'bozo' is true, then the feed had an error during processing, so we set it to None return [ @@ -14,13 +13,14 @@ def charge_urls(liste_url: list[str]) -> list[dict[str, any] | None]: def fusion_flux(liste_url: list[str], liste_flux: list[dict[str, any] | None], tri_chrono: bool) -> list[dict[str, str]]: feeds = [{ - 'titre': entry['title'], - 'categorie': entry['category'], - 'serveur': urlparse(entry['title_detail']['base']).netloc, - 'date_publi': entry['published'], - 'lien': entry['link'], - 'description': entry['description'] - } for feed in liste_flux for entry in feed['entries']] + 'titre': entry.title, + 'categorie': entry.category, + 'serveur': urlparse(entry.title_detail.base).netloc, + 'date_publi': entry.published, + 'lien': entry.link, + 'description': entry.description, + 'guid': entry.guid + } for feed in liste_flux if feed is not None for entry in feed['entries']] if tri_chrono: feeds.sort(key=lambda e: datetime.strptime(e['date_publi'], "%a, %d %b %Y %H:%M"), @@ -32,3 +32,6 @@ def fusion_flux(liste_url: list[str], liste_flux: list[dict[str, any] | None], def main(): + raise NotImplementedError +if __name__ == "__main__": + main()