From 23780584ad4464cb8a0bc5d48875fdab8bb74e94 Mon Sep 17 00:00:00 2001 From: "Schimon Jehudah, Adv." Date: Sun, 24 Nov 2024 18:17:13 +0200 Subject: [PATCH] Fix HTTP download. Thank you. mirux. --- slixfeed/syndication.py | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/slixfeed/syndication.py b/slixfeed/syndication.py index 89f734e..4e0c0bc 100644 --- a/slixfeed/syndication.py +++ b/slixfeed/syndication.py @@ -272,7 +272,7 @@ class Feed: feed_id = sqlite.get_feed_id(db_file, url) if not feed_id: if not sqlite.check_identifier_exist(db_file, identifier): - result = await fetch.http(url) + result = await fetch.http(self.settings_network, , url) message = result['message'] status_code = result['status_code'] if not result['error']: @@ -975,7 +975,7 @@ class FeedDiscovery: result = None except Exception as e: logger.error(str(e)) - logger.warning("Failed to parse URL as feed for {}.".format(url)) + logger.warning(f"Failed to parse URL as feed for {url}.") result = {'link' : None, 'index' : None, 'name' : None, @@ -983,18 +983,18 @@ class FeedDiscovery: 'error' : True, 'exist' : None} if not result: - logger.debug("Feed auto-discovery engaged for {}".format(url)) + logger.debug(f"Feed auto-discovery engaged for {url}") result = FeedDiscovery.feed_mode_auto_discovery(url, tree) if not result: - logger.debug("Feed link scan mode engaged for {}".format(url)) + logger.debug(f"Feed link scan mode engaged for {url}") result = FeedDiscovery.feed_mode_scan(url, tree, pathnames) if not result: - logger.debug("Feed arbitrary mode engaged for {}".format(url)) + logger.debug(f"Feed arbitrary mode engaged for {url}") result = FeedDiscovery.feed_mode_guess(url, pathnames) if not result: - logger.debug("No feeds were found for {}".format(url)) + logger.debug(f"No feeds were found for {url}") result = None - result = await FeedDiscovery.process_feed_selection(url, result) + result = await FeedDiscovery.process_feed_selection(settings_network, url, result) return result @@ -1139,10 +1139,10 @@ class FeedDiscovery: # URLs (string) and Feeds (dict) and function that # composes text message (string). # Maybe that's not necessary. - async def process_feed_selection(url, urls): + async def process_feed_selection(settings_network, url, urls): feeds = {} for i in urls: - result = await fetch.http(i) + result = await fetch.http(settings_network, i) if not result['error']: document = result['content'] status_code = result['status_code'] @@ -1287,7 +1287,7 @@ class FeedTask: # print('Skipping URL:', url) # continue - result = await fetch.http(url) + result = await fetch.http(self.settings_network, url) status_code = result['status_code'] feed_id = sqlite.get_feed_id(db_file, url) feed_id = feed_id[0]