Fix HTTP download.

Thank you. mirux.
This commit is contained in:
Schimon Jehudah, Adv. 2024-11-24 18:17:13 +02:00
parent 878753b26e
commit 23780584ad

View file

@ -272,7 +272,7 @@ class Feed:
feed_id = sqlite.get_feed_id(db_file, url)
if not feed_id:
if not sqlite.check_identifier_exist(db_file, identifier):
result = await fetch.http(url)
result = await fetch.http(self.settings_network, , url)
message = result['message']
status_code = result['status_code']
if not result['error']:
@ -975,7 +975,7 @@ class FeedDiscovery:
result = None
except Exception as e:
logger.error(str(e))
logger.warning("Failed to parse URL as feed for {}.".format(url))
logger.warning(f"Failed to parse URL as feed for {url}.")
result = {'link' : None,
'index' : None,
'name' : None,
@ -983,18 +983,18 @@ class FeedDiscovery:
'error' : True,
'exist' : None}
if not result:
logger.debug("Feed auto-discovery engaged for {}".format(url))
logger.debug(f"Feed auto-discovery engaged for {url}")
result = FeedDiscovery.feed_mode_auto_discovery(url, tree)
if not result:
logger.debug("Feed link scan mode engaged for {}".format(url))
logger.debug(f"Feed link scan mode engaged for {url}")
result = FeedDiscovery.feed_mode_scan(url, tree, pathnames)
if not result:
logger.debug("Feed arbitrary mode engaged for {}".format(url))
logger.debug(f"Feed arbitrary mode engaged for {url}")
result = FeedDiscovery.feed_mode_guess(url, pathnames)
if not result:
logger.debug("No feeds were found for {}".format(url))
logger.debug(f"No feeds were found for {url}")
result = None
result = await FeedDiscovery.process_feed_selection(url, result)
result = await FeedDiscovery.process_feed_selection(settings_network, url, result)
return result
@ -1139,10 +1139,10 @@ class FeedDiscovery:
# URLs (string) and Feeds (dict) and function that
# composes text message (string).
# Maybe that's not necessary.
async def process_feed_selection(url, urls):
async def process_feed_selection(settings_network, url, urls):
feeds = {}
for i in urls:
result = await fetch.http(i)
result = await fetch.http(settings_network, i)
if not result['error']:
document = result['content']
status_code = result['status_code']
@ -1287,7 +1287,7 @@ class FeedTask:
# print('Skipping URL:', url)
# continue
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
status_code = result['status_code']
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]