Add proxy for links.

Fix minor problems.
This commit is contained in:
Schimon Jehudah 2023-12-29 00:45:01 +00:00
parent be008fc6b5
commit 76b2713223
3 changed files with 27 additions and 2 deletions

View file

@ -686,6 +686,7 @@ async def feed_mode_request(url, tree):
feed_amnt = len(feeds[feed].entries)
except:
continue
feed_mark = 0
if feed_amnt:
# NOTE Because there could be many false positives
# which are revealed in second phase of scan, we
@ -806,6 +807,7 @@ async def feed_mode_scan(url, tree):
feed_name = urlsplit(feed).netloc
feed_addr = feed
feed_amnt = len(feeds[feed].entries)
feed_mark = 0
if feed_amnt:
# NOTE Because there could be many false positives
# which are revealed in second phase of scan, we
@ -912,9 +914,14 @@ def is_feed(url, feed):
val : boolean
True or False.
"""
msg = None
if not feed.entries:
try:
feed["feed"]["title"]
val = True
msg = (
"Empty feed for {}"
).format(url)
except:
val = False
msg = (

10
slixfeed/format.py Normal file
View file

@ -0,0 +1,10 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
TODO
Move code from sqlite.get_entry_unread
"""

View file

@ -21,9 +21,16 @@ from datetime import date
# from slixfeed.config import get_value_default
import slixfeed.config as config
# from slixfeed.data import join_url
from slixfeed.datetime import current_time, rfc2822_to_iso8601
from slixfeed.datetime import (
current_time,
rfc2822_to_iso8601
)
from sqlite3 import connect, Error
from slixfeed.url import join_url, remove_tracking_parameters
from slixfeed.url import (
join_url,
remove_tracking_parameters,
replace_hostname
)
# from eliot import start_action, to_file
# # with start_action(action_type="list_feeds()", db=db_file):
@ -485,6 +492,7 @@ async def get_entry_unread(db_file, num=None):
# summary = "\n".join(summary)
link = result[2]
link = await remove_tracking_parameters(link)
link = (await replace_hostname(link)) or link
sql = (
"SELECT name "
"FROM feeds "