forked from sch/Slixfeed
Add proxy for links.
Fix minor problems.
This commit is contained in:
parent
be008fc6b5
commit
76b2713223
3 changed files with 27 additions and 2 deletions
|
@ -686,6 +686,7 @@ async def feed_mode_request(url, tree):
|
||||||
feed_amnt = len(feeds[feed].entries)
|
feed_amnt = len(feeds[feed].entries)
|
||||||
except:
|
except:
|
||||||
continue
|
continue
|
||||||
|
feed_mark = 0
|
||||||
if feed_amnt:
|
if feed_amnt:
|
||||||
# NOTE Because there could be many false positives
|
# NOTE Because there could be many false positives
|
||||||
# which are revealed in second phase of scan, we
|
# which are revealed in second phase of scan, we
|
||||||
|
@ -806,6 +807,7 @@ async def feed_mode_scan(url, tree):
|
||||||
feed_name = urlsplit(feed).netloc
|
feed_name = urlsplit(feed).netloc
|
||||||
feed_addr = feed
|
feed_addr = feed
|
||||||
feed_amnt = len(feeds[feed].entries)
|
feed_amnt = len(feeds[feed].entries)
|
||||||
|
feed_mark = 0
|
||||||
if feed_amnt:
|
if feed_amnt:
|
||||||
# NOTE Because there could be many false positives
|
# NOTE Because there could be many false positives
|
||||||
# which are revealed in second phase of scan, we
|
# which are revealed in second phase of scan, we
|
||||||
|
@ -912,9 +914,14 @@ def is_feed(url, feed):
|
||||||
val : boolean
|
val : boolean
|
||||||
True or False.
|
True or False.
|
||||||
"""
|
"""
|
||||||
|
msg = None
|
||||||
if not feed.entries:
|
if not feed.entries:
|
||||||
try:
|
try:
|
||||||
feed["feed"]["title"]
|
feed["feed"]["title"]
|
||||||
|
val = True
|
||||||
|
msg = (
|
||||||
|
"Empty feed for {}"
|
||||||
|
).format(url)
|
||||||
except:
|
except:
|
||||||
val = False
|
val = False
|
||||||
msg = (
|
msg = (
|
||||||
|
|
10
slixfeed/format.py
Normal file
10
slixfeed/format.py
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
Move code from sqlite.get_entry_unread
|
||||||
|
|
||||||
|
"""
|
|
@ -21,9 +21,16 @@ from datetime import date
|
||||||
# from slixfeed.config import get_value_default
|
# from slixfeed.config import get_value_default
|
||||||
import slixfeed.config as config
|
import slixfeed.config as config
|
||||||
# from slixfeed.data import join_url
|
# from slixfeed.data import join_url
|
||||||
from slixfeed.datetime import current_time, rfc2822_to_iso8601
|
from slixfeed.datetime import (
|
||||||
|
current_time,
|
||||||
|
rfc2822_to_iso8601
|
||||||
|
)
|
||||||
from sqlite3 import connect, Error
|
from sqlite3 import connect, Error
|
||||||
from slixfeed.url import join_url, remove_tracking_parameters
|
from slixfeed.url import (
|
||||||
|
join_url,
|
||||||
|
remove_tracking_parameters,
|
||||||
|
replace_hostname
|
||||||
|
)
|
||||||
|
|
||||||
# from eliot import start_action, to_file
|
# from eliot import start_action, to_file
|
||||||
# # with start_action(action_type="list_feeds()", db=db_file):
|
# # with start_action(action_type="list_feeds()", db=db_file):
|
||||||
|
@ -485,6 +492,7 @@ async def get_entry_unread(db_file, num=None):
|
||||||
# summary = "\n".join(summary)
|
# summary = "\n".join(summary)
|
||||||
link = result[2]
|
link = result[2]
|
||||||
link = await remove_tracking_parameters(link)
|
link = await remove_tracking_parameters(link)
|
||||||
|
link = (await replace_hostname(link)) or link
|
||||||
sql = (
|
sql = (
|
||||||
"SELECT name "
|
"SELECT name "
|
||||||
"FROM feeds "
|
"FROM feeds "
|
||||||
|
|
Loading…
Reference in a new issue