forked from sch/Slixfeed
More segregation of code
This commit is contained in:
parent
7135994888
commit
b77ef5346f
11 changed files with 937 additions and 778 deletions
|
@ -1,3 +1,4 @@
|
|||
proxies:
|
||||
anonymousoverflow:
|
||||
clearnet:
|
||||
- https://ao.phreedom.club
|
||||
|
|
|
@ -36,8 +36,13 @@ async def add_feed(db_file, url):
|
|||
db_file, url, title, status)
|
||||
await organize_items(
|
||||
db_file, [url])
|
||||
old = await sqlite.get_settings_value(
|
||||
old = (
|
||||
await sqlite.get_settings_value(
|
||||
db_file, "old")
|
||||
) or (
|
||||
config.get_value_default(
|
||||
"settings", "Settings", "old")
|
||||
)
|
||||
if not old:
|
||||
await sqlite.mark_source_as_read(
|
||||
db_file, url)
|
||||
|
@ -277,7 +282,7 @@ async def organize_items(db_file, urls):
|
|||
entries = feed.entries
|
||||
# length = len(entries)
|
||||
# await remove_entry(db_file, source, length)
|
||||
await sqlite.remove_nonexistent_entries(
|
||||
await remove_nonexistent_entries(
|
||||
db_file, feed, source)
|
||||
# new_entry = 0
|
||||
for entry in entries:
|
||||
|
@ -331,17 +336,14 @@ async def organize_items(db_file, urls):
|
|||
summary = "> *** No summary ***"
|
||||
read_status = 0
|
||||
pathname = urlsplit(link).path
|
||||
string = (
|
||||
"{} {} {}"
|
||||
string = ("{} {} {}"
|
||||
).format(
|
||||
title,
|
||||
summary,
|
||||
pathname
|
||||
title, summary, pathname
|
||||
)
|
||||
allow_list = await config.is_listed(
|
||||
allow_list = await config.is_include_keyword(
|
||||
db_file, "filter-allow", string)
|
||||
if not allow_list:
|
||||
reject_list = await config.is_listed(
|
||||
reject_list = await config.is_include_keyword(
|
||||
db_file, "filter-deny", string)
|
||||
if reject_list:
|
||||
# print(">>> REJECTED", title)
|
||||
|
@ -367,3 +369,103 @@ async def organize_items(db_file, urls):
|
|||
# print(current_time(), exist, title)
|
||||
|
||||
|
||||
async def remove_nonexistent_entries(db_file, feed, source):
|
||||
"""
|
||||
Remove entries that don't exist in a given parsed feed.
|
||||
Check the entries returned from feed and delete read non
|
||||
existing entries, otherwise move to table archive, if unread.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
db_file : str
|
||||
Path to database file.
|
||||
feed : list
|
||||
Parsed feed document.
|
||||
source : str
|
||||
Feed URL. URL of associated feed.
|
||||
"""
|
||||
items = sqlite.get_entries_of_source(db_file, feed, source)
|
||||
entries = feed.entries
|
||||
# breakpoint()
|
||||
for item in items:
|
||||
valid = False
|
||||
for entry in entries:
|
||||
title = None
|
||||
link = None
|
||||
time = None
|
||||
# valid = False
|
||||
# TODO better check and don't repeat code
|
||||
if entry.has_key("id") and item[3]:
|
||||
if entry.id == item[3]:
|
||||
# print("compare1:", entry.id)
|
||||
# print("compare2:", item[3])
|
||||
# print("============")
|
||||
valid = True
|
||||
break
|
||||
else:
|
||||
if entry.has_key("title"):
|
||||
title = entry.title
|
||||
else:
|
||||
title = feed["feed"]["title"]
|
||||
if entry.has_key("link"):
|
||||
link = join_url(source, entry.link)
|
||||
else:
|
||||
link = source
|
||||
if entry.has_key("published") and item[4]:
|
||||
# print("compare11:", title, link, time)
|
||||
# print("compare22:", item[1], item[2], item[4])
|
||||
# print("============")
|
||||
time = rfc2822_to_iso8601(entry.published)
|
||||
if (item[1] == title and
|
||||
item[2] == link and
|
||||
item[4] == time):
|
||||
valid = True
|
||||
break
|
||||
else:
|
||||
if (item[1] == title and
|
||||
item[2] == link):
|
||||
# print("compare111:", title, link)
|
||||
# print("compare222:", item[1], item[2])
|
||||
# print("============")
|
||||
valid = True
|
||||
break
|
||||
# TODO better check and don't repeat code
|
||||
if not valid:
|
||||
# print("id: ", item[0])
|
||||
# if title:
|
||||
# print("title: ", title)
|
||||
# print("item[1]: ", item[1])
|
||||
# if link:
|
||||
# print("link: ", link)
|
||||
# print("item[2]: ", item[2])
|
||||
# if entry.id:
|
||||
# print("last_entry:", entry.id)
|
||||
# print("item[3]: ", item[3])
|
||||
# if time:
|
||||
# print("time: ", time)
|
||||
# print("item[4]: ", item[4])
|
||||
# print("read: ", item[5])
|
||||
# breakpoint()
|
||||
|
||||
# TODO Send to table archive
|
||||
# TODO Also make a regular/routine check for sources that
|
||||
# have been changed (though that can only happen when
|
||||
# manually editing)
|
||||
ix = item[0]
|
||||
# print(">>> SOURCE: ", source)
|
||||
# print(">>> INVALID:", item[1])
|
||||
# print("title:", item[1])
|
||||
# print("link :", item[2])
|
||||
# print("id :", item[3])
|
||||
if item[5] == 1:
|
||||
sqlite.delete_entry_by_id(db_file, ix)
|
||||
# print(">>> DELETING:", item[1])
|
||||
else:
|
||||
# print(">>> ARCHIVING:", item[1])
|
||||
sqlite.archive_entry(db_file, ix)
|
||||
limit = (
|
||||
await sqlite.get_settings_value(db_file, "archive")
|
||||
) or (
|
||||
config.get_value_default("settings", "Settings", "archive")
|
||||
)
|
||||
await sqlite.maintain_archive(db_file, limit)
|
||||
|
|
|
@ -115,14 +115,16 @@ def get_value_default(filename, section, key):
|
|||
return result
|
||||
|
||||
|
||||
def get_list(filename):
|
||||
def get_list(filename, key):
|
||||
"""
|
||||
Get settings default value.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
filename : str
|
||||
filename of yaml file.
|
||||
Filename of yaml file.
|
||||
key: str
|
||||
Key.
|
||||
|
||||
Returns
|
||||
-------
|
||||
|
@ -137,6 +139,7 @@ def get_list(filename):
|
|||
# default = yaml.safe_load(defaults)
|
||||
# result = default[key]
|
||||
result = yaml.safe_load(defaults)
|
||||
result = result[key]
|
||||
return result
|
||||
|
||||
|
||||
|
@ -305,7 +308,7 @@ async def remove_from_list(newwords, keywords):
|
|||
return val
|
||||
|
||||
|
||||
async def is_listed(db_file, key, string):
|
||||
async def is_include_keyword(db_file, key, string):
|
||||
"""
|
||||
Check keyword match.
|
||||
|
||||
|
@ -325,21 +328,16 @@ async def is_listed(db_file, key, string):
|
|||
"""
|
||||
# async def reject(db_file, string):
|
||||
# async def is_blacklisted(db_file, string):
|
||||
list = await sqlite.get_filters_value(
|
||||
db_file,
|
||||
key
|
||||
)
|
||||
if list:
|
||||
list = list.split(",")
|
||||
for i in list:
|
||||
if not i or len(i) < 2:
|
||||
keywords = (await sqlite.get_filters_value(db_file, key)) or ''
|
||||
keywords = keywords.split(",")
|
||||
keywords = keywords + (get_list("lists.yaml", key))
|
||||
for keyword in keywords:
|
||||
if not keyword or len(keyword) < 2:
|
||||
continue
|
||||
if i in string.lower():
|
||||
if keyword in string.lower():
|
||||
# print(">>> ACTIVATE", i)
|
||||
# return 1
|
||||
return i
|
||||
else:
|
||||
return None
|
||||
return keyword
|
||||
|
||||
"""
|
||||
|
||||
|
|
|
@ -120,8 +120,7 @@ async def feed_mode_request(url, tree):
|
|||
"""
|
||||
feeds = {}
|
||||
parted_url = urlsplit(url)
|
||||
paths = config.get_list("lists.yaml")
|
||||
paths = paths["pathnames"]
|
||||
paths = config.get_list("lists.yaml", "pathnames")
|
||||
for path in paths:
|
||||
address = urlunsplit([
|
||||
parted_url.scheme,
|
||||
|
@ -229,8 +228,7 @@ async def feed_mode_scan(url, tree):
|
|||
feeds = {}
|
||||
# paths = []
|
||||
# TODO Test
|
||||
paths = config.get_list("lists.yaml")
|
||||
paths = paths["pathnames"]
|
||||
paths = config.get_list("lists.yaml", "pathnames")
|
||||
for path in paths:
|
||||
# xpath_query = "//*[@*[contains(.,'{}')]]".format(path)
|
||||
# xpath_query = "//a[contains(@href,'{}')]".format(path)
|
||||
|
|
|
@ -66,15 +66,12 @@ async def download_feed(url):
|
|||
msg: list or str
|
||||
Document or error message.
|
||||
"""
|
||||
try:
|
||||
user_agent = config.get_value_default("settings", "Network", "user-agent")
|
||||
except:
|
||||
user_agent = "Slixfeed/0.1"
|
||||
if not len(user_agent):
|
||||
user_agent = "Slixfeed/0.1"
|
||||
user_agent = (config.get_value(
|
||||
"settings", "Network", "user-agent")) or 'Slixfeed/0.1'
|
||||
headers = {'User-Agent': user_agent}
|
||||
url = url[0]
|
||||
proxy = (config.get_value("settings", "Network", "http_proxy")) or ''
|
||||
proxy = (config.get_value(
|
||||
"settings", "Network", "http_proxy")) or ''
|
||||
timeout = ClientTimeout(total=10)
|
||||
async with ClientSession(headers=headers) as session:
|
||||
# async with ClientSession(trust_env=True) as session:
|
||||
|
|
152
slixfeed/filter.py
Normal file
152
slixfeed/filter.py
Normal file
|
@ -0,0 +1,152 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
|
||||
TODO
|
||||
|
||||
1) Website-specific filter (i.e. audiobookbay).
|
||||
|
||||
2) Exclude websites from filtering (e.g. metapedia).
|
||||
|
||||
3) Filter phrases:
|
||||
Refer to sqlitehandler.search_entries for implementation.
|
||||
It is expected to be more complex than function search_entries.
|
||||
|
||||
"""
|
||||
|
||||
import slixfeed.config as config
|
||||
import slixfeed.sqlite as sqlite
|
||||
|
||||
|
||||
async def add_to_list(newwords, keywords):
|
||||
"""
|
||||
Append new keywords to list.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
newwords : str
|
||||
List of new keywords.
|
||||
keywords : str
|
||||
List of current keywords.
|
||||
|
||||
Returns
|
||||
-------
|
||||
val : str
|
||||
List of current keywords and new keywords.
|
||||
"""
|
||||
if isinstance(keywords, str) or keywords is None:
|
||||
try:
|
||||
keywords = keywords.split(",")
|
||||
except:
|
||||
keywords = []
|
||||
newwords = newwords.lower().split(",")
|
||||
for word in newwords:
|
||||
word = word.strip()
|
||||
if len(word) and word not in keywords:
|
||||
keywords.extend([word])
|
||||
keywords.sort()
|
||||
val = ",".join(keywords)
|
||||
return val
|
||||
|
||||
|
||||
async def remove_from_list(newwords, keywords):
|
||||
"""
|
||||
Remove given keywords from list.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
newwords : str
|
||||
List of new keywords.
|
||||
keywords : str
|
||||
List of current keywords.
|
||||
|
||||
Returns
|
||||
-------
|
||||
val : str
|
||||
List of new keywords.
|
||||
"""
|
||||
if isinstance(keywords, str) or keywords is None:
|
||||
try:
|
||||
keywords = keywords.split(",")
|
||||
except:
|
||||
keywords = []
|
||||
newwords = newwords.lower().split(",")
|
||||
for word in newwords:
|
||||
word = word.strip()
|
||||
if len(word) and word in keywords:
|
||||
keywords.remove(word)
|
||||
keywords.sort()
|
||||
val = ",".join(keywords)
|
||||
return val
|
||||
|
||||
|
||||
async def is_include_keyword(db_file, key, string):
|
||||
"""
|
||||
Check keyword match.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
db_file : str
|
||||
Path to database file.
|
||||
type : str
|
||||
"allow" or "deny".
|
||||
string : str
|
||||
String.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Matched keyword or None.
|
||||
|
||||
"""
|
||||
# async def reject(db_file, string):
|
||||
# async def is_blacklisted(db_file, string):
|
||||
keywords = (await sqlite.get_filters_value(db_file, key)) or ''
|
||||
keywords = keywords.split(",")
|
||||
keywords = keywords + (config.get_list("lists.yaml", key))
|
||||
for keyword in keywords:
|
||||
if not keyword or len(keyword) < 2:
|
||||
continue
|
||||
if keyword in string.lower():
|
||||
# print(">>> ACTIVATE", i)
|
||||
# return 1
|
||||
return keyword
|
||||
|
||||
"""
|
||||
|
||||
This code was tested at module datahandler
|
||||
|
||||
reject = 0
|
||||
blacklist = await get_settings_value(
|
||||
db_file,
|
||||
"filter-deny"
|
||||
)
|
||||
# print(">>> blacklist:")
|
||||
# print(blacklist)
|
||||
# breakpoint()
|
||||
if blacklist:
|
||||
blacklist = blacklist.split(",")
|
||||
# print(">>> blacklist.split")
|
||||
# print(blacklist)
|
||||
# breakpoint()
|
||||
for i in blacklist:
|
||||
# print(">>> length", len(i))
|
||||
# breakpoint()
|
||||
# if len(i):
|
||||
if not i or len(i) < 2:
|
||||
print(">>> continue due to length", len(i))
|
||||
# breakpoint()
|
||||
continue
|
||||
# print(title)
|
||||
# print(">>> blacklisted word:", i)
|
||||
# breakpoint()
|
||||
test = (title + " " + summary + " " + link)
|
||||
if i in test.lower():
|
||||
reject = 1
|
||||
break
|
||||
|
||||
if reject:
|
||||
print("rejected:",title)
|
||||
entry = (title, '', link, source, date, 1);
|
||||
|
||||
"""
|
|
@ -16,9 +16,8 @@ TODO
|
|||
"""
|
||||
|
||||
from asyncio import Lock
|
||||
from bs4 import BeautifulSoup
|
||||
from datetime import date
|
||||
# from slixfeed.config import get_value_default
|
||||
import logging
|
||||
import slixfeed.config as config
|
||||
# from slixfeed.data import join_url
|
||||
from slixfeed.datetime import (
|
||||
|
@ -413,7 +412,7 @@ async def get_unread_entries(db_file, num):
|
|||
return results
|
||||
|
||||
|
||||
def mark_entry_as_read(cur, ix):
|
||||
async def mark_entry_as_read(cur, ix):
|
||||
"""
|
||||
Set read status of entry as read.
|
||||
|
||||
|
@ -454,6 +453,70 @@ async def mark_source_as_read(db_file, source):
|
|||
cur.execute(sql, (source,))
|
||||
|
||||
|
||||
async def delete_entry_by_id(db_file, ix):
|
||||
"""
|
||||
Delete entry by Id.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
db_file : str
|
||||
Path to database file.
|
||||
ix : str
|
||||
Index.
|
||||
"""
|
||||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM entries "
|
||||
"WHERE id = :ix"
|
||||
)
|
||||
cur.execute(sql, (ix,))
|
||||
|
||||
|
||||
async def archive_entry(db_file, ix):
|
||||
"""
|
||||
Insert entry to archive and delete entry.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
db_file : str
|
||||
Path to database file.
|
||||
ix : str
|
||||
Index.
|
||||
"""
|
||||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
sql = (
|
||||
"INSERT "
|
||||
"INTO archive "
|
||||
"SELECT * "
|
||||
"FROM entries "
|
||||
"WHERE entries.id = :ix"
|
||||
)
|
||||
try:
|
||||
cur.execute(sql, (ix,))
|
||||
except:
|
||||
print(
|
||||
"ERROR DB insert from entries "
|
||||
"into archive at index", ix
|
||||
)
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM entries "
|
||||
"WHERE id = :ix"
|
||||
)
|
||||
try:
|
||||
cur.execute(sql, (ix,))
|
||||
except:
|
||||
print(
|
||||
"ERROR DB deleting items from "
|
||||
"table entries at index", ix
|
||||
)
|
||||
|
||||
|
||||
def get_feed_title(db_file, source):
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
|
@ -477,8 +540,9 @@ async def mark_as_read(db_file, ix):
|
|||
# NOTE: We can use DBLOCK once for both
|
||||
# functions, because, due to exclusive
|
||||
# ID, only one can ever occur.
|
||||
mark_entry_as_read(cur, ix)
|
||||
delete_archived_entry(cur, ix)
|
||||
await mark_entry_as_read(cur, ix)
|
||||
await delete_archived_entry(cur, ix)
|
||||
|
||||
|
||||
async def mark_all_as_read(db_file):
|
||||
"""
|
||||
|
@ -503,7 +567,7 @@ async def mark_all_as_read(db_file):
|
|||
cur.execute(sql)
|
||||
|
||||
|
||||
def delete_archived_entry(cur, ix):
|
||||
async def delete_archived_entry(cur, ix):
|
||||
"""
|
||||
Delete entry from table archive.
|
||||
|
||||
|
@ -644,7 +708,6 @@ async def set_date(cur, url):
|
|||
url : str
|
||||
URL.
|
||||
"""
|
||||
today = date.today()
|
||||
sql = (
|
||||
"UPDATE feeds "
|
||||
"SET updated = :today "
|
||||
|
@ -652,7 +715,7 @@ async def set_date(cur, url):
|
|||
)
|
||||
# cur = conn.cursor()
|
||||
cur.execute(sql, {
|
||||
"today": today,
|
||||
"today": date.today(),
|
||||
"url": url
|
||||
})
|
||||
|
||||
|
@ -780,7 +843,7 @@ async def add_entry(cur, entry):
|
|||
# breakpoint()
|
||||
|
||||
|
||||
async def maintain_archive(cur, limit):
|
||||
async def maintain_archive(db_file, limit):
|
||||
"""
|
||||
Maintain list of archived entries equal to specified number of items.
|
||||
|
||||
|
@ -789,6 +852,9 @@ async def maintain_archive(cur, limit):
|
|||
db_file : str
|
||||
Path to database file.
|
||||
"""
|
||||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
sql = (
|
||||
"SELECT count(id) "
|
||||
"FROM archive"
|
||||
|
@ -800,18 +866,18 @@ async def maintain_archive(cur, limit):
|
|||
# if isinstance(limit,str):
|
||||
# print("STOP")
|
||||
# breakpoint()
|
||||
reduc = count - int(limit)
|
||||
if reduc > 0:
|
||||
difference = count - int(limit)
|
||||
if difference > 0:
|
||||
sql = (
|
||||
"DELETE FROM archive "
|
||||
"WHERE id "
|
||||
"IN (SELECT id "
|
||||
"FROM archive "
|
||||
"ORDER BY timestamp ASC "
|
||||
"LIMIT :reduc)"
|
||||
"LIMIT :difference)"
|
||||
)
|
||||
cur.execute(sql, {
|
||||
"reduc": reduc
|
||||
"difference": difference
|
||||
})
|
||||
|
||||
|
||||
|
@ -819,7 +885,7 @@ async def maintain_archive(cur, limit):
|
|||
# NOTE Entries that are read from archive are deleted.
|
||||
# NOTE Unlike entries from table entries, entries from
|
||||
# table archive are not marked as read.
|
||||
async def remove_nonexistent_entries(db_file, feed, source):
|
||||
async def get_entries_of_source(db_file, feed, source):
|
||||
"""
|
||||
Remove entries that don't exist in a given parsed feed.
|
||||
Check the entries returned from feed and delete read non
|
||||
|
@ -842,117 +908,7 @@ async def remove_nonexistent_entries(db_file, feed, source):
|
|||
"WHERE source = ?"
|
||||
)
|
||||
items = cur.execute(sql, (source,)).fetchall()
|
||||
entries = feed.entries
|
||||
# breakpoint()
|
||||
for item in items:
|
||||
valid = False
|
||||
for entry in entries:
|
||||
title = None
|
||||
link = None
|
||||
time = None
|
||||
# valid = False
|
||||
# TODO better check and don't repeat code
|
||||
if entry.has_key("id") and item[3]:
|
||||
if entry.id == item[3]:
|
||||
# print("compare1:", entry.id)
|
||||
# print("compare2:", item[3])
|
||||
# print("============")
|
||||
valid = True
|
||||
break
|
||||
else:
|
||||
if entry.has_key("title"):
|
||||
title = entry.title
|
||||
else:
|
||||
title = feed["feed"]["title"]
|
||||
if entry.has_key("link"):
|
||||
link = join_url(source, entry.link)
|
||||
else:
|
||||
link = source
|
||||
if entry.has_key("published") and item[4]:
|
||||
# print("compare11:", title, link, time)
|
||||
# print("compare22:", item[1], item[2], item[4])
|
||||
# print("============")
|
||||
time = rfc2822_to_iso8601(entry.published)
|
||||
if (item[1] == title and
|
||||
item[2] == link and
|
||||
item[4] == time):
|
||||
valid = True
|
||||
break
|
||||
else:
|
||||
if (item[1] == title and
|
||||
item[2] == link):
|
||||
# print("compare111:", title, link)
|
||||
# print("compare222:", item[1], item[2])
|
||||
# print("============")
|
||||
valid = True
|
||||
break
|
||||
# TODO better check and don't repeat code
|
||||
if not valid:
|
||||
# print("id: ", item[0])
|
||||
# if title:
|
||||
# print("title: ", title)
|
||||
# print("item[1]: ", item[1])
|
||||
# if link:
|
||||
# print("link: ", link)
|
||||
# print("item[2]: ", item[2])
|
||||
# if entry.id:
|
||||
# print("last_entry:", entry.id)
|
||||
# print("item[3]: ", item[3])
|
||||
# if time:
|
||||
# print("time: ", time)
|
||||
# print("item[4]: ", item[4])
|
||||
# print("read: ", item[5])
|
||||
# breakpoint()
|
||||
async with DBLOCK:
|
||||
# TODO Send to table archive
|
||||
# TODO Also make a regular/routine check for sources that
|
||||
# have been changed (though that can only happen when
|
||||
# manually editing)
|
||||
ix = item[0]
|
||||
# print(">>> SOURCE: ", source)
|
||||
# print(">>> INVALID:", item[1])
|
||||
# print("title:", item[1])
|
||||
# print("link :", item[2])
|
||||
# print("id :", item[3])
|
||||
if item[5] == 1:
|
||||
# print(">>> DELETING:", item[1])
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM entries "
|
||||
"WHERE id = :ix"
|
||||
)
|
||||
cur.execute(sql, (ix,))
|
||||
else:
|
||||
# print(">>> ARCHIVING:", item[1])
|
||||
sql = (
|
||||
"INSERT "
|
||||
"INTO archive "
|
||||
"SELECT * "
|
||||
"FROM entries "
|
||||
"WHERE entries.id = :ix"
|
||||
)
|
||||
try:
|
||||
cur.execute(sql, (ix,))
|
||||
except:
|
||||
print(
|
||||
"ERROR DB insert from entries "
|
||||
"into archive at index", ix
|
||||
)
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM entries "
|
||||
"WHERE id = :ix"
|
||||
)
|
||||
try:
|
||||
cur.execute(sql, (ix,))
|
||||
except:
|
||||
print(
|
||||
"ERROR DB deleting items from "
|
||||
"table entries at index", ix
|
||||
)
|
||||
async with DBLOCK:
|
||||
limit = await get_settings_value(db_file, "archive")
|
||||
await maintain_archive(cur, limit)
|
||||
return items
|
||||
|
||||
|
||||
# TODO What is this function for? 2024-01-02
|
||||
|
@ -1253,7 +1209,7 @@ async def set_settings_value(db_file, key_value):
|
|||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
await set_settings_value_default(cur, key)
|
||||
# try:
|
||||
sql = (
|
||||
"UPDATE settings "
|
||||
"SET value = :value "
|
||||
|
@ -1263,48 +1219,10 @@ async def set_settings_value(db_file, key_value):
|
|||
"key": key,
|
||||
"value": value
|
||||
})
|
||||
|
||||
|
||||
async def set_settings_value_default(cur, key):
|
||||
"""
|
||||
Set default settings value, if no value found.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cur : object
|
||||
Cursor object.
|
||||
key : str
|
||||
Key: enabled, interval, master, quantum, random.
|
||||
|
||||
Returns
|
||||
-------
|
||||
val : str
|
||||
Numeric value.
|
||||
"""
|
||||
# async def set_settings_value_default(cur):
|
||||
# keys = ["enabled", "interval", "quantum"]
|
||||
# for i in keys:
|
||||
# sql = "SELECT id FROM settings WHERE key = ?"
|
||||
# cur.execute(sql, (i,))
|
||||
# if not cur.fetchone():
|
||||
# val = settings.get_value_default(i)
|
||||
# sql = "INSERT INTO settings(key,value) VALUES(?,?)"
|
||||
# cur.execute(sql, (i, val))
|
||||
sql = (
|
||||
"SELECT id "
|
||||
"FROM settings "
|
||||
"WHERE key = ?"
|
||||
)
|
||||
cur.execute(sql, (key,))
|
||||
if not cur.fetchone():
|
||||
value = config.get_value_default("settings", "Settings", key)
|
||||
sql = (
|
||||
"INSERT "
|
||||
"INTO settings(key,value) "
|
||||
"VALUES(?,?)"
|
||||
)
|
||||
cur.execute(sql, (key, value))
|
||||
return value
|
||||
# except:
|
||||
# logging.debug(
|
||||
# "No specific value set for key {}.".format(key)
|
||||
# )
|
||||
|
||||
|
||||
async def get_settings_value(db_file, key):
|
||||
|
@ -1324,31 +1242,20 @@ async def get_settings_value(db_file, key):
|
|||
val : str
|
||||
Numeric value.
|
||||
"""
|
||||
# try:
|
||||
# with create_connection(db_file) as conn:
|
||||
# cur = conn.cursor()
|
||||
# sql = "SELECT value FROM settings WHERE key = ?"
|
||||
# cur.execute(sql, (key,))
|
||||
# result = cur.fetchone()
|
||||
# except:
|
||||
# result = settings.get_value_default(key)
|
||||
# if not result:
|
||||
# result = settings.get_value_default(key)
|
||||
# return result
|
||||
with create_connection(db_file) as conn:
|
||||
try:
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
sql = (
|
||||
"SELECT value "
|
||||
"FROM settings "
|
||||
"WHERE key = ?"
|
||||
)
|
||||
val = cur.execute(sql, (key,)).fetchone()[0]
|
||||
value = cur.execute(sql, (key,)).fetchone()[0]
|
||||
return value
|
||||
except:
|
||||
val = await set_settings_value_default(cur, key)
|
||||
if not val:
|
||||
val = await set_settings_value_default(cur, key)
|
||||
return val
|
||||
logging.debug(
|
||||
"No specific value set for key {}.".format(key)
|
||||
)
|
||||
|
||||
|
||||
async def set_filters_value(db_file, key_value):
|
||||
|
@ -1379,7 +1286,6 @@ async def set_filters_value(db_file, key_value):
|
|||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
await set_filters_value_default(cur, key)
|
||||
sql = (
|
||||
"UPDATE filters "
|
||||
"SET value = :value "
|
||||
|
@ -1391,41 +1297,6 @@ async def set_filters_value(db_file, key_value):
|
|||
})
|
||||
|
||||
|
||||
async def set_filters_value_default(cur, key):
|
||||
"""
|
||||
Set default filters value, if no value found.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
cur : object
|
||||
Cursor object.
|
||||
key : str
|
||||
Key: filter-allow, filter-deny, filter-replace.
|
||||
|
||||
Returns
|
||||
-------
|
||||
val : str
|
||||
List of strings.
|
||||
"""
|
||||
sql = (
|
||||
"SELECT id "
|
||||
"FROM filters "
|
||||
"WHERE key = ?"
|
||||
)
|
||||
cur.execute(sql, (key,))
|
||||
if not cur.fetchone():
|
||||
val = config.get_list("lists.yaml")
|
||||
val = val[key]
|
||||
val = ",".join(val)
|
||||
sql = (
|
||||
"INSERT "
|
||||
"INTO filters(key,value) "
|
||||
"VALUES(?,?)"
|
||||
)
|
||||
cur.execute(sql, (key, val))
|
||||
return val
|
||||
|
||||
|
||||
async def get_filters_value(db_file, key):
|
||||
"""
|
||||
Get filters value.
|
||||
|
@ -1443,16 +1314,16 @@ async def get_filters_value(db_file, key):
|
|||
List of strings.
|
||||
"""
|
||||
with create_connection(db_file) as conn:
|
||||
try:
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
sql = (
|
||||
"SELECT value "
|
||||
"FROM filters "
|
||||
"WHERE key = ?"
|
||||
)
|
||||
val = cur.execute(sql, (key,)).fetchone()[0]
|
||||
value = cur.execute(sql, (key,)).fetchone()[0]
|
||||
return value
|
||||
except:
|
||||
val = await set_filters_value_default(cur, key)
|
||||
if not val:
|
||||
val = await set_filters_value_default(cur, key)
|
||||
return val
|
||||
logging.debug(
|
||||
"No specific value set for key {}.".format(key)
|
||||
)
|
||||
|
|
|
@ -116,14 +116,17 @@ async def start_tasks_xmpp(self, jid, tasks):
|
|||
|
||||
|
||||
async def clean_tasks_xmpp(jid, tasks):
|
||||
logging.debug("Stopping tasks {} for JID {}".format(tasks, jid))
|
||||
logging.debug(
|
||||
"Stopping tasks {} for JID {}".format(tasks, jid)
|
||||
)
|
||||
for task in tasks:
|
||||
# if task_manager[jid][task]:
|
||||
try:
|
||||
task_manager[jid][task].cancel()
|
||||
except:
|
||||
logging.debug(
|
||||
"No task {} for JID {} (clean_tasks)".format(task, jid))
|
||||
"No task {} for JID {} (clean_tasks)".format(task, jid)
|
||||
)
|
||||
|
||||
|
||||
"""
|
||||
|
@ -149,7 +152,13 @@ async def task_jid(self, jid):
|
|||
Jabber ID.
|
||||
"""
|
||||
db_file = get_pathname_to_database(jid)
|
||||
enabled = await get_settings_value(db_file, "enabled")
|
||||
enabled = (
|
||||
await get_settings_value(
|
||||
db_file, "enabled")
|
||||
) or (
|
||||
get_value_default(
|
||||
"settings", "Settings", "enabled")
|
||||
)
|
||||
if enabled:
|
||||
# NOTE Perhaps we want to utilize super with keyword
|
||||
# arguments in order to know what tasks to initiate.
|
||||
|
@ -199,10 +208,22 @@ async def send_update(self, jid, num=None):
|
|||
"""
|
||||
logging.debug("Sending a news update to JID {}".format(jid))
|
||||
db_file = get_pathname_to_database(jid)
|
||||
enabled = await get_settings_value(db_file, "enabled")
|
||||
enabled = (
|
||||
await get_settings_value(
|
||||
db_file, "enabled")
|
||||
) or (
|
||||
get_value_default(
|
||||
"settings", "Settings", "enabled")
|
||||
)
|
||||
if enabled:
|
||||
if not num:
|
||||
num = await get_settings_value(db_file, "quantum")
|
||||
num = (
|
||||
await get_settings_value(
|
||||
db_file, "quantum")
|
||||
) or (
|
||||
get_value_default(
|
||||
"settings", "Settings", "quantum")
|
||||
)
|
||||
else:
|
||||
num = int(num)
|
||||
news_digest = []
|
||||
|
@ -265,15 +286,23 @@ async def send_status(self, jid):
|
|||
jid : str
|
||||
Jabber ID.
|
||||
"""
|
||||
logging.debug("Sending a status message to JID {}".format(jid))
|
||||
logging.debug(
|
||||
"Sending a status message to JID {}".format(jid))
|
||||
status_text = "🤖️ Slixfeed RSS News Bot"
|
||||
db_file = get_pathname_to_database(jid)
|
||||
enabled = await get_settings_value(db_file, "enabled")
|
||||
enabled = (
|
||||
await get_settings_value(
|
||||
db_file, "enabled")
|
||||
) or (
|
||||
get_value_default(
|
||||
"settings", "Settings", "enabled")
|
||||
)
|
||||
if not enabled:
|
||||
status_mode = "xa"
|
||||
status_text = "📫️ Send \"Start\" to receive updates"
|
||||
else:
|
||||
feeds = await get_number_of_items(db_file, "feeds")
|
||||
feeds = await get_number_of_items(
|
||||
db_file, "feeds")
|
||||
# print(await current_time(), jid, "has", feeds, "feeds")
|
||||
if not feeds:
|
||||
print(">>> not feeds:", feeds, "jid:", jid)
|
||||
|
@ -335,7 +364,13 @@ async def refresh_task(self, jid, callback, key, val=None):
|
|||
)
|
||||
if not val:
|
||||
db_file = get_pathname_to_database(jid)
|
||||
val = await get_settings_value(db_file, key)
|
||||
val = (
|
||||
await get_settings_value(
|
||||
db_file, key)
|
||||
) or (
|
||||
get_value_default(
|
||||
"settings", "Settings", key)
|
||||
)
|
||||
# if task_manager[jid][key]:
|
||||
if jid in task_manager:
|
||||
try:
|
||||
|
@ -389,7 +424,8 @@ async def check_updates(jid):
|
|||
db_file = get_pathname_to_database(jid)
|
||||
urls = await get_feeds_url(db_file)
|
||||
await organize_items(db_file, urls)
|
||||
val = get_value_default("settings", "Settings", "check")
|
||||
val = get_value_default(
|
||||
"settings", "Settings", "check")
|
||||
await asyncio.sleep(60 * float(val))
|
||||
# Schedule to call this function again in 90 minutes
|
||||
# loop.call_at(
|
||||
|
|
|
@ -54,7 +54,7 @@ def replace_hostname(url, url_type):
|
|||
pathname = parted_url.path
|
||||
queries = parted_url.query
|
||||
fragment = parted_url.fragment
|
||||
proxies = config.get_list("proxies.yaml")
|
||||
proxies = config.get_list("proxies.yaml", "proxies")
|
||||
for proxy in proxies:
|
||||
proxy = proxies[proxy]
|
||||
if hostname in proxy["hostname"] and url_type in proxy["type"]:
|
||||
|
@ -92,8 +92,7 @@ def remove_tracking_parameters(url):
|
|||
pathname = parted_url.path
|
||||
queries = parse_qs(parted_url.query)
|
||||
fragment = parted_url.fragment
|
||||
trackers = config.get_list("queries.yaml")
|
||||
trackers = trackers["trackers"]
|
||||
trackers = config.get_list("queries.yaml", "trackers")
|
||||
for tracker in trackers:
|
||||
if tracker in queries: del queries[tracker]
|
||||
queries_new = urlencode(queries, doseq=True)
|
||||
|
|
|
@ -16,10 +16,7 @@ TODO
|
|||
2) Assure message delivery before calling a new task.
|
||||
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged
|
||||
|
||||
3) Do not send updates when busy or away.
|
||||
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-changed_status
|
||||
|
||||
4) XHTTML-IM
|
||||
3) XHTTML-IM
|
||||
case _ if message_lowercase.startswith("html"):
|
||||
message['html']="
|
||||
Parse me!
|
||||
|
|
|
@ -23,6 +23,7 @@ import slixfeed.action as action
|
|||
from slixfeed.config import (
|
||||
add_to_list,
|
||||
get_default_dbdir,
|
||||
get_value_default,
|
||||
get_value,
|
||||
get_pathname_to_database,
|
||||
remove_from_list)
|
||||
|
@ -224,13 +225,20 @@ async def message(self, message):
|
|||
if not exist:
|
||||
await sqlite.insert_feed(db_file, url, title)
|
||||
await action.organize_items(db_file, [url])
|
||||
old = await sqlite.get_settings_value(db_file, "old")
|
||||
old = (
|
||||
await sqlite.get_settings_value(db_file, "old")
|
||||
) or (
|
||||
get_value_default("settings", "Settings", "old")
|
||||
)
|
||||
if old:
|
||||
await task.clean_tasks_xmpp(jid, ["status"])
|
||||
await task.clean_tasks_xmpp(
|
||||
jid, ["status"])
|
||||
# await send_status(jid)
|
||||
await task.start_tasks_xmpp(self, jid, ["status"])
|
||||
await task.start_tasks_xmpp(
|
||||
self, jid, ["status"])
|
||||
else:
|
||||
await sqlite.mark_source_as_read(db_file, url)
|
||||
await sqlite.mark_source_as_read(
|
||||
db_file, url)
|
||||
response = (
|
||||
"> {}\nNews source has been "
|
||||
"added to subscription list."
|
||||
|
|
Loading…
Reference in a new issue