Fix archiving. WARNING of complexity due to UNION of SQL

This commit is contained in:
Schimon Jehudah 2023-11-15 14:00:49 +00:00
parent 9a93d6de0e
commit cb2317b35a
7 changed files with 466 additions and 203 deletions

View file

@ -11,73 +11,7 @@ TODO
""" """
import os import os
import sys import filehandler
def get_default_dbdir():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to database file.
Note
----
This function was taken from project buku.
See https://github.com/jarun/buku
* Arun Prakash Jana (jarun)
* Dmitry Marakasov (AMDmi3)
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
data_home = os.path.join(os.environ.get('HOME'), '.local', 'share')
return os.path.join(data_home, 'slixfeed')
def get_default_confdir():
"""
Determine the directory path where configuration will be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to configueation directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
config_home = os.environ.get('APPDATA')
if config_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
config_home = os.path.join(os.environ.get('HOME'), '.config')
return os.path.join(config_home, 'slixfeed')
async def get_value_default(key): async def get_value_default(key):
@ -101,11 +35,11 @@ async def get_value_default(key):
case "filter-allow": case "filter-allow":
result = "hitler,sadam,saddam" result = "hitler,sadam,saddam"
case "filter-deny": case "filter-deny":
result = "crim,dead,death,disaster,holocaust,murder,war" result = "crim,dead,death,disaster,murder,war"
case "interval": case "interval":
result = 30 result = 300
case "quantum": case "quantum":
result = 4 result = 3
case "random": case "random":
result = 0 result = 0
return result return result
@ -121,7 +55,7 @@ def get_list():
Dictionary of pathnames. Dictionary of pathnames.
""" """
paths = [] paths = []
cfg_dir = get_default_confdir() cfg_dir = filehandler.get_default_confdir()
if not os.path.isdir(cfg_dir): if not os.path.isdir(cfg_dir):
os.mkdir(cfg_dir) os.mkdir(cfg_dir)
cfg_file = os.path.join(cfg_dir, r"url_paths.txt") cfg_file = os.path.join(cfg_dir, r"url_paths.txt")

View file

@ -4,7 +4,6 @@
import aiohttp import aiohttp
import asyncio import asyncio
import feedparser import feedparser
import os
import sqlitehandler import sqlitehandler
import confighandler import confighandler
@ -22,39 +21,6 @@ from urllib.parse import urlunsplit
from lxml import html from lxml import html
# NOTE Perhaps this needs to be executed
# just once per program execution
async def initdb(jid, callback, message=None):
"""
Callback function to instantiate action on database.
Parameters
----------
jid : str
Jabber ID.
callback : ?
Function name.
message : str, optional
Optional kwarg when a message is a part or
required argument. The default is None.
Returns
-------
object
Coroutine object.
"""
db_dir = confighandler.get_default_dbdir()
if not os.path.isdir(db_dir):
os.mkdir(db_dir)
db_file = os.path.join(db_dir, r"{}.db".format(jid))
sqlitehandler.create_tables(db_file)
# await sqlitehandler.set_default_values(db_file)
if message:
return await callback(db_file, message)
else:
return await callback(db_file)
async def download_updates(db_file, url=None): async def download_updates(db_file, url=None):
""" """
Check feeds for new entries. Check feeds for new entries.
@ -207,7 +173,7 @@ async def download_updates(db_file, url=None):
string string
) )
if reject_list: if reject_list:
print(">>> REJECTED", title) # print(">>> REJECTED", title)
summary = "REJECTED" summary = "REJECTED"
# summary = "" # summary = ""
read_status = 1 read_status = 1
@ -630,21 +596,39 @@ async def feed_mode_request(db_file, url, tree):
res = await download_feed(address) res = await download_feed(address)
if res[1] == 200: if res[1] == 200:
try: try:
title = feedparser.parse(res[0])["feed"]["title"] feeds[address] = feedparser.parse(res[0])
# print(feeds)
except: except:
title = '*** No Title ***' continue
feeds[address] = title
if len(feeds) > 1: if len(feeds) > 1:
positive = 0
msg = ( msg = (
"RSS URL discovery has found {} feeds:\n```\n" "RSS URL discovery has found {} feeds:\n```\n"
).format(len(feeds)) ).format(len(feeds))
for feed in feeds: for feed in feeds:
feed_name = feeds[feed] feed_name = feeds[feed]["feed"]["title"]
feed_addr = feed feed_addr = feed
msg += "{}\n{}\n\n".format(feed_name, feed_addr) feed_amnt = len(feeds[feed].entries)
if feed_amnt:
positive = 1
msg += (
"Title: {}\n"
" Link: {}\n"
"Count: {}\n"
"\n"
).format(
feed_name,
feed_addr,
feed_amnt
)
msg += ( msg += (
"```\nThe above feeds were extracted from\n{}" "```\nThe above feeds were extracted from\n{}"
).format(url) ).format(url)
if not positive:
msg = (
"No feeds were found for {}."
).format(url)
return msg
elif feeds: elif feeds:
feed_addr = list(feeds)[0] feed_addr = list(feeds)[0]
msg = await add_feed(db_file, feed_addr) msg = await add_feed(db_file, feed_addr)
@ -709,11 +693,12 @@ async def feed_mode_scan(db_file, url, tree):
res = await download_feed(address) res = await download_feed(address)
if res[1] == 200: if res[1] == 200:
try: try:
feeds[address] = feedparser.parse(res[0])["feed"]["title"] feeds[address] = feedparser.parse(res[0])
print(feeds) # print(feeds)
except: except:
continue continue
if len(feeds) > 1: if len(feeds) > 1:
positive = 0
msg = ( msg = (
"RSS URL scan has found {} feeds:\n```\n" "RSS URL scan has found {} feeds:\n```\n"
).format(len(feeds)) ).format(len(feeds))
@ -722,12 +707,28 @@ async def feed_mode_scan(db_file, url, tree):
# res = await download_feed(feed) # res = await download_feed(feed)
# except: # except:
# continue # continue
feed_name = feeds[feed] feed_name = feeds[feed]["feed"]["title"]
feed_addr = feed feed_addr = feed
msg += "{}\n{}\n\n".format(feed_name, feed_addr) feed_amnt = len(feeds[feed].entries)
if feed_amnt:
positive = 1
msg += (
"Title: {}\n"
" Link: {}\n"
"Count: {}\n"
"\n"
).format(
feed_name,
feed_addr,
feed_amnt
)
msg += ( msg += (
"```\nThe above feeds were extracted from\n{}" "```\nThe above feeds were extracted from\n{}"
).format(url) ).format(url)
if not positive:
msg = (
"No feeds were found for {}."
).format(url)
return msg return msg
elif feeds: elif feeds:
feed_addr = list(feeds)[0] feed_addr = list(feeds)[0]

105
slixfeed/filehandler.py Normal file
View file

@ -0,0 +1,105 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import sqlitehandler
def get_default_dbdir():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to database file.
Note
----
This function was taken from project buku.
See https://github.com/jarun/buku
* Arun Prakash Jana (jarun)
* Dmitry Marakasov (AMDmi3)
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
data_home = os.path.join(os.environ.get('HOME'), '.local', 'share')
return os.path.join(data_home, 'slixfeed')
def get_default_confdir():
"""
Determine the directory path where configuration will be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to configueation directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
config_home = os.environ.get('APPDATA')
if config_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
config_home = os.path.join(os.environ.get('HOME'), '.config')
return os.path.join(config_home, 'slixfeed')
# NOTE Perhaps this needs to be executed
# just once per program execution
async def initdb(jid, callback, message=None):
"""
Callback function to instantiate action on database.
Parameters
----------
jid : str
Jabber ID.
callback : ?
Function name.
message : str, optional
Optional kwarg when a message is a part or
required argument. The default is None.
Returns
-------
object
Coroutine object.
"""
db_dir = get_default_dbdir()
if not os.path.isdir(db_dir):
os.mkdir(db_dir)
db_file = os.path.join(db_dir, r"{}.db".format(jid))
sqlitehandler.create_tables(db_file)
# await sqlitehandler.set_default_values(db_file)
if message:
return await callback(db_file, message)
else:
return await callback(db_file)

View file

@ -60,7 +60,7 @@ async def is_listed(db_file, type, string):
if not i or len(i) < 2: if not i or len(i) < 2:
continue continue
if i in string.lower(): if i in string.lower():
print(">>> ACTIVATE", i) # print(">>> ACTIVATE", i)
return 1 return 1
else: else:
return None return None

113
slixfeed/main.py Normal file
View file

@ -0,0 +1,113 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
FIXME
1) Check feed duplication on runtime.
When feed is valid and is not yet in the database it is
posible to send a batch which would result in duplication.
Consequently, it might result in database lock error upon
feed removal attempt
TODO
1) SQL prepared statements
2) Machine Learning for scrapping Title, Link, Summary and Timstamp
3) Support MUC
4) Support categories
5) Default prepackaged list of feeds
6) XMPP commands
7) Bot as transport
8) OMEMO
9) Logging
10) Default feeds (e.g. Blacklisted News, TBOT etc.)
11) Download and upload/send article (xHTML, xHTMLZ, Markdown, MHTML, TXT)
Use Readability
12) Fetch summary from URL, instead of storing summary.
13) Support protocol Gopher
https://github.com/michael-lazar/pygopherd
https://github.com/gopherball/gb
"""
# vars and their meanings:
# jid = Jabber ID (XMPP)
# res = response (HTTP)
from argparse import ArgumentParser
from getpass import getpass
import logging
from datetime import date
import time
# from eliot import start_action, to_file
# # to_file(open("slixfeed.log", "w"))
# # with start_action(action_type="set_date()", jid=jid):
# # with start_action(action_type="message()", msg=msg):
#import irchandler
import xmpphandler
#import matrixhandler
if __name__ == '__main__':
# Setup the command line arguments.
parser = ArgumentParser(description=xmpphandler.Slixfeed.__doc__)
# Output verbosity options.
parser.add_argument(
"-q", "--quiet", help="set logging to ERROR",
action="store_const", dest="loglevel",
const=logging.ERROR, default=logging.INFO
)
parser.add_argument(
"-d", "--debug", help="set logging to DEBUG",
action="store_const", dest="loglevel",
const=logging.DEBUG, default=logging.INFO
)
# JID and password options.
parser.add_argument("-j", "--jid", dest="jid",
help="JID to use")
parser.add_argument("-p", "--password", dest="password",
help="password to use")
args = parser.parse_args()
# Setup logging.
logging.basicConfig(level=args.loglevel,
format='%(levelname)-8s %(message)s')
if args.jid is None:
args.jid = input("Username: ")
if args.password is None:
args.password = getpass("Password: ")
# Setup the Slixfeed and register plugins. Note that while plugins may
# have interdependencies, the order in which you register them does
# not matter.
xmpp = xmpphandler.Slixfeed(args.jid, args.password)
xmpp.register_plugin('xep_0004') # Data Forms
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0045') # Multi-User Chat
xmpp.register_plugin('xep_0060') # PubSub
xmpp.register_plugin('xep_0199') # XMPP Ping
# Connect to the XMPP server and start processing XMPP stanzas.
xmpp.connect()
xmpp.process()

View file

@ -249,6 +249,12 @@ async def remove_feed(db_file, ix):
"WHERE source = ?" "WHERE source = ?"
) )
cur.execute(sql, (url,)) cur.execute(sql, (url,))
sql = (
"DELETE "
"FROM archive "
"WHERE source = ?"
)
cur.execute(sql, (url,))
sql = ( sql = (
"DELETE FROM feeds " "DELETE FROM feeds "
"WHERE id = ?" "WHERE id = ?"
@ -360,9 +366,18 @@ async def get_number_of_entries_unread(db_file):
with create_connection(db_file) as conn: with create_connection(db_file) as conn:
cur = conn.cursor() cur = conn.cursor()
sql = ( sql = (
"SELECT "
"("
"SELECT count(id) " "SELECT count(id) "
"FROM entries " "FROM entries "
"WHERE read = 0" "WHERE read = 0"
") "
"+ "
"("
"SELECT count(id) "
"FROM archive"
") "
"AS total_count"
) )
count = cur.execute(sql).fetchone()[0] count = cur.execute(sql).fetchone()[0]
return count return count
@ -391,12 +406,32 @@ async def get_entry_unread(db_file, num=None):
num = int(num) num = int(num)
with create_connection(db_file) as conn: with create_connection(db_file) as conn:
cur = conn.cursor() cur = conn.cursor()
# sql = "SELECT id FROM entries WHERE read = 0 LIMIT 1" # sql = (
# sql = "SELECT id FROM entries WHERE read = 0 ORDER BY timestamp DESC LIMIT 1" # "SELECT id "
# "FROM entries "
# "WHERE read = 0 "
# "LIMIT 1"
# )
# sql = ("SELECT id "
# "FROM entries "
# "WHERE read = 0 "
# "ORDER BY timestamp DESC "
# "LIMIT 1"
# )
# sql = (
# "SELECT id, title, summary, link "
# "FROM entries "
# "WHERE read = 0 "
# "ORDER BY timestamp "
# "DESC LIMIT :num"
# )
sql = ( sql = (
"SELECT id, title, summary, link " "SELECT id, title, summary, link, timestamp "
"FROM entries " "FROM entries "
"WHERE read = 0 " "WHERE read = 0 "
"UNION ALL "
"SELECT id, title, summary, link, timestamp "
"FROM archive "
"ORDER BY timestamp " "ORDER BY timestamp "
"DESC LIMIT :num" "DESC LIMIT :num"
) )
@ -444,7 +479,11 @@ async def get_entry_unread(db_file, num=None):
str(link) str(link)
) )
async with DBLOCK: async with DBLOCK:
# NOTE: We can use DBLOCK once for both
# functions, because, due to exclusive
# ID, only one can ever occur.
await mark_as_read(cur, ix) await mark_as_read(cur, ix)
await delete_entry(cur, ix)
return news_list return news_list
@ -467,6 +506,24 @@ async def mark_as_read(cur, ix):
cur.execute(sql, (ix,)) cur.execute(sql, (ix,))
async def delete_entry(cur, ix):
"""
Delete entry from table archive.
Parameters
----------
db_file : str
Path to database file.
ix : str
Index of entry.
"""
sql = (
"DELETE FROM archive "
"WHERE id = ?"
)
cur.execute(sql, (ix,))
async def statistics(db_file): async def statistics(db_file):
""" """
Return table statistics. Return table statistics.
@ -803,8 +860,8 @@ async def remove_entry(db_file, source, length):
async def remove_nonexistent_entries(db_file, feed, source): async def remove_nonexistent_entries(db_file, feed, source):
""" """
Remove entries that don't exist in a given parsed feed. Remove entries that don't exist in a given parsed feed.
Check the entries returned from feed and delete non Check the entries returned from feed and delete read non
existing entries existing entries, otherwise move to table archive, if unread.
Parameters Parameters
---------- ----------
@ -824,12 +881,20 @@ async def remove_nonexistent_entries(db_file, feed, source):
) )
items = cur.execute(sql, (source,)).fetchall() items = cur.execute(sql, (source,)).fetchall()
entries = feed.entries entries = feed.entries
for entry in entries: # breakpoint()
valid = False
for item in items: for item in items:
valid = False
for entry in entries:
title = None
link = None
time = None
# valid = False
# TODO better check and don't repeat code # TODO better check and don't repeat code
if entry.has_key("id") and item[3]: if entry.has_key("id") and item[3]:
if entry.id == item[3]: if entry.id == item[3]:
# print("compare1:", entry.id)
# print("compare2:", item[3])
# print("============")
valid = True valid = True
break break
else: else:
@ -842,6 +907,9 @@ async def remove_nonexistent_entries(db_file, feed, source):
else: else:
link = source link = source
if entry.has_key("published") and item[4]: if entry.has_key("published") and item[4]:
# print("compare11:", title, link, time)
# print("compare22:", item[1], item[2], item[4])
# print("============")
time = await datetimehandler.rfc2822_to_iso8601(entry.published) time = await datetimehandler.rfc2822_to_iso8601(entry.published)
if (item[1] == title and if (item[1] == title and
item[2] == link and item[2] == link and
@ -851,17 +919,41 @@ async def remove_nonexistent_entries(db_file, feed, source):
else: else:
if (item[1] == title and if (item[1] == title and
item[2] == link): item[2] == link):
# print("compare111:", title, link)
# print("compare222:", item[1], item[2])
# print("============")
valid = True valid = True
break break
# TODO better check and don't repeat code # TODO better check and don't repeat code
if not valid: if not valid:
# print("id: ", item[0])
# if title:
# print("title: ", title)
# print("item[1]: ", item[1])
# if link:
# print("link: ", link)
# print("item[2]: ", item[2])
# if entry.id:
# print("last_entry:", entry.id)
# print("item[3]: ", item[3])
# if time:
# print("time: ", time)
# print("item[4]: ", item[4])
# print("read: ", item[5])
# breakpoint()
async with DBLOCK: async with DBLOCK:
# TODO Send to table archive # TODO Send to table archive
# TODO Also make a regular/routine check for sources that # TODO Also make a regular/routine check for sources that
# have been changed (though that can only happen when # have been changed (though that can only happen when
# manually editing) # manually editing)
ix = item[0] ix = item[0]
print(">>> SOURCE: ", source)
print(">>> INVALID:", item[1])
# print("title:", item[1])
# print("link :", item[2])
# print("id :", item[3])
if item[5] == 1: if item[5] == 1:
print(">>> DELETING:", item[1])
sql = ( sql = (
"DELETE " "DELETE "
"FROM entries " "FROM entries "
@ -869,16 +961,11 @@ async def remove_nonexistent_entries(db_file, feed, source):
) )
cur.execute(sql, (ix,)) cur.execute(sql, (ix,))
else: else:
print(">>> ARCHIVING:") print(">>> ARCHIVING:", item[1])
print("title:", item[1])
print("link :", item[2])
print("id :", item[3])
sql = ( sql = (
"INSERT " "INSERT "
"INTO archive " "INTO archive "
"SELECT * " "SELECT * "
# "SELECT title, summary, "
# "link, source, timestamp "
"FROM entries " "FROM entries "
"WHERE entries.id = :ix" "WHERE entries.id = :ix"
) )
@ -1015,13 +1102,18 @@ async def last_entries(db_file, num):
elif num < 1: elif num < 1:
num = 1 num = 1
cur = get_cursor(db_file) cur = get_cursor(db_file)
# sql = "SELECT title, link FROM entries ORDER BY ROWID DESC LIMIT :num" # sql = (
# "SELECT title, link "
# "FROM entries "
# "ORDER BY ROWID DESC "
# "LIMIT :num"
# )
sql = ( sql = (
"SELECT title, link " "SELECT title, link "
"FROM entries " "FROM entries "
"WHERE read = 0 " "WHERE read = 0 "
"ORDER BY timestamp " "ORDER BY timestamp DESC "
"DESC LIMIT :num " "LIMIT :num "
) )
results = cur.execute(sql, (num,)) results = cur.execute(sql, (num,))
titles_list = "Recent {} titles:\n".format(num) titles_list = "Recent {} titles:\n".format(num)
@ -1053,7 +1145,7 @@ async def search_feeds(db_file, query):
""" """
cur = get_cursor(db_file) cur = get_cursor(db_file)
sql = ( sql = (
"SELECT name, id, address " "SELECT name, address, id "
"FROM feeds " "FROM feeds "
"WHERE name LIKE ? " "WHERE name LIKE ? "
"LIMIT 50" "LIMIT 50"
@ -1066,7 +1158,10 @@ async def search_feeds(db_file, query):
for result in results: for result in results:
counter += 1 counter += 1
results_list += ( results_list += (
"\n{} [{}]\n{}\n" "\nName: {}"
"\n URL: {}"
"\n ID: {}"
"\n"
).format( ).format(
str(result[0]), str(result[0]),
str(result[1]), str(result[1]),
@ -1099,9 +1194,16 @@ async def search_entries(db_file, query):
"SELECT title, link " "SELECT title, link "
"FROM entries " "FROM entries "
"WHERE title LIKE ? " "WHERE title LIKE ? "
"UNION ALL "
"SELECT title, link "
"FROM archive "
"WHERE title LIKE ? "
"LIMIT 50" "LIMIT 50"
) )
results = cur.execute(sql, [f'%{query}%']) results = cur.execute(sql, (
f'%{query}%',
f'%{query}%'
))
results_list = ( results_list = (
"Search results for '{}':\n```" "Search results for '{}':\n```"
).format(query) ).format(query)
@ -1168,11 +1270,15 @@ async def check_entry_exist(db_file, source, eid=None,
"link = :link and " "link = :link and "
"timestamp = :date" "timestamp = :date"
) )
try:
result = cur.execute(sql, { result = cur.execute(sql, {
"title": title, "title": title,
"link": link, "link": link,
"timestamp": date "timestamp": date
}).fetchone() }).fetchone()
except:
print("this is source:", source)
print("this is date: ", date)
else: else:
sql = ( sql = (
"SELECT id " "SELECT id "
@ -1183,10 +1289,13 @@ async def check_entry_exist(db_file, source, eid=None,
"title": title, "title": title,
"link": link "link": link
}).fetchone() }).fetchone()
try:
if result: if result:
return True return True
else: else:
None None
except:
print("no result. this is source:", source)
async def set_settings_value(db_file, key_value): async def set_settings_value(db_file, key_value):

View file

@ -19,9 +19,9 @@ import slixmpp
from slixmpp.plugins.xep_0363.http_upload import FileTooBig, HTTPError, UploadServiceNotFound from slixmpp.plugins.xep_0363.http_upload import FileTooBig, HTTPError, UploadServiceNotFound
import confighandler
import datahandler import datahandler
import datetimehandler import datetimehandler
import filehandler
import filterhandler import filterhandler
import sqlitehandler import sqlitehandler
@ -115,7 +115,7 @@ class Slixfeed(slixmpp.ClientXMPP):
action = 0 action = 0
jid = msg["from"].bare jid = msg["from"].bare
db_dir = confighandler.get_default_dbdir() db_dir = filehandler.get_default_dbdir()
os.chdir(db_dir) os.chdir(db_dir)
if jid + ".db" not in os.listdir(): if jid + ".db" not in os.listdir():
await self.task_jid(jid) await self.task_jid(jid)
@ -140,7 +140,7 @@ class Slixfeed(slixmpp.ClientXMPP):
url = message.split(" ")[0] url = message.split(" ")[0]
title = " ".join(message.split(" ")[1:]) title = " ".join(message.split(" ")[1:])
if url.startswith("http"): if url.startswith("http"):
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
datahandler.add_feed_no_check, datahandler.add_feed_no_check,
[url, title] [url, title]
@ -152,7 +152,7 @@ class Slixfeed(slixmpp.ClientXMPP):
key = "filter-" + message[:5] key = "filter-" + message[:5]
val = message[6:] val = message[6:]
if val: if val:
keywords = await datahandler.initdb( keywords = await filehandler.initdb(
jid, jid,
sqlitehandler.get_settings_value, sqlitehandler.get_settings_value,
key key
@ -161,7 +161,7 @@ class Slixfeed(slixmpp.ClientXMPP):
val, val,
keywords keywords
) )
await datahandler.initdb( await filehandler.initdb(
jid, jid,
sqlitehandler.set_settings_value, sqlitehandler.set_settings_value,
[key, val] [key, val]
@ -176,7 +176,7 @@ class Slixfeed(slixmpp.ClientXMPP):
key = "filter-" + message[:4] key = "filter-" + message[:4]
val = message[5:] val = message[5:]
if val: if val:
keywords = await datahandler.initdb( keywords = await filehandler.initdb(
jid, jid,
sqlitehandler.get_settings_value, sqlitehandler.get_settings_value,
key key
@ -185,7 +185,7 @@ class Slixfeed(slixmpp.ClientXMPP):
val, val,
keywords keywords
) )
await datahandler.initdb( await filehandler.initdb(
jid, jid,
sqlitehandler.set_settings_value, sqlitehandler.set_settings_value,
[key, val] [key, val]
@ -198,7 +198,7 @@ class Slixfeed(slixmpp.ClientXMPP):
action = "Missing keywords." action = "Missing keywords."
case _ if message_lowercase.startswith("http"): case _ if message_lowercase.startswith("http"):
url = message url = message
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
datahandler.add_feed, datahandler.add_feed,
url url
@ -209,7 +209,7 @@ class Slixfeed(slixmpp.ClientXMPP):
query = message[6:] query = message[6:]
if query: if query:
if len(query) > 3: if len(query) > 3:
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.search_feeds, sqlitehandler.search_feeds,
query query
@ -219,7 +219,7 @@ class Slixfeed(slixmpp.ClientXMPP):
"Enter at least 4 characters to search" "Enter at least 4 characters to search"
) )
else: else:
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.list_feeds sqlitehandler.list_feeds
) )
@ -235,7 +235,7 @@ class Slixfeed(slixmpp.ClientXMPP):
# action = ( # action = (
# "Updates will be sent every {} minutes." # "Updates will be sent every {} minutes."
# ).format(action) # ).format(action)
await datahandler.initdb( await filehandler.initdb(
jid, jid,
sqlitehandler.set_settings_value, sqlitehandler.set_settings_value,
[key, val] [key, val]
@ -263,7 +263,7 @@ class Slixfeed(slixmpp.ClientXMPP):
# action = ( # action = (
# "Every update will contain {} news items." # "Every update will contain {} news items."
# ).format(action) # ).format(action)
await datahandler.initdb( await filehandler.initdb(
jid, jid,
sqlitehandler.set_settings_value, sqlitehandler.set_settings_value,
[key, val] [key, val]
@ -278,7 +278,7 @@ class Slixfeed(slixmpp.ClientXMPP):
case _ if message_lowercase.startswith("recent"): case _ if message_lowercase.startswith("recent"):
num = message[7:] num = message[7:]
if num: if num:
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.last_entries, sqlitehandler.last_entries,
num num
@ -288,7 +288,7 @@ class Slixfeed(slixmpp.ClientXMPP):
case _ if message_lowercase.startswith("remove"): case _ if message_lowercase.startswith("remove"):
ix = message[7:] ix = message[7:]
if ix: if ix:
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.remove_feed, sqlitehandler.remove_feed,
ix ix
@ -300,7 +300,7 @@ class Slixfeed(slixmpp.ClientXMPP):
query = message[7:] query = message[7:]
if query: if query:
if len(query) > 1: if len(query) > 1:
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.search_entries, sqlitehandler.search_entries,
query query
@ -315,7 +315,7 @@ class Slixfeed(slixmpp.ClientXMPP):
# action = "Updates are enabled." # action = "Updates are enabled."
key = "enabled" key = "enabled"
val = 1 val = 1
await datahandler.initdb( await filehandler.initdb(
jid, jid,
sqlitehandler.set_settings_value, sqlitehandler.set_settings_value,
[key, val] [key, val]
@ -325,13 +325,13 @@ class Slixfeed(slixmpp.ClientXMPP):
# print(await datetimehandler.current_time(), "task_manager[jid]") # print(await datetimehandler.current_time(), "task_manager[jid]")
# print(task_manager[jid]) # print(task_manager[jid])
case "stats": case "stats":
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.statistics sqlitehandler.statistics
) )
case _ if message_lowercase.startswith("status "): case _ if message_lowercase.startswith("status "):
ix = message[7:] ix = message[7:]
action = await datahandler.initdb( action = await filehandler.initdb(
jid, jid,
sqlitehandler.toggle_status, sqlitehandler.toggle_status,
ix ix
@ -349,7 +349,7 @@ class Slixfeed(slixmpp.ClientXMPP):
# task_manager[jid]["interval"].cancel() # task_manager[jid]["interval"].cancel()
# key = "enabled" # key = "enabled"
# val = 0 # val = 0
# action = await datahandler.initdb( # action = await filehandler.initdb(
# jid, # jid,
# sqlitehandler.set_settings_value, # sqlitehandler.set_settings_value,
# [key, val] # [key, val]
@ -360,7 +360,7 @@ class Slixfeed(slixmpp.ClientXMPP):
# # await self.send_status(jid) # # await self.send_status(jid)
key = "enabled" key = "enabled"
val = 0 val = 0
await datahandler.initdb( await filehandler.initdb(
jid, jid,
sqlitehandler.set_settings_value, sqlitehandler.set_settings_value,
[key, val] [key, val]
@ -388,7 +388,7 @@ class Slixfeed(slixmpp.ClientXMPP):
Self. Self.
""" """
while True: while True:
db_dir = confighandler.get_default_dbdir() db_dir = filehandler.get_default_dbdir()
if not os.path.isdir(db_dir): if not os.path.isdir(db_dir):
msg = ( msg = (
"Slixfeed can not work without a database.\n" "Slixfeed can not work without a database.\n"
@ -410,7 +410,8 @@ class Slixfeed(slixmpp.ClientXMPP):
# await jid_tasker[jid] # await jid_tasker[jid]
async with asyncio.TaskGroup() as tg: async with asyncio.TaskGroup() as tg:
for file in files: for file in files:
if file.endswith(".db") and not file.endswith(".db-jour.db"): if (file.endswith(".db") and
not file.endswith(".db-jour.db")):
jid = file[:-3] jid = file[:-3]
main_task.extend([tg.create_task(self.task_jid(jid))]) main_task.extend([tg.create_task(self.task_jid(jid))])
# main_task = [tg.create_task(self.task_jid(jid))] # main_task = [tg.create_task(self.task_jid(jid))]
@ -428,7 +429,7 @@ class Slixfeed(slixmpp.ClientXMPP):
jid : str jid : str
Jabber ID. Jabber ID.
""" """
enabled = await datahandler.initdb( enabled = await filehandler.initdb(
jid, jid,
sqlitehandler.get_settings_value, sqlitehandler.get_settings_value,
"enabled" "enabled"
@ -476,7 +477,7 @@ class Slixfeed(slixmpp.ClientXMPP):
""" """
# print("Starting send_update()") # print("Starting send_update()")
# print(jid) # print(jid)
new = await datahandler.initdb( new = await filehandler.initdb(
jid, jid,
sqlitehandler.get_entry_unread, sqlitehandler.get_entry_unread,
num num
@ -493,7 +494,7 @@ class Slixfeed(slixmpp.ClientXMPP):
self.send_update, self.send_update,
"interval" "interval"
) )
# interval = await datahandler.initdb( # interval = await filehandler.initdb(
# jid, # jid,
# sqlitehandler.get_settings_value, # sqlitehandler.get_settings_value,
# "interval" # "interval"
@ -530,7 +531,7 @@ class Slixfeed(slixmpp.ClientXMPP):
Jabber ID. Jabber ID.
""" """
print(await datetimehandler.current_time(), "> SEND STATUS",jid) print(await datetimehandler.current_time(), "> SEND STATUS",jid)
enabled = await datahandler.initdb( enabled = await filehandler.initdb(
jid, jid,
sqlitehandler.get_settings_value, sqlitehandler.get_settings_value,
"enabled" "enabled"
@ -539,7 +540,7 @@ class Slixfeed(slixmpp.ClientXMPP):
status_mode = "xa" status_mode = "xa"
status_text = "Send \"Start\" to receive news." status_text = "Send \"Start\" to receive news."
else: else:
feeds = await datahandler.initdb( feeds = await filehandler.initdb(
jid, jid,
sqlitehandler.get_number_of_items, sqlitehandler.get_number_of_items,
"feeds" "feeds"
@ -550,7 +551,7 @@ class Slixfeed(slixmpp.ClientXMPP):
"📂️ Send a URL from a blog or a news website." "📂️ Send a URL from a blog or a news website."
) )
else: else:
unread = await datahandler.initdb( unread = await filehandler.initdb(
jid, jid,
sqlitehandler.get_number_of_entries_unread sqlitehandler.get_number_of_entries_unread
) )
@ -606,7 +607,7 @@ class Slixfeed(slixmpp.ClientXMPP):
Value. The default is None. Value. The default is None.
""" """
if not val: if not val:
val = await datahandler.initdb( val = await filehandler.initdb(
jid, jid,
sqlitehandler.get_settings_value, sqlitehandler.get_settings_value,
key key
@ -644,7 +645,7 @@ async def check_updates(jid):
""" """
while True: while True:
print(await datetimehandler.current_time(), "> CHCK UPDATE",jid) print(await datetimehandler.current_time(), "> CHCK UPDATE",jid)
await datahandler.initdb(jid, datahandler.download_updates) await filehandler.initdb(jid, datahandler.download_updates)
await asyncio.sleep(60 * 90) await asyncio.sleep(60 * 90)
# Schedule to call this function again in 90 minutes # Schedule to call this function again in 90 minutes
# loop.call_at( # loop.call_at(