Fix command read; Improve code of module sqlite.

This commit is contained in:
Schimon Jehudah, Adv. 2024-06-19 23:28:17 +03:00
parent 1b8254832d
commit fd07ae865a
6 changed files with 96 additions and 379 deletions

View file

@ -13,6 +13,7 @@ logger.debug('This is a debug message')
"""
from datetime import datetime
import logging
@ -55,4 +56,6 @@ class Message:
def printer(text):
print(text, end='\r')
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
print('{} {}'.format(current_time, text), end='\r')

View file

@ -498,23 +498,7 @@ async def add_metadata(db_file):
ixs = cur.execute(sql).fetchall()
for ix in ixs:
feed_id = ix[0]
# insert_feed_properties(cur, feed_id)
insert_feed_status(cur, feed_id)
insert_feed_preferences(cur, feed_id)
def insert_feed_status(cur, feed_id):
"""
Set feed status.
Parameters
----------
cur : object
Cursor object.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: feed_id: {}'
.format(function_name, feed_id))
# Set feed status
sql = (
"""
INSERT
@ -531,20 +515,7 @@ def insert_feed_status(cur, feed_id):
logger.warning(
"Skipping feed_id {} for table feeds_state".format(feed_id))
logger.error(e)
def insert_feed_preferences(cur, feed_id):
"""
Set feed preferences.
Parameters
----------
cur : object
Cursor object.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: feed_id: {}'
.format(function_name, feed_id))
# Set feed preferences.
sql = (
"""
INSERT
@ -563,37 +534,6 @@ def insert_feed_preferences(cur, feed_id):
logger.error(e)
# TODO Test
def insert_feed_properties(cur, feed_id):
"""
Set feed properties.
Parameters
----------
cur : object
Cursor object.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: feed_id: {}'
.format(function_name, feed_id))
sql = (
"""
INSERT
INTO feeds_properties(
id)
VALUES(
?)
"""
)
par = (feed_id,)
try:
cur.execute(sql, par)
except IntegrityError as e:
logger.warning(
"Skipping feed_id {} for table feeds_properties".format(feed_id))
logger.error(e)
async def insert_feed(db_file, url, title, identifier, entries=None, version=None,
encoding=None, language=None, status_code=None,
updated=None):
@ -673,52 +613,6 @@ async def insert_feed(db_file, url, title, identifier, entries=None, version=Non
cur.execute(sql, par)
async def insert_feed_(db_file, url, title):
"""
Insert a new feed into the feeds table.
Parameters
----------
db_file : str
Path to database file.
url : str
URL.
title : str, optional
Feed title. The default is None.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} url: {}'
.format(function_name, db_file, url))
async with DBLOCK:
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
INSERT
INTO feeds_properties(
title, url)
VALUES(
?, ?)
"""
)
par = (
title, url
)
cur.execute(sql, par)
sql = (
"""
SELECT id
FROM feeds_properties
WHERE url = :url
"""
)
par = (url,)
feed_id = cur.execute(sql, par).fetchone()[0]
# insert_feed_properties(cur, feed_id)
insert_feed_status(cur, feed_id)
insert_feed_preferences(cur, feed_id)
async def remove_feed_by_url(db_file, url):
"""
Delete a feed by feed URL.
@ -1531,37 +1425,6 @@ def get_feed_id(db_file, url):
return feed_id
def is_entry_archived(cur, ix):
"""
Check whether a given entry is archived.
Parameters
----------
cur : object
Cursor object.
ix : str
Index of entry.
Returns
-------
result : tuple
Entry ID.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: ix: {}'
.format(function_name, ix))
sql = (
"""
SELECT id
FROM entries_state
WHERE archived = 1 AND entry_id = ?
"""
)
par = (ix,)
result = cur.execute(sql, par).fetchone()
return result
def is_entry_read(db_file, ix):
"""
Check whether a given entry is marked as read.
@ -1977,53 +1840,9 @@ async def mark_feed_as_read(db_file, feed_id):
# cur.execute(sql, par)
async def mark_entry_as_read(cur, ix):
"""
Set read status of entry as read.
Parameters
----------
cur : object
Cursor object.
ix : str
Index of entry.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: ix: {}'
.format(function_name, ix))
sql = (
"""
UPDATE entries_state
SET read = 1
WHERE entry_id = ?
"""
)
par = (ix,)
cur.execute(sql, par)
async def mark_as_read(db_file, ix):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} ix: {}'
.format(function_name, db_file, ix))
async with DBLOCK:
with create_connection(db_file) as conn:
cur = conn.cursor()
# TODO While `async with DBLOCK` does work well from
# outside of functions, it would be better practice
# to place it within the functions.
# NOTE: We can use DBLOCK once for both
# functions, because, due to exclusive
# ID, only one can ever occur.
if is_entry_archived(cur, ix):
await delete_entry(cur, ix)
else:
await mark_entry_as_read(cur, ix)
async def delete_entry(cur, ix):
"""
Delete entry.
Set read status of entry as read or delete entry.
Parameters
----------
@ -2033,8 +1852,23 @@ async def delete_entry(cur, ix):
Index of entry.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: ix: {}'
.format(function_name, ix))
logger.debug('{}: db_file: {} ix: {}'
.format(function_name, db_file, ix))
async with DBLOCK:
with create_connection(db_file) as conn:
cur = conn.cursor()
# Check whether a given entry is archived.
sql = (
"""
SELECT id
FROM entries_state
WHERE archived = 1 AND entry_id = ?
"""
)
par = (ix,)
result = cur.execute(sql, par).fetchone()
# is_entry_archived
if result:
sql = (
"""
DELETE
@ -2042,56 +1876,15 @@ async def delete_entry(cur, ix):
WHERE id = ?
"""
)
par = (ix,)
cur.execute(sql, par)
async def update_statistics(cur):
"""
Update table statistics.
Parameters
----------
cur : object
Cursor object.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}'.format(function_name))
stat_dict = {}
stat_dict["feeds"] = get_number_of_items(cur, 'feeds_properties')
stat_dict["entries"] = get_number_of_items(cur, 'entries_properties')
stat_dict["unread"] = get_number_of_entries_unread(cur=cur)
for i in stat_dict:
sql = (
"SELECT id "
"FROM statistics "
"WHERE title = ?"
)
par = (i,)
cur.execute(sql, par)
if cur.fetchone():
sql = (
"UPDATE statistics "
"SET number = :num "
"WHERE title = :title"
)
par = {
"title": i,
"num": stat_dict[i]
}
cur.execute(sql, par)
else:
sql = (
"SELECT count(id) "
"FROM statistics"
"""
UPDATE entries_state
SET read = 1
WHERE entry_id = ?
"""
)
count = cur.execute(sql).fetchone()[0]
ix = count + 1
sql = (
"INSERT INTO statistics "
"VALUES(?,?,?)"
)
par = (ix, i, stat_dict[i])
par = (ix,)
cur.execute(sql, par)
@ -2709,89 +2502,6 @@ def get_contents_by_entry_id(db_file, entry_id):
return result
def get_invalid_entries(db_file, url, feed):
"""
List entries that do not exist in a given feed.
Parameters
----------
db_file : str
Path to database file.
url : str
Feed URL.
feed : list
Parsed feed document.
Returns
-------
ixs : dict
List of indexes of invalid items.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} url: {}'.format(function_name, db_file, url))
feed_id = get_feed_id(db_file, url)
feed_id = feed_id[0]
items = get_entries_of_feed(db_file, feed_id)
entries = feed.entries
ixs = {}
for item in items:
ix, entry_title, entry_link, entry_id, timestamp = item
read_status = is_entry_read(db_file, ix)
read_status = read_status[0]
for entry in entries:
title = None
link = None
time = None
# TODO better check and don't repeat code
if entry.has_key("id") and entry_id:
if entry.id == entry_id:
# print(url)
# print("compare entry.id == entry_id:", entry.id)
# print("compare entry.id == entry_id:", entry_id)
# print("============")
# items_valid.append(ix)
break
else:
# Prepare a title to compare
if entry.has_key("title"):
title = entry.title
else:
title = feed["feed"]["title"]
# Prepare a link to compare
if entry.has_key("link"):
link = Url.join_url(url, entry.link)
else:
link = url
# Compare date, link and title
if entry.has_key("published") and timestamp:
# print(url)
# print("compare published:", title, link, time)
# print("compare published:", entry_title, entry_link, timestamp)
# print("============")
time = DateAndTime.rfc2822_to_iso8601(entry.published)
if (entry_title == title and
entry_link == link and
timestamp == time):
# items_valid.append(ix)
break
else:
# Compare link and title
if (entry_title == title and
entry_link == link):
# print(url)
# print("compare entry_link == link:", title, link)
# print("compare entry_title == title:", entry_title, entry_link)
# print("============")
# items_valid.append(ix)
break
# print('invalid entry:')
# print(entry)
# TODO better check and don't repeat code
ixs[ix] = read_status
# print(ixs)
return ixs
async def process_invalid_entries(db_file, ixs):
"""
Batch process of invalid items.
@ -2974,10 +2684,7 @@ def get_feeds_by_enabled_state(db_file, enabled_state):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} enabled_state: {}'
.format(function_name, db_file, enabled_state))
if enabled_state:
enabled_state = 1
else:
enabled_state = 0
enabled_state = 1 if enabled_state else 0
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (

View file

@ -1,2 +1,2 @@
__version__ = '0.1.84'
__version_info__ = (0, 1, 84)
__version__ = '0.1.85'
__version_info__ = (0, 1, 85)

View file

@ -444,7 +444,7 @@ class XmppCommands:
result['identifier'],
result['index']))
elif result['error']:
message = ('> {}\nFailed to find subscriptions. '
message = ('> {}\nNo subscriptions were found. '
'Reason: {} (status code: {})'
.format(url, result['message'],
result['code']))
@ -508,7 +508,7 @@ class XmppCommands:
result['name'],
result['index']))
elif result['error']:
message = ('> {}\nFailed to find subscriptions. '
message = ('> {}\nNo subscriptions were found. '
'Reason: {} (status code: {})'
.format(url, result['message'],
result['code']))
@ -740,7 +740,7 @@ class XmppCommands:
while True:
result = await fetch.http(url)
status = result['status_code']
if not result['error']:
if result and not result['error']:
document = result['content']
feed = parse(document)
if Feed.is_feed(url, feed):
@ -760,6 +760,10 @@ class XmppCommands:
message += ('```\nTotal of {} feeds.'
.format(len(results)))
break
elif not result:
message = ('> {}\nNo subscriptions were found.'
.format(url))
break
else:
url = result['link']
else:
@ -767,15 +771,13 @@ class XmppCommands:
.format(url, status))
break
else:
message = ('No action has been taken.'
'\n'
'Missing URL.')
message = ('No action has been taken. Missing URL.')
case 2:
num = data[1]
if url.startswith('http'):
while True:
result = await fetch.http(url)
if not result['error']:
if result and not result['error']:
document = result['content']
status = result['status_code']
feed = parse(document)
@ -796,6 +798,10 @@ class XmppCommands:
message += ('```\nTotal of {} feeds.'
.format(len(results)))
break
elif not result:
message = ('> {}\nNo subscriptions were found.'
.format(url))
break
else:
url = result['link']
else:

View file

@ -14,8 +14,7 @@ TODO
"""
import asyncio
from slixfeed.utilities import DateAndTime
from slixfeed.log import Logger
from slixfeed.log import Logger, Message
from slixmpp.exceptions import IqTimeout, IqError
from time import sleep
@ -62,17 +61,17 @@ class XmppConnect:
def recover(self, message):
logger.warning(message)
print(DateAndTime.current_time(), message, 'Attempting to reconnect.')
Message.printer('Slixfeed ittempting to reconnect...')
self.connection_attempts += 1
# if self.connection_attempts <= self.max_connection_attempts:
# self.reconnect(wait=5.0) # wait a bit before attempting to reconnect
# else:
# print(current_time(),"Maximum connection attempts exceeded.")
# logging.error("Maximum connection attempts exceeded.")
print(DateAndTime.current_time(), 'Attempt number', self.connection_attempts)
Message.printer('Attempt number {}'.format(self.connection_attempts))
seconds = self.reconnect_timeout or 30
seconds = int(seconds)
print(DateAndTime.current_time(), 'Next attempt within', seconds, 'seconds')
Message.printer('Next attempt be made within {} seconds'.format(seconds))
# NOTE asyncio.sleep doesn't interval as expected
# await asyncio.sleep(seconds)
sleep(seconds)
@ -80,12 +79,14 @@ class XmppConnect:
def inspect(self):
print('Disconnected\nReconnecting...')
Message.printer('Disconnected')
sleep(3)
Message.printer('Reconnecting...')
try:
self.reconnect
except:
self.disconnect()
print('Problem reconnecting')
Message.printer('Problem reconnecting')
class XmppConnectTask:

View file

@ -34,8 +34,7 @@ class XmppGroupchat:
'bookmark {}'.format(bookmark['name']))
alias = bookmark["nick"]
muc_jid = bookmark["jid"]
Message.printer('Joining MUC {} ...'.format(muc_jid))
Message.printer('Joining to MUC {} ...'.format(muc_jid))
result = await XmppMuc.join(self, muc_jid, alias)
if result == 'ban':
await XmppBookmark.remove(self, muc_jid)
@ -53,3 +52,4 @@ class XmppGroupchat:
elif not bookmark["jid"]:
logger.error('JID is missing for bookmark {}'
.format(bookmark['name']))
print('Done')