forked from sch/Slixfeed
Fix a couple of errors and split functions
This commit is contained in:
parent
94af40cf40
commit
96f3369539
6 changed files with 114 additions and 148 deletions
|
@ -69,16 +69,16 @@ def get_value(filename, section, keys):
|
|||
key = keys
|
||||
try:
|
||||
result = section_res[key]
|
||||
logging.debug("Found value {} for key {}".format(
|
||||
value, key))
|
||||
logging.debug(
|
||||
"Found value {} for key {}".format(result, key))
|
||||
except:
|
||||
result = ''
|
||||
# logging.error("Missing key:", key)
|
||||
if result == None:
|
||||
logging.error(
|
||||
"Check configuration file {} for "
|
||||
"missing key {} under section {}.".format(
|
||||
filename, section, keys))
|
||||
"Check configuration file {}.ini for "
|
||||
"missing key(s) \"{}\" under section [{}].".format(
|
||||
filename, keys, section))
|
||||
else:
|
||||
return result
|
||||
|
||||
|
|
|
@ -376,7 +376,7 @@ async def add_feed_no_check(db_file, data):
|
|||
url = data[0]
|
||||
title = data[1]
|
||||
url = trim_url(url)
|
||||
exist = await sqlite.check_feed_exist(db_file, url)
|
||||
exist = await sqlite.is_feed_exist(db_file, url)
|
||||
if not exist:
|
||||
msg = await sqlite.insert_feed(db_file, url, title)
|
||||
await download_updates(db_file, [url])
|
||||
|
@ -409,7 +409,7 @@ async def add_feed(db_file, url):
|
|||
"""
|
||||
msg = None
|
||||
url = trim_url(url)
|
||||
exist = await sqlite.check_feed_exist(db_file, url)
|
||||
exist = await sqlite.is_feed_exist(db_file, url)
|
||||
if not exist:
|
||||
res = await download_feed(url)
|
||||
if res[0]:
|
||||
|
@ -417,9 +417,14 @@ async def add_feed(db_file, url):
|
|||
title = utility.get_title(url, feed)
|
||||
if utility.is_feed(url, feed):
|
||||
status = res[1]
|
||||
msg = await sqlite.insert_feed(
|
||||
await sqlite.insert_feed(
|
||||
db_file, url, title, status)
|
||||
await download_updates(db_file, [url])
|
||||
title = title if title else url
|
||||
msg = (
|
||||
"> {}\nNews source \"{}\" has been added "
|
||||
"to subscription list."
|
||||
).format(url, title)
|
||||
else:
|
||||
msg = await probe_page(
|
||||
add_feed, url, res[0], db_file=db_file)
|
||||
|
@ -497,7 +502,7 @@ async def download_feed(url):
|
|||
user_agent = "Slixfeed/0.1"
|
||||
if not len(user_agent):
|
||||
user_agent = "Slixfeed/0.1"
|
||||
proxy = config.get_value("settings", "Network", "http_proxy")
|
||||
proxy = (config.get_value("settings", "Network", "http_proxy")) or ''
|
||||
timeout = ClientTimeout(total=10)
|
||||
headers = {'User-Agent': user_agent}
|
||||
async with ClientSession(headers=headers) as session:
|
||||
|
|
|
@ -177,24 +177,18 @@ async def insert_feed(db_file, url, title=None, status=None):
|
|||
Feed Title. The default is None.
|
||||
status : str, optional
|
||||
HTTP status code. The default is None.
|
||||
|
||||
Returns
|
||||
-------
|
||||
msg : str
|
||||
Message.
|
||||
"""
|
||||
#TODO consider async with DBLOCK
|
||||
#conn = create_connection(db_file)
|
||||
|
||||
# with create_connection(db_file) as conn:
|
||||
# #exist = await check_feed_exist(conn, url)
|
||||
# exist = await check_feed_exist(db_file, url)
|
||||
# #exist = await is_feed_exist(conn, url)
|
||||
# exist = await is_feed_exist(db_file, url)
|
||||
|
||||
# if not exist:
|
||||
# status = await main.download_feed(url)
|
||||
# else:
|
||||
# return "News source is already listed in the subscription list"
|
||||
|
||||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
|
@ -208,13 +202,6 @@ async def insert_feed(db_file, url, title=None, status=None):
|
|||
)
|
||||
cur.execute(sql, feed)
|
||||
|
||||
source = title if title else '<' + url + '>'
|
||||
msg = (
|
||||
"> {}\nNews source \"{}\" has been added "
|
||||
"to subscription list."
|
||||
).format(url, source)
|
||||
return msg
|
||||
|
||||
|
||||
async def remove_feed(db_file, ix):
|
||||
"""
|
||||
|
@ -226,61 +213,40 @@ async def remove_feed(db_file, ix):
|
|||
Path to database file.
|
||||
ix : str
|
||||
Index of feed.
|
||||
|
||||
Returns
|
||||
-------
|
||||
msg : str
|
||||
Message.
|
||||
"""
|
||||
with create_connection(db_file) as conn:
|
||||
async with DBLOCK:
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
sql = (
|
||||
"SELECT address "
|
||||
"FROM feeds "
|
||||
"WHERE id = ?"
|
||||
)
|
||||
# cur
|
||||
# for i in url:
|
||||
# url = i[0]
|
||||
url = cur.execute(sql, (ix,)).fetchone()[0]
|
||||
sql = (
|
||||
"SELECT name "
|
||||
"FROM feeds "
|
||||
"WHERE id = ?"
|
||||
)
|
||||
name = cur.execute(sql, (ix,)).fetchone()[0]
|
||||
# NOTE Should we move DBLOCK to this line? 2022-12-23
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM entries "
|
||||
"WHERE source = ?"
|
||||
)
|
||||
cur.execute(sql, (url,))
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM archive "
|
||||
"WHERE source = ?"
|
||||
)
|
||||
cur.execute(sql, (url,))
|
||||
sql = (
|
||||
"DELETE FROM feeds "
|
||||
"WHERE id = ?"
|
||||
)
|
||||
cur.execute(sql, (ix,))
|
||||
msg = (
|
||||
"> {}\nNews source \"{}\" has been removed "
|
||||
"from subscription list."
|
||||
).format(url, name)
|
||||
except:
|
||||
msg = (
|
||||
"No news source with ID {}."
|
||||
).format(ix)
|
||||
return msg
|
||||
sql = (
|
||||
"SELECT address "
|
||||
"FROM feeds "
|
||||
"WHERE id = ?"
|
||||
)
|
||||
# cur
|
||||
# for i in url:
|
||||
# url = i[0]
|
||||
url = cur.execute(sql, (ix,)).fetchone()[0]
|
||||
# NOTE Should we move DBLOCK to this line? 2022-12-23
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM entries "
|
||||
"WHERE source = ?"
|
||||
)
|
||||
cur.execute(sql, (url,))
|
||||
sql = (
|
||||
"DELETE "
|
||||
"FROM archive "
|
||||
"WHERE source = ?"
|
||||
)
|
||||
cur.execute(sql, (url,))
|
||||
sql = (
|
||||
"DELETE FROM feeds "
|
||||
"WHERE id = ?"
|
||||
)
|
||||
cur.execute(sql, (ix,))
|
||||
|
||||
|
||||
async def check_feed_exist(db_file, url):
|
||||
async def is_feed_exist(db_file, url):
|
||||
"""
|
||||
Check whether a feed exists.
|
||||
Query for feeds by given url.
|
||||
|
@ -304,7 +270,8 @@ async def check_feed_exist(db_file, url):
|
|||
"WHERE address = ?"
|
||||
)
|
||||
result = cur.execute(sql, (url,)).fetchone()
|
||||
return result
|
||||
if result:
|
||||
return True
|
||||
|
||||
|
||||
async def get_number_of_items(db_file, table):
|
||||
|
@ -406,8 +373,8 @@ async def get_unread_entries(db_file, num):
|
|||
|
||||
Returns
|
||||
-------
|
||||
entry : str
|
||||
News item message.
|
||||
results : ???
|
||||
News items.
|
||||
"""
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
|
@ -641,9 +608,9 @@ async def update_statistics(cur):
|
|||
cur.execute(sql, (ix, i, stat_dict[i]))
|
||||
|
||||
|
||||
async def toggle_status(db_file, ix):
|
||||
async def set_enabled_status(db_file, ix, status):
|
||||
"""
|
||||
Toggle status of feed.
|
||||
Set status of feed to enabled or not enabled (i.e. disabled).
|
||||
|
||||
Parameters
|
||||
----------
|
||||
|
@ -651,56 +618,21 @@ async def toggle_status(db_file, ix):
|
|||
Path to database file.
|
||||
ix : str
|
||||
Index of entry.
|
||||
|
||||
Returns
|
||||
-------
|
||||
msg : str
|
||||
Message.
|
||||
status : int
|
||||
0 or 1.
|
||||
"""
|
||||
async with DBLOCK:
|
||||
with create_connection(db_file) as conn:
|
||||
cur = conn.cursor()
|
||||
try:
|
||||
#cur = get_cursor(db_file)
|
||||
sql = (
|
||||
"SELECT name "
|
||||
"FROM feeds "
|
||||
"WHERE id = :id"
|
||||
)
|
||||
title = cur.execute(sql, (ix,)).fetchone()[0]
|
||||
sql = (
|
||||
"SELECT enabled "
|
||||
"FROM feeds "
|
||||
"WHERE id = ?"
|
||||
)
|
||||
# NOTE [0][1][2]
|
||||
status = cur.execute(sql, (ix,)).fetchone()[0]
|
||||
# FIXME always set to 1
|
||||
# NOTE Maybe because is not integer
|
||||
# TODO Reset feed table before further testing
|
||||
if status == 1:
|
||||
status = 0
|
||||
state = "disabled"
|
||||
else:
|
||||
status = 1
|
||||
state = "enabled"
|
||||
sql = (
|
||||
"UPDATE feeds "
|
||||
"SET enabled = :status "
|
||||
"WHERE id = :id"
|
||||
)
|
||||
cur.execute(sql, {
|
||||
"status": status,
|
||||
"id": ix
|
||||
})
|
||||
msg = (
|
||||
"Updates from '{}' are now {}."
|
||||
).format(title, state)
|
||||
except:
|
||||
msg = (
|
||||
"No news source with ID {}."
|
||||
).format(ix)
|
||||
return msg
|
||||
sql = (
|
||||
"UPDATE feeds "
|
||||
"SET enabled = :status "
|
||||
"WHERE id = :id"
|
||||
)
|
||||
cur.execute(sql, {
|
||||
"status": status,
|
||||
"id": ix
|
||||
})
|
||||
|
||||
|
||||
async def set_date(cur, url):
|
||||
|
@ -836,9 +768,10 @@ async def add_entry(cur, entry):
|
|||
try:
|
||||
cur.execute(sql, entry)
|
||||
except:
|
||||
# None
|
||||
print("Unknown error for sqlite.add_entry")
|
||||
print(entry)
|
||||
None
|
||||
# print("Unknown error for sqlite.add_entry")
|
||||
# print(entry)
|
||||
#
|
||||
# print(current_time(), "COROUTINE OBJECT NOW")
|
||||
# for i in entry:
|
||||
# print(type(i))
|
||||
|
@ -1085,7 +1018,7 @@ async def get_feeds(db_file):
|
|||
|
||||
Returns
|
||||
-------
|
||||
msg : ???
|
||||
results : ???
|
||||
URLs of feeds.
|
||||
"""
|
||||
cur = get_cursor(db_file)
|
||||
|
|
|
@ -340,7 +340,7 @@ async def refresh_task(self, jid, callback, key, val=None):
|
|||
except:
|
||||
logging.debug(
|
||||
"No task of type {} to cancel for "
|
||||
"JID {} (clean_tasks)").format(key, jid)
|
||||
"JID {} (clean_tasks)".format(key, jid))
|
||||
# task_manager[jid][key] = loop.call_at(
|
||||
# loop.time() + 60 * float(val),
|
||||
# loop.create_task,
|
||||
|
|
|
@ -17,9 +17,7 @@ async def recover_connection(self, event, message):
|
|||
# print(current_time(),"Maximum connection attempts exceeded.")
|
||||
# logging.error("Maximum connection attempts exceeded.")
|
||||
print(current_time(), "Attempt number", self.connection_attempts)
|
||||
seconds = get_value("accounts", "XMPP Connect", "reconnect_timeout")
|
||||
if not seconds:
|
||||
seconds = 30
|
||||
seconds = (get_value("accounts", "XMPP Connect", "reconnect_timeout")) or 30
|
||||
seconds = int(seconds)
|
||||
print(current_time(), "Next attempt within", seconds, "seconds")
|
||||
# NOTE asyncio.sleep doesn't interval as expected
|
||||
|
|
|
@ -327,12 +327,19 @@ async def message(self, message):
|
|||
else:
|
||||
response = "Missing keywords."
|
||||
send_reply_message(self, message, response)
|
||||
case _ if message_lowercase.startswith("import "):
|
||||
status_type = "dnd"
|
||||
status_message = (
|
||||
"📥️ Procesing request to import feeds ..."
|
||||
)
|
||||
send_status_message(
|
||||
self, jid, status_type, status_message)
|
||||
case _ if message_lowercase.startswith("export "):
|
||||
key = message_text[7:]
|
||||
if key in ("opml", "html", "md", "xbel"):
|
||||
status_type = "dnd"
|
||||
status_message = (
|
||||
"📂️ Procesing request to export feeds into {} ..."
|
||||
"📤️ Procesing request to export feeds into {} ..."
|
||||
).format(key)
|
||||
send_status_message(
|
||||
self, jid, status_type, status_message)
|
||||
|
@ -366,12 +373,12 @@ async def message(self, message):
|
|||
else:
|
||||
response = "Unsupported filetype."
|
||||
send_reply_message(self, message, response)
|
||||
case _ if (message_lowercase.startswith("gemini") or
|
||||
message_lowercase.startswith("gopher")):
|
||||
case _ if (message_lowercase.startswith("gemini:") or
|
||||
message_lowercase.startswith("gopher:")):
|
||||
response = "Gemini and Gopher are not supported yet."
|
||||
send_reply_message(self, message, response)
|
||||
case _ if (message_lowercase.startswith("http") or
|
||||
message_lowercase.startswith("feed")):
|
||||
message_lowercase.startswith("feed:")):
|
||||
url = message_text
|
||||
await task.clean_tasks_xmpp(jid, ["status"])
|
||||
status_type = "dnd"
|
||||
|
@ -624,16 +631,22 @@ async def message(self, message):
|
|||
ix = message_text[7:]
|
||||
if ix:
|
||||
db_file = get_pathname_to_database(jid)
|
||||
response = await sqlite.remove_feed(db_file, ix)
|
||||
# await refresh_task(
|
||||
# self,
|
||||
# jid,
|
||||
# send_status,
|
||||
# "status",
|
||||
# 20
|
||||
# )
|
||||
await task.clean_tasks_xmpp(jid, ["status"])
|
||||
await task.start_tasks_xmpp(self, jid, ["status"])
|
||||
try:
|
||||
await sqlite.remove_feed(db_file, ix)
|
||||
response = (
|
||||
"> {}\nNews source has been removed "
|
||||
"from subscription list.").format(url)
|
||||
# await refresh_task(
|
||||
# self,
|
||||
# jid,
|
||||
# send_status,
|
||||
# "status",
|
||||
# 20
|
||||
# )
|
||||
await task.clean_tasks_xmpp(jid, ["status"])
|
||||
await task.start_tasks_xmpp(self, jid, ["status"])
|
||||
except:
|
||||
response = "No news source with ID {}.".format(ix)
|
||||
else:
|
||||
response = "Missing feed ID."
|
||||
send_reply_message(self, message, response)
|
||||
|
@ -687,10 +700,27 @@ async def message(self, message):
|
|||
result = await sqlite.statistics(db_file)
|
||||
response = compose.list_statistics(result)
|
||||
send_reply_message(self, message, response)
|
||||
case _ if message_lowercase.startswith("status "):
|
||||
case _ if message_lowercase.startswith("disable "):
|
||||
ix = message_text[8:]
|
||||
db_file = get_pathname_to_database(jid)
|
||||
try:
|
||||
await sqlite.set_enabled_status(db_file, ix, 0)
|
||||
response = (
|
||||
"Updates are now disabled for news source {}."
|
||||
).format(ix)
|
||||
except:
|
||||
response = "No news source with ID {}.".format(ix)
|
||||
send_reply_message(self, message, response)
|
||||
case _ if message_lowercase.startswith("enable "):
|
||||
ix = message_text[7:]
|
||||
db_file = get_pathname_to_database(jid)
|
||||
response = await sqlite.toggle_status(db_file, ix)
|
||||
try:
|
||||
await sqlite.set_enabled_status(db_file, ix, 1)
|
||||
response = (
|
||||
"Updates are now disabled for news source {}."
|
||||
).format(ix)
|
||||
except:
|
||||
response = "No news source with ID {}.".format(ix)
|
||||
send_reply_message(self, message, response)
|
||||
case "stop":
|
||||
# FIXME
|
||||
|
|
Loading…
Reference in a new issue