forked from sch/Slixfeed
Add logging
This commit is contained in:
parent
f207dc1632
commit
f25cb70181
7 changed files with 888 additions and 256 deletions
|
@ -229,9 +229,19 @@ def main():
|
||||||
logging.basicConfig(level=args.loglevel,
|
logging.basicConfig(level=args.loglevel,
|
||||||
format='%(levelname)-8s %(message)s')
|
format='%(levelname)-8s %(message)s')
|
||||||
|
|
||||||
|
# # Setup logging.
|
||||||
|
# logging.basicConfig(level=args.loglevel,
|
||||||
|
# format='%(levelname)-8s %(message)s')
|
||||||
|
# # logging.basicConfig(format='[%(levelname)s] %(message)s')
|
||||||
|
# logger = logging.getLogger()
|
||||||
|
# logdbg = logger.debug
|
||||||
|
# logerr = logger.error
|
||||||
|
# lognfo = logger.info
|
||||||
|
# logwrn = logger.warning
|
||||||
|
|
||||||
# Try configuration file
|
# Try configuration file
|
||||||
values = config.get_value('accounts', 'XMPP Client',
|
key_list = ['alias', 'jid', 'password', 'hostname', 'port']
|
||||||
['alias', 'jid', 'password', 'hostname', 'port'])
|
values = config.get_value('accounts', 'XMPP Client', key_list)
|
||||||
alias = values[0]
|
alias = values[0]
|
||||||
jid = values[1]
|
jid = values[1]
|
||||||
password = values[2]
|
password = values[2]
|
||||||
|
|
|
@ -30,12 +30,11 @@ from bs4 import BeautifulSoup
|
||||||
from feedparser import parse
|
from feedparser import parse
|
||||||
from http.client import IncompleteRead
|
from http.client import IncompleteRead
|
||||||
import json
|
import json
|
||||||
import logging
|
from slixfeed.log import Logger
|
||||||
from lxml import html
|
from lxml import html
|
||||||
import os
|
import os
|
||||||
import slixfeed.config as config
|
import slixfeed.config as config
|
||||||
import slixfeed.crawl as crawl
|
import slixfeed.crawl as crawl
|
||||||
|
|
||||||
import slixfeed.dt as dt
|
import slixfeed.dt as dt
|
||||||
import slixfeed.fetch as fetch
|
import slixfeed.fetch as fetch
|
||||||
import slixfeed.sqlite as sqlite
|
import slixfeed.sqlite as sqlite
|
||||||
|
@ -53,41 +52,43 @@ from slixfeed.xmpp.message import XmppMessage
|
||||||
from slixfeed.xmpp.presence import XmppPresence
|
from slixfeed.xmpp.presence import XmppPresence
|
||||||
from slixfeed.xmpp.upload import XmppUpload
|
from slixfeed.xmpp.upload import XmppUpload
|
||||||
from slixfeed.xmpp.utility import get_chat_type
|
from slixfeed.xmpp.utility import get_chat_type
|
||||||
|
import sys
|
||||||
import tomllib
|
import tomllib
|
||||||
from urllib import error
|
from urllib import error
|
||||||
from urllib.parse import parse_qs, urlsplit
|
from urllib.parse import parse_qs, urlsplit
|
||||||
import xml.etree.ElementTree as ET
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
logger = Logger(__name__)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import xml2epub
|
import xml2epub
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logging.info(
|
logger.error('Package xml2epub was not found.\n'
|
||||||
"Package xml2epub was not found.\n"
|
'ePUB support is disabled.')
|
||||||
"ePUB support is disabled.")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import html2text
|
import html2text
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logging.info(
|
logger.error('Package html2text was not found.\n'
|
||||||
"Package html2text was not found.\n"
|
'Markdown support is disabled.')
|
||||||
"Markdown support is disabled.")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import pdfkit
|
import pdfkit
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logging.info(
|
logger.error('Package pdfkit was not found.\n'
|
||||||
"Package pdfkit was not found.\n"
|
'PDF support is disabled.')
|
||||||
"PDF support is disabled.")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from readability import Document
|
from readability import Document
|
||||||
except ImportError:
|
except ImportError:
|
||||||
logging.info(
|
logger.error('Package readability was not found.\n'
|
||||||
"Package readability was not found.\n"
|
'Arc90 Lab algorithm is disabled.')
|
||||||
"Arc90 Lab algorithm is disabled.")
|
|
||||||
|
|
||||||
|
|
||||||
async def export_feeds(self, jid, jid_file, ext):
|
async def export_feeds(self, jid, jid_file, ext):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for JID {}.'
|
||||||
|
.format(function_name, jid))
|
||||||
cache_dir = config.get_default_cache_directory()
|
cache_dir = config.get_default_cache_directory()
|
||||||
if not os.path.isdir(cache_dir):
|
if not os.path.isdir(cache_dir):
|
||||||
os.mkdir(cache_dir)
|
os.mkdir(cache_dir)
|
||||||
|
@ -117,7 +118,9 @@ async def xmpp_send_status(self, jid):
|
||||||
jid : str
|
jid : str
|
||||||
Jabber ID.
|
Jabber ID.
|
||||||
"""
|
"""
|
||||||
logging.info('Sending a status message to JID {}'.format(jid))
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for JID {}.'
|
||||||
|
.format(function_name, jid))
|
||||||
status_text = '📜️ Slixfeed RSS News Bot'
|
status_text = '📜️ Slixfeed RSS News Bot'
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = config.get_pathname_to_database(jid_file)
|
db_file = config.get_pathname_to_database(jid_file)
|
||||||
|
@ -169,6 +172,9 @@ async def xmpp_send_update(self, jid, num=None):
|
||||||
num : str, optional
|
num : str, optional
|
||||||
Number. The default is None.
|
Number. The default is None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for JID {}.'
|
||||||
|
.format(function_name, jid))
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = config.get_pathname_to_database(jid_file)
|
db_file = config.get_pathname_to_database(jid_file)
|
||||||
enabled = config.get_setting_value(db_file, 'enabled')
|
enabled = config.get_setting_value(db_file, 'enabled')
|
||||||
|
@ -267,6 +273,9 @@ async def xmpp_send_update(self, jid, num=None):
|
||||||
|
|
||||||
|
|
||||||
def manual(filename, section=None, command=None):
|
def manual(filename, section=None, command=None):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, filename))
|
||||||
config_dir = config.get_default_config_directory()
|
config_dir = config.get_default_config_directory()
|
||||||
with open(config_dir + '/' + filename, mode="rb") as commands:
|
with open(config_dir + '/' + filename, mode="rb") as commands:
|
||||||
cmds = tomllib.load(commands)
|
cmds = tomllib.load(commands)
|
||||||
|
@ -279,7 +288,7 @@ def manual(filename, section=None, command=None):
|
||||||
try:
|
try:
|
||||||
cmd_list = cmds[section][command]
|
cmd_list = cmds[section][command]
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
logging.error(str(e))
|
logger.error(str(e))
|
||||||
cmd_list = None
|
cmd_list = None
|
||||||
elif section:
|
elif section:
|
||||||
try:
|
try:
|
||||||
|
@ -287,7 +296,7 @@ def manual(filename, section=None, command=None):
|
||||||
for cmd in cmds[section]:
|
for cmd in cmds[section]:
|
||||||
cmd_list.extend([cmd])
|
cmd_list.extend([cmd])
|
||||||
except KeyError as e:
|
except KeyError as e:
|
||||||
logging.error('KeyError:' + str(e))
|
logger.error('KeyError:' + str(e))
|
||||||
cmd_list = None
|
cmd_list = None
|
||||||
else:
|
else:
|
||||||
cmd_list = []
|
cmd_list = []
|
||||||
|
@ -316,6 +325,9 @@ def log_to_markdown(timestamp, filename, jid, message):
|
||||||
None.
|
None.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, filename))
|
||||||
with open(filename + '.md', 'a') as file:
|
with open(filename + '.md', 'a') as file:
|
||||||
# entry = "{} {}:\n{}\n\n".format(timestamp, jid, message)
|
# entry = "{} {}:\n{}\n\n".format(timestamp, jid, message)
|
||||||
entry = (
|
entry = (
|
||||||
|
@ -342,6 +354,8 @@ def is_feed_json(document):
|
||||||
val : boolean
|
val : boolean
|
||||||
True or False.
|
True or False.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated'.format(function_name))
|
||||||
value = False
|
value = False
|
||||||
try:
|
try:
|
||||||
feed = json.loads(document)
|
feed = json.loads(document)
|
||||||
|
@ -376,6 +390,8 @@ def is_feed(feed):
|
||||||
val : boolean
|
val : boolean
|
||||||
True or False.
|
True or False.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated'.format(function_name))
|
||||||
value = False
|
value = False
|
||||||
# message = None
|
# message = None
|
||||||
if not feed.entries:
|
if not feed.entries:
|
||||||
|
@ -410,6 +426,8 @@ def is_feed(feed):
|
||||||
|
|
||||||
|
|
||||||
def list_unread_entries(result, feed_title, jid_file):
|
def list_unread_entries(result, feed_title, jid_file):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated'.format(function_name))
|
||||||
# TODO Add filtering
|
# TODO Add filtering
|
||||||
# TODO Do this when entry is added to list and mark it as read
|
# TODO Do this when entry is added to list and mark it as read
|
||||||
# DONE!
|
# DONE!
|
||||||
|
@ -469,6 +487,9 @@ def list_unread_entries(result, feed_title, jid_file):
|
||||||
|
|
||||||
|
|
||||||
def list_search_results(query, results):
|
def list_search_results(query, results):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for query {}.'
|
||||||
|
.format(function_name, query))
|
||||||
message = ("Search results for '{}':\n\n```"
|
message = ("Search results for '{}':\n\n```"
|
||||||
.format(query))
|
.format(query))
|
||||||
for result in results:
|
for result in results:
|
||||||
|
@ -482,6 +503,9 @@ def list_search_results(query, results):
|
||||||
|
|
||||||
|
|
||||||
def list_feeds_by_query(db_file, query):
|
def list_feeds_by_query(db_file, query):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for query {}.'
|
||||||
|
.format(function_name, query))
|
||||||
results = sqlite.search_feeds(db_file, query)
|
results = sqlite.search_feeds(db_file, query)
|
||||||
message = ('Feeds containing "{}":\n\n```'
|
message = ('Feeds containing "{}":\n\n```'
|
||||||
.format(query))
|
.format(query))
|
||||||
|
@ -511,6 +535,9 @@ async def list_statistics(db_file):
|
||||||
msg : str
|
msg : str
|
||||||
Statistics as message.
|
Statistics as message.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
entries_unread = await sqlite.get_number_of_entries_unread(db_file)
|
entries_unread = await sqlite.get_number_of_entries_unread(db_file)
|
||||||
entries = await sqlite.get_number_of_items(db_file, 'entries')
|
entries = await sqlite.get_number_of_items(db_file, 'entries')
|
||||||
archive = await sqlite.get_number_of_items(db_file, 'archive')
|
archive = await sqlite.get_number_of_items(db_file, 'archive')
|
||||||
|
@ -554,6 +581,9 @@ async def list_statistics(db_file):
|
||||||
|
|
||||||
# FIXME Replace counter by len
|
# FIXME Replace counter by len
|
||||||
def list_last_entries(results, num):
|
def list_last_entries(results, num):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
message = "Recent {} titles:\n\n```".format(num)
|
message = "Recent {} titles:\n\n```".format(num)
|
||||||
for result in results:
|
for result in results:
|
||||||
message += ("\n{}\n{}\n"
|
message += ("\n{}\n{}\n"
|
||||||
|
@ -566,6 +596,9 @@ def list_last_entries(results, num):
|
||||||
|
|
||||||
|
|
||||||
def pick_a_feed(lang=None):
|
def pick_a_feed(lang=None):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
config_dir = config.get_default_config_directory()
|
config_dir = config.get_default_config_directory()
|
||||||
with open(config_dir + '/' + 'feeds.toml', mode="rb") as feeds:
|
with open(config_dir + '/' + 'feeds.toml', mode="rb") as feeds:
|
||||||
urls = tomllib.load(feeds)
|
urls = tomllib.load(feeds)
|
||||||
|
@ -575,6 +608,9 @@ def pick_a_feed(lang=None):
|
||||||
|
|
||||||
|
|
||||||
def list_feeds(results):
|
def list_feeds(results):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
message = "\nList of subscriptions:\n\n```\n"
|
message = "\nList of subscriptions:\n\n```\n"
|
||||||
for result in results:
|
for result in results:
|
||||||
message += ("Name : {}\n"
|
message += ("Name : {}\n"
|
||||||
|
@ -597,6 +633,9 @@ def list_feeds(results):
|
||||||
|
|
||||||
|
|
||||||
async def list_bookmarks(self):
|
async def list_bookmarks(self):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
conferences = await XmppBookmark.get(self)
|
conferences = await XmppBookmark.get(self)
|
||||||
message = '\nList of groupchats:\n\n```\n'
|
message = '\nList of groupchats:\n\n```\n'
|
||||||
for conference in conferences:
|
for conference in conferences:
|
||||||
|
@ -610,6 +649,9 @@ async def list_bookmarks(self):
|
||||||
|
|
||||||
|
|
||||||
def export_to_markdown(jid, filename, results):
|
def export_to_markdown(jid, filename, results):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for JID {}.'
|
||||||
|
.format(function_name, jid))
|
||||||
with open(filename, 'w') as file:
|
with open(filename, 'w') as file:
|
||||||
file.write('# Subscriptions for {}\n'.format(jid))
|
file.write('# Subscriptions for {}\n'.format(jid))
|
||||||
file.write('## Set of feeds exported with Slixfeed\n')
|
file.write('## Set of feeds exported with Slixfeed\n')
|
||||||
|
@ -622,6 +664,9 @@ def export_to_markdown(jid, filename, results):
|
||||||
|
|
||||||
# TODO Consider adding element jid as a pointer of import
|
# TODO Consider adding element jid as a pointer of import
|
||||||
def export_to_opml(jid, filename, results):
|
def export_to_opml(jid, filename, results):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for JID {}.'
|
||||||
|
.format(function_name, jid))
|
||||||
root = ET.Element("opml")
|
root = ET.Element("opml")
|
||||||
root.set("version", "1.0")
|
root.set("version", "1.0")
|
||||||
head = ET.SubElement(root, "head")
|
head = ET.SubElement(root, "head")
|
||||||
|
@ -645,6 +690,9 @@ def export_to_opml(jid, filename, results):
|
||||||
|
|
||||||
|
|
||||||
async def import_opml(db_file, url):
|
async def import_opml(db_file, url):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
document = result['content']
|
document = result['content']
|
||||||
|
@ -667,6 +715,9 @@ async def import_opml(db_file, url):
|
||||||
|
|
||||||
|
|
||||||
async def add_feed(db_file, url):
|
async def add_feed(db_file, url):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
while True:
|
while True:
|
||||||
exist = await sqlite.get_feed_id_and_name(db_file, url)
|
exist = await sqlite.get_feed_id_and_name(db_file, url)
|
||||||
if not exist:
|
if not exist:
|
||||||
|
@ -832,6 +883,9 @@ async def scan_json(db_file, url):
|
||||||
url : str, optional
|
url : str, optional
|
||||||
URL. The default is None.
|
URL. The default is None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
if isinstance(url, tuple): url = url[0]
|
if isinstance(url, tuple): url = url[0]
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
|
@ -866,7 +920,7 @@ async def scan_json(db_file, url):
|
||||||
IncompleteRead,
|
IncompleteRead,
|
||||||
error.URLError
|
error.URLError
|
||||||
) as e:
|
) as e:
|
||||||
logging.error(e)
|
logger.error(e)
|
||||||
return
|
return
|
||||||
# new_entry = 0
|
# new_entry = 0
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
|
@ -890,9 +944,10 @@ async def scan_json(db_file, url):
|
||||||
entry_id = entry["id"] if "id" in entry.keys() else link
|
entry_id = entry["id"] if "id" in entry.keys() else link
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
exist = await sqlite.check_entry_exist(
|
exist = sqlite.check_entry_exist(db_file, feed_id,
|
||||||
db_file, feed_id, entry_id=entry_id,
|
entry_id=entry_id,
|
||||||
title=title, link=link, date=date)
|
title=title, link=link,
|
||||||
|
date=date)
|
||||||
if not exist:
|
if not exist:
|
||||||
summary = entry["summary"] if "summary" in entry.keys() else ''
|
summary = entry["summary"] if "summary" in entry.keys() else ''
|
||||||
if not summary:
|
if not summary:
|
||||||
|
@ -916,12 +971,12 @@ async def scan_json(db_file, url):
|
||||||
string)
|
string)
|
||||||
if reject_list:
|
if reject_list:
|
||||||
read_status = 1
|
read_status = 1
|
||||||
logging.debug('Rejected : {}'
|
logger.debug('Rejected : {}'
|
||||||
'\n'
|
'\n'
|
||||||
'Keyword : {}'
|
'Keyword : {}'
|
||||||
.format(link, reject_list))
|
.format(link, reject_list))
|
||||||
if isinstance(date, int):
|
if isinstance(date, int):
|
||||||
logging.error('Variable "date" is int: {}'.format(date))
|
logger.error('Variable "date" is int: {}'.format(date))
|
||||||
media_link = ''
|
media_link = ''
|
||||||
if "attachments" in entry.keys():
|
if "attachments" in entry.keys():
|
||||||
for e_link in entry["attachments"]:
|
for e_link in entry["attachments"]:
|
||||||
|
@ -938,10 +993,10 @@ async def scan_json(db_file, url):
|
||||||
media_link = trim_url(media_link)
|
media_link = trim_url(media_link)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
logging.info('KeyError: "url"\n'
|
logger.info('KeyError: "url"\n'
|
||||||
'Missing "url" attribute for {}'
|
'Missing "url" attribute for {}'
|
||||||
.format(url))
|
.format(url))
|
||||||
logging.info('Continue scanning for next '
|
logger.info('Continue scanning for next '
|
||||||
'potential enclosure of {}'
|
'potential enclosure of {}'
|
||||||
.format(link))
|
.format(link))
|
||||||
entry = {
|
entry = {
|
||||||
|
@ -965,6 +1020,9 @@ async def scan_json(db_file, url):
|
||||||
|
|
||||||
|
|
||||||
async def view_feed(url):
|
async def view_feed(url):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for URL {}.'
|
||||||
|
.format(function_name, url))
|
||||||
while True:
|
while True:
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
|
@ -1027,6 +1085,9 @@ async def view_feed(url):
|
||||||
|
|
||||||
|
|
||||||
async def view_entry(url, num):
|
async def view_entry(url, num):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for URL {}.'
|
||||||
|
.format(function_name, url))
|
||||||
while True:
|
while True:
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
|
@ -1104,6 +1165,9 @@ async def scan(db_file, url):
|
||||||
url : str, optional
|
url : str, optional
|
||||||
URL. The default is None.
|
URL. The default is None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
if isinstance(url, tuple): url = url[0]
|
if isinstance(url, tuple): url = url[0]
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
|
@ -1144,7 +1208,7 @@ async def scan(db_file, url):
|
||||||
len(feed["entries"]), updated)
|
len(feed["entries"]), updated)
|
||||||
# await update_feed_status
|
# await update_feed_status
|
||||||
except (IncompleteReadError, IncompleteRead, error.URLError) as e:
|
except (IncompleteReadError, IncompleteRead, error.URLError) as e:
|
||||||
logging.error(e)
|
logger.error(e)
|
||||||
return
|
return
|
||||||
# new_entry = 0
|
# new_entry = 0
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
|
@ -1168,7 +1232,7 @@ async def scan(db_file, url):
|
||||||
entry_id = entry.id if entry.has_key("id") else link
|
entry_id = entry.id if entry.has_key("id") else link
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
exist = await sqlite.check_entry_exist(db_file, feed_id,
|
exist = sqlite.check_entry_exist(db_file, feed_id,
|
||||||
entry_id=entry_id,
|
entry_id=entry_id,
|
||||||
title=title, link=link,
|
title=title, link=link,
|
||||||
date=date)
|
date=date)
|
||||||
|
@ -1187,12 +1251,12 @@ async def scan(db_file, url):
|
||||||
string)
|
string)
|
||||||
if reject_list:
|
if reject_list:
|
||||||
read_status = 1
|
read_status = 1
|
||||||
logging.debug('Rejected : {}'
|
logger.debug('Rejected : {}'
|
||||||
'\n'
|
'\n'
|
||||||
'Keyword : {}'.format(link,
|
'Keyword : {}'.format(link,
|
||||||
reject_list))
|
reject_list))
|
||||||
if isinstance(date, int):
|
if isinstance(date, int):
|
||||||
logging.error('Variable "date" is int: {}'
|
logger.error('Variable "date" is int: {}'
|
||||||
.format(date))
|
.format(date))
|
||||||
media_link = ''
|
media_link = ''
|
||||||
if entry.has_key("links"):
|
if entry.has_key("links"):
|
||||||
|
@ -1212,10 +1276,10 @@ async def scan(db_file, url):
|
||||||
media_link = trim_url(media_link)
|
media_link = trim_url(media_link)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
logging.info('KeyError: "href"\n'
|
logger.info('KeyError: "href"\n'
|
||||||
'Missing "href" attribute for {}'
|
'Missing "href" attribute for {}'
|
||||||
.format(url))
|
.format(url))
|
||||||
logging.info('Continue scanning for next '
|
logger.info('Continue scanning for next '
|
||||||
'potential enclosure of {}'
|
'potential enclosure of {}'
|
||||||
.format(link))
|
.format(link))
|
||||||
entry = {
|
entry = {
|
||||||
|
@ -1240,6 +1304,9 @@ async def scan(db_file, url):
|
||||||
|
|
||||||
|
|
||||||
def get_document_title(data):
|
def get_document_title(data):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
try:
|
try:
|
||||||
document = Document(data)
|
document = Document(data)
|
||||||
title = document.short_title()
|
title = document.short_title()
|
||||||
|
@ -1250,6 +1317,9 @@ def get_document_title(data):
|
||||||
|
|
||||||
|
|
||||||
def get_document_content(data):
|
def get_document_content(data):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
try:
|
try:
|
||||||
document = Document(data)
|
document = Document(data)
|
||||||
content = document.summary()
|
content = document.summary()
|
||||||
|
@ -1260,6 +1330,9 @@ def get_document_content(data):
|
||||||
|
|
||||||
|
|
||||||
def get_document_content_as_text(data):
|
def get_document_content_as_text(data):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
try:
|
try:
|
||||||
document = Document(data)
|
document = Document(data)
|
||||||
content = document.summary()
|
content = document.summary()
|
||||||
|
@ -1271,6 +1344,9 @@ def get_document_content_as_text(data):
|
||||||
|
|
||||||
|
|
||||||
def generate_document(data, url, ext, filename, readability=False):
|
def generate_document(data, url, ext, filename, readability=False):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, filename, url))
|
||||||
error = None
|
error = None
|
||||||
if readability:
|
if readability:
|
||||||
try:
|
try:
|
||||||
|
@ -1278,15 +1354,15 @@ def generate_document(data, url, ext, filename, readability=False):
|
||||||
content = document.summary()
|
content = document.summary()
|
||||||
except:
|
except:
|
||||||
content = data
|
content = data
|
||||||
logging.warning('Check that package readability is installed.')
|
logger.warning('Check that package readability is installed.')
|
||||||
else:
|
else:
|
||||||
content = data
|
content = data
|
||||||
match ext:
|
match ext:
|
||||||
case "epub":
|
case "epub":
|
||||||
error = generate_epub(content, filename)
|
error = generate_epub(content, filename)
|
||||||
if error:
|
if error:
|
||||||
logging.error(error)
|
logger.error(error)
|
||||||
# logging.error(
|
# logger.error(
|
||||||
# "Check that packages xml2epub is installed, "
|
# "Check that packages xml2epub is installed, "
|
||||||
# "or try again.")
|
# "or try again.")
|
||||||
case "html":
|
case "html":
|
||||||
|
@ -1295,14 +1371,14 @@ def generate_document(data, url, ext, filename, readability=False):
|
||||||
try:
|
try:
|
||||||
generate_markdown(content, filename)
|
generate_markdown(content, filename)
|
||||||
except:
|
except:
|
||||||
logging.warning('Check that package html2text '
|
logger.warning('Check that package html2text '
|
||||||
'is installed, or try again.')
|
'is installed, or try again.')
|
||||||
error = 'Package html2text was not found.'
|
error = 'Package html2text was not found.'
|
||||||
case "pdf":
|
case "pdf":
|
||||||
error = generate_pdf(content, filename)
|
error = generate_pdf(content, filename)
|
||||||
if error:
|
if error:
|
||||||
logging.error(error)
|
logger.error(error)
|
||||||
# logging.warning(
|
# logger.warning(
|
||||||
# "Check that packages pdfkit and wkhtmltopdf "
|
# "Check that packages pdfkit and wkhtmltopdf "
|
||||||
# "are installed, or try again.")
|
# "are installed, or try again.")
|
||||||
# error = (
|
# error = (
|
||||||
|
@ -1321,6 +1397,9 @@ def generate_document(data, url, ext, filename, readability=False):
|
||||||
|
|
||||||
|
|
||||||
async def extract_image_from_feed(db_file, feed_id, url):
|
async def extract_image_from_feed(db_file, feed_id, url):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
feed_url = sqlite.get_feed_url(db_file, feed_id)
|
feed_url = sqlite.get_feed_url(db_file, feed_id)
|
||||||
feed_url = feed_url[0]
|
feed_url = feed_url[0]
|
||||||
result = await fetch.http(feed_url)
|
result = await fetch.http(feed_url)
|
||||||
|
@ -1336,11 +1415,14 @@ async def extract_image_from_feed(db_file, feed_id, url):
|
||||||
image_url = link.href
|
image_url = link.href
|
||||||
return image_url
|
return image_url
|
||||||
except:
|
except:
|
||||||
logging.error(url)
|
logger.error(url)
|
||||||
logging.error('AttributeError: object has no attribute "link"')
|
logger.error('AttributeError: object has no attribute "link"')
|
||||||
|
|
||||||
|
|
||||||
async def extract_image_from_html(url):
|
async def extract_image_from_html(url):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for URL {}.'
|
||||||
|
.format(function_name, url))
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
data = result['content']
|
data = result['content']
|
||||||
|
@ -1349,7 +1431,7 @@ async def extract_image_from_html(url):
|
||||||
content = document.summary()
|
content = document.summary()
|
||||||
except:
|
except:
|
||||||
content = data
|
content = data
|
||||||
logging.warning('Check that package readability is installed.')
|
logger.warning('Check that package readability is installed.')
|
||||||
tree = html.fromstring(content)
|
tree = html.fromstring(content)
|
||||||
# TODO Exclude banners, class="share" links etc.
|
# TODO Exclude banners, class="share" links etc.
|
||||||
images = tree.xpath(
|
images = tree.xpath(
|
||||||
|
@ -1370,6 +1452,9 @@ async def extract_image_from_html(url):
|
||||||
|
|
||||||
|
|
||||||
def generate_epub(text, pathname):
|
def generate_epub(text, pathname):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, pathname))
|
||||||
## create an empty eBook
|
## create an empty eBook
|
||||||
pathname_list = pathname.split("/")
|
pathname_list = pathname.split("/")
|
||||||
filename = pathname_list.pop()
|
filename = pathname_list.pop()
|
||||||
|
@ -1397,11 +1482,17 @@ def generate_epub(text, pathname):
|
||||||
|
|
||||||
|
|
||||||
def generate_html(text, filename):
|
def generate_html(text, filename):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, filename))
|
||||||
with open(filename, 'w') as file:
|
with open(filename, 'w') as file:
|
||||||
file.write(text)
|
file.write(text)
|
||||||
|
|
||||||
|
|
||||||
def generate_markdown(text, filename):
|
def generate_markdown(text, filename):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, filename))
|
||||||
h2m = html2text.HTML2Text()
|
h2m = html2text.HTML2Text()
|
||||||
# Convert HTML to Markdown
|
# Convert HTML to Markdown
|
||||||
markdown = h2m.handle(text)
|
markdown = h2m.handle(text)
|
||||||
|
@ -1410,6 +1501,9 @@ def generate_markdown(text, filename):
|
||||||
|
|
||||||
|
|
||||||
def generate_pdf(text, filename):
|
def generate_pdf(text, filename):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, filename))
|
||||||
try:
|
try:
|
||||||
pdfkit.from_string(text, filename)
|
pdfkit.from_string(text, filename)
|
||||||
except IOError as error:
|
except IOError as error:
|
||||||
|
@ -1419,17 +1513,26 @@ def generate_pdf(text, filename):
|
||||||
|
|
||||||
|
|
||||||
def generate_txt(text, filename):
|
def generate_txt(text, filename):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, filename))
|
||||||
text = remove_html_tags(text)
|
text = remove_html_tags(text)
|
||||||
with open(filename, 'w') as file:
|
with open(filename, 'w') as file:
|
||||||
file.write(text)
|
file.write(text)
|
||||||
|
|
||||||
def remove_html_tags(data):
|
def remove_html_tags(data):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
data = BeautifulSoup(data, "lxml").text
|
data = BeautifulSoup(data, "lxml").text
|
||||||
data = data.replace("\n\n", "\n")
|
data = data.replace("\n\n", "\n")
|
||||||
return data
|
return data
|
||||||
|
|
||||||
# TODO Add support for eDonkey, Gnutella, Soulseek
|
# TODO Add support for eDonkey, Gnutella, Soulseek
|
||||||
async def get_magnet(link):
|
async def get_magnet(link):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for URL {}.'
|
||||||
|
.format(function_name, link))
|
||||||
parted_link = urlsplit(link)
|
parted_link = urlsplit(link)
|
||||||
queries = parse_qs(parted_link.query)
|
queries = parse_qs(parted_link.query)
|
||||||
query_xt = queries["xt"][0]
|
query_xt = queries["xt"][0]
|
||||||
|
@ -1437,12 +1540,11 @@ async def get_magnet(link):
|
||||||
filename = queries["dn"][0]
|
filename = queries["dn"][0]
|
||||||
checksum = query_xt[len("urn:btih:"):]
|
checksum = query_xt[len("urn:btih:"):]
|
||||||
torrent = await fetch.magnet(link)
|
torrent = await fetch.magnet(link)
|
||||||
logging.debug('Attempting to retrieve {} ({})'
|
logger.debug('Attempting to retrieve {} ({})'
|
||||||
.format(filename, checksum))
|
.format(filename, checksum))
|
||||||
if not torrent:
|
if not torrent:
|
||||||
logging.debug(
|
logger.debug('Attempting to retrieve {} from HTTP caching service'
|
||||||
"Attempting to retrieve {} from HTTP caching service".format(
|
.format(filename))
|
||||||
filename))
|
|
||||||
urls = [
|
urls = [
|
||||||
'https://watercache.libertycorp.org/get/{}/{}',
|
'https://watercache.libertycorp.org/get/{}/{}',
|
||||||
'https://itorrents.org/torrent/{}.torrent?title={}',
|
'https://itorrents.org/torrent/{}.torrent?title={}',
|
||||||
|
@ -1471,6 +1573,9 @@ async def remove_nonexistent_entries(db_file, url, feed):
|
||||||
feed : list
|
feed : list
|
||||||
Parsed feed document.
|
Parsed feed document.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
||||||
|
@ -1577,6 +1682,9 @@ async def remove_nonexistent_entries_json(db_file, url, feed):
|
||||||
feed : list
|
feed : list
|
||||||
Parsed feed document.
|
Parsed feed document.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
||||||
|
|
43
slixfeed/log.py
Normal file
43
slixfeed/log.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
#!/usr/bin/env python3
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
To use this class, first, instantiate Logger with the name of your module
|
||||||
|
or class, then call the appropriate logging methods on that instance.
|
||||||
|
|
||||||
|
logger = Logger(__name__)
|
||||||
|
logger.debug('This is a debug message')
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
|
||||||
|
class Logger:
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.logger = logging.getLogger(name)
|
||||||
|
self.logger.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
ch = logging.StreamHandler()
|
||||||
|
ch.setLevel(logging.DEBUG)
|
||||||
|
|
||||||
|
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
ch.setFormatter(formatter)
|
||||||
|
|
||||||
|
self.logger.addHandler(ch)
|
||||||
|
|
||||||
|
def critical(self, message):
|
||||||
|
self.logger.critical(message)
|
||||||
|
|
||||||
|
def debug(self, message):
|
||||||
|
self.logger.debug(message)
|
||||||
|
|
||||||
|
def error(self, message):
|
||||||
|
self.logger.error(message)
|
||||||
|
|
||||||
|
def info(self, message):
|
||||||
|
self.logger.info(message)
|
||||||
|
|
||||||
|
def warning(self, message):
|
||||||
|
self.logger.warning(message)
|
|
@ -18,9 +18,10 @@ TODO
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from asyncio import Lock
|
from asyncio import Lock
|
||||||
import logging
|
|
||||||
# from slixfeed.data import join_url
|
# from slixfeed.data import join_url
|
||||||
|
from slixfeed.log import Logger
|
||||||
from sqlite3 import connect, Error, IntegrityError
|
from sqlite3 import connect, Error, IntegrityError
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# from eliot import start_action, to_file
|
# from eliot import start_action, to_file
|
||||||
|
@ -31,10 +32,12 @@ import time
|
||||||
# # with start_action(action_type="search_entries()", query=query):
|
# # with start_action(action_type="search_entries()", query=query):
|
||||||
# # with start_action(action_type="check_entry()", link=link):
|
# # with start_action(action_type="check_entry()", link=link):
|
||||||
|
|
||||||
|
CURSORS = {}
|
||||||
|
|
||||||
# aiosqlite
|
# aiosqlite
|
||||||
DBLOCK = Lock()
|
DBLOCK = Lock()
|
||||||
|
|
||||||
CURSORS = {}
|
logger = Logger(__name__)
|
||||||
|
|
||||||
def create_connection(db_file):
|
def create_connection(db_file):
|
||||||
"""
|
"""
|
||||||
|
@ -51,6 +54,9 @@ def create_connection(db_file):
|
||||||
conn : object
|
conn : object
|
||||||
Connection object or None.
|
Connection object or None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
conn = None
|
conn = None
|
||||||
try:
|
try:
|
||||||
conn = connect(db_file)
|
conn = connect(db_file)
|
||||||
|
@ -70,6 +76,9 @@ def create_tables(db_file):
|
||||||
db_file : str
|
db_file : str
|
||||||
Path to database file.
|
Path to database file.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
archive_table_sql = (
|
archive_table_sql = (
|
||||||
"""
|
"""
|
||||||
|
@ -296,6 +305,9 @@ def get_cursor(db_file):
|
||||||
CURSORS[db_file] : object
|
CURSORS[db_file] : object
|
||||||
Cursor.
|
Cursor.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
if db_file in CURSORS:
|
if db_file in CURSORS:
|
||||||
return CURSORS[db_file]
|
return CURSORS[db_file]
|
||||||
else:
|
else:
|
||||||
|
@ -316,6 +328,9 @@ async def import_feeds(db_file, feeds):
|
||||||
feeds : list
|
feeds : list
|
||||||
Set of feeds (Title and URL).
|
Set of feeds (Title and URL).
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -337,8 +352,8 @@ async def import_feeds(db_file, feeds):
|
||||||
try:
|
try:
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
logging.warning("Skipping: " + str(url))
|
logger.warning("Skipping: " + str(url))
|
||||||
logging.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
async def add_metadata(db_file):
|
async def add_metadata(db_file):
|
||||||
|
@ -350,6 +365,9 @@ async def add_metadata(db_file):
|
||||||
db_file : str
|
db_file : str
|
||||||
Path to database file.
|
Path to database file.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -376,6 +394,9 @@ def insert_feed_status(cur, feed_id):
|
||||||
cur : object
|
cur : object
|
||||||
Cursor object.
|
Cursor object.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for feed_id {}.'
|
||||||
|
.format(function_name, feed_id))
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
INSERT
|
INSERT
|
||||||
|
@ -389,9 +410,9 @@ def insert_feed_status(cur, feed_id):
|
||||||
try:
|
try:
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
logging.warning(
|
logger.warning(
|
||||||
"Skipping feed_id {} for table feeds_state".format(feed_id))
|
"Skipping feed_id {} for table feeds_state".format(feed_id))
|
||||||
logging.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
def insert_feed_properties(cur, feed_id):
|
def insert_feed_properties(cur, feed_id):
|
||||||
|
@ -403,6 +424,9 @@ def insert_feed_properties(cur, feed_id):
|
||||||
cur : object
|
cur : object
|
||||||
Cursor object.
|
Cursor object.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for feed_id {}.'
|
||||||
|
.format(function_name, feed_id))
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
INSERT
|
INSERT
|
||||||
|
@ -416,14 +440,14 @@ def insert_feed_properties(cur, feed_id):
|
||||||
try:
|
try:
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
except IntegrityError as e:
|
except IntegrityError as e:
|
||||||
logging.warning(
|
logger.warning(
|
||||||
"Skipping feed_id {} for table feeds_properties".format(feed_id))
|
"Skipping feed_id {} for table feeds_properties".format(feed_id))
|
||||||
logging.error(e)
|
logger.error(e)
|
||||||
|
|
||||||
|
|
||||||
async def insert_feed(
|
async def insert_feed(db_file, url, title=None, entries=None, version=None,
|
||||||
db_file, url, title=None, entries=None, version=None,
|
encoding=None, language=None, status_code=None,
|
||||||
encoding=None, language=None, status_code=None, updated=None):
|
updated=None):
|
||||||
"""
|
"""
|
||||||
Insert a new feed into the feeds table.
|
Insert a new feed into the feeds table.
|
||||||
|
|
||||||
|
@ -448,6 +472,9 @@ async def insert_feed(
|
||||||
updated : ???, optional
|
updated : ???, optional
|
||||||
Date feed was last updated. The default is None.
|
Date feed was last updated. The default is None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -501,9 +528,9 @@ async def insert_feed(
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
|
|
||||||
|
|
||||||
async def insert_feed_(
|
async def insert_feed_(db_file, url, title=None, entries=None, version=None,
|
||||||
db_file, url, title=None, entries=None, version=None,
|
encoding=None, language=None, status_code=None,
|
||||||
encoding=None, language=None, status_code=None, updated=None):
|
updated=None):
|
||||||
"""
|
"""
|
||||||
Insert a new feed into the feeds table.
|
Insert a new feed into the feeds table.
|
||||||
|
|
||||||
|
@ -532,6 +559,9 @@ async def insert_feed_(
|
||||||
updated : ???, optional
|
updated : ???, optional
|
||||||
Date feed was last updated. The default is None.
|
Date feed was last updated. The default is None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -575,6 +605,9 @@ async def remove_feed_by_url(db_file, url):
|
||||||
url : str
|
url : str
|
||||||
URL of feed.
|
URL of feed.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -600,6 +633,9 @@ async def remove_feed_by_index(db_file, ix):
|
||||||
ix : str
|
ix : str
|
||||||
Index of feed.
|
Index of feed.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -645,6 +681,9 @@ def get_feeds_by_tag_id(db_file, tag_id):
|
||||||
result : tuple
|
result : tuple
|
||||||
List of tags.
|
List of tags.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Tag ID {}.'
|
||||||
|
.format(function_name, db_file, tag_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -677,6 +716,9 @@ def get_tags_by_feed_id(db_file, feed_id):
|
||||||
result : tuple
|
result : tuple
|
||||||
List of tags.
|
List of tags.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -706,6 +748,9 @@ async def set_feed_id_and_tag_id(db_file, feed_id, tag_id):
|
||||||
tag_id : str
|
tag_id : str
|
||||||
Tag ID
|
Tag ID
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Tag ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id, tag_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -741,6 +786,9 @@ def get_tag_id(db_file, tag_name):
|
||||||
ix : str
|
ix : str
|
||||||
Tag ID.
|
Tag ID.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Tag {}.'
|
||||||
|
.format(function_name, db_file, tag_name))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -771,6 +819,9 @@ def get_tag_name(db_file, ix):
|
||||||
tag_name : str
|
tag_name : str
|
||||||
Tag name.
|
Tag name.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -801,6 +852,9 @@ def is_tag_id_associated(db_file, tag_id):
|
||||||
tag_id : str
|
tag_id : str
|
||||||
Tag ID.
|
Tag ID.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Tag ID {}.'
|
||||||
|
.format(function_name, db_file, tag_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -818,6 +872,9 @@ def is_tag_id_associated(db_file, tag_id):
|
||||||
|
|
||||||
|
|
||||||
async def delete_tag_by_index(db_file, ix):
|
async def delete_tag_by_index(db_file, ix):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -852,6 +909,9 @@ def is_tag_id_of_feed_id(db_file, tag_id, feed_id):
|
||||||
tag_id : str
|
tag_id : str
|
||||||
Tag ID.
|
Tag ID.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Tag ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id, tag_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -870,6 +930,9 @@ def is_tag_id_of_feed_id(db_file, tag_id, feed_id):
|
||||||
|
|
||||||
|
|
||||||
async def delete_feed_id_tag_id(db_file, feed_id, tag_id):
|
async def delete_feed_id_tag_id(db_file, feed_id, tag_id):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Tag ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id, tag_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -898,6 +961,9 @@ async def set_new_tag(db_file, tag):
|
||||||
tag : str
|
tag : str
|
||||||
Tag
|
Tag
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Tag {}.'
|
||||||
|
.format(function_name, db_file, tag))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -934,6 +1000,9 @@ async def get_feed_id_and_name(db_file, url):
|
||||||
result : tuple
|
result : tuple
|
||||||
List of ID and Name of feed.
|
List of ID and Name of feed.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -964,6 +1033,9 @@ async def get_number_of_items(db_file, table):
|
||||||
count : ?
|
count : ?
|
||||||
Number of rows.
|
Number of rows.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Table {}.'
|
||||||
|
.format(function_name, db_file, table))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -990,6 +1062,9 @@ async def get_number_of_feeds_active(db_file):
|
||||||
count : str
|
count : str
|
||||||
Number of rows.
|
Number of rows.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1017,6 +1092,9 @@ async def get_number_of_entries_unread(db_file):
|
||||||
count : ?
|
count : ?
|
||||||
Number of rows.
|
Number of rows.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1053,6 +1131,9 @@ async def get_unread_entries(db_file, num):
|
||||||
result : tuple
|
result : tuple
|
||||||
News items.
|
News items.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Num {}.'
|
||||||
|
.format(function_name, db_file, num))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1088,6 +1169,9 @@ def get_feed_id_by_entry_index(db_file, ix):
|
||||||
feed_id : str
|
feed_id : str
|
||||||
Feed index.
|
Feed index.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1118,6 +1202,9 @@ async def get_feed_id(db_file, url):
|
||||||
feed_id : str
|
feed_id : str
|
||||||
Feed index.
|
Feed index.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and URL {}.'
|
||||||
|
.format(function_name, db_file, url))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1143,6 +1230,9 @@ async def mark_entry_as_read(cur, ix):
|
||||||
ix : str
|
ix : str
|
||||||
Index of entry.
|
Index of entry.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
UPDATE entries
|
UPDATE entries
|
||||||
|
@ -1165,6 +1255,9 @@ def get_number_of_unread_entries_by_feed(db_file, feed_id):
|
||||||
feed_id : str
|
feed_id : str
|
||||||
Feed Id.
|
Feed Id.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1190,6 +1283,9 @@ async def mark_feed_as_read(db_file, feed_id):
|
||||||
feed_id : str
|
feed_id : str
|
||||||
Feed Id.
|
Feed Id.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1215,6 +1311,9 @@ async def delete_entry_by_id(db_file, ix):
|
||||||
ix : str
|
ix : str
|
||||||
Index.
|
Index.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1240,6 +1339,9 @@ async def archive_entry(db_file, ix):
|
||||||
ix : str
|
ix : str
|
||||||
Index.
|
Index.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1255,11 +1357,9 @@ async def archive_entry(db_file, ix):
|
||||||
par = (ix,)
|
par = (ix,)
|
||||||
try:
|
try:
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
except:
|
except Exception as e:
|
||||||
print(
|
print('ERROR DB insert from entries into archive at index {} '
|
||||||
"ERROR DB insert from entries "
|
'for {}. Reason: {}'.format(ix, db_file, e))
|
||||||
"into archive at index", ix
|
|
||||||
)
|
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
DELETE
|
DELETE
|
||||||
|
@ -1278,6 +1378,9 @@ async def archive_entry(db_file, ix):
|
||||||
|
|
||||||
|
|
||||||
def get_feed_title(db_file, ix):
|
def get_feed_title(db_file, ix):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1305,6 +1408,9 @@ async def set_feed_title(db_file, feed_id, name):
|
||||||
name : str
|
name : str
|
||||||
New name.
|
New name.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Name {}.'
|
||||||
|
.format(function_name, db_file, feed_id, name))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1323,6 +1429,9 @@ async def set_feed_title(db_file, feed_id, name):
|
||||||
|
|
||||||
|
|
||||||
def get_entry_title(db_file, ix):
|
def get_entry_title(db_file, ix):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = ( # TODO Handletable archive too
|
sql = ( # TODO Handletable archive too
|
||||||
|
@ -1338,6 +1447,9 @@ def get_entry_title(db_file, ix):
|
||||||
|
|
||||||
|
|
||||||
def get_entry_url(db_file, ix):
|
def get_entry_url(db_file, ix):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = ( # TODO Handletable archive too
|
sql = ( # TODO Handletable archive too
|
||||||
|
@ -1353,6 +1465,9 @@ def get_entry_url(db_file, ix):
|
||||||
|
|
||||||
|
|
||||||
def get_feed_url(db_file, ix):
|
def get_feed_url(db_file, ix):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1368,6 +1483,9 @@ def get_feed_url(db_file, ix):
|
||||||
|
|
||||||
|
|
||||||
async def mark_as_read(db_file, ix):
|
async def mark_as_read(db_file, ix):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Index {}.'
|
||||||
|
.format(function_name, db_file, ix))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1390,6 +1508,9 @@ async def mark_all_as_read(db_file):
|
||||||
db_file : str
|
db_file : str
|
||||||
Path to database file.
|
Path to database file.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1420,6 +1541,9 @@ async def delete_archived_entry(cur, ix):
|
||||||
ix : str
|
ix : str
|
||||||
Index of entry.
|
Index of entry.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for Index {}.'
|
||||||
|
.format(function_name, ix))
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
DELETE
|
DELETE
|
||||||
|
@ -1440,6 +1564,9 @@ async def update_statistics(cur):
|
||||||
cur : object
|
cur : object
|
||||||
Cursor object.
|
Cursor object.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated.'
|
||||||
|
.format(function_name))
|
||||||
stat_dict = {}
|
stat_dict = {}
|
||||||
stat_dict["feeds"] = await get_number_of_items(cur, 'feeds')
|
stat_dict["feeds"] = await get_number_of_items(cur, 'feeds')
|
||||||
stat_dict["entries"] = await get_number_of_items(cur, 'entries')
|
stat_dict["entries"] = await get_number_of_items(cur, 'entries')
|
||||||
|
@ -1491,6 +1618,9 @@ async def set_enabled_status(db_file, feed_id, status):
|
||||||
status : int
|
status : int
|
||||||
0 or 1.
|
0 or 1.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Status {}.'
|
||||||
|
.format(function_name, db_file, feed_id, status))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1519,8 +1649,8 @@ When time functions of slixfeed.timedate
|
||||||
were async, there were errors of coroutines
|
were async, there were errors of coroutines
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async def add_entry(
|
async def add_entry(db_file, title, link, entry_id, feed_id, date,
|
||||||
db_file, title, link, entry_id, feed_id, date, read_status):
|
read_status):
|
||||||
"""
|
"""
|
||||||
Add a new entry row into the entries table.
|
Add a new entry row into the entries table.
|
||||||
|
|
||||||
|
@ -1541,6 +1671,9 @@ async def add_entry(
|
||||||
read_status : str
|
read_status : str
|
||||||
0 or 1.
|
0 or 1.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1590,6 +1723,9 @@ async def add_entries_and_update_timestamp(db_file, feed_id, new_entries):
|
||||||
new_entries : tuple
|
new_entries : tuple
|
||||||
Set of entries as dict.
|
Set of entries as dict.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1639,6 +1775,9 @@ async def set_date(db_file, feed_id):
|
||||||
feed_id : str
|
feed_id : str
|
||||||
Feed Id.
|
Feed Id.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1670,6 +1809,9 @@ async def update_feed_status(db_file, feed_id, status_code):
|
||||||
status : str
|
status : str
|
||||||
Status ID or message.
|
Status ID or message.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Status Code {}.'
|
||||||
|
.format(function_name, db_file, feed_id, status_code))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1701,6 +1843,9 @@ async def update_feed_validity(db_file, feed_id, valid):
|
||||||
valid : boolean
|
valid : boolean
|
||||||
0 or 1.
|
0 or 1.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Feed ID {} and Validity {}.'
|
||||||
|
.format(function_name, db_file, feed_id, valid))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1733,6 +1878,9 @@ async def update_feed_properties(db_file, feed_id, entries, updated):
|
||||||
updated : ???
|
updated : ???
|
||||||
Date feed was last updated.
|
Date feed was last updated.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1761,6 +1909,9 @@ async def maintain_archive(db_file, limit):
|
||||||
limit : str
|
limit : str
|
||||||
Number of maximum entries to store.
|
Number of maximum entries to store.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Limit {}.'
|
||||||
|
.format(function_name, db_file, limit))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -1815,6 +1966,9 @@ async def get_entries_of_feed(db_file, feed_id):
|
||||||
feed_id : str
|
feed_id : str
|
||||||
Feed Id.
|
Feed Id.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1869,6 +2023,9 @@ async def get_feeds_url(db_file):
|
||||||
result : tuple
|
result : tuple
|
||||||
URLs of active feeds.
|
URLs of active feeds.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1897,6 +2054,9 @@ def get_feeds_by_enabled_state(db_file, enabled_state):
|
||||||
result : tuple
|
result : tuple
|
||||||
List of URLs.
|
List of URLs.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and State {}.'
|
||||||
|
.format(function_name, db_file, enabled_state))
|
||||||
if enabled_state:
|
if enabled_state:
|
||||||
enabled_state = 1
|
enabled_state = 1
|
||||||
else:
|
else:
|
||||||
|
@ -1930,6 +2090,9 @@ async def get_active_feeds_url(db_file):
|
||||||
result : tuple
|
result : tuple
|
||||||
URLs of active feeds.
|
URLs of active feeds.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1958,6 +2121,9 @@ def get_tags(db_file):
|
||||||
result : tuple
|
result : tuple
|
||||||
List of tags.
|
List of tags.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -1984,6 +2150,9 @@ async def get_feeds(db_file):
|
||||||
result : tuple
|
result : tuple
|
||||||
URLs of feeds.
|
URLs of feeds.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
# TODO
|
# TODO
|
||||||
# 1) Select id from table feeds
|
# 1) Select id from table feeds
|
||||||
# Select name, url (feeds) updated, enabled, feed_id (status)
|
# Select name, url (feeds) updated, enabled, feed_id (status)
|
||||||
|
@ -2017,6 +2186,9 @@ async def last_entries(db_file, num):
|
||||||
titles_list : tuple
|
titles_list : tuple
|
||||||
List of recent N entries as message.
|
List of recent N entries as message.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Num {}.'
|
||||||
|
.format(function_name, db_file, num))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
# sql = (
|
# sql = (
|
||||||
|
@ -2059,6 +2231,9 @@ def search_feeds(db_file, query):
|
||||||
result : tuple
|
result : tuple
|
||||||
Feeds of specified keywords as message.
|
Feeds of specified keywords as message.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Query {}.'
|
||||||
|
.format(function_name, db_file, query))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2091,6 +2266,9 @@ async def search_entries(db_file, query):
|
||||||
titles_list : tuple
|
titles_list : tuple
|
||||||
Entries of specified keywords as message.
|
Entries of specified keywords as message.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Query {}.'
|
||||||
|
.format(function_name, db_file, query))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2132,8 +2310,8 @@ ERROR DATE: result = https://blog.heckel.io/feed/
|
||||||
19:32:06 ERROR DATE: result = https://mwl.io/feed
|
19:32:06 ERROR DATE: result = https://mwl.io/feed
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async def check_entry_exist(
|
def check_entry_exist(db_file, feed_id, entry_id=None, title=None, link=None,
|
||||||
db_file, feed_id, entry_id=None, title=None, link=None, date=None):
|
date=None):
|
||||||
"""
|
"""
|
||||||
Check whether an entry exists.
|
Check whether an entry exists.
|
||||||
If entry has an ID, check by ID.
|
If entry has an ID, check by ID.
|
||||||
|
@ -2160,6 +2338,9 @@ async def check_entry_exist(
|
||||||
bool
|
bool
|
||||||
True or None.
|
True or None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Feed ID {}.'
|
||||||
|
.format(function_name, db_file, feed_id))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
exist = False
|
exist = False
|
||||||
|
@ -2194,8 +2375,8 @@ async def check_entry_exist(
|
||||||
result = cur.execute(sql, par).fetchone()
|
result = cur.execute(sql, par).fetchone()
|
||||||
if result: exist = True
|
if result: exist = True
|
||||||
except:
|
except:
|
||||||
logging.error("source =", feed_id)
|
logger.error("source =", feed_id)
|
||||||
logging.error("date =", date)
|
logger.error("date =", date)
|
||||||
else:
|
else:
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
|
@ -2237,6 +2418,10 @@ async def set_setting_value(db_file, key_value):
|
||||||
key = key_value[0]
|
key = key_value[0]
|
||||||
value = key_value[1]
|
value = key_value[1]
|
||||||
|
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Key {} and Value {}.'
|
||||||
|
.format(function_name, db_file, key, value))
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
match key:
|
match key:
|
||||||
case 'interval':
|
case 'interval':
|
||||||
|
@ -2288,6 +2473,11 @@ async def update_setting_value(db_file, key_value):
|
||||||
# val = 0
|
# val = 0
|
||||||
key = key_value[0]
|
key = key_value[0]
|
||||||
value = key_value[1]
|
value = key_value[1]
|
||||||
|
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Key {} and Value {}.'
|
||||||
|
.format(function_name, db_file, key, value))
|
||||||
|
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -2310,6 +2500,9 @@ async def update_setting_value(db_file, key_value):
|
||||||
|
|
||||||
|
|
||||||
async def delete_filter(db_file, key):
|
async def delete_filter(db_file, key):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Key {}.'
|
||||||
|
.format(function_name, db_file, key))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -2325,6 +2518,9 @@ async def delete_filter(db_file, key):
|
||||||
|
|
||||||
|
|
||||||
async def delete_setting(db_file, key):
|
async def delete_setting(db_file, key):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Key {}.'
|
||||||
|
.format(function_name, db_file, key))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -2340,6 +2536,9 @@ async def delete_setting(db_file, key):
|
||||||
|
|
||||||
|
|
||||||
async def delete_settings(db_file):
|
async def delete_settings(db_file):
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -2369,6 +2568,9 @@ def get_setting_value(db_file, key):
|
||||||
val : str
|
val : str
|
||||||
Numeric value.
|
Numeric value.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Key {}.'
|
||||||
|
.format(function_name, db_file, key))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2399,6 +2601,9 @@ def is_setting_key(db_file, key):
|
||||||
key : str
|
key : str
|
||||||
Key.
|
Key.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Key {}.'
|
||||||
|
.format(function_name, db_file, key))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2429,6 +2634,11 @@ async def set_filter_value(db_file, key_value):
|
||||||
"""
|
"""
|
||||||
key = key_value[0]
|
key = key_value[0]
|
||||||
val = key_value[1]
|
val = key_value[1]
|
||||||
|
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Key {} and Value {}.'
|
||||||
|
.format(function_name, db_file, key, val))
|
||||||
|
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -2473,6 +2683,11 @@ async def update_filter_value(db_file, key_value):
|
||||||
# val = 0
|
# val = 0
|
||||||
key = key_value[0]
|
key = key_value[0]
|
||||||
val = key_value[1]
|
val = key_value[1]
|
||||||
|
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Key {} and Value {}.'
|
||||||
|
.format(function_name, db_file, key, val))
|
||||||
|
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
|
@ -2506,6 +2721,9 @@ def is_filter_key(db_file, key):
|
||||||
key : str
|
key : str
|
||||||
Key.
|
Key.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Key {}.'
|
||||||
|
.format(function_name, db_file, key))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2536,6 +2754,9 @@ def get_filter_value(db_file, key):
|
||||||
value : str
|
value : str
|
||||||
List of strings.
|
List of strings.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}, Key {}.'
|
||||||
|
.format(function_name, db_file, key))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2563,6 +2784,9 @@ async def set_last_update_time(db_file):
|
||||||
-------
|
-------
|
||||||
None.
|
None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2595,6 +2819,9 @@ async def get_last_update_time(db_file):
|
||||||
val : str
|
val : str
|
||||||
Time.
|
Time.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
try:
|
try:
|
||||||
|
@ -2609,7 +2836,7 @@ async def get_last_update_time(db_file):
|
||||||
value = str(value)
|
value = str(value)
|
||||||
except:
|
except:
|
||||||
value = None
|
value = None
|
||||||
logging.debug(
|
logger.debug(
|
||||||
"No specific value set for key last_update.")
|
"No specific value set for key last_update.")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
@ -2627,6 +2854,9 @@ async def update_last_update_time(db_file):
|
||||||
-------
|
-------
|
||||||
None.
|
None.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2661,6 +2891,9 @@ def get_categories(db_file):
|
||||||
categories : str
|
categories : str
|
||||||
List of categories.
|
List of categories.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2688,6 +2921,9 @@ def get_locales(db_file):
|
||||||
locales : tuple
|
locales : tuple
|
||||||
List of locales.
|
List of locales.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2715,6 +2951,9 @@ def get_nations(db_file):
|
||||||
nations : tuple
|
nations : tuple
|
||||||
List of nations.
|
List of nations.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2769,6 +3008,9 @@ def get_titles_tags_urls(db_file):
|
||||||
titles_urls : tuple
|
titles_urls : tuple
|
||||||
List of titles and urls.
|
List of titles and urls.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {}.'
|
||||||
|
.format(function_name, db_file))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
@ -2797,6 +3039,9 @@ def get_titles_tags_urls_by_category(db_file, category):
|
||||||
titles_urls : tuple
|
titles_urls : tuple
|
||||||
List of titles and urls.
|
List of titles and urls.
|
||||||
"""
|
"""
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.info('Function {} has been initiated for filename {} and Category {}.'
|
||||||
|
.format(function_name, db_file, category))
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
sql = (
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
__version__ = '0.1.25'
|
__version__ = '0.1.26'
|
||||||
__version_info__ = (0, 1, 25)
|
__version_info__ = (0, 1, 26)
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -9,7 +9,7 @@ Accept symbols 🉑️ 👍️ ✍
|
||||||
|
|
||||||
TODO
|
TODO
|
||||||
|
|
||||||
Remove subscription from JID that do not (stopped) share presence.
|
Remove subscription from JID that do not (i.e. has stopped) share presence.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
|
Loading…
Reference in a new issue