Ad-Hoc: Add more operator options;

Ad-Hoc: Add menus (list-single) for selection from a fixed list of bookmarks ans contacts;
Database: Identifier (node name) includes hyphens instead of dots.
Database: SQLite database now stores more items.
Bookmarks: Improve code;
MUC: Improve code;
SQLite: Manjor code changes to adapt to new table;
URL: Fix redirection (hostname switcher).
This commit is contained in:
Schimon Jehudah 2024-04-05 15:25:04 +00:00
parent e0bc0bddf7
commit 60756dbdd2
12 changed files with 1786 additions and 1188 deletions

View file

@ -183,7 +183,7 @@ async def xmpp_send_status_message(self, jid):
status_mode = 'dnd' status_mode = 'dnd'
status_text = jid_task[list(jid_task.keys())[0]] status_text = jid_task[list(jid_task.keys())[0]]
else: else:
feeds = sqlite.get_number_of_items(db_file, 'feeds') feeds = sqlite.get_number_of_items(db_file, 'feeds_properties')
# print(await current_time(), jid, "has", feeds, "feeds") # print(await current_time(), jid, "has", feeds, "feeds")
if not feeds: if not feeds:
status_mode = 'available' status_mode = 'available'
@ -227,21 +227,22 @@ async def xmpp_send_pubsub(self, jid_bare, num=None):
subscriptions = sqlite.get_active_feeds_url(db_file) subscriptions = sqlite.get_active_feeds_url(db_file)
for url in subscriptions: for url in subscriptions:
url = url[0] url = url[0]
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
feed_title = None
feed_summary = None
if jid_bare == self.boundjid.bare: if jid_bare == self.boundjid.bare:
node = 'urn:xmpp:microblog:0' node = 'urn:xmpp:microblog:0'
feed_title = None
feed_subtitle = None
else: else:
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
feed_title = sqlite.get_feed_title(db_file, feed_id) feed_title = sqlite.get_feed_title(db_file, feed_id)
feed_title = feed_title[0] feed_title = feed_title[0]
feed_summary = None feed_subtitle = sqlite.get_feed_subtitle(db_file, feed_id)
node = sqlite.get_node_name(db_file, feed_id) feed_subtitle = feed_subtitle[0]
node = sqlite.get_feed_identifier(db_file, feed_id)
node = node[0] node = node[0]
xep = None xep = None
iq_create_node = XmppPubsub.create_node( iq_create_node = XmppPubsub.create_node(
self, jid_bare, node, xep, feed_title, feed_summary) self, jid_bare, node, xep, feed_title, feed_subtitle)
await XmppIQ.send(self, iq_create_node) await XmppIQ.send(self, iq_create_node)
entries = sqlite.get_unread_entries_of_feed(db_file, feed_id) entries = sqlite.get_unread_entries_of_feed(db_file, feed_id)
feed_properties = sqlite.get_feed_properties(db_file, feed_id) feed_properties = sqlite.get_feed_properties(db_file, feed_id)
@ -251,17 +252,21 @@ async def xmpp_send_pubsub(self, jid_bare, num=None):
# if num and counter < num: # if num and counter < num:
report[url] = len(entries) report[url] = len(entries)
for entry in entries: for entry in entries:
feed_entry = {'author' : None, feed_entry = {'authors' : entry[3],
'authors' : None, 'content' : entry[6],
'category' : None, 'content_type' : entry[7],
'content' : None, 'contact' : entry[4],
'description' : entry[3], 'contributors' : entry[5],
'link' : entry[2], 'summary' : entry[8],
'links' : entry[4], 'summary_type' : entry[9],
'tags' : None, 'enclosures' : entry[13],
'title' : entry[1], 'language' : entry[10],
'type' : None, 'link' : entry[2],
'updated' : entry[7]} 'links' : entry[11],
'published' : entry[15],
'tags' : entry[12],
'title' : entry[1],
'updated' : entry[16]}
iq_create_entry = XmppPubsub.create_entry( iq_create_entry = XmppPubsub.create_entry(
self, jid_bare, node, feed_entry, feed_version) self, jid_bare, node, feed_entry, feed_version)
await XmppIQ.send(self, iq_create_entry) await XmppIQ.send(self, iq_create_entry)
@ -303,12 +308,11 @@ async def xmpp_send_message(self, jid, num=None):
title_e = result[1] title_e = result[1]
url = result[2] url = result[2]
summary = result[3] summary = result[3]
enclosure = result[4] feed_id = result[4]
feed_id = result[5] date = result[5]
date = result[6]
title_f = sqlite.get_feed_title(db_file, feed_id) title_f = sqlite.get_feed_title(db_file, feed_id)
title_f = title_f[0] title_f = title_f[0]
news_digest += list_unread_entries(self, result, title_f, jid) news_digest += await list_unread_entries(self, result, title_f, jid)
# print(db_file) # print(db_file)
# print(result[0]) # print(result[0])
# breakpoint() # breakpoint()
@ -533,7 +537,7 @@ def is_feed(feed):
return value return value
def list_unread_entries(self, result, feed_title, jid): async def list_unread_entries(self, result, feed_title, jid):
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: feed_title: {} jid: {}' logger.debug('{}: feed_title: {} jid: {}'
.format(function_name, feed_title, jid)) .format(function_name, feed_title, jid))
@ -581,7 +585,7 @@ def list_unread_entries(self, result, feed_title, jid):
# summary = "\n".join(summary) # summary = "\n".join(summary)
link = result[2] link = result[2]
link = remove_tracking_parameters(link) link = remove_tracking_parameters(link)
link = (replace_hostname(link, "link")) or link link = await replace_hostname(link, "link") or link
# news_item = ("\n{}\n{}\n{} [{}]\n").format(str(title), str(link), # news_item = ("\n{}\n{}\n{} [{}]\n").format(str(title), str(link),
# str(feed_title), str(ix)) # str(feed_title), str(ix))
formatting = Config.get_setting_value(self.settings, jid, 'formatting') formatting = Config.get_setting_value(self.settings, jid, 'formatting')
@ -691,11 +695,9 @@ async def list_statistics(db_file):
logger.debug('{}: db_file: {}' logger.debug('{}: db_file: {}'
.format(function_name, db_file)) .format(function_name, db_file))
entries_unread = sqlite.get_number_of_entries_unread(db_file) entries_unread = sqlite.get_number_of_entries_unread(db_file)
entries = sqlite.get_number_of_items(db_file, 'entries') entries = sqlite.get_number_of_items(db_file, 'entries_properties')
archive = sqlite.get_number_of_items(db_file, 'archive')
entries_all = entries + archive
feeds_active = sqlite.get_number_of_feeds_active(db_file) feeds_active = sqlite.get_number_of_feeds_active(db_file)
feeds_all = sqlite.get_number_of_items(db_file, 'feeds') feeds_all = sqlite.get_number_of_items(db_file, 'feeds_properties')
# msg = """You have {} unread news items out of {} from {} news sources. # msg = """You have {} unread news items out of {} from {} news sources.
# """.format(unread_entries, entries, feeds) # """.format(unread_entries, entries, feeds)
@ -714,7 +716,7 @@ async def list_statistics(db_file):
"News items : {}/{}\n" "News items : {}/{}\n"
"News sources : {}/{}\n" "News sources : {}/{}\n"
"```").format(entries_unread, "```").format(entries_unread,
entries_all, entries,
feeds_active, feeds_active,
feeds_all) feeds_all)
return message return message
@ -762,19 +764,16 @@ def list_feeds(results):
.format(len(results))) .format(len(results)))
else: else:
url = pick_a_feed() url = pick_a_feed()
message = ('List of subscriptions is empty.' message = ('List of subscriptions is empty. To add a feed, send a URL.'
'\n' '\n'
'To add a feed, send a URL.' 'Featured news: *{}*\n{}'
'\n'
'Featured news:\n*{}*\n{}'
.format(url['name'], url['link'])) .format(url['name'], url['link']))
return message return message
async def list_bookmarks(self): def list_bookmarks(self, conferences):
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}'.format(function_name)) logger.debug('{}'.format(function_name))
conferences = await XmppBookmark.get(self)
message = '\nList of groupchats:\n\n```\n' message = '\nList of groupchats:\n\n```\n'
for conference in conferences: for conference in conferences:
message += ('Name: {}\n' message += ('Name: {}\n'
@ -835,36 +834,42 @@ async def import_opml(db_file, result):
if not result['error']: if not result['error']:
document = result['content'] document = result['content']
root = ET.fromstring(document) root = ET.fromstring(document)
before = sqlite.get_number_of_items(db_file, 'feeds') before = sqlite.get_number_of_items(db_file, 'feeds_properties')
feeds = [] feeds = []
for child in root.findall(".//outline"): for child in root.findall(".//outline"):
url = child.get("xmlUrl") url = child.get("xmlUrl")
title = child.get("text") title = child.get("text")
# feed = (url, title) # feed = (url, title)
# feeds.extend([feed]) # feeds.extend([feed])
feeds.extend([(url, title)]) feed = {
'title' : title,
'url' : url,
}
feeds.extend([feed])
await sqlite.import_feeds(db_file, feeds) await sqlite.import_feeds(db_file, feeds)
await sqlite.add_metadata(db_file) await sqlite.add_metadata(db_file)
after = sqlite.get_number_of_items(db_file, 'feeds') after = sqlite.get_number_of_items(db_file, 'feeds_properties')
difference = int(after) - int(before) difference = int(after) - int(before)
return difference return difference
async def add_feed(self, jid_bare, db_file, url, node): async def add_feed(self, jid_bare, db_file, url, identifier):
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} url: {}' logger.debug('{}: db_file: {} url: {}'
.format(function_name, db_file, url)) .format(function_name, db_file, url))
while True: while True:
exist_feed = sqlite.get_feed_id_and_name(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
if not exist_feed: if not feed_id:
exist_node = sqlite.check_node_exist(db_file, node) exist_identifier = sqlite.check_identifier_exist(db_file, identifier)
if not exist_node: if not exist_identifier:
result = await fetch.http(url) result = await fetch.http(url)
message = result['message'] message = result['message']
status_code = result['status_code'] status_code = result['status_code']
if not result['error']: if not result['error']:
await sqlite.update_feed_status(db_file, feed_id, status_code)
document = result['content'] document = result['content']
feed = parse(document) feed = parse(document)
# if document and status_code == 200:
# if is_feed(url, feed): # if is_feed(url, feed):
if is_feed(feed): if is_feed(feed):
if "title" in feed["feed"].keys(): if "title" in feed["feed"].keys():
@ -887,21 +892,41 @@ async def add_feed(self, jid_bare, db_file, url, node):
updated = '' updated = ''
else: else:
updated = '' updated = ''
version = feed["version"] version = feed.version
entries = len(feed["entries"]) entries_count = len(feed.entries)
await sqlite.insert_feed(db_file, url, title, node, await sqlite.insert_feed(db_file,
entries=entries, url,
title,
identifier,
entries=entries_count,
version=version, version=version,
encoding=encoding, encoding=encoding,
language=language, language=language,
status_code=status_code, status_code=status_code,
updated=updated) updated=updated)
await scan(self, jid_bare, db_file, url) feed_valid = 0 if feed.bozo else 1
old = Config.get_setting_value(self.settings, jid_bare, 'old') await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
if feed.has_key('updated_parsed'):
feed_updated = feed.updated_parsed
try:
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
except:
feed_updated = None
else:
feed_updated = None
entries_count = len(feed.entries)
await sqlite.update_feed_properties(db_file, feed_id,
entries_count,
feed_updated)
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0] feed_id = feed_id[0]
if not old: new_entries = get_properties_of_entries(
await sqlite.mark_feed_as_read(db_file, feed_id) self, jid_bare, db_file, url, feed_id, feed)
if new_entries:
await sqlite.add_entries_and_update_feed_state(
db_file, feed_id, new_entries)
old = Config.get_setting_value(self.settings, jid_bare, 'old')
if not old: await sqlite.mark_feed_as_read(db_file, feed_id)
result_final = {'link' : url, result_final = {'link' : url,
'index' : feed_id, 'index' : feed_id,
'name' : title, 'name' : title,
@ -909,7 +934,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
'error' : False, 'error' : False,
'message': message, 'message': message,
'exist' : False, 'exist' : False,
'node' : None} 'identifier' : None}
break break
# NOTE This elif statement be unnecessary # NOTE This elif statement be unnecessary
# when feedparser be supporting json feed. # when feedparser be supporting json feed.
@ -936,9 +961,12 @@ async def add_feed(self, jid_bare, db_file, url, node):
else: else:
updated = '' updated = ''
version = 'json' + feed["version"].split('/').pop() version = 'json' + feed["version"].split('/').pop()
entries = len(feed["items"]) entries_count = len(feed["items"])
await sqlite.insert_feed(db_file, url, title, node, await sqlite.insert_feed(db_file,
entries=entries, url,
title,
identifier,
entries=entries_count,
version=version, version=version,
encoding=encoding, encoding=encoding,
language=language, language=language,
@ -957,7 +985,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
'error' : False, 'error' : False,
'message': message, 'message': message,
'exist' : False, 'exist' : False,
'node' : None} 'identifier' : None}
break break
else: else:
# NOTE Do not be tempted to return a compact dictionary. # NOTE Do not be tempted to return a compact dictionary.
@ -973,7 +1001,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
'error' : True, 'error' : True,
'message': message, 'message': message,
'exist' : False, 'exist' : False,
'node' : None} 'identifier' : None}
break break
elif isinstance(result, list): elif isinstance(result, list):
# Get out of the loop and deliver a list of dicts. # Get out of the loop and deliver a list of dicts.
@ -983,6 +1011,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
# Go back up to the while loop and try again. # Go back up to the while loop and try again.
url = result['link'] url = result['link']
else: else:
await sqlite.update_feed_status(db_file, feed_id, status_code)
result_final = {'link' : url, result_final = {'link' : url,
'index' : None, 'index' : None,
'name' : None, 'name' : None,
@ -990,12 +1019,13 @@ async def add_feed(self, jid_bare, db_file, url, node):
'error' : True, 'error' : True,
'message': message, 'message': message,
'exist' : False, 'exist' : False,
'node' : None} 'identifier' : None}
break break
else: else:
ix = exist_node[1] ix = exist_identifier[1]
node = exist_node[2] identifier = exist_identifier[2]
message = 'Node is already allocated.' message = ('Identifier "{}" is already allocated.'
.format(identifier))
result_final = {'link' : url, result_final = {'link' : url,
'index' : ix, 'index' : ix,
'name' : None, 'name' : None,
@ -1003,20 +1033,21 @@ async def add_feed(self, jid_bare, db_file, url, node):
'error' : False, 'error' : False,
'message': message, 'message': message,
'exist' : False, 'exist' : False,
'node' : node} 'identifier' : identifier}
break break
else: else:
ix = exist_feed[0] feed_id = feed_id[0]
name = exist_feed[1] title = sqlite.get_feed_title(db_file, feed_id)
title = title[0]
message = 'URL already exist.' message = 'URL already exist.'
result_final = {'link' : url, result_final = {'link' : url,
'index' : ix, 'index' : feed_id,
'name' : name, 'name' : title,
'code' : None, 'code' : None,
'error' : False, 'error' : False,
'message': message, 'message': message,
'exist' : True, 'exist' : True,
'node' : None} 'identifier' : None}
break break
return result_final return result_final
@ -1168,8 +1199,8 @@ async def scan_json(self, jid_bare, db_file, url):
if len(new_entries): if len(new_entries):
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0] feed_id = feed_id[0]
await sqlite.add_entries_and_update_timestamp(db_file, feed_id, await sqlite.add_entries_and_update_feed_state(db_file, feed_id,
new_entries) new_entries)
def view_feed(url, feed): def view_feed(url, feed):
@ -1266,162 +1297,274 @@ def view_entry(url, feed, num):
return response return response
# TODO get all active feeds of active accounts and scan the feed with the earliest scanned time async def download_feed(self, db_file, feed_url):
# TODO Rename function name (idea: scan_and_populate)
async def scan(self, jid_bare, db_file, url):
""" """
Check feeds for new entries. Get feed content.
Parameters Parameters
---------- ----------
db_file : str db_file : str
Path to database file. Path to database file.
url : str, optional url : str, optional
URL. The default is None. URL.
""" """
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} url: {}' logger.debug('{}: db_file: {} url: {}'
.format(function_name, db_file, url)) .format(function_name, db_file, feed_url))
if isinstance(url, tuple): url = url[0] if isinstance(feed_url, tuple): feed_url = feed_url[0]
result = await fetch.http(url) result = await fetch.http(feed_url)
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, feed_url)
feed_id = feed_id[0] feed_id = feed_id[0]
status_code = result['status_code'] status_code = result['status_code']
await sqlite.update_feed_status(db_file, feed_id, status_code) await sqlite.update_feed_status(db_file, feed_id, status_code)
if not result['error']:
document = result['content']
status = result['status_code'] # TODO get all active feeds of active accounts and scan the feed with the earliest scanned time
new_entries = [] # TODO Rename function name (idea: scan_and_populate)
if document and status == 200: def get_properties_of_entries(self, jid_bare, db_file, feed_url, feed_id, feed):
feed = parse(document) """
entries = feed.entries Get new entries.
# length = len(entries)
await remove_nonexistent_entries(self, jid_bare, db_file, url, feed) Parameters
try: ----------
if feed.bozo: db_file : str
# bozo = ( Path to database file.
# "WARNING: Bozo detected for feed: {}\n" url : str, optional
# "For more information, visit " URL.
# "https://pythonhosted.org/feedparser/bozo.html" """
# ).format(url) print('GET', feed_url, jid_bare)
# print(bozo) function_name = sys._getframe().f_code.co_name
valid = 0 logger.debug('{}: feed_id: {} url: {}'
else: .format(function_name, feed_id, feed_url))
valid = 1
feed_id = sqlite.get_feed_id(db_file, url) new_entries = []
feed_id = feed_id[0] for entry in feed.entries:
await sqlite.update_feed_validity( logger.debug('{}: entry: {}'.format(function_name, entry.link))
db_file, feed_id, valid) if entry.has_key("published"):
if "updated_parsed" in feed["feed"].keys(): entry_published = entry.published
updated = feed["feed"]["updated_parsed"] entry_published = dt.rfc2822_to_iso8601(entry_published)
try: else:
updated = dt.convert_struct_time_to_iso8601(updated) entry_published = ''
except: if entry.has_key("updated"):
updated = '' entry_updated = entry.updated
else: entry_updated = dt.rfc2822_to_iso8601(entry_updated)
updated = '' else:
feed_id = sqlite.get_feed_id(db_file, url) entry_updated = dt.now()
feed_id = feed_id[0] if entry.has_key("link"):
await sqlite.update_feed_properties(db_file, feed_id, # link = complete_url(source, entry.link)
len(feed["entries"]), updated) entry_link = join_url(feed_url, entry.link)
# await update_feed_status entry_link = trim_url(entry_link)
except (IncompleteReadError, IncompleteRead, error.URLError) as e: else:
logger.error(e) entry_link = feed_url
return # title = feed["feed"]["title"]
# new_entry = 0 # title = "{}: *{}*".format(feed["feed"]["title"], entry.title)
for entry in entries: entry_title = entry.title if entry.has_key("title") else entry_published
logger.debug('{}: entry: {}'.format(function_name, entry.link)) entry_id = entry.id if entry.has_key("id") else entry_link
if entry.has_key("published"): exist = sqlite.check_entry_exist(db_file, feed_id,
date = entry.published identifier=entry_id,
date = dt.rfc2822_to_iso8601(date) title=entry_title,
elif entry.has_key("updated"): link=entry_link,
date = entry.updated published=entry_published)
date = dt.rfc2822_to_iso8601(date) if not exist:
else: read_status = 0
date = dt.now() # # Filter
if entry.has_key("link"): # pathname = urlsplit(link).path
# link = complete_url(source, entry.link) # string = (
link = join_url(url, entry.link) # "{} {} {}"
link = trim_url(link) # ).format(
else: # title, summary, pathname)
link = url # if self.settings['default']['filter']:
# title = feed["feed"]["title"] # print('Filter is now processing data.')
# title = "{}: *{}*".format(feed["feed"]["title"], entry.title) # allow_list = config.is_include_keyword(db_file,
title = entry.title if entry.has_key("title") else date # "allow", string)
entry_id = entry.id if entry.has_key("id") else link # if not allow_list:
feed_id = sqlite.get_feed_id(db_file, url) # reject_list = config.is_include_keyword(db_file,
feed_id = feed_id[0] # "deny",
exist = sqlite.check_entry_exist(db_file, feed_id, # string)
entry_id=entry_id, # if reject_list:
title=title, link=link, # read_status = 1
date=date) # logger.debug('Rejected : {}'
if not exist: # '\n'
summary = entry.summary if entry.has_key("summary") else '' # 'Keyword : {}'
read_status = 0 # .format(link, reject_list))
pathname = urlsplit(link).path if isinstance(entry_published, int):
string = ( logger.error('Variable "published" is int: {}'.format(entry_published))
"{} {} {}" if isinstance(entry_updated, int):
).format( logger.error('Variable "updated" is int: {}'.format(entry_updated))
title, summary, pathname)
if self.settings['default']['filter']: # Authors
print('Filter is now processing data.') entry_authors =[]
allow_list = config.is_include_keyword(db_file, if entry.has_key('authors'):
"allow", string) for author in entry.authors:
if not allow_list: author_properties = {
reject_list = config.is_include_keyword(db_file, 'name' : author.name if author.has_key('name') else '',
"deny", 'url' : author.href if author.has_key('href') else '',
string) 'email' : author.email if author.has_key('email') else '',
if reject_list:
read_status = 1
logger.debug('Rejected : {}'
'\n'
'Keyword : {}'
.format(link, reject_list))
if isinstance(date, int):
logger.error('Variable "date" is int: {}'.format(date))
media_link = ''
if entry.has_key("links"):
for e_link in entry.links:
try:
# if (link.rel == "enclosure" and
# (link.type.startswith("audio/") or
# link.type.startswith("image/") or
# link.type.startswith("video/"))
# ):
media_type = e_link.type[:e_link.type.index("/")]
if e_link.has_key("rel"):
if (e_link.rel == "enclosure" and
media_type in ("audio", "image", "video")):
media_link = e_link.href
media_link = join_url(url, e_link.href)
media_link = trim_url(media_link)
break
except:
logger.error('KeyError: "href"\n'
'Missing "href" attribute for {}'
.format(url))
logger.error('Continue scanning for next '
'potential enclosure of {}'
.format(link))
entry = {
"title": title,
"link": link,
"summary": summary,
"enclosure": media_link,
"entry_id": entry_id,
"date": date,
"read_status": read_status
} }
new_entries.extend([entry]) entry_authors.extend([author_properties])
# await sqlite.add_entry( elif entry.has_key('author_detail'):
# db_file, title, link, entry_id, author_properties = {
# url, date, read_status) 'name' : entry.author_detail.name if entry.author_detail.has_key('name') else '',
# await sqlite.set_date(db_file, url) 'url' : entry.author_detail.href if entry.author_detail.has_key('href') else '',
if len(new_entries): 'email' : entry.author_detail.email if entry.author_detail.has_key('email') else '',
feed_id = sqlite.get_feed_id(db_file, url) }
feed_id = feed_id[0] entry_authors.extend([author_properties])
await sqlite.add_entries_and_update_timestamp(db_file, feed_id, elif entry.has_key('author'):
new_entries) author_properties = {
'name' : entry.author,
'url' : '',
'email' : '',
}
entry_authors.extend([author_properties])
# Contributors
entry_contributors = []
if entry.has_key('contributors'):
for contributor in entry.contributors:
contributor_properties = {
'name' : contributor.name if contributor.has_key('name') else '',
'url' : contributor.href if contributor.has_key('href') else '',
'email' : contributor.email if contributor.has_key('email') else '',
}
entry_contributors.extend([contributor_properties])
# Tags
entry_tags = []
if entry.has_key('tags'):
for tag in entry.tags:
tag_properties = {
'term' : tag.term if tag.has_key('term') else '',
'scheme' : tag.scheme if tag.has_key('scheme') else '',
'label' : tag.label if tag.has_key('label') else '',
}
entry_tags.extend([tag_properties])
# Content
entry_contents = []
if entry.has_key('content'):
for content in entry.content:
text = content.value if content.has_key('value') else ''
type = content.type if content.has_key('type') else ''
lang = content.lang if content.has_key('lang') else ''
base = content.base if content.has_key('base') else ''
entry_content = {
'text' : text,
'lang' : lang,
'type' : type,
'base' : base,
}
entry_contents.extend([entry_content])
# Links and Enclosures
entry_links = []
if entry.has_key('links'):
for link in entry.links:
link_properties = {
'url' : link.href if link.has_key('href') else '',
'rel' : link.rel if link.has_key('rel') else '',
'type' : link.type if link.has_key('type') else '',
'length' : '',
}
entry_links.extend([link_properties])
# Element media:content is utilized by Mastodon
if entry.has_key('media_content'):
for link in entry.media_content:
link_properties = {
'url' : link['url'] if 'url' in link else '',
'rel' : 'enclosure',
'type' : link['type'] if 'type' in link else '',
# 'medium' : link['medium'] if 'medium' in link else '',
'length' : link['filesize'] if 'filesize' in link else '',
}
entry_links.extend([link_properties])
if entry.has_key('media_thumbnail'):
for link in entry.media_thumbnail:
link_properties = {
'url' : link['url'] if 'url' in link else '',
'rel' : 'enclosure',
'type' : '',
# 'medium' : 'image',
'length' : '',
}
entry_links.extend([link_properties])
# Category
entry_category = entry.category if entry.has_key('category') else ''
# Comments
entry_comments = entry.comments if entry.has_key('comments') else ''
# href
entry_href = entry.href if entry.has_key('href') else ''
# Link: Same as entry.links[0].href in most if not all cases
entry_link = entry.link if entry.has_key('link') else ''
# Rating
entry_rating = entry.rating if entry.has_key('rating') else ''
# Summary
entry_summary_text = entry.summary if entry.has_key('summary') else ''
if entry.has_key('summary_detail'):
entry_summary_type = entry.summary_detail.type if entry.summary_detail.has_key('type') else ''
entry_summary_lang = entry.summary_detail.lang if entry.summary_detail.has_key('lang') else ''
entry_summary_base = entry.summary_detail.base if entry.summary_detail.has_key('base') else ''
else:
entry_summary_type = ''
entry_summary_lang = ''
entry_summary_base = ''
# Title
entry_title = entry.title if entry.has_key('title') else ''
if entry.has_key('title_detail'):
entry_title_type = entry.title_detail.type if entry.title_detail.has_key('type') else ''
else:
entry_title_type = ''
###########################################################
# media_type = e_link.type[:e_link.type.index("/")]
# if (e_link.rel == "enclosure" and
# media_type in ("audio", "image", "video")):
# media_link = e_link.href
# media_link = join_url(url, e_link.href)
# media_link = trim_url(media_link)
###########################################################
entry_properties = {
"identifier": entry_id,
"link": entry_link,
"href": entry_href,
"title": entry_title,
"title_type": entry_title_type,
'summary_text' : entry_summary_text,
'summary_lang' : entry_summary_lang,
'summary_type' : entry_summary_type,
'summary_base' : entry_summary_base,
'category' : entry_category,
"comments": entry_comments,
"rating": entry_rating,
"published": entry_published,
"updated": entry_updated,
"read_status": read_status
}
print('entry_properties')
print(entry_properties)
new_entries.extend([{
"entry_properties" : entry_properties,
"entry_authors" : entry_authors,
"entry_contributors" : entry_contributors,
"entry_contents" : entry_contents,
"entry_links" : entry_links,
"entry_tags" : entry_tags
}])
# await sqlite.add_entry(
# db_file, title, link, entry_id,
# url, date, read_status)
# await sqlite.set_date(db_file, url)
return new_entries
def get_document_title(data): def get_document_title(data):

File diff suppressed because it is too large Load diff

View file

@ -68,12 +68,15 @@ except Exception as exc:
""" """
import asyncio import asyncio
from feedparser import parse
import logging import logging
import os import os
import slixfeed.action as action import slixfeed.action as action
import slixfeed.config as config import slixfeed.config as config
from slixfeed.config import Config from slixfeed.config import Config
# from slixfeed.dt import current_time # from slixfeed.dt import current_time
import slixfeed.dt as dt
import slixfeed.fetch as fetch
import slixfeed.sqlite as sqlite import slixfeed.sqlite as sqlite
# from xmpp import Slixfeed # from xmpp import Slixfeed
from slixfeed.xmpp.presence import XmppPresence from slixfeed.xmpp.presence import XmppPresence
@ -339,14 +342,67 @@ async def check_updates(self, jid_bare):
jid : str jid : str
Jabber ID. Jabber ID.
""" """
print('Scanning for updates for JID {}'.format(jid_bare))
logging.info('Scanning for updates for JID {}'.format(jid_bare)) logging.info('Scanning for updates for JID {}'.format(jid_bare))
while True: while True:
jid_file = jid_bare.replace('/', '_') jid_file = jid_bare.replace('/', '_')
db_file = config.get_pathname_to_database(jid_file) db_file = config.get_pathname_to_database(jid_file)
urls = sqlite.get_active_feeds_url(db_file) urls = sqlite.get_active_feeds_url(db_file)
for url in urls: for url in urls:
await action.scan(self, jid_bare, db_file, url) url = url[0]
await asyncio.sleep(50) print('STA',url)
result = await fetch.http(url)
status_code = result['status_code']
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
if not result['error']:
await sqlite.update_feed_status(db_file, feed_id, status_code)
document = result['content']
feed = parse(document)
feed_valid = 0 if feed.bozo else 1
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
if feed.has_key('updated_parsed'):
feed_updated = feed.updated_parsed
try:
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
except:
feed_updated = ''
else:
feed_updated = ''
entries_count = len(feed.entries)
feed_version = feed.version if feed.has_key('version') else ''
feed_encoding = feed.encoding if feed.has_key('encoding') else ''
feed_language = feed.feed.language if feed.feed.has_key('language') else ''
feed_icon = feed.feed.icon if feed.feed.has_key('icon') else ''
feed_image = feed.feed.image if feed.feed.has_key('image') else ''
feed_logo = feed.feed.logo if feed.feed.has_key('logo') else ''
feed_ttl = feed.feed.ttl if feed.feed.has_key('ttl') else ''
feed_properties = {
"version" : feed_version,
"encoding" : feed_encoding,
"language" : feed_language,
"rating" : '',
"entries_count" : entries_count,
"icon" : feed_icon,
"image" : feed_image,
"logo" : feed_logo,
"ttl" : feed_ttl,
"updated" : feed_updated,
}
await sqlite.update_feed_properties(db_file, feed_id,
feed_properties)
new_entries = action.get_properties_of_entries(
self, jid_bare, db_file, url, feed_id, feed)
if new_entries: await sqlite.add_entries_and_update_feed_state(
db_file, feed_id, new_entries)
print('END', url)
await asyncio.sleep(5)
val = Config.get_setting_value(self.settings, jid_bare, 'check') val = Config.get_setting_value(self.settings, jid_bare, 'check')
await asyncio.sleep(60 * float(val)) await asyncio.sleep(60 * float(val))
# Schedule to call this function again in 90 minutes # Schedule to call this function again in 90 minutes

View file

@ -50,7 +50,7 @@ def get_hostname(url):
return hostname return hostname
def replace_hostname(url, url_type): async def replace_hostname(url, url_type):
""" """
Replace hostname. Replace hostname.
@ -79,6 +79,8 @@ def replace_hostname(url, url_type):
proxy = proxies[proxy_name] proxy = proxies[proxy_name]
if hostname in proxy['hostname'] and url_type in proxy['type']: if hostname in proxy['hostname'] and url_type in proxy['type']:
while not url_new: while not url_new:
print('>>>')
print(url_new)
proxy_type = 'clearnet' proxy_type = 'clearnet'
proxy_list = proxy[proxy_type] proxy_list = proxy[proxy_type]
if len(proxy_list): if len(proxy_list):
@ -89,10 +91,13 @@ def replace_hostname(url, url_type):
hostname_new = parted_proxy_url.netloc hostname_new = parted_proxy_url.netloc
url_new = urlunsplit([protocol_new, hostname_new, url_new = urlunsplit([protocol_new, hostname_new,
pathname, queries, fragment]) pathname, queries, fragment])
response = fetch.http_response(url_new) print(proxy_url)
print(url_new)
print('>>>')
response = await fetch.http(url_new)
if (response and if (response and
response.status_code == 200 and response['status_code'] == 200 and
response.reason == 'OK' and # response.reason == 'OK' and
url_new.startswith(proxy_url)): url_new.startswith(proxy_url)):
break break
else: else:
@ -104,13 +109,16 @@ def replace_hostname(url, url_type):
config.backup_obsolete(proxies_obsolete_file, config.backup_obsolete(proxies_obsolete_file,
proxy_name, proxy_type, proxy_name, proxy_type,
proxy_url) proxy_url)
config.update_proxies(proxies_file, proxy_name, try:
proxy_type, proxy_url) config.update_proxies(proxies_file, proxy_name,
proxy_type, proxy_url)
except ValueError as e:
logging.error([str(e), proxy_url])
url_new = None url_new = None
else: else:
logging.warning( logging.warning(
"No proxy URLs for {}." "No proxy URLs for {}. Please update proxies.toml"
"Update proxies.toml".format(proxy_name)) .format(proxy_name))
url_new = url url_new = url
break break
return url_new return url_new

View file

@ -1,2 +1,2 @@
__version__ = '0.1.52' __version__ = '0.1.53'
__version_info__ = (0, 1, 52) __version_info__ = (0, 1, 53)

View file

@ -15,7 +15,7 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
class XmppBookmark: class XmppBookmark:
async def get(self): async def get_bookmarks(self):
result = await self.plugin['xep_0048'].get_bookmarks() result = await self.plugin['xep_0048'].get_bookmarks()
conferences = result['private']['bookmarks']['conferences'] conferences = result['private']['bookmarks']['conferences']
return conferences return conferences

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -20,20 +20,6 @@ import logging
class XmppGroupchat: class XmppGroupchat:
# async def accept_muc_invite(self, message, ctr=None):
# # if isinstance(message, str):
# if not ctr:
# ctr = message["from"].bare
# jid = message['groupchat_invite']['jid']
# else:
# jid = message
def accept_invitation(self, message):
# operator muc_chat
inviter = message["from"].bare
jid = message['groupchat_invite']['jid']
self.join(self, inviter, jid)
async def join(self, jid, alias=None, password=None): async def join(self, jid, alias=None, password=None):
# token = await initdb( # token = await initdb(
# muc_jid, # muc_jid,

View file

@ -319,22 +319,49 @@ async def message(self, message):
message_text = message_text[4:] message_text = message_text[4:]
url = message_text.split(' ')[0] url = message_text.split(' ')[0]
title = ' '.join(message_text.split(' ')[1:]) title = ' '.join(message_text.split(' ')[1:])
if not title:
title = uri.get_hostname(url)
counter = 0
hostname = uri.get_hostname(url)
node = hostname + ':' + str(counter)
while True:
if sqlite.check_node_exist(db_file, node):
counter += 1
node = hostname + ':' + str(counter)
else:
break
if url.startswith('http'): if url.startswith('http'):
if not title:
title = uri.get_hostname(url)
db_file = config.get_pathname_to_database(jid_file) db_file = config.get_pathname_to_database(jid_file)
counter = 0
hostname = uri.get_hostname(url)
hostname = hostname.replace('.','-')
identifier = hostname + ':' + str(counter)
while True:
if sqlite.check_identifier_exist(db_file, identifier):
counter += 1
identifier = hostname + ':' + str(counter)
else:
break
exist = sqlite.get_feed_id_and_name(db_file, url) exist = sqlite.get_feed_id_and_name(db_file, url)
if not exist: if not exist:
await sqlite.insert_feed(db_file, url, title, node) await sqlite.insert_feed(db_file, url, title,
identifier)
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
document = result['content']
feed = parse(document)
feed_valid = 0 if feed.bozo else 1
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
if feed.has_key('updated_parsed'):
feed_updated = feed.updated_parsed
try:
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
except:
feed_updated = None
else:
feed_updated = None
entries_count = len(feed.entries)
await sqlite.update_feed_properties(db_file, feed_id,
entries_count,
feed_updated)
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
new_entries = action.get_properties_of_entries(
self, jid_bare, db_file, url, feed_id, feed)
if new_entries:
await sqlite.add_entries_and_update_feed_state(
db_file, feed_id, new_entries)
await action.scan(self, jid_bare, db_file, url) await action.scan(self, jid_bare, db_file, url)
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self.settings, jid_bare, Config.add_settings_jid(self.settings, jid_bare,
@ -477,7 +504,8 @@ async def message(self, message):
XmppMessage.send_reply(self, message, response) XmppMessage.send_reply(self, message, response)
case 'bookmarks': case 'bookmarks':
if is_operator(self, jid_bare): if is_operator(self, jid_bare):
response = await action.list_bookmarks(self) conferences = await XmppBookmark.get_bookmarks(self)
response = action.list_bookmarks(self, conferences)
else: else:
response = ('This action is restricted. ' response = ('This action is restricted. '
'Type: viewing bookmarks.') 'Type: viewing bookmarks.')
@ -608,7 +636,7 @@ async def message(self, message):
url = ix_url url = ix_url
if url: if url:
url = uri.remove_tracking_parameters(url) url = uri.remove_tracking_parameters(url)
url = (uri.replace_hostname(url, 'link')) or url url = (await uri.replace_hostname(url, 'link')) or url
result = await fetch.http(url) result = await fetch.http(url)
if not result['error']: if not result['error']:
data = result['content'] data = result['content']
@ -696,15 +724,17 @@ async def message(self, message):
url = info[1] url = info[1]
db_file = config.get_pathname_to_database(jid) db_file = config.get_pathname_to_database(jid)
if len(info) > 2: if len(info) > 2:
node = info[2] identifier = info[2]
else: else:
counter = 0 counter = 0
hostname = uri.get_hostname(url) hostname = uri.get_hostname(url)
node = hostname + ':' + str(counter) hostname = hostname.replace('.','-')
identifier = hostname + ':' + str(counter)
while True: while True:
if sqlite.check_node_exist(db_file, node): if sqlite.check_identifier_exist(
db_file, identifier):
counter += 1 counter += 1
node = hostname + ':' + str(counter) identifier = hostname + ':' + str(counter)
else: else:
break break
# task.clean_tasks_xmpp_chat(self, jid_bare, ['status']) # task.clean_tasks_xmpp_chat(self, jid_bare, ['status'])
@ -720,8 +750,10 @@ async def message(self, message):
status_type=status_type) status_type=status_type)
if url.startswith('feed:'): if url.startswith('feed:'):
url = uri.feed_to_http(url) url = uri.feed_to_http(url)
url = (uri.replace_hostname(url, 'feed')) or url url = (await uri.replace_hostname(url, 'feed')) or url
result = await action.add_feed(self, jid_bare, db_file, url, node) result = await action.add_feed(self, jid_bare,
db_file, url,
identifier)
if isinstance(result, list): if isinstance(result, list):
results = result results = result
response = ("Web feeds found for {}\n\n```\n" response = ("Web feeds found for {}\n\n```\n"
@ -740,11 +772,11 @@ async def message(self, message):
.format(result['link'], .format(result['link'],
result['name'], result['name'],
result['index'])) result['index']))
elif result['node']: elif result['identifier']:
response = ('> {}\nNode "{}" is already ' response = ('> {}\nIdentifier "{}" is already '
'allocated to index {}' 'allocated to index {}'
.format(result['link'], .format(result['link'],
result['node'], result['identifier'],
result['index'])) result['index']))
elif result['error']: elif result['error']:
response = ('> {}\nFailed to find subscriptions. ' response = ('> {}\nFailed to find subscriptions. '
@ -776,10 +808,10 @@ async def message(self, message):
'\n' '\n'
'Missing argument. ' 'Missing argument. '
'Enter PubSub JID and subscription URL ' 'Enter PubSub JID and subscription URL '
'(and optionally: NodeName).') '(and optionally: Identifier Name).')
else: else:
response = ('This action is restricted. ' response = ('This action is restricted. '
'Type: adding node.') 'Type: publishing to node.')
XmppMessage.send_reply(self, message, response) XmppMessage.send_reply(self, message, response)
case _ if (message_lowercase.startswith('http') or case _ if (message_lowercase.startswith('http') or
message_lowercase.startswith('feed:')): message_lowercase.startswith('feed:')):
@ -797,19 +829,21 @@ async def message(self, message):
status_type=status_type) status_type=status_type)
if url.startswith('feed:'): if url.startswith('feed:'):
url = uri.feed_to_http(url) url = uri.feed_to_http(url)
url = (uri.replace_hostname(url, 'feed')) or url url = (await uri.replace_hostname(url, 'feed')) or url
db_file = config.get_pathname_to_database(jid_file) db_file = config.get_pathname_to_database(jid_file)
counter = 0 counter = 0
hostname = uri.get_hostname(url) hostname = uri.get_hostname(url)
node = hostname + ':' + str(counter) hostname = hostname.replace('.','-')
identifier = hostname + ':' + str(counter)
while True: while True:
if sqlite.check_node_exist(db_file, node): if sqlite.check_identifier_exist(db_file, identifier):
counter += 1 counter += 1
node = hostname + ':' + str(counter) identifier = hostname + ':' + str(counter)
else: else:
break break
# try: # try:
result = await action.add_feed(self, jid_bare, db_file, url, node) result = await action.add_feed(self, jid_bare, db_file, url,
identifier)
if isinstance(result, list): if isinstance(result, list):
results = result results = result
response = ("Web feeds found for {}\n\n```\n" response = ("Web feeds found for {}\n\n```\n"
@ -1122,15 +1156,15 @@ async def message(self, message):
status_type=status_type) status_type=status_type)
if url.startswith('feed:'): if url.startswith('feed:'):
url = uri.feed_to_http(url) url = uri.feed_to_http(url)
url = (uri.replace_hostname(url, 'feed')) or url url = (await uri.replace_hostname(url, 'feed')) or url
match len(data): match len(data):
case 1: case 1:
if url.startswith('http'): if url.startswith('http'):
while True: while True:
result = await fetch.http(url) result = await fetch.http(url)
status = result['status_code']
if not result['error']: if not result['error']:
document = result['content'] document = result['content']
status = result['status_code']
feed = parse(document) feed = parse(document)
# if is_feed(url, feed): # if is_feed(url, feed):
if action.is_feed(feed): if action.is_feed(feed):
@ -1151,7 +1185,7 @@ async def message(self, message):
.format(len(results))) .format(len(results)))
break break
else: else:
url = result[0] url = result['link']
else: else:
response = ('> {}\nFailed to load URL. Reason: {}' response = ('> {}\nFailed to load URL. Reason: {}'
.format(url, status)) .format(url, status))
@ -1188,7 +1222,7 @@ async def message(self, message):
.format(len(results))) .format(len(results)))
break break
else: else:
url = result[0] url = result['link']
else: else:
response = ('> {}\nFailed to load URL. Reason: {}' response = ('> {}\nFailed to load URL. Reason: {}'
.format(url, status)) .format(url, status))

View file

@ -15,7 +15,7 @@ class XmppPubsub:
async def get_pubsub_services(self): async def get_pubsub_services(self):
jids = [self.boundjid.bare] results = []
iq = await self['xep_0030'].get_items(jid=self.boundjid.domain) iq = await self['xep_0030'].get_items(jid=self.boundjid.domain)
items = iq['disco_items']['items'] items = iq['disco_items']['items']
for item in items: for item in items:
@ -23,9 +23,13 @@ class XmppPubsub:
identities = iq['disco_info']['identities'] identities = iq['disco_info']['identities']
for identity in identities: for identity in identities:
if identity[0] == 'pubsub' and identity[1] == 'service': if identity[0] == 'pubsub' and identity[1] == 'service':
jid = item[0] result = {}
jids.extend([jid]) result['jid'] = item[0]
return jids if item[1]: result['name'] = item[1]
elif item[2]: result['name'] = item[2]
else: result['name'] = item[0]
results.extend([result])
return results
def delete_node(self, jid, node): def delete_node(self, jid, node):
@ -44,7 +48,7 @@ class XmppPubsub:
# TODO Make use of var "xep" with match/case (XEP-0060, XEP-0277, XEP-0472) # TODO Make use of var "xep" with match/case (XEP-0060, XEP-0277, XEP-0472)
def create_node(self, jid, node, xep ,title=None, summary=None): def create_node(self, jid, node, xep ,title=None, subtitle=None):
jid_from = str(self.boundjid) if self.is_component else None jid_from = str(self.boundjid) if self.is_component else None
iq = self.Iq(stype='set', iq = self.Iq(stype='set',
sto=jid, sto=jid,
@ -57,7 +61,7 @@ class XmppPubsub:
value=title) value=title)
form.addField('pubsub#description', form.addField('pubsub#description',
ftype='text-single', ftype='text-single',
value=summary) value=subtitle)
form.addField('pubsub#notify_retract', form.addField('pubsub#notify_retract',
ftype='boolean', ftype='boolean',
value=1) value=1)

View file

@ -17,6 +17,7 @@ def is_operator(self, jid_bare):
break break
return result return result
def is_moderator(self, jid_bare, jid_full): def is_moderator(self, jid_bare, jid_full):
alias = jid_full[jid_full.index('/')+1:] alias = jid_full[jid_full.index('/')+1:]
role = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'role') role = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'role')
@ -27,6 +28,16 @@ def is_moderator(self, jid_bare, jid_full):
return result return result
def is_member(self, jid_bare, jid_full):
alias = jid_full[jid_full.index('/')+1:]
affiliation = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'affiliation')
if affiliation == 'member':
result = True
else:
result = False
return result
# TODO Rename to get_jid_type # TODO Rename to get_jid_type
async def get_chat_type(self, jid): async def get_chat_type(self, jid):
""" """