Ad-Hoc: Add more operator options;
Ad-Hoc: Add menus (list-single) for selection from a fixed list of bookmarks ans contacts; Database: Identifier (node name) includes hyphens instead of dots. Database: SQLite database now stores more items. Bookmarks: Improve code; MUC: Improve code; SQLite: Manjor code changes to adapt to new table; URL: Fix redirection (hostname switcher).
This commit is contained in:
parent
e0bc0bddf7
commit
60756dbdd2
12 changed files with 1786 additions and 1188 deletions
|
@ -183,7 +183,7 @@ async def xmpp_send_status_message(self, jid):
|
||||||
status_mode = 'dnd'
|
status_mode = 'dnd'
|
||||||
status_text = jid_task[list(jid_task.keys())[0]]
|
status_text = jid_task[list(jid_task.keys())[0]]
|
||||||
else:
|
else:
|
||||||
feeds = sqlite.get_number_of_items(db_file, 'feeds')
|
feeds = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||||
# print(await current_time(), jid, "has", feeds, "feeds")
|
# print(await current_time(), jid, "has", feeds, "feeds")
|
||||||
if not feeds:
|
if not feeds:
|
||||||
status_mode = 'available'
|
status_mode = 'available'
|
||||||
|
@ -227,21 +227,22 @@ async def xmpp_send_pubsub(self, jid_bare, num=None):
|
||||||
subscriptions = sqlite.get_active_feeds_url(db_file)
|
subscriptions = sqlite.get_active_feeds_url(db_file)
|
||||||
for url in subscriptions:
|
for url in subscriptions:
|
||||||
url = url[0]
|
url = url[0]
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
|
||||||
feed_id = feed_id[0]
|
|
||||||
feed_title = None
|
|
||||||
feed_summary = None
|
|
||||||
if jid_bare == self.boundjid.bare:
|
if jid_bare == self.boundjid.bare:
|
||||||
node = 'urn:xmpp:microblog:0'
|
node = 'urn:xmpp:microblog:0'
|
||||||
|
feed_title = None
|
||||||
|
feed_subtitle = None
|
||||||
else:
|
else:
|
||||||
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
feed_title = sqlite.get_feed_title(db_file, feed_id)
|
feed_title = sqlite.get_feed_title(db_file, feed_id)
|
||||||
feed_title = feed_title[0]
|
feed_title = feed_title[0]
|
||||||
feed_summary = None
|
feed_subtitle = sqlite.get_feed_subtitle(db_file, feed_id)
|
||||||
node = sqlite.get_node_name(db_file, feed_id)
|
feed_subtitle = feed_subtitle[0]
|
||||||
|
node = sqlite.get_feed_identifier(db_file, feed_id)
|
||||||
node = node[0]
|
node = node[0]
|
||||||
xep = None
|
xep = None
|
||||||
iq_create_node = XmppPubsub.create_node(
|
iq_create_node = XmppPubsub.create_node(
|
||||||
self, jid_bare, node, xep, feed_title, feed_summary)
|
self, jid_bare, node, xep, feed_title, feed_subtitle)
|
||||||
await XmppIQ.send(self, iq_create_node)
|
await XmppIQ.send(self, iq_create_node)
|
||||||
entries = sqlite.get_unread_entries_of_feed(db_file, feed_id)
|
entries = sqlite.get_unread_entries_of_feed(db_file, feed_id)
|
||||||
feed_properties = sqlite.get_feed_properties(db_file, feed_id)
|
feed_properties = sqlite.get_feed_properties(db_file, feed_id)
|
||||||
|
@ -251,17 +252,21 @@ async def xmpp_send_pubsub(self, jid_bare, num=None):
|
||||||
# if num and counter < num:
|
# if num and counter < num:
|
||||||
report[url] = len(entries)
|
report[url] = len(entries)
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
feed_entry = {'author' : None,
|
feed_entry = {'authors' : entry[3],
|
||||||
'authors' : None,
|
'content' : entry[6],
|
||||||
'category' : None,
|
'content_type' : entry[7],
|
||||||
'content' : None,
|
'contact' : entry[4],
|
||||||
'description' : entry[3],
|
'contributors' : entry[5],
|
||||||
|
'summary' : entry[8],
|
||||||
|
'summary_type' : entry[9],
|
||||||
|
'enclosures' : entry[13],
|
||||||
|
'language' : entry[10],
|
||||||
'link' : entry[2],
|
'link' : entry[2],
|
||||||
'links' : entry[4],
|
'links' : entry[11],
|
||||||
'tags' : None,
|
'published' : entry[15],
|
||||||
|
'tags' : entry[12],
|
||||||
'title' : entry[1],
|
'title' : entry[1],
|
||||||
'type' : None,
|
'updated' : entry[16]}
|
||||||
'updated' : entry[7]}
|
|
||||||
iq_create_entry = XmppPubsub.create_entry(
|
iq_create_entry = XmppPubsub.create_entry(
|
||||||
self, jid_bare, node, feed_entry, feed_version)
|
self, jid_bare, node, feed_entry, feed_version)
|
||||||
await XmppIQ.send(self, iq_create_entry)
|
await XmppIQ.send(self, iq_create_entry)
|
||||||
|
@ -303,12 +308,11 @@ async def xmpp_send_message(self, jid, num=None):
|
||||||
title_e = result[1]
|
title_e = result[1]
|
||||||
url = result[2]
|
url = result[2]
|
||||||
summary = result[3]
|
summary = result[3]
|
||||||
enclosure = result[4]
|
feed_id = result[4]
|
||||||
feed_id = result[5]
|
date = result[5]
|
||||||
date = result[6]
|
|
||||||
title_f = sqlite.get_feed_title(db_file, feed_id)
|
title_f = sqlite.get_feed_title(db_file, feed_id)
|
||||||
title_f = title_f[0]
|
title_f = title_f[0]
|
||||||
news_digest += list_unread_entries(self, result, title_f, jid)
|
news_digest += await list_unread_entries(self, result, title_f, jid)
|
||||||
# print(db_file)
|
# print(db_file)
|
||||||
# print(result[0])
|
# print(result[0])
|
||||||
# breakpoint()
|
# breakpoint()
|
||||||
|
@ -533,7 +537,7 @@ def is_feed(feed):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def list_unread_entries(self, result, feed_title, jid):
|
async def list_unread_entries(self, result, feed_title, jid):
|
||||||
function_name = sys._getframe().f_code.co_name
|
function_name = sys._getframe().f_code.co_name
|
||||||
logger.debug('{}: feed_title: {} jid: {}'
|
logger.debug('{}: feed_title: {} jid: {}'
|
||||||
.format(function_name, feed_title, jid))
|
.format(function_name, feed_title, jid))
|
||||||
|
@ -581,7 +585,7 @@ def list_unread_entries(self, result, feed_title, jid):
|
||||||
# summary = "\n".join(summary)
|
# summary = "\n".join(summary)
|
||||||
link = result[2]
|
link = result[2]
|
||||||
link = remove_tracking_parameters(link)
|
link = remove_tracking_parameters(link)
|
||||||
link = (replace_hostname(link, "link")) or link
|
link = await replace_hostname(link, "link") or link
|
||||||
# news_item = ("\n{}\n{}\n{} [{}]\n").format(str(title), str(link),
|
# news_item = ("\n{}\n{}\n{} [{}]\n").format(str(title), str(link),
|
||||||
# str(feed_title), str(ix))
|
# str(feed_title), str(ix))
|
||||||
formatting = Config.get_setting_value(self.settings, jid, 'formatting')
|
formatting = Config.get_setting_value(self.settings, jid, 'formatting')
|
||||||
|
@ -691,11 +695,9 @@ async def list_statistics(db_file):
|
||||||
logger.debug('{}: db_file: {}'
|
logger.debug('{}: db_file: {}'
|
||||||
.format(function_name, db_file))
|
.format(function_name, db_file))
|
||||||
entries_unread = sqlite.get_number_of_entries_unread(db_file)
|
entries_unread = sqlite.get_number_of_entries_unread(db_file)
|
||||||
entries = sqlite.get_number_of_items(db_file, 'entries')
|
entries = sqlite.get_number_of_items(db_file, 'entries_properties')
|
||||||
archive = sqlite.get_number_of_items(db_file, 'archive')
|
|
||||||
entries_all = entries + archive
|
|
||||||
feeds_active = sqlite.get_number_of_feeds_active(db_file)
|
feeds_active = sqlite.get_number_of_feeds_active(db_file)
|
||||||
feeds_all = sqlite.get_number_of_items(db_file, 'feeds')
|
feeds_all = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||||
|
|
||||||
# msg = """You have {} unread news items out of {} from {} news sources.
|
# msg = """You have {} unread news items out of {} from {} news sources.
|
||||||
# """.format(unread_entries, entries, feeds)
|
# """.format(unread_entries, entries, feeds)
|
||||||
|
@ -714,7 +716,7 @@ async def list_statistics(db_file):
|
||||||
"News items : {}/{}\n"
|
"News items : {}/{}\n"
|
||||||
"News sources : {}/{}\n"
|
"News sources : {}/{}\n"
|
||||||
"```").format(entries_unread,
|
"```").format(entries_unread,
|
||||||
entries_all,
|
entries,
|
||||||
feeds_active,
|
feeds_active,
|
||||||
feeds_all)
|
feeds_all)
|
||||||
return message
|
return message
|
||||||
|
@ -762,19 +764,16 @@ def list_feeds(results):
|
||||||
.format(len(results)))
|
.format(len(results)))
|
||||||
else:
|
else:
|
||||||
url = pick_a_feed()
|
url = pick_a_feed()
|
||||||
message = ('List of subscriptions is empty.'
|
message = ('List of subscriptions is empty. To add a feed, send a URL.'
|
||||||
'\n'
|
'\n'
|
||||||
'To add a feed, send a URL.'
|
'Featured news: *{}*\n{}'
|
||||||
'\n'
|
|
||||||
'Featured news:\n*{}*\n{}'
|
|
||||||
.format(url['name'], url['link']))
|
.format(url['name'], url['link']))
|
||||||
return message
|
return message
|
||||||
|
|
||||||
|
|
||||||
async def list_bookmarks(self):
|
def list_bookmarks(self, conferences):
|
||||||
function_name = sys._getframe().f_code.co_name
|
function_name = sys._getframe().f_code.co_name
|
||||||
logger.debug('{}'.format(function_name))
|
logger.debug('{}'.format(function_name))
|
||||||
conferences = await XmppBookmark.get(self)
|
|
||||||
message = '\nList of groupchats:\n\n```\n'
|
message = '\nList of groupchats:\n\n```\n'
|
||||||
for conference in conferences:
|
for conference in conferences:
|
||||||
message += ('Name: {}\n'
|
message += ('Name: {}\n'
|
||||||
|
@ -835,36 +834,42 @@ async def import_opml(db_file, result):
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
document = result['content']
|
document = result['content']
|
||||||
root = ET.fromstring(document)
|
root = ET.fromstring(document)
|
||||||
before = sqlite.get_number_of_items(db_file, 'feeds')
|
before = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||||
feeds = []
|
feeds = []
|
||||||
for child in root.findall(".//outline"):
|
for child in root.findall(".//outline"):
|
||||||
url = child.get("xmlUrl")
|
url = child.get("xmlUrl")
|
||||||
title = child.get("text")
|
title = child.get("text")
|
||||||
# feed = (url, title)
|
# feed = (url, title)
|
||||||
# feeds.extend([feed])
|
# feeds.extend([feed])
|
||||||
feeds.extend([(url, title)])
|
feed = {
|
||||||
|
'title' : title,
|
||||||
|
'url' : url,
|
||||||
|
}
|
||||||
|
feeds.extend([feed])
|
||||||
await sqlite.import_feeds(db_file, feeds)
|
await sqlite.import_feeds(db_file, feeds)
|
||||||
await sqlite.add_metadata(db_file)
|
await sqlite.add_metadata(db_file)
|
||||||
after = sqlite.get_number_of_items(db_file, 'feeds')
|
after = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||||
difference = int(after) - int(before)
|
difference = int(after) - int(before)
|
||||||
return difference
|
return difference
|
||||||
|
|
||||||
|
|
||||||
async def add_feed(self, jid_bare, db_file, url, node):
|
async def add_feed(self, jid_bare, db_file, url, identifier):
|
||||||
function_name = sys._getframe().f_code.co_name
|
function_name = sys._getframe().f_code.co_name
|
||||||
logger.debug('{}: db_file: {} url: {}'
|
logger.debug('{}: db_file: {} url: {}'
|
||||||
.format(function_name, db_file, url))
|
.format(function_name, db_file, url))
|
||||||
while True:
|
while True:
|
||||||
exist_feed = sqlite.get_feed_id_and_name(db_file, url)
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
if not exist_feed:
|
if not feed_id:
|
||||||
exist_node = sqlite.check_node_exist(db_file, node)
|
exist_identifier = sqlite.check_identifier_exist(db_file, identifier)
|
||||||
if not exist_node:
|
if not exist_identifier:
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
message = result['message']
|
message = result['message']
|
||||||
status_code = result['status_code']
|
status_code = result['status_code']
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
|
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||||
document = result['content']
|
document = result['content']
|
||||||
feed = parse(document)
|
feed = parse(document)
|
||||||
|
# if document and status_code == 200:
|
||||||
# if is_feed(url, feed):
|
# if is_feed(url, feed):
|
||||||
if is_feed(feed):
|
if is_feed(feed):
|
||||||
if "title" in feed["feed"].keys():
|
if "title" in feed["feed"].keys():
|
||||||
|
@ -887,21 +892,41 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
updated = ''
|
updated = ''
|
||||||
else:
|
else:
|
||||||
updated = ''
|
updated = ''
|
||||||
version = feed["version"]
|
version = feed.version
|
||||||
entries = len(feed["entries"])
|
entries_count = len(feed.entries)
|
||||||
await sqlite.insert_feed(db_file, url, title, node,
|
await sqlite.insert_feed(db_file,
|
||||||
entries=entries,
|
url,
|
||||||
|
title,
|
||||||
|
identifier,
|
||||||
|
entries=entries_count,
|
||||||
version=version,
|
version=version,
|
||||||
encoding=encoding,
|
encoding=encoding,
|
||||||
language=language,
|
language=language,
|
||||||
status_code=status_code,
|
status_code=status_code,
|
||||||
updated=updated)
|
updated=updated)
|
||||||
await scan(self, jid_bare, db_file, url)
|
feed_valid = 0 if feed.bozo else 1
|
||||||
old = Config.get_setting_value(self.settings, jid_bare, 'old')
|
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||||
|
if feed.has_key('updated_parsed'):
|
||||||
|
feed_updated = feed.updated_parsed
|
||||||
|
try:
|
||||||
|
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
|
||||||
|
except:
|
||||||
|
feed_updated = None
|
||||||
|
else:
|
||||||
|
feed_updated = None
|
||||||
|
entries_count = len(feed.entries)
|
||||||
|
await sqlite.update_feed_properties(db_file, feed_id,
|
||||||
|
entries_count,
|
||||||
|
feed_updated)
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
if not old:
|
new_entries = get_properties_of_entries(
|
||||||
await sqlite.mark_feed_as_read(db_file, feed_id)
|
self, jid_bare, db_file, url, feed_id, feed)
|
||||||
|
if new_entries:
|
||||||
|
await sqlite.add_entries_and_update_feed_state(
|
||||||
|
db_file, feed_id, new_entries)
|
||||||
|
old = Config.get_setting_value(self.settings, jid_bare, 'old')
|
||||||
|
if not old: await sqlite.mark_feed_as_read(db_file, feed_id)
|
||||||
result_final = {'link' : url,
|
result_final = {'link' : url,
|
||||||
'index' : feed_id,
|
'index' : feed_id,
|
||||||
'name' : title,
|
'name' : title,
|
||||||
|
@ -909,7 +934,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
'error' : False,
|
'error' : False,
|
||||||
'message': message,
|
'message': message,
|
||||||
'exist' : False,
|
'exist' : False,
|
||||||
'node' : None}
|
'identifier' : None}
|
||||||
break
|
break
|
||||||
# NOTE This elif statement be unnecessary
|
# NOTE This elif statement be unnecessary
|
||||||
# when feedparser be supporting json feed.
|
# when feedparser be supporting json feed.
|
||||||
|
@ -936,9 +961,12 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
else:
|
else:
|
||||||
updated = ''
|
updated = ''
|
||||||
version = 'json' + feed["version"].split('/').pop()
|
version = 'json' + feed["version"].split('/').pop()
|
||||||
entries = len(feed["items"])
|
entries_count = len(feed["items"])
|
||||||
await sqlite.insert_feed(db_file, url, title, node,
|
await sqlite.insert_feed(db_file,
|
||||||
entries=entries,
|
url,
|
||||||
|
title,
|
||||||
|
identifier,
|
||||||
|
entries=entries_count,
|
||||||
version=version,
|
version=version,
|
||||||
encoding=encoding,
|
encoding=encoding,
|
||||||
language=language,
|
language=language,
|
||||||
|
@ -957,7 +985,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
'error' : False,
|
'error' : False,
|
||||||
'message': message,
|
'message': message,
|
||||||
'exist' : False,
|
'exist' : False,
|
||||||
'node' : None}
|
'identifier' : None}
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
# NOTE Do not be tempted to return a compact dictionary.
|
# NOTE Do not be tempted to return a compact dictionary.
|
||||||
|
@ -973,7 +1001,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
'error' : True,
|
'error' : True,
|
||||||
'message': message,
|
'message': message,
|
||||||
'exist' : False,
|
'exist' : False,
|
||||||
'node' : None}
|
'identifier' : None}
|
||||||
break
|
break
|
||||||
elif isinstance(result, list):
|
elif isinstance(result, list):
|
||||||
# Get out of the loop and deliver a list of dicts.
|
# Get out of the loop and deliver a list of dicts.
|
||||||
|
@ -983,6 +1011,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
# Go back up to the while loop and try again.
|
# Go back up to the while loop and try again.
|
||||||
url = result['link']
|
url = result['link']
|
||||||
else:
|
else:
|
||||||
|
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||||
result_final = {'link' : url,
|
result_final = {'link' : url,
|
||||||
'index' : None,
|
'index' : None,
|
||||||
'name' : None,
|
'name' : None,
|
||||||
|
@ -990,12 +1019,13 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
'error' : True,
|
'error' : True,
|
||||||
'message': message,
|
'message': message,
|
||||||
'exist' : False,
|
'exist' : False,
|
||||||
'node' : None}
|
'identifier' : None}
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
ix = exist_node[1]
|
ix = exist_identifier[1]
|
||||||
node = exist_node[2]
|
identifier = exist_identifier[2]
|
||||||
message = 'Node is already allocated.'
|
message = ('Identifier "{}" is already allocated.'
|
||||||
|
.format(identifier))
|
||||||
result_final = {'link' : url,
|
result_final = {'link' : url,
|
||||||
'index' : ix,
|
'index' : ix,
|
||||||
'name' : None,
|
'name' : None,
|
||||||
|
@ -1003,20 +1033,21 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
||||||
'error' : False,
|
'error' : False,
|
||||||
'message': message,
|
'message': message,
|
||||||
'exist' : False,
|
'exist' : False,
|
||||||
'node' : node}
|
'identifier' : identifier}
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
ix = exist_feed[0]
|
feed_id = feed_id[0]
|
||||||
name = exist_feed[1]
|
title = sqlite.get_feed_title(db_file, feed_id)
|
||||||
|
title = title[0]
|
||||||
message = 'URL already exist.'
|
message = 'URL already exist.'
|
||||||
result_final = {'link' : url,
|
result_final = {'link' : url,
|
||||||
'index' : ix,
|
'index' : feed_id,
|
||||||
'name' : name,
|
'name' : title,
|
||||||
'code' : None,
|
'code' : None,
|
||||||
'error' : False,
|
'error' : False,
|
||||||
'message': message,
|
'message': message,
|
||||||
'exist' : True,
|
'exist' : True,
|
||||||
'node' : None}
|
'identifier' : None}
|
||||||
break
|
break
|
||||||
return result_final
|
return result_final
|
||||||
|
|
||||||
|
@ -1168,7 +1199,7 @@ async def scan_json(self, jid_bare, db_file, url):
|
||||||
if len(new_entries):
|
if len(new_entries):
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
await sqlite.add_entries_and_update_timestamp(db_file, feed_id,
|
await sqlite.add_entries_and_update_feed_state(db_file, feed_id,
|
||||||
new_entries)
|
new_entries)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1266,162 +1297,274 @@ def view_entry(url, feed, num):
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
|
||||||
# TODO get all active feeds of active accounts and scan the feed with the earliest scanned time
|
async def download_feed(self, db_file, feed_url):
|
||||||
# TODO Rename function name (idea: scan_and_populate)
|
|
||||||
async def scan(self, jid_bare, db_file, url):
|
|
||||||
"""
|
"""
|
||||||
Check feeds for new entries.
|
Get feed content.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
----------
|
----------
|
||||||
db_file : str
|
db_file : str
|
||||||
Path to database file.
|
Path to database file.
|
||||||
url : str, optional
|
url : str, optional
|
||||||
URL. The default is None.
|
URL.
|
||||||
"""
|
"""
|
||||||
function_name = sys._getframe().f_code.co_name
|
function_name = sys._getframe().f_code.co_name
|
||||||
logger.debug('{}: db_file: {} url: {}'
|
logger.debug('{}: db_file: {} url: {}'
|
||||||
.format(function_name, db_file, url))
|
.format(function_name, db_file, feed_url))
|
||||||
if isinstance(url, tuple): url = url[0]
|
if isinstance(feed_url, tuple): feed_url = feed_url[0]
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(feed_url)
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
feed_id = sqlite.get_feed_id(db_file, feed_url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
status_code = result['status_code']
|
status_code = result['status_code']
|
||||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||||
if not result['error']:
|
|
||||||
document = result['content']
|
|
||||||
status = result['status_code']
|
# TODO get all active feeds of active accounts and scan the feed with the earliest scanned time
|
||||||
|
# TODO Rename function name (idea: scan_and_populate)
|
||||||
|
def get_properties_of_entries(self, jid_bare, db_file, feed_url, feed_id, feed):
|
||||||
|
"""
|
||||||
|
Get new entries.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
db_file : str
|
||||||
|
Path to database file.
|
||||||
|
url : str, optional
|
||||||
|
URL.
|
||||||
|
"""
|
||||||
|
print('GET', feed_url, jid_bare)
|
||||||
|
function_name = sys._getframe().f_code.co_name
|
||||||
|
logger.debug('{}: feed_id: {} url: {}'
|
||||||
|
.format(function_name, feed_id, feed_url))
|
||||||
|
|
||||||
new_entries = []
|
new_entries = []
|
||||||
if document and status == 200:
|
for entry in feed.entries:
|
||||||
feed = parse(document)
|
|
||||||
entries = feed.entries
|
|
||||||
# length = len(entries)
|
|
||||||
await remove_nonexistent_entries(self, jid_bare, db_file, url, feed)
|
|
||||||
try:
|
|
||||||
if feed.bozo:
|
|
||||||
# bozo = (
|
|
||||||
# "WARNING: Bozo detected for feed: {}\n"
|
|
||||||
# "For more information, visit "
|
|
||||||
# "https://pythonhosted.org/feedparser/bozo.html"
|
|
||||||
# ).format(url)
|
|
||||||
# print(bozo)
|
|
||||||
valid = 0
|
|
||||||
else:
|
|
||||||
valid = 1
|
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
|
||||||
feed_id = feed_id[0]
|
|
||||||
await sqlite.update_feed_validity(
|
|
||||||
db_file, feed_id, valid)
|
|
||||||
if "updated_parsed" in feed["feed"].keys():
|
|
||||||
updated = feed["feed"]["updated_parsed"]
|
|
||||||
try:
|
|
||||||
updated = dt.convert_struct_time_to_iso8601(updated)
|
|
||||||
except:
|
|
||||||
updated = ''
|
|
||||||
else:
|
|
||||||
updated = ''
|
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
|
||||||
feed_id = feed_id[0]
|
|
||||||
await sqlite.update_feed_properties(db_file, feed_id,
|
|
||||||
len(feed["entries"]), updated)
|
|
||||||
# await update_feed_status
|
|
||||||
except (IncompleteReadError, IncompleteRead, error.URLError) as e:
|
|
||||||
logger.error(e)
|
|
||||||
return
|
|
||||||
# new_entry = 0
|
|
||||||
for entry in entries:
|
|
||||||
logger.debug('{}: entry: {}'.format(function_name, entry.link))
|
logger.debug('{}: entry: {}'.format(function_name, entry.link))
|
||||||
if entry.has_key("published"):
|
if entry.has_key("published"):
|
||||||
date = entry.published
|
entry_published = entry.published
|
||||||
date = dt.rfc2822_to_iso8601(date)
|
entry_published = dt.rfc2822_to_iso8601(entry_published)
|
||||||
elif entry.has_key("updated"):
|
|
||||||
date = entry.updated
|
|
||||||
date = dt.rfc2822_to_iso8601(date)
|
|
||||||
else:
|
else:
|
||||||
date = dt.now()
|
entry_published = ''
|
||||||
|
if entry.has_key("updated"):
|
||||||
|
entry_updated = entry.updated
|
||||||
|
entry_updated = dt.rfc2822_to_iso8601(entry_updated)
|
||||||
|
else:
|
||||||
|
entry_updated = dt.now()
|
||||||
if entry.has_key("link"):
|
if entry.has_key("link"):
|
||||||
# link = complete_url(source, entry.link)
|
# link = complete_url(source, entry.link)
|
||||||
link = join_url(url, entry.link)
|
entry_link = join_url(feed_url, entry.link)
|
||||||
link = trim_url(link)
|
entry_link = trim_url(entry_link)
|
||||||
else:
|
else:
|
||||||
link = url
|
entry_link = feed_url
|
||||||
# title = feed["feed"]["title"]
|
# title = feed["feed"]["title"]
|
||||||
# title = "{}: *{}*".format(feed["feed"]["title"], entry.title)
|
# title = "{}: *{}*".format(feed["feed"]["title"], entry.title)
|
||||||
title = entry.title if entry.has_key("title") else date
|
entry_title = entry.title if entry.has_key("title") else entry_published
|
||||||
entry_id = entry.id if entry.has_key("id") else link
|
entry_id = entry.id if entry.has_key("id") else entry_link
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
|
||||||
feed_id = feed_id[0]
|
|
||||||
exist = sqlite.check_entry_exist(db_file, feed_id,
|
exist = sqlite.check_entry_exist(db_file, feed_id,
|
||||||
entry_id=entry_id,
|
identifier=entry_id,
|
||||||
title=title, link=link,
|
title=entry_title,
|
||||||
date=date)
|
link=entry_link,
|
||||||
|
published=entry_published)
|
||||||
if not exist:
|
if not exist:
|
||||||
summary = entry.summary if entry.has_key("summary") else ''
|
|
||||||
read_status = 0
|
read_status = 0
|
||||||
pathname = urlsplit(link).path
|
# # Filter
|
||||||
string = (
|
# pathname = urlsplit(link).path
|
||||||
"{} {} {}"
|
# string = (
|
||||||
).format(
|
# "{} {} {}"
|
||||||
title, summary, pathname)
|
# ).format(
|
||||||
if self.settings['default']['filter']:
|
# title, summary, pathname)
|
||||||
print('Filter is now processing data.')
|
# if self.settings['default']['filter']:
|
||||||
allow_list = config.is_include_keyword(db_file,
|
# print('Filter is now processing data.')
|
||||||
"allow", string)
|
# allow_list = config.is_include_keyword(db_file,
|
||||||
if not allow_list:
|
# "allow", string)
|
||||||
reject_list = config.is_include_keyword(db_file,
|
# if not allow_list:
|
||||||
"deny",
|
# reject_list = config.is_include_keyword(db_file,
|
||||||
string)
|
# "deny",
|
||||||
if reject_list:
|
# string)
|
||||||
read_status = 1
|
# if reject_list:
|
||||||
logger.debug('Rejected : {}'
|
# read_status = 1
|
||||||
'\n'
|
# logger.debug('Rejected : {}'
|
||||||
'Keyword : {}'
|
# '\n'
|
||||||
.format(link, reject_list))
|
# 'Keyword : {}'
|
||||||
if isinstance(date, int):
|
# .format(link, reject_list))
|
||||||
logger.error('Variable "date" is int: {}'.format(date))
|
if isinstance(entry_published, int):
|
||||||
media_link = ''
|
logger.error('Variable "published" is int: {}'.format(entry_published))
|
||||||
if entry.has_key("links"):
|
if isinstance(entry_updated, int):
|
||||||
for e_link in entry.links:
|
logger.error('Variable "updated" is int: {}'.format(entry_updated))
|
||||||
try:
|
|
||||||
# if (link.rel == "enclosure" and
|
# Authors
|
||||||
# (link.type.startswith("audio/") or
|
entry_authors =[]
|
||||||
# link.type.startswith("image/") or
|
if entry.has_key('authors'):
|
||||||
# link.type.startswith("video/"))
|
for author in entry.authors:
|
||||||
# ):
|
author_properties = {
|
||||||
media_type = e_link.type[:e_link.type.index("/")]
|
'name' : author.name if author.has_key('name') else '',
|
||||||
if e_link.has_key("rel"):
|
'url' : author.href if author.has_key('href') else '',
|
||||||
if (e_link.rel == "enclosure" and
|
'email' : author.email if author.has_key('email') else '',
|
||||||
media_type in ("audio", "image", "video")):
|
}
|
||||||
media_link = e_link.href
|
entry_authors.extend([author_properties])
|
||||||
media_link = join_url(url, e_link.href)
|
elif entry.has_key('author_detail'):
|
||||||
media_link = trim_url(media_link)
|
author_properties = {
|
||||||
break
|
'name' : entry.author_detail.name if entry.author_detail.has_key('name') else '',
|
||||||
except:
|
'url' : entry.author_detail.href if entry.author_detail.has_key('href') else '',
|
||||||
logger.error('KeyError: "href"\n'
|
'email' : entry.author_detail.email if entry.author_detail.has_key('email') else '',
|
||||||
'Missing "href" attribute for {}'
|
}
|
||||||
.format(url))
|
entry_authors.extend([author_properties])
|
||||||
logger.error('Continue scanning for next '
|
elif entry.has_key('author'):
|
||||||
'potential enclosure of {}'
|
author_properties = {
|
||||||
.format(link))
|
'name' : entry.author,
|
||||||
entry = {
|
'url' : '',
|
||||||
"title": title,
|
'email' : '',
|
||||||
"link": link,
|
}
|
||||||
"summary": summary,
|
entry_authors.extend([author_properties])
|
||||||
"enclosure": media_link,
|
|
||||||
"entry_id": entry_id,
|
# Contributors
|
||||||
"date": date,
|
entry_contributors = []
|
||||||
|
if entry.has_key('contributors'):
|
||||||
|
for contributor in entry.contributors:
|
||||||
|
contributor_properties = {
|
||||||
|
'name' : contributor.name if contributor.has_key('name') else '',
|
||||||
|
'url' : contributor.href if contributor.has_key('href') else '',
|
||||||
|
'email' : contributor.email if contributor.has_key('email') else '',
|
||||||
|
}
|
||||||
|
entry_contributors.extend([contributor_properties])
|
||||||
|
|
||||||
|
# Tags
|
||||||
|
entry_tags = []
|
||||||
|
if entry.has_key('tags'):
|
||||||
|
for tag in entry.tags:
|
||||||
|
tag_properties = {
|
||||||
|
'term' : tag.term if tag.has_key('term') else '',
|
||||||
|
'scheme' : tag.scheme if tag.has_key('scheme') else '',
|
||||||
|
'label' : tag.label if tag.has_key('label') else '',
|
||||||
|
}
|
||||||
|
entry_tags.extend([tag_properties])
|
||||||
|
|
||||||
|
# Content
|
||||||
|
entry_contents = []
|
||||||
|
if entry.has_key('content'):
|
||||||
|
for content in entry.content:
|
||||||
|
text = content.value if content.has_key('value') else ''
|
||||||
|
type = content.type if content.has_key('type') else ''
|
||||||
|
lang = content.lang if content.has_key('lang') else ''
|
||||||
|
base = content.base if content.has_key('base') else ''
|
||||||
|
entry_content = {
|
||||||
|
'text' : text,
|
||||||
|
'lang' : lang,
|
||||||
|
'type' : type,
|
||||||
|
'base' : base,
|
||||||
|
}
|
||||||
|
entry_contents.extend([entry_content])
|
||||||
|
|
||||||
|
# Links and Enclosures
|
||||||
|
entry_links = []
|
||||||
|
if entry.has_key('links'):
|
||||||
|
for link in entry.links:
|
||||||
|
link_properties = {
|
||||||
|
'url' : link.href if link.has_key('href') else '',
|
||||||
|
'rel' : link.rel if link.has_key('rel') else '',
|
||||||
|
'type' : link.type if link.has_key('type') else '',
|
||||||
|
'length' : '',
|
||||||
|
}
|
||||||
|
entry_links.extend([link_properties])
|
||||||
|
# Element media:content is utilized by Mastodon
|
||||||
|
if entry.has_key('media_content'):
|
||||||
|
for link in entry.media_content:
|
||||||
|
link_properties = {
|
||||||
|
'url' : link['url'] if 'url' in link else '',
|
||||||
|
'rel' : 'enclosure',
|
||||||
|
'type' : link['type'] if 'type' in link else '',
|
||||||
|
# 'medium' : link['medium'] if 'medium' in link else '',
|
||||||
|
'length' : link['filesize'] if 'filesize' in link else '',
|
||||||
|
}
|
||||||
|
entry_links.extend([link_properties])
|
||||||
|
if entry.has_key('media_thumbnail'):
|
||||||
|
for link in entry.media_thumbnail:
|
||||||
|
link_properties = {
|
||||||
|
'url' : link['url'] if 'url' in link else '',
|
||||||
|
'rel' : 'enclosure',
|
||||||
|
'type' : '',
|
||||||
|
# 'medium' : 'image',
|
||||||
|
'length' : '',
|
||||||
|
}
|
||||||
|
entry_links.extend([link_properties])
|
||||||
|
|
||||||
|
# Category
|
||||||
|
entry_category = entry.category if entry.has_key('category') else ''
|
||||||
|
|
||||||
|
# Comments
|
||||||
|
entry_comments = entry.comments if entry.has_key('comments') else ''
|
||||||
|
|
||||||
|
# href
|
||||||
|
entry_href = entry.href if entry.has_key('href') else ''
|
||||||
|
|
||||||
|
# Link: Same as entry.links[0].href in most if not all cases
|
||||||
|
entry_link = entry.link if entry.has_key('link') else ''
|
||||||
|
|
||||||
|
# Rating
|
||||||
|
entry_rating = entry.rating if entry.has_key('rating') else ''
|
||||||
|
|
||||||
|
# Summary
|
||||||
|
entry_summary_text = entry.summary if entry.has_key('summary') else ''
|
||||||
|
if entry.has_key('summary_detail'):
|
||||||
|
entry_summary_type = entry.summary_detail.type if entry.summary_detail.has_key('type') else ''
|
||||||
|
entry_summary_lang = entry.summary_detail.lang if entry.summary_detail.has_key('lang') else ''
|
||||||
|
entry_summary_base = entry.summary_detail.base if entry.summary_detail.has_key('base') else ''
|
||||||
|
else:
|
||||||
|
entry_summary_type = ''
|
||||||
|
entry_summary_lang = ''
|
||||||
|
entry_summary_base = ''
|
||||||
|
|
||||||
|
# Title
|
||||||
|
entry_title = entry.title if entry.has_key('title') else ''
|
||||||
|
if entry.has_key('title_detail'):
|
||||||
|
entry_title_type = entry.title_detail.type if entry.title_detail.has_key('type') else ''
|
||||||
|
else:
|
||||||
|
entry_title_type = ''
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
|
||||||
|
# media_type = e_link.type[:e_link.type.index("/")]
|
||||||
|
# if (e_link.rel == "enclosure" and
|
||||||
|
# media_type in ("audio", "image", "video")):
|
||||||
|
# media_link = e_link.href
|
||||||
|
# media_link = join_url(url, e_link.href)
|
||||||
|
# media_link = trim_url(media_link)
|
||||||
|
|
||||||
|
###########################################################
|
||||||
|
|
||||||
|
entry_properties = {
|
||||||
|
"identifier": entry_id,
|
||||||
|
"link": entry_link,
|
||||||
|
"href": entry_href,
|
||||||
|
"title": entry_title,
|
||||||
|
"title_type": entry_title_type,
|
||||||
|
'summary_text' : entry_summary_text,
|
||||||
|
'summary_lang' : entry_summary_lang,
|
||||||
|
'summary_type' : entry_summary_type,
|
||||||
|
'summary_base' : entry_summary_base,
|
||||||
|
'category' : entry_category,
|
||||||
|
"comments": entry_comments,
|
||||||
|
"rating": entry_rating,
|
||||||
|
"published": entry_published,
|
||||||
|
"updated": entry_updated,
|
||||||
"read_status": read_status
|
"read_status": read_status
|
||||||
}
|
}
|
||||||
new_entries.extend([entry])
|
print('entry_properties')
|
||||||
|
print(entry_properties)
|
||||||
|
|
||||||
|
new_entries.extend([{
|
||||||
|
"entry_properties" : entry_properties,
|
||||||
|
"entry_authors" : entry_authors,
|
||||||
|
"entry_contributors" : entry_contributors,
|
||||||
|
"entry_contents" : entry_contents,
|
||||||
|
"entry_links" : entry_links,
|
||||||
|
"entry_tags" : entry_tags
|
||||||
|
}])
|
||||||
# await sqlite.add_entry(
|
# await sqlite.add_entry(
|
||||||
# db_file, title, link, entry_id,
|
# db_file, title, link, entry_id,
|
||||||
# url, date, read_status)
|
# url, date, read_status)
|
||||||
# await sqlite.set_date(db_file, url)
|
# await sqlite.set_date(db_file, url)
|
||||||
if len(new_entries):
|
return new_entries
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
|
||||||
feed_id = feed_id[0]
|
|
||||||
await sqlite.add_entries_and_update_timestamp(db_file, feed_id,
|
|
||||||
new_entries)
|
|
||||||
|
|
||||||
|
|
||||||
def get_document_title(data):
|
def get_document_title(data):
|
||||||
|
|
1049
slixfeed/sqlite.py
1049
slixfeed/sqlite.py
File diff suppressed because it is too large
Load diff
|
@ -68,12 +68,15 @@ except Exception as exc:
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
from feedparser import parse
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import slixfeed.action as action
|
import slixfeed.action as action
|
||||||
import slixfeed.config as config
|
import slixfeed.config as config
|
||||||
from slixfeed.config import Config
|
from slixfeed.config import Config
|
||||||
# from slixfeed.dt import current_time
|
# from slixfeed.dt import current_time
|
||||||
|
import slixfeed.dt as dt
|
||||||
|
import slixfeed.fetch as fetch
|
||||||
import slixfeed.sqlite as sqlite
|
import slixfeed.sqlite as sqlite
|
||||||
# from xmpp import Slixfeed
|
# from xmpp import Slixfeed
|
||||||
from slixfeed.xmpp.presence import XmppPresence
|
from slixfeed.xmpp.presence import XmppPresence
|
||||||
|
@ -339,14 +342,67 @@ async def check_updates(self, jid_bare):
|
||||||
jid : str
|
jid : str
|
||||||
Jabber ID.
|
Jabber ID.
|
||||||
"""
|
"""
|
||||||
|
print('Scanning for updates for JID {}'.format(jid_bare))
|
||||||
logging.info('Scanning for updates for JID {}'.format(jid_bare))
|
logging.info('Scanning for updates for JID {}'.format(jid_bare))
|
||||||
while True:
|
while True:
|
||||||
jid_file = jid_bare.replace('/', '_')
|
jid_file = jid_bare.replace('/', '_')
|
||||||
db_file = config.get_pathname_to_database(jid_file)
|
db_file = config.get_pathname_to_database(jid_file)
|
||||||
urls = sqlite.get_active_feeds_url(db_file)
|
urls = sqlite.get_active_feeds_url(db_file)
|
||||||
for url in urls:
|
for url in urls:
|
||||||
await action.scan(self, jid_bare, db_file, url)
|
url = url[0]
|
||||||
await asyncio.sleep(50)
|
print('STA',url)
|
||||||
|
|
||||||
|
result = await fetch.http(url)
|
||||||
|
status_code = result['status_code']
|
||||||
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
|
if not result['error']:
|
||||||
|
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||||
|
document = result['content']
|
||||||
|
feed = parse(document)
|
||||||
|
|
||||||
|
feed_valid = 0 if feed.bozo else 1
|
||||||
|
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||||
|
|
||||||
|
if feed.has_key('updated_parsed'):
|
||||||
|
feed_updated = feed.updated_parsed
|
||||||
|
try:
|
||||||
|
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
|
||||||
|
except:
|
||||||
|
feed_updated = ''
|
||||||
|
else:
|
||||||
|
feed_updated = ''
|
||||||
|
|
||||||
|
entries_count = len(feed.entries)
|
||||||
|
|
||||||
|
feed_version = feed.version if feed.has_key('version') else ''
|
||||||
|
feed_encoding = feed.encoding if feed.has_key('encoding') else ''
|
||||||
|
feed_language = feed.feed.language if feed.feed.has_key('language') else ''
|
||||||
|
feed_icon = feed.feed.icon if feed.feed.has_key('icon') else ''
|
||||||
|
feed_image = feed.feed.image if feed.feed.has_key('image') else ''
|
||||||
|
feed_logo = feed.feed.logo if feed.feed.has_key('logo') else ''
|
||||||
|
feed_ttl = feed.feed.ttl if feed.feed.has_key('ttl') else ''
|
||||||
|
|
||||||
|
feed_properties = {
|
||||||
|
"version" : feed_version,
|
||||||
|
"encoding" : feed_encoding,
|
||||||
|
"language" : feed_language,
|
||||||
|
"rating" : '',
|
||||||
|
"entries_count" : entries_count,
|
||||||
|
"icon" : feed_icon,
|
||||||
|
"image" : feed_image,
|
||||||
|
"logo" : feed_logo,
|
||||||
|
"ttl" : feed_ttl,
|
||||||
|
"updated" : feed_updated,
|
||||||
|
}
|
||||||
|
await sqlite.update_feed_properties(db_file, feed_id,
|
||||||
|
feed_properties)
|
||||||
|
new_entries = action.get_properties_of_entries(
|
||||||
|
self, jid_bare, db_file, url, feed_id, feed)
|
||||||
|
if new_entries: await sqlite.add_entries_and_update_feed_state(
|
||||||
|
db_file, feed_id, new_entries)
|
||||||
|
print('END', url)
|
||||||
|
await asyncio.sleep(5)
|
||||||
val = Config.get_setting_value(self.settings, jid_bare, 'check')
|
val = Config.get_setting_value(self.settings, jid_bare, 'check')
|
||||||
await asyncio.sleep(60 * float(val))
|
await asyncio.sleep(60 * float(val))
|
||||||
# Schedule to call this function again in 90 minutes
|
# Schedule to call this function again in 90 minutes
|
||||||
|
|
|
@ -50,7 +50,7 @@ def get_hostname(url):
|
||||||
return hostname
|
return hostname
|
||||||
|
|
||||||
|
|
||||||
def replace_hostname(url, url_type):
|
async def replace_hostname(url, url_type):
|
||||||
"""
|
"""
|
||||||
Replace hostname.
|
Replace hostname.
|
||||||
|
|
||||||
|
@ -79,6 +79,8 @@ def replace_hostname(url, url_type):
|
||||||
proxy = proxies[proxy_name]
|
proxy = proxies[proxy_name]
|
||||||
if hostname in proxy['hostname'] and url_type in proxy['type']:
|
if hostname in proxy['hostname'] and url_type in proxy['type']:
|
||||||
while not url_new:
|
while not url_new:
|
||||||
|
print('>>>')
|
||||||
|
print(url_new)
|
||||||
proxy_type = 'clearnet'
|
proxy_type = 'clearnet'
|
||||||
proxy_list = proxy[proxy_type]
|
proxy_list = proxy[proxy_type]
|
||||||
if len(proxy_list):
|
if len(proxy_list):
|
||||||
|
@ -89,10 +91,13 @@ def replace_hostname(url, url_type):
|
||||||
hostname_new = parted_proxy_url.netloc
|
hostname_new = parted_proxy_url.netloc
|
||||||
url_new = urlunsplit([protocol_new, hostname_new,
|
url_new = urlunsplit([protocol_new, hostname_new,
|
||||||
pathname, queries, fragment])
|
pathname, queries, fragment])
|
||||||
response = fetch.http_response(url_new)
|
print(proxy_url)
|
||||||
|
print(url_new)
|
||||||
|
print('>>>')
|
||||||
|
response = await fetch.http(url_new)
|
||||||
if (response and
|
if (response and
|
||||||
response.status_code == 200 and
|
response['status_code'] == 200 and
|
||||||
response.reason == 'OK' and
|
# response.reason == 'OK' and
|
||||||
url_new.startswith(proxy_url)):
|
url_new.startswith(proxy_url)):
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -104,13 +109,16 @@ def replace_hostname(url, url_type):
|
||||||
config.backup_obsolete(proxies_obsolete_file,
|
config.backup_obsolete(proxies_obsolete_file,
|
||||||
proxy_name, proxy_type,
|
proxy_name, proxy_type,
|
||||||
proxy_url)
|
proxy_url)
|
||||||
|
try:
|
||||||
config.update_proxies(proxies_file, proxy_name,
|
config.update_proxies(proxies_file, proxy_name,
|
||||||
proxy_type, proxy_url)
|
proxy_type, proxy_url)
|
||||||
|
except ValueError as e:
|
||||||
|
logging.error([str(e), proxy_url])
|
||||||
url_new = None
|
url_new = None
|
||||||
else:
|
else:
|
||||||
logging.warning(
|
logging.warning(
|
||||||
"No proxy URLs for {}."
|
"No proxy URLs for {}. Please update proxies.toml"
|
||||||
"Update proxies.toml".format(proxy_name))
|
.format(proxy_name))
|
||||||
url_new = url
|
url_new = url
|
||||||
break
|
break
|
||||||
return url_new
|
return url_new
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
__version__ = '0.1.52'
|
__version__ = '0.1.53'
|
||||||
__version_info__ = (0, 1, 52)
|
__version_info__ = (0, 1, 53)
|
||||||
|
|
|
@ -15,7 +15,7 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
|
||||||
class XmppBookmark:
|
class XmppBookmark:
|
||||||
|
|
||||||
|
|
||||||
async def get(self):
|
async def get_bookmarks(self):
|
||||||
result = await self.plugin['xep_0048'].get_bookmarks()
|
result = await self.plugin['xep_0048'].get_bookmarks()
|
||||||
conferences = result['private']['bookmarks']['conferences']
|
conferences = result['private']['bookmarks']['conferences']
|
||||||
return conferences
|
return conferences
|
||||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -20,20 +20,6 @@ import logging
|
||||||
|
|
||||||
class XmppGroupchat:
|
class XmppGroupchat:
|
||||||
|
|
||||||
# async def accept_muc_invite(self, message, ctr=None):
|
|
||||||
# # if isinstance(message, str):
|
|
||||||
# if not ctr:
|
|
||||||
# ctr = message["from"].bare
|
|
||||||
# jid = message['groupchat_invite']['jid']
|
|
||||||
# else:
|
|
||||||
# jid = message
|
|
||||||
def accept_invitation(self, message):
|
|
||||||
# operator muc_chat
|
|
||||||
inviter = message["from"].bare
|
|
||||||
jid = message['groupchat_invite']['jid']
|
|
||||||
self.join(self, inviter, jid)
|
|
||||||
|
|
||||||
|
|
||||||
async def join(self, jid, alias=None, password=None):
|
async def join(self, jid, alias=None, password=None):
|
||||||
# token = await initdb(
|
# token = await initdb(
|
||||||
# muc_jid,
|
# muc_jid,
|
||||||
|
|
|
@ -319,22 +319,49 @@ async def message(self, message):
|
||||||
message_text = message_text[4:]
|
message_text = message_text[4:]
|
||||||
url = message_text.split(' ')[0]
|
url = message_text.split(' ')[0]
|
||||||
title = ' '.join(message_text.split(' ')[1:])
|
title = ' '.join(message_text.split(' ')[1:])
|
||||||
|
if url.startswith('http'):
|
||||||
if not title:
|
if not title:
|
||||||
title = uri.get_hostname(url)
|
title = uri.get_hostname(url)
|
||||||
|
db_file = config.get_pathname_to_database(jid_file)
|
||||||
counter = 0
|
counter = 0
|
||||||
hostname = uri.get_hostname(url)
|
hostname = uri.get_hostname(url)
|
||||||
node = hostname + ':' + str(counter)
|
hostname = hostname.replace('.','-')
|
||||||
|
identifier = hostname + ':' + str(counter)
|
||||||
while True:
|
while True:
|
||||||
if sqlite.check_node_exist(db_file, node):
|
if sqlite.check_identifier_exist(db_file, identifier):
|
||||||
counter += 1
|
counter += 1
|
||||||
node = hostname + ':' + str(counter)
|
identifier = hostname + ':' + str(counter)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
if url.startswith('http'):
|
|
||||||
db_file = config.get_pathname_to_database(jid_file)
|
|
||||||
exist = sqlite.get_feed_id_and_name(db_file, url)
|
exist = sqlite.get_feed_id_and_name(db_file, url)
|
||||||
if not exist:
|
if not exist:
|
||||||
await sqlite.insert_feed(db_file, url, title, node)
|
await sqlite.insert_feed(db_file, url, title,
|
||||||
|
identifier)
|
||||||
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
|
document = result['content']
|
||||||
|
feed = parse(document)
|
||||||
|
feed_valid = 0 if feed.bozo else 1
|
||||||
|
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||||
|
if feed.has_key('updated_parsed'):
|
||||||
|
feed_updated = feed.updated_parsed
|
||||||
|
try:
|
||||||
|
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
|
||||||
|
except:
|
||||||
|
feed_updated = None
|
||||||
|
else:
|
||||||
|
feed_updated = None
|
||||||
|
entries_count = len(feed.entries)
|
||||||
|
await sqlite.update_feed_properties(db_file, feed_id,
|
||||||
|
entries_count,
|
||||||
|
feed_updated)
|
||||||
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
|
new_entries = action.get_properties_of_entries(
|
||||||
|
self, jid_bare, db_file, url, feed_id, feed)
|
||||||
|
if new_entries:
|
||||||
|
await sqlite.add_entries_and_update_feed_state(
|
||||||
|
db_file, feed_id, new_entries)
|
||||||
await action.scan(self, jid_bare, db_file, url)
|
await action.scan(self, jid_bare, db_file, url)
|
||||||
if jid_bare not in self.settings:
|
if jid_bare not in self.settings:
|
||||||
Config.add_settings_jid(self.settings, jid_bare,
|
Config.add_settings_jid(self.settings, jid_bare,
|
||||||
|
@ -477,7 +504,8 @@ async def message(self, message):
|
||||||
XmppMessage.send_reply(self, message, response)
|
XmppMessage.send_reply(self, message, response)
|
||||||
case 'bookmarks':
|
case 'bookmarks':
|
||||||
if is_operator(self, jid_bare):
|
if is_operator(self, jid_bare):
|
||||||
response = await action.list_bookmarks(self)
|
conferences = await XmppBookmark.get_bookmarks(self)
|
||||||
|
response = action.list_bookmarks(self, conferences)
|
||||||
else:
|
else:
|
||||||
response = ('This action is restricted. '
|
response = ('This action is restricted. '
|
||||||
'Type: viewing bookmarks.')
|
'Type: viewing bookmarks.')
|
||||||
|
@ -608,7 +636,7 @@ async def message(self, message):
|
||||||
url = ix_url
|
url = ix_url
|
||||||
if url:
|
if url:
|
||||||
url = uri.remove_tracking_parameters(url)
|
url = uri.remove_tracking_parameters(url)
|
||||||
url = (uri.replace_hostname(url, 'link')) or url
|
url = (await uri.replace_hostname(url, 'link')) or url
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
data = result['content']
|
data = result['content']
|
||||||
|
@ -696,15 +724,17 @@ async def message(self, message):
|
||||||
url = info[1]
|
url = info[1]
|
||||||
db_file = config.get_pathname_to_database(jid)
|
db_file = config.get_pathname_to_database(jid)
|
||||||
if len(info) > 2:
|
if len(info) > 2:
|
||||||
node = info[2]
|
identifier = info[2]
|
||||||
else:
|
else:
|
||||||
counter = 0
|
counter = 0
|
||||||
hostname = uri.get_hostname(url)
|
hostname = uri.get_hostname(url)
|
||||||
node = hostname + ':' + str(counter)
|
hostname = hostname.replace('.','-')
|
||||||
|
identifier = hostname + ':' + str(counter)
|
||||||
while True:
|
while True:
|
||||||
if sqlite.check_node_exist(db_file, node):
|
if sqlite.check_identifier_exist(
|
||||||
|
db_file, identifier):
|
||||||
counter += 1
|
counter += 1
|
||||||
node = hostname + ':' + str(counter)
|
identifier = hostname + ':' + str(counter)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
# task.clean_tasks_xmpp_chat(self, jid_bare, ['status'])
|
# task.clean_tasks_xmpp_chat(self, jid_bare, ['status'])
|
||||||
|
@ -720,8 +750,10 @@ async def message(self, message):
|
||||||
status_type=status_type)
|
status_type=status_type)
|
||||||
if url.startswith('feed:'):
|
if url.startswith('feed:'):
|
||||||
url = uri.feed_to_http(url)
|
url = uri.feed_to_http(url)
|
||||||
url = (uri.replace_hostname(url, 'feed')) or url
|
url = (await uri.replace_hostname(url, 'feed')) or url
|
||||||
result = await action.add_feed(self, jid_bare, db_file, url, node)
|
result = await action.add_feed(self, jid_bare,
|
||||||
|
db_file, url,
|
||||||
|
identifier)
|
||||||
if isinstance(result, list):
|
if isinstance(result, list):
|
||||||
results = result
|
results = result
|
||||||
response = ("Web feeds found for {}\n\n```\n"
|
response = ("Web feeds found for {}\n\n```\n"
|
||||||
|
@ -740,11 +772,11 @@ async def message(self, message):
|
||||||
.format(result['link'],
|
.format(result['link'],
|
||||||
result['name'],
|
result['name'],
|
||||||
result['index']))
|
result['index']))
|
||||||
elif result['node']:
|
elif result['identifier']:
|
||||||
response = ('> {}\nNode "{}" is already '
|
response = ('> {}\nIdentifier "{}" is already '
|
||||||
'allocated to index {}'
|
'allocated to index {}'
|
||||||
.format(result['link'],
|
.format(result['link'],
|
||||||
result['node'],
|
result['identifier'],
|
||||||
result['index']))
|
result['index']))
|
||||||
elif result['error']:
|
elif result['error']:
|
||||||
response = ('> {}\nFailed to find subscriptions. '
|
response = ('> {}\nFailed to find subscriptions. '
|
||||||
|
@ -776,10 +808,10 @@ async def message(self, message):
|
||||||
'\n'
|
'\n'
|
||||||
'Missing argument. '
|
'Missing argument. '
|
||||||
'Enter PubSub JID and subscription URL '
|
'Enter PubSub JID and subscription URL '
|
||||||
'(and optionally: NodeName).')
|
'(and optionally: Identifier Name).')
|
||||||
else:
|
else:
|
||||||
response = ('This action is restricted. '
|
response = ('This action is restricted. '
|
||||||
'Type: adding node.')
|
'Type: publishing to node.')
|
||||||
XmppMessage.send_reply(self, message, response)
|
XmppMessage.send_reply(self, message, response)
|
||||||
case _ if (message_lowercase.startswith('http') or
|
case _ if (message_lowercase.startswith('http') or
|
||||||
message_lowercase.startswith('feed:')):
|
message_lowercase.startswith('feed:')):
|
||||||
|
@ -797,19 +829,21 @@ async def message(self, message):
|
||||||
status_type=status_type)
|
status_type=status_type)
|
||||||
if url.startswith('feed:'):
|
if url.startswith('feed:'):
|
||||||
url = uri.feed_to_http(url)
|
url = uri.feed_to_http(url)
|
||||||
url = (uri.replace_hostname(url, 'feed')) or url
|
url = (await uri.replace_hostname(url, 'feed')) or url
|
||||||
db_file = config.get_pathname_to_database(jid_file)
|
db_file = config.get_pathname_to_database(jid_file)
|
||||||
counter = 0
|
counter = 0
|
||||||
hostname = uri.get_hostname(url)
|
hostname = uri.get_hostname(url)
|
||||||
node = hostname + ':' + str(counter)
|
hostname = hostname.replace('.','-')
|
||||||
|
identifier = hostname + ':' + str(counter)
|
||||||
while True:
|
while True:
|
||||||
if sqlite.check_node_exist(db_file, node):
|
if sqlite.check_identifier_exist(db_file, identifier):
|
||||||
counter += 1
|
counter += 1
|
||||||
node = hostname + ':' + str(counter)
|
identifier = hostname + ':' + str(counter)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
# try:
|
# try:
|
||||||
result = await action.add_feed(self, jid_bare, db_file, url, node)
|
result = await action.add_feed(self, jid_bare, db_file, url,
|
||||||
|
identifier)
|
||||||
if isinstance(result, list):
|
if isinstance(result, list):
|
||||||
results = result
|
results = result
|
||||||
response = ("Web feeds found for {}\n\n```\n"
|
response = ("Web feeds found for {}\n\n```\n"
|
||||||
|
@ -1122,15 +1156,15 @@ async def message(self, message):
|
||||||
status_type=status_type)
|
status_type=status_type)
|
||||||
if url.startswith('feed:'):
|
if url.startswith('feed:'):
|
||||||
url = uri.feed_to_http(url)
|
url = uri.feed_to_http(url)
|
||||||
url = (uri.replace_hostname(url, 'feed')) or url
|
url = (await uri.replace_hostname(url, 'feed')) or url
|
||||||
match len(data):
|
match len(data):
|
||||||
case 1:
|
case 1:
|
||||||
if url.startswith('http'):
|
if url.startswith('http'):
|
||||||
while True:
|
while True:
|
||||||
result = await fetch.http(url)
|
result = await fetch.http(url)
|
||||||
|
status = result['status_code']
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
document = result['content']
|
document = result['content']
|
||||||
status = result['status_code']
|
|
||||||
feed = parse(document)
|
feed = parse(document)
|
||||||
# if is_feed(url, feed):
|
# if is_feed(url, feed):
|
||||||
if action.is_feed(feed):
|
if action.is_feed(feed):
|
||||||
|
@ -1151,7 +1185,7 @@ async def message(self, message):
|
||||||
.format(len(results)))
|
.format(len(results)))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
url = result[0]
|
url = result['link']
|
||||||
else:
|
else:
|
||||||
response = ('> {}\nFailed to load URL. Reason: {}'
|
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||||
.format(url, status))
|
.format(url, status))
|
||||||
|
@ -1188,7 +1222,7 @@ async def message(self, message):
|
||||||
.format(len(results)))
|
.format(len(results)))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
url = result[0]
|
url = result['link']
|
||||||
else:
|
else:
|
||||||
response = ('> {}\nFailed to load URL. Reason: {}'
|
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||||
.format(url, status))
|
.format(url, status))
|
||||||
|
|
|
@ -15,7 +15,7 @@ class XmppPubsub:
|
||||||
|
|
||||||
|
|
||||||
async def get_pubsub_services(self):
|
async def get_pubsub_services(self):
|
||||||
jids = [self.boundjid.bare]
|
results = []
|
||||||
iq = await self['xep_0030'].get_items(jid=self.boundjid.domain)
|
iq = await self['xep_0030'].get_items(jid=self.boundjid.domain)
|
||||||
items = iq['disco_items']['items']
|
items = iq['disco_items']['items']
|
||||||
for item in items:
|
for item in items:
|
||||||
|
@ -23,9 +23,13 @@ class XmppPubsub:
|
||||||
identities = iq['disco_info']['identities']
|
identities = iq['disco_info']['identities']
|
||||||
for identity in identities:
|
for identity in identities:
|
||||||
if identity[0] == 'pubsub' and identity[1] == 'service':
|
if identity[0] == 'pubsub' and identity[1] == 'service':
|
||||||
jid = item[0]
|
result = {}
|
||||||
jids.extend([jid])
|
result['jid'] = item[0]
|
||||||
return jids
|
if item[1]: result['name'] = item[1]
|
||||||
|
elif item[2]: result['name'] = item[2]
|
||||||
|
else: result['name'] = item[0]
|
||||||
|
results.extend([result])
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
def delete_node(self, jid, node):
|
def delete_node(self, jid, node):
|
||||||
|
@ -44,7 +48,7 @@ class XmppPubsub:
|
||||||
|
|
||||||
|
|
||||||
# TODO Make use of var "xep" with match/case (XEP-0060, XEP-0277, XEP-0472)
|
# TODO Make use of var "xep" with match/case (XEP-0060, XEP-0277, XEP-0472)
|
||||||
def create_node(self, jid, node, xep ,title=None, summary=None):
|
def create_node(self, jid, node, xep ,title=None, subtitle=None):
|
||||||
jid_from = str(self.boundjid) if self.is_component else None
|
jid_from = str(self.boundjid) if self.is_component else None
|
||||||
iq = self.Iq(stype='set',
|
iq = self.Iq(stype='set',
|
||||||
sto=jid,
|
sto=jid,
|
||||||
|
@ -57,7 +61,7 @@ class XmppPubsub:
|
||||||
value=title)
|
value=title)
|
||||||
form.addField('pubsub#description',
|
form.addField('pubsub#description',
|
||||||
ftype='text-single',
|
ftype='text-single',
|
||||||
value=summary)
|
value=subtitle)
|
||||||
form.addField('pubsub#notify_retract',
|
form.addField('pubsub#notify_retract',
|
||||||
ftype='boolean',
|
ftype='boolean',
|
||||||
value=1)
|
value=1)
|
||||||
|
|
|
@ -17,6 +17,7 @@ def is_operator(self, jid_bare):
|
||||||
break
|
break
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def is_moderator(self, jid_bare, jid_full):
|
def is_moderator(self, jid_bare, jid_full):
|
||||||
alias = jid_full[jid_full.index('/')+1:]
|
alias = jid_full[jid_full.index('/')+1:]
|
||||||
role = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'role')
|
role = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'role')
|
||||||
|
@ -27,6 +28,16 @@ def is_moderator(self, jid_bare, jid_full):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
def is_member(self, jid_bare, jid_full):
|
||||||
|
alias = jid_full[jid_full.index('/')+1:]
|
||||||
|
affiliation = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'affiliation')
|
||||||
|
if affiliation == 'member':
|
||||||
|
result = True
|
||||||
|
else:
|
||||||
|
result = False
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
# TODO Rename to get_jid_type
|
# TODO Rename to get_jid_type
|
||||||
async def get_chat_type(self, jid):
|
async def get_chat_type(self, jid):
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in a new issue