forked from sch/Slixfeed
Ad-Hoc: Add more operator options;
Ad-Hoc: Add menus (list-single) for selection from a fixed list of bookmarks ans contacts; Database: Identifier (node name) includes hyphens instead of dots. Database: SQLite database now stores more items. Bookmarks: Improve code; MUC: Improve code; SQLite: Manjor code changes to adapt to new table; URL: Fix redirection (hostname switcher).
This commit is contained in:
parent
e0bc0bddf7
commit
60756dbdd2
12 changed files with 1786 additions and 1188 deletions
|
@ -183,7 +183,7 @@ async def xmpp_send_status_message(self, jid):
|
|||
status_mode = 'dnd'
|
||||
status_text = jid_task[list(jid_task.keys())[0]]
|
||||
else:
|
||||
feeds = sqlite.get_number_of_items(db_file, 'feeds')
|
||||
feeds = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||
# print(await current_time(), jid, "has", feeds, "feeds")
|
||||
if not feeds:
|
||||
status_mode = 'available'
|
||||
|
@ -227,21 +227,22 @@ async def xmpp_send_pubsub(self, jid_bare, num=None):
|
|||
subscriptions = sqlite.get_active_feeds_url(db_file)
|
||||
for url in subscriptions:
|
||||
url = url[0]
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
feed_title = None
|
||||
feed_summary = None
|
||||
if jid_bare == self.boundjid.bare:
|
||||
node = 'urn:xmpp:microblog:0'
|
||||
feed_title = None
|
||||
feed_subtitle = None
|
||||
else:
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
feed_title = sqlite.get_feed_title(db_file, feed_id)
|
||||
feed_title = feed_title[0]
|
||||
feed_summary = None
|
||||
node = sqlite.get_node_name(db_file, feed_id)
|
||||
feed_subtitle = sqlite.get_feed_subtitle(db_file, feed_id)
|
||||
feed_subtitle = feed_subtitle[0]
|
||||
node = sqlite.get_feed_identifier(db_file, feed_id)
|
||||
node = node[0]
|
||||
xep = None
|
||||
iq_create_node = XmppPubsub.create_node(
|
||||
self, jid_bare, node, xep, feed_title, feed_summary)
|
||||
self, jid_bare, node, xep, feed_title, feed_subtitle)
|
||||
await XmppIQ.send(self, iq_create_node)
|
||||
entries = sqlite.get_unread_entries_of_feed(db_file, feed_id)
|
||||
feed_properties = sqlite.get_feed_properties(db_file, feed_id)
|
||||
|
@ -251,17 +252,21 @@ async def xmpp_send_pubsub(self, jid_bare, num=None):
|
|||
# if num and counter < num:
|
||||
report[url] = len(entries)
|
||||
for entry in entries:
|
||||
feed_entry = {'author' : None,
|
||||
'authors' : None,
|
||||
'category' : None,
|
||||
'content' : None,
|
||||
'description' : entry[3],
|
||||
'link' : entry[2],
|
||||
'links' : entry[4],
|
||||
'tags' : None,
|
||||
'title' : entry[1],
|
||||
'type' : None,
|
||||
'updated' : entry[7]}
|
||||
feed_entry = {'authors' : entry[3],
|
||||
'content' : entry[6],
|
||||
'content_type' : entry[7],
|
||||
'contact' : entry[4],
|
||||
'contributors' : entry[5],
|
||||
'summary' : entry[8],
|
||||
'summary_type' : entry[9],
|
||||
'enclosures' : entry[13],
|
||||
'language' : entry[10],
|
||||
'link' : entry[2],
|
||||
'links' : entry[11],
|
||||
'published' : entry[15],
|
||||
'tags' : entry[12],
|
||||
'title' : entry[1],
|
||||
'updated' : entry[16]}
|
||||
iq_create_entry = XmppPubsub.create_entry(
|
||||
self, jid_bare, node, feed_entry, feed_version)
|
||||
await XmppIQ.send(self, iq_create_entry)
|
||||
|
@ -303,12 +308,11 @@ async def xmpp_send_message(self, jid, num=None):
|
|||
title_e = result[1]
|
||||
url = result[2]
|
||||
summary = result[3]
|
||||
enclosure = result[4]
|
||||
feed_id = result[5]
|
||||
date = result[6]
|
||||
feed_id = result[4]
|
||||
date = result[5]
|
||||
title_f = sqlite.get_feed_title(db_file, feed_id)
|
||||
title_f = title_f[0]
|
||||
news_digest += list_unread_entries(self, result, title_f, jid)
|
||||
news_digest += await list_unread_entries(self, result, title_f, jid)
|
||||
# print(db_file)
|
||||
# print(result[0])
|
||||
# breakpoint()
|
||||
|
@ -533,7 +537,7 @@ def is_feed(feed):
|
|||
return value
|
||||
|
||||
|
||||
def list_unread_entries(self, result, feed_title, jid):
|
||||
async def list_unread_entries(self, result, feed_title, jid):
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: feed_title: {} jid: {}'
|
||||
.format(function_name, feed_title, jid))
|
||||
|
@ -581,7 +585,7 @@ def list_unread_entries(self, result, feed_title, jid):
|
|||
# summary = "\n".join(summary)
|
||||
link = result[2]
|
||||
link = remove_tracking_parameters(link)
|
||||
link = (replace_hostname(link, "link")) or link
|
||||
link = await replace_hostname(link, "link") or link
|
||||
# news_item = ("\n{}\n{}\n{} [{}]\n").format(str(title), str(link),
|
||||
# str(feed_title), str(ix))
|
||||
formatting = Config.get_setting_value(self.settings, jid, 'formatting')
|
||||
|
@ -691,11 +695,9 @@ async def list_statistics(db_file):
|
|||
logger.debug('{}: db_file: {}'
|
||||
.format(function_name, db_file))
|
||||
entries_unread = sqlite.get_number_of_entries_unread(db_file)
|
||||
entries = sqlite.get_number_of_items(db_file, 'entries')
|
||||
archive = sqlite.get_number_of_items(db_file, 'archive')
|
||||
entries_all = entries + archive
|
||||
entries = sqlite.get_number_of_items(db_file, 'entries_properties')
|
||||
feeds_active = sqlite.get_number_of_feeds_active(db_file)
|
||||
feeds_all = sqlite.get_number_of_items(db_file, 'feeds')
|
||||
feeds_all = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||
|
||||
# msg = """You have {} unread news items out of {} from {} news sources.
|
||||
# """.format(unread_entries, entries, feeds)
|
||||
|
@ -714,7 +716,7 @@ async def list_statistics(db_file):
|
|||
"News items : {}/{}\n"
|
||||
"News sources : {}/{}\n"
|
||||
"```").format(entries_unread,
|
||||
entries_all,
|
||||
entries,
|
||||
feeds_active,
|
||||
feeds_all)
|
||||
return message
|
||||
|
@ -762,19 +764,16 @@ def list_feeds(results):
|
|||
.format(len(results)))
|
||||
else:
|
||||
url = pick_a_feed()
|
||||
message = ('List of subscriptions is empty.'
|
||||
message = ('List of subscriptions is empty. To add a feed, send a URL.'
|
||||
'\n'
|
||||
'To add a feed, send a URL.'
|
||||
'\n'
|
||||
'Featured news:\n*{}*\n{}'
|
||||
'Featured news: *{}*\n{}'
|
||||
.format(url['name'], url['link']))
|
||||
return message
|
||||
|
||||
|
||||
async def list_bookmarks(self):
|
||||
def list_bookmarks(self, conferences):
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}'.format(function_name))
|
||||
conferences = await XmppBookmark.get(self)
|
||||
message = '\nList of groupchats:\n\n```\n'
|
||||
for conference in conferences:
|
||||
message += ('Name: {}\n'
|
||||
|
@ -835,36 +834,42 @@ async def import_opml(db_file, result):
|
|||
if not result['error']:
|
||||
document = result['content']
|
||||
root = ET.fromstring(document)
|
||||
before = sqlite.get_number_of_items(db_file, 'feeds')
|
||||
before = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||
feeds = []
|
||||
for child in root.findall(".//outline"):
|
||||
url = child.get("xmlUrl")
|
||||
title = child.get("text")
|
||||
# feed = (url, title)
|
||||
# feeds.extend([feed])
|
||||
feeds.extend([(url, title)])
|
||||
feed = {
|
||||
'title' : title,
|
||||
'url' : url,
|
||||
}
|
||||
feeds.extend([feed])
|
||||
await sqlite.import_feeds(db_file, feeds)
|
||||
await sqlite.add_metadata(db_file)
|
||||
after = sqlite.get_number_of_items(db_file, 'feeds')
|
||||
after = sqlite.get_number_of_items(db_file, 'feeds_properties')
|
||||
difference = int(after) - int(before)
|
||||
return difference
|
||||
|
||||
|
||||
async def add_feed(self, jid_bare, db_file, url, node):
|
||||
async def add_feed(self, jid_bare, db_file, url, identifier):
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: db_file: {} url: {}'
|
||||
.format(function_name, db_file, url))
|
||||
while True:
|
||||
exist_feed = sqlite.get_feed_id_and_name(db_file, url)
|
||||
if not exist_feed:
|
||||
exist_node = sqlite.check_node_exist(db_file, node)
|
||||
if not exist_node:
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
if not feed_id:
|
||||
exist_identifier = sqlite.check_identifier_exist(db_file, identifier)
|
||||
if not exist_identifier:
|
||||
result = await fetch.http(url)
|
||||
message = result['message']
|
||||
status_code = result['status_code']
|
||||
if not result['error']:
|
||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||
document = result['content']
|
||||
feed = parse(document)
|
||||
# if document and status_code == 200:
|
||||
# if is_feed(url, feed):
|
||||
if is_feed(feed):
|
||||
if "title" in feed["feed"].keys():
|
||||
|
@ -887,21 +892,41 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
updated = ''
|
||||
else:
|
||||
updated = ''
|
||||
version = feed["version"]
|
||||
entries = len(feed["entries"])
|
||||
await sqlite.insert_feed(db_file, url, title, node,
|
||||
entries=entries,
|
||||
version = feed.version
|
||||
entries_count = len(feed.entries)
|
||||
await sqlite.insert_feed(db_file,
|
||||
url,
|
||||
title,
|
||||
identifier,
|
||||
entries=entries_count,
|
||||
version=version,
|
||||
encoding=encoding,
|
||||
language=language,
|
||||
status_code=status_code,
|
||||
updated=updated)
|
||||
await scan(self, jid_bare, db_file, url)
|
||||
old = Config.get_setting_value(self.settings, jid_bare, 'old')
|
||||
feed_valid = 0 if feed.bozo else 1
|
||||
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||
if feed.has_key('updated_parsed'):
|
||||
feed_updated = feed.updated_parsed
|
||||
try:
|
||||
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
|
||||
except:
|
||||
feed_updated = None
|
||||
else:
|
||||
feed_updated = None
|
||||
entries_count = len(feed.entries)
|
||||
await sqlite.update_feed_properties(db_file, feed_id,
|
||||
entries_count,
|
||||
feed_updated)
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
if not old:
|
||||
await sqlite.mark_feed_as_read(db_file, feed_id)
|
||||
new_entries = get_properties_of_entries(
|
||||
self, jid_bare, db_file, url, feed_id, feed)
|
||||
if new_entries:
|
||||
await sqlite.add_entries_and_update_feed_state(
|
||||
db_file, feed_id, new_entries)
|
||||
old = Config.get_setting_value(self.settings, jid_bare, 'old')
|
||||
if not old: await sqlite.mark_feed_as_read(db_file, feed_id)
|
||||
result_final = {'link' : url,
|
||||
'index' : feed_id,
|
||||
'name' : title,
|
||||
|
@ -909,7 +934,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
'error' : False,
|
||||
'message': message,
|
||||
'exist' : False,
|
||||
'node' : None}
|
||||
'identifier' : None}
|
||||
break
|
||||
# NOTE This elif statement be unnecessary
|
||||
# when feedparser be supporting json feed.
|
||||
|
@ -936,9 +961,12 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
else:
|
||||
updated = ''
|
||||
version = 'json' + feed["version"].split('/').pop()
|
||||
entries = len(feed["items"])
|
||||
await sqlite.insert_feed(db_file, url, title, node,
|
||||
entries=entries,
|
||||
entries_count = len(feed["items"])
|
||||
await sqlite.insert_feed(db_file,
|
||||
url,
|
||||
title,
|
||||
identifier,
|
||||
entries=entries_count,
|
||||
version=version,
|
||||
encoding=encoding,
|
||||
language=language,
|
||||
|
@ -957,7 +985,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
'error' : False,
|
||||
'message': message,
|
||||
'exist' : False,
|
||||
'node' : None}
|
||||
'identifier' : None}
|
||||
break
|
||||
else:
|
||||
# NOTE Do not be tempted to return a compact dictionary.
|
||||
|
@ -973,7 +1001,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
'error' : True,
|
||||
'message': message,
|
||||
'exist' : False,
|
||||
'node' : None}
|
||||
'identifier' : None}
|
||||
break
|
||||
elif isinstance(result, list):
|
||||
# Get out of the loop and deliver a list of dicts.
|
||||
|
@ -983,6 +1011,7 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
# Go back up to the while loop and try again.
|
||||
url = result['link']
|
||||
else:
|
||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||
result_final = {'link' : url,
|
||||
'index' : None,
|
||||
'name' : None,
|
||||
|
@ -990,12 +1019,13 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
'error' : True,
|
||||
'message': message,
|
||||
'exist' : False,
|
||||
'node' : None}
|
||||
'identifier' : None}
|
||||
break
|
||||
else:
|
||||
ix = exist_node[1]
|
||||
node = exist_node[2]
|
||||
message = 'Node is already allocated.'
|
||||
ix = exist_identifier[1]
|
||||
identifier = exist_identifier[2]
|
||||
message = ('Identifier "{}" is already allocated.'
|
||||
.format(identifier))
|
||||
result_final = {'link' : url,
|
||||
'index' : ix,
|
||||
'name' : None,
|
||||
|
@ -1003,20 +1033,21 @@ async def add_feed(self, jid_bare, db_file, url, node):
|
|||
'error' : False,
|
||||
'message': message,
|
||||
'exist' : False,
|
||||
'node' : node}
|
||||
'identifier' : identifier}
|
||||
break
|
||||
else:
|
||||
ix = exist_feed[0]
|
||||
name = exist_feed[1]
|
||||
feed_id = feed_id[0]
|
||||
title = sqlite.get_feed_title(db_file, feed_id)
|
||||
title = title[0]
|
||||
message = 'URL already exist.'
|
||||
result_final = {'link' : url,
|
||||
'index' : ix,
|
||||
'name' : name,
|
||||
'index' : feed_id,
|
||||
'name' : title,
|
||||
'code' : None,
|
||||
'error' : False,
|
||||
'message': message,
|
||||
'exist' : True,
|
||||
'node' : None}
|
||||
'identifier' : None}
|
||||
break
|
||||
return result_final
|
||||
|
||||
|
@ -1168,8 +1199,8 @@ async def scan_json(self, jid_bare, db_file, url):
|
|||
if len(new_entries):
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
await sqlite.add_entries_and_update_timestamp(db_file, feed_id,
|
||||
new_entries)
|
||||
await sqlite.add_entries_and_update_feed_state(db_file, feed_id,
|
||||
new_entries)
|
||||
|
||||
|
||||
def view_feed(url, feed):
|
||||
|
@ -1266,162 +1297,274 @@ def view_entry(url, feed, num):
|
|||
return response
|
||||
|
||||
|
||||
# TODO get all active feeds of active accounts and scan the feed with the earliest scanned time
|
||||
# TODO Rename function name (idea: scan_and_populate)
|
||||
async def scan(self, jid_bare, db_file, url):
|
||||
async def download_feed(self, db_file, feed_url):
|
||||
"""
|
||||
Check feeds for new entries.
|
||||
Get feed content.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
db_file : str
|
||||
Path to database file.
|
||||
url : str, optional
|
||||
URL. The default is None.
|
||||
URL.
|
||||
"""
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: db_file: {} url: {}'
|
||||
.format(function_name, db_file, url))
|
||||
if isinstance(url, tuple): url = url[0]
|
||||
result = await fetch.http(url)
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
.format(function_name, db_file, feed_url))
|
||||
if isinstance(feed_url, tuple): feed_url = feed_url[0]
|
||||
result = await fetch.http(feed_url)
|
||||
feed_id = sqlite.get_feed_id(db_file, feed_url)
|
||||
feed_id = feed_id[0]
|
||||
status_code = result['status_code']
|
||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||
if not result['error']:
|
||||
document = result['content']
|
||||
status = result['status_code']
|
||||
new_entries = []
|
||||
if document and status == 200:
|
||||
feed = parse(document)
|
||||
entries = feed.entries
|
||||
# length = len(entries)
|
||||
await remove_nonexistent_entries(self, jid_bare, db_file, url, feed)
|
||||
try:
|
||||
if feed.bozo:
|
||||
# bozo = (
|
||||
# "WARNING: Bozo detected for feed: {}\n"
|
||||
# "For more information, visit "
|
||||
# "https://pythonhosted.org/feedparser/bozo.html"
|
||||
# ).format(url)
|
||||
# print(bozo)
|
||||
valid = 0
|
||||
else:
|
||||
valid = 1
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
await sqlite.update_feed_validity(
|
||||
db_file, feed_id, valid)
|
||||
if "updated_parsed" in feed["feed"].keys():
|
||||
updated = feed["feed"]["updated_parsed"]
|
||||
try:
|
||||
updated = dt.convert_struct_time_to_iso8601(updated)
|
||||
except:
|
||||
updated = ''
|
||||
else:
|
||||
updated = ''
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
await sqlite.update_feed_properties(db_file, feed_id,
|
||||
len(feed["entries"]), updated)
|
||||
# await update_feed_status
|
||||
except (IncompleteReadError, IncompleteRead, error.URLError) as e:
|
||||
logger.error(e)
|
||||
return
|
||||
# new_entry = 0
|
||||
for entry in entries:
|
||||
logger.debug('{}: entry: {}'.format(function_name, entry.link))
|
||||
if entry.has_key("published"):
|
||||
date = entry.published
|
||||
date = dt.rfc2822_to_iso8601(date)
|
||||
elif entry.has_key("updated"):
|
||||
date = entry.updated
|
||||
date = dt.rfc2822_to_iso8601(date)
|
||||
else:
|
||||
date = dt.now()
|
||||
if entry.has_key("link"):
|
||||
# link = complete_url(source, entry.link)
|
||||
link = join_url(url, entry.link)
|
||||
link = trim_url(link)
|
||||
else:
|
||||
link = url
|
||||
# title = feed["feed"]["title"]
|
||||
# title = "{}: *{}*".format(feed["feed"]["title"], entry.title)
|
||||
title = entry.title if entry.has_key("title") else date
|
||||
entry_id = entry.id if entry.has_key("id") else link
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
exist = sqlite.check_entry_exist(db_file, feed_id,
|
||||
entry_id=entry_id,
|
||||
title=title, link=link,
|
||||
date=date)
|
||||
if not exist:
|
||||
summary = entry.summary if entry.has_key("summary") else ''
|
||||
read_status = 0
|
||||
pathname = urlsplit(link).path
|
||||
string = (
|
||||
"{} {} {}"
|
||||
).format(
|
||||
title, summary, pathname)
|
||||
if self.settings['default']['filter']:
|
||||
print('Filter is now processing data.')
|
||||
allow_list = config.is_include_keyword(db_file,
|
||||
"allow", string)
|
||||
if not allow_list:
|
||||
reject_list = config.is_include_keyword(db_file,
|
||||
"deny",
|
||||
string)
|
||||
if reject_list:
|
||||
read_status = 1
|
||||
logger.debug('Rejected : {}'
|
||||
'\n'
|
||||
'Keyword : {}'
|
||||
.format(link, reject_list))
|
||||
if isinstance(date, int):
|
||||
logger.error('Variable "date" is int: {}'.format(date))
|
||||
media_link = ''
|
||||
if entry.has_key("links"):
|
||||
for e_link in entry.links:
|
||||
try:
|
||||
# if (link.rel == "enclosure" and
|
||||
# (link.type.startswith("audio/") or
|
||||
# link.type.startswith("image/") or
|
||||
# link.type.startswith("video/"))
|
||||
# ):
|
||||
media_type = e_link.type[:e_link.type.index("/")]
|
||||
if e_link.has_key("rel"):
|
||||
if (e_link.rel == "enclosure" and
|
||||
media_type in ("audio", "image", "video")):
|
||||
media_link = e_link.href
|
||||
media_link = join_url(url, e_link.href)
|
||||
media_link = trim_url(media_link)
|
||||
break
|
||||
except:
|
||||
logger.error('KeyError: "href"\n'
|
||||
'Missing "href" attribute for {}'
|
||||
.format(url))
|
||||
logger.error('Continue scanning for next '
|
||||
'potential enclosure of {}'
|
||||
.format(link))
|
||||
entry = {
|
||||
"title": title,
|
||||
"link": link,
|
||||
"summary": summary,
|
||||
"enclosure": media_link,
|
||||
"entry_id": entry_id,
|
||||
"date": date,
|
||||
"read_status": read_status
|
||||
|
||||
|
||||
# TODO get all active feeds of active accounts and scan the feed with the earliest scanned time
|
||||
# TODO Rename function name (idea: scan_and_populate)
|
||||
def get_properties_of_entries(self, jid_bare, db_file, feed_url, feed_id, feed):
|
||||
"""
|
||||
Get new entries.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
db_file : str
|
||||
Path to database file.
|
||||
url : str, optional
|
||||
URL.
|
||||
"""
|
||||
print('GET', feed_url, jid_bare)
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: feed_id: {} url: {}'
|
||||
.format(function_name, feed_id, feed_url))
|
||||
|
||||
new_entries = []
|
||||
for entry in feed.entries:
|
||||
logger.debug('{}: entry: {}'.format(function_name, entry.link))
|
||||
if entry.has_key("published"):
|
||||
entry_published = entry.published
|
||||
entry_published = dt.rfc2822_to_iso8601(entry_published)
|
||||
else:
|
||||
entry_published = ''
|
||||
if entry.has_key("updated"):
|
||||
entry_updated = entry.updated
|
||||
entry_updated = dt.rfc2822_to_iso8601(entry_updated)
|
||||
else:
|
||||
entry_updated = dt.now()
|
||||
if entry.has_key("link"):
|
||||
# link = complete_url(source, entry.link)
|
||||
entry_link = join_url(feed_url, entry.link)
|
||||
entry_link = trim_url(entry_link)
|
||||
else:
|
||||
entry_link = feed_url
|
||||
# title = feed["feed"]["title"]
|
||||
# title = "{}: *{}*".format(feed["feed"]["title"], entry.title)
|
||||
entry_title = entry.title if entry.has_key("title") else entry_published
|
||||
entry_id = entry.id if entry.has_key("id") else entry_link
|
||||
exist = sqlite.check_entry_exist(db_file, feed_id,
|
||||
identifier=entry_id,
|
||||
title=entry_title,
|
||||
link=entry_link,
|
||||
published=entry_published)
|
||||
if not exist:
|
||||
read_status = 0
|
||||
# # Filter
|
||||
# pathname = urlsplit(link).path
|
||||
# string = (
|
||||
# "{} {} {}"
|
||||
# ).format(
|
||||
# title, summary, pathname)
|
||||
# if self.settings['default']['filter']:
|
||||
# print('Filter is now processing data.')
|
||||
# allow_list = config.is_include_keyword(db_file,
|
||||
# "allow", string)
|
||||
# if not allow_list:
|
||||
# reject_list = config.is_include_keyword(db_file,
|
||||
# "deny",
|
||||
# string)
|
||||
# if reject_list:
|
||||
# read_status = 1
|
||||
# logger.debug('Rejected : {}'
|
||||
# '\n'
|
||||
# 'Keyword : {}'
|
||||
# .format(link, reject_list))
|
||||
if isinstance(entry_published, int):
|
||||
logger.error('Variable "published" is int: {}'.format(entry_published))
|
||||
if isinstance(entry_updated, int):
|
||||
logger.error('Variable "updated" is int: {}'.format(entry_updated))
|
||||
|
||||
# Authors
|
||||
entry_authors =[]
|
||||
if entry.has_key('authors'):
|
||||
for author in entry.authors:
|
||||
author_properties = {
|
||||
'name' : author.name if author.has_key('name') else '',
|
||||
'url' : author.href if author.has_key('href') else '',
|
||||
'email' : author.email if author.has_key('email') else '',
|
||||
}
|
||||
new_entries.extend([entry])
|
||||
# await sqlite.add_entry(
|
||||
# db_file, title, link, entry_id,
|
||||
# url, date, read_status)
|
||||
# await sqlite.set_date(db_file, url)
|
||||
if len(new_entries):
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
await sqlite.add_entries_and_update_timestamp(db_file, feed_id,
|
||||
new_entries)
|
||||
entry_authors.extend([author_properties])
|
||||
elif entry.has_key('author_detail'):
|
||||
author_properties = {
|
||||
'name' : entry.author_detail.name if entry.author_detail.has_key('name') else '',
|
||||
'url' : entry.author_detail.href if entry.author_detail.has_key('href') else '',
|
||||
'email' : entry.author_detail.email if entry.author_detail.has_key('email') else '',
|
||||
}
|
||||
entry_authors.extend([author_properties])
|
||||
elif entry.has_key('author'):
|
||||
author_properties = {
|
||||
'name' : entry.author,
|
||||
'url' : '',
|
||||
'email' : '',
|
||||
}
|
||||
entry_authors.extend([author_properties])
|
||||
|
||||
# Contributors
|
||||
entry_contributors = []
|
||||
if entry.has_key('contributors'):
|
||||
for contributor in entry.contributors:
|
||||
contributor_properties = {
|
||||
'name' : contributor.name if contributor.has_key('name') else '',
|
||||
'url' : contributor.href if contributor.has_key('href') else '',
|
||||
'email' : contributor.email if contributor.has_key('email') else '',
|
||||
}
|
||||
entry_contributors.extend([contributor_properties])
|
||||
|
||||
# Tags
|
||||
entry_tags = []
|
||||
if entry.has_key('tags'):
|
||||
for tag in entry.tags:
|
||||
tag_properties = {
|
||||
'term' : tag.term if tag.has_key('term') else '',
|
||||
'scheme' : tag.scheme if tag.has_key('scheme') else '',
|
||||
'label' : tag.label if tag.has_key('label') else '',
|
||||
}
|
||||
entry_tags.extend([tag_properties])
|
||||
|
||||
# Content
|
||||
entry_contents = []
|
||||
if entry.has_key('content'):
|
||||
for content in entry.content:
|
||||
text = content.value if content.has_key('value') else ''
|
||||
type = content.type if content.has_key('type') else ''
|
||||
lang = content.lang if content.has_key('lang') else ''
|
||||
base = content.base if content.has_key('base') else ''
|
||||
entry_content = {
|
||||
'text' : text,
|
||||
'lang' : lang,
|
||||
'type' : type,
|
||||
'base' : base,
|
||||
}
|
||||
entry_contents.extend([entry_content])
|
||||
|
||||
# Links and Enclosures
|
||||
entry_links = []
|
||||
if entry.has_key('links'):
|
||||
for link in entry.links:
|
||||
link_properties = {
|
||||
'url' : link.href if link.has_key('href') else '',
|
||||
'rel' : link.rel if link.has_key('rel') else '',
|
||||
'type' : link.type if link.has_key('type') else '',
|
||||
'length' : '',
|
||||
}
|
||||
entry_links.extend([link_properties])
|
||||
# Element media:content is utilized by Mastodon
|
||||
if entry.has_key('media_content'):
|
||||
for link in entry.media_content:
|
||||
link_properties = {
|
||||
'url' : link['url'] if 'url' in link else '',
|
||||
'rel' : 'enclosure',
|
||||
'type' : link['type'] if 'type' in link else '',
|
||||
# 'medium' : link['medium'] if 'medium' in link else '',
|
||||
'length' : link['filesize'] if 'filesize' in link else '',
|
||||
}
|
||||
entry_links.extend([link_properties])
|
||||
if entry.has_key('media_thumbnail'):
|
||||
for link in entry.media_thumbnail:
|
||||
link_properties = {
|
||||
'url' : link['url'] if 'url' in link else '',
|
||||
'rel' : 'enclosure',
|
||||
'type' : '',
|
||||
# 'medium' : 'image',
|
||||
'length' : '',
|
||||
}
|
||||
entry_links.extend([link_properties])
|
||||
|
||||
# Category
|
||||
entry_category = entry.category if entry.has_key('category') else ''
|
||||
|
||||
# Comments
|
||||
entry_comments = entry.comments if entry.has_key('comments') else ''
|
||||
|
||||
# href
|
||||
entry_href = entry.href if entry.has_key('href') else ''
|
||||
|
||||
# Link: Same as entry.links[0].href in most if not all cases
|
||||
entry_link = entry.link if entry.has_key('link') else ''
|
||||
|
||||
# Rating
|
||||
entry_rating = entry.rating if entry.has_key('rating') else ''
|
||||
|
||||
# Summary
|
||||
entry_summary_text = entry.summary if entry.has_key('summary') else ''
|
||||
if entry.has_key('summary_detail'):
|
||||
entry_summary_type = entry.summary_detail.type if entry.summary_detail.has_key('type') else ''
|
||||
entry_summary_lang = entry.summary_detail.lang if entry.summary_detail.has_key('lang') else ''
|
||||
entry_summary_base = entry.summary_detail.base if entry.summary_detail.has_key('base') else ''
|
||||
else:
|
||||
entry_summary_type = ''
|
||||
entry_summary_lang = ''
|
||||
entry_summary_base = ''
|
||||
|
||||
# Title
|
||||
entry_title = entry.title if entry.has_key('title') else ''
|
||||
if entry.has_key('title_detail'):
|
||||
entry_title_type = entry.title_detail.type if entry.title_detail.has_key('type') else ''
|
||||
else:
|
||||
entry_title_type = ''
|
||||
|
||||
###########################################################
|
||||
|
||||
# media_type = e_link.type[:e_link.type.index("/")]
|
||||
# if (e_link.rel == "enclosure" and
|
||||
# media_type in ("audio", "image", "video")):
|
||||
# media_link = e_link.href
|
||||
# media_link = join_url(url, e_link.href)
|
||||
# media_link = trim_url(media_link)
|
||||
|
||||
###########################################################
|
||||
|
||||
entry_properties = {
|
||||
"identifier": entry_id,
|
||||
"link": entry_link,
|
||||
"href": entry_href,
|
||||
"title": entry_title,
|
||||
"title_type": entry_title_type,
|
||||
'summary_text' : entry_summary_text,
|
||||
'summary_lang' : entry_summary_lang,
|
||||
'summary_type' : entry_summary_type,
|
||||
'summary_base' : entry_summary_base,
|
||||
'category' : entry_category,
|
||||
"comments": entry_comments,
|
||||
"rating": entry_rating,
|
||||
"published": entry_published,
|
||||
"updated": entry_updated,
|
||||
"read_status": read_status
|
||||
}
|
||||
print('entry_properties')
|
||||
print(entry_properties)
|
||||
|
||||
new_entries.extend([{
|
||||
"entry_properties" : entry_properties,
|
||||
"entry_authors" : entry_authors,
|
||||
"entry_contributors" : entry_contributors,
|
||||
"entry_contents" : entry_contents,
|
||||
"entry_links" : entry_links,
|
||||
"entry_tags" : entry_tags
|
||||
}])
|
||||
# await sqlite.add_entry(
|
||||
# db_file, title, link, entry_id,
|
||||
# url, date, read_status)
|
||||
# await sqlite.set_date(db_file, url)
|
||||
return new_entries
|
||||
|
||||
|
||||
def get_document_title(data):
|
||||
|
|
1063
slixfeed/sqlite.py
1063
slixfeed/sqlite.py
File diff suppressed because it is too large
Load diff
|
@ -68,12 +68,15 @@ except Exception as exc:
|
|||
"""
|
||||
|
||||
import asyncio
|
||||
from feedparser import parse
|
||||
import logging
|
||||
import os
|
||||
import slixfeed.action as action
|
||||
import slixfeed.config as config
|
||||
from slixfeed.config import Config
|
||||
# from slixfeed.dt import current_time
|
||||
import slixfeed.dt as dt
|
||||
import slixfeed.fetch as fetch
|
||||
import slixfeed.sqlite as sqlite
|
||||
# from xmpp import Slixfeed
|
||||
from slixfeed.xmpp.presence import XmppPresence
|
||||
|
@ -339,14 +342,67 @@ async def check_updates(self, jid_bare):
|
|||
jid : str
|
||||
Jabber ID.
|
||||
"""
|
||||
print('Scanning for updates for JID {}'.format(jid_bare))
|
||||
logging.info('Scanning for updates for JID {}'.format(jid_bare))
|
||||
while True:
|
||||
jid_file = jid_bare.replace('/', '_')
|
||||
db_file = config.get_pathname_to_database(jid_file)
|
||||
urls = sqlite.get_active_feeds_url(db_file)
|
||||
for url in urls:
|
||||
await action.scan(self, jid_bare, db_file, url)
|
||||
await asyncio.sleep(50)
|
||||
url = url[0]
|
||||
print('STA',url)
|
||||
|
||||
result = await fetch.http(url)
|
||||
status_code = result['status_code']
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
if not result['error']:
|
||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||
document = result['content']
|
||||
feed = parse(document)
|
||||
|
||||
feed_valid = 0 if feed.bozo else 1
|
||||
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||
|
||||
if feed.has_key('updated_parsed'):
|
||||
feed_updated = feed.updated_parsed
|
||||
try:
|
||||
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
|
||||
except:
|
||||
feed_updated = ''
|
||||
else:
|
||||
feed_updated = ''
|
||||
|
||||
entries_count = len(feed.entries)
|
||||
|
||||
feed_version = feed.version if feed.has_key('version') else ''
|
||||
feed_encoding = feed.encoding if feed.has_key('encoding') else ''
|
||||
feed_language = feed.feed.language if feed.feed.has_key('language') else ''
|
||||
feed_icon = feed.feed.icon if feed.feed.has_key('icon') else ''
|
||||
feed_image = feed.feed.image if feed.feed.has_key('image') else ''
|
||||
feed_logo = feed.feed.logo if feed.feed.has_key('logo') else ''
|
||||
feed_ttl = feed.feed.ttl if feed.feed.has_key('ttl') else ''
|
||||
|
||||
feed_properties = {
|
||||
"version" : feed_version,
|
||||
"encoding" : feed_encoding,
|
||||
"language" : feed_language,
|
||||
"rating" : '',
|
||||
"entries_count" : entries_count,
|
||||
"icon" : feed_icon,
|
||||
"image" : feed_image,
|
||||
"logo" : feed_logo,
|
||||
"ttl" : feed_ttl,
|
||||
"updated" : feed_updated,
|
||||
}
|
||||
await sqlite.update_feed_properties(db_file, feed_id,
|
||||
feed_properties)
|
||||
new_entries = action.get_properties_of_entries(
|
||||
self, jid_bare, db_file, url, feed_id, feed)
|
||||
if new_entries: await sqlite.add_entries_and_update_feed_state(
|
||||
db_file, feed_id, new_entries)
|
||||
print('END', url)
|
||||
await asyncio.sleep(5)
|
||||
val = Config.get_setting_value(self.settings, jid_bare, 'check')
|
||||
await asyncio.sleep(60 * float(val))
|
||||
# Schedule to call this function again in 90 minutes
|
||||
|
|
|
@ -50,7 +50,7 @@ def get_hostname(url):
|
|||
return hostname
|
||||
|
||||
|
||||
def replace_hostname(url, url_type):
|
||||
async def replace_hostname(url, url_type):
|
||||
"""
|
||||
Replace hostname.
|
||||
|
||||
|
@ -79,6 +79,8 @@ def replace_hostname(url, url_type):
|
|||
proxy = proxies[proxy_name]
|
||||
if hostname in proxy['hostname'] and url_type in proxy['type']:
|
||||
while not url_new:
|
||||
print('>>>')
|
||||
print(url_new)
|
||||
proxy_type = 'clearnet'
|
||||
proxy_list = proxy[proxy_type]
|
||||
if len(proxy_list):
|
||||
|
@ -89,10 +91,13 @@ def replace_hostname(url, url_type):
|
|||
hostname_new = parted_proxy_url.netloc
|
||||
url_new = urlunsplit([protocol_new, hostname_new,
|
||||
pathname, queries, fragment])
|
||||
response = fetch.http_response(url_new)
|
||||
print(proxy_url)
|
||||
print(url_new)
|
||||
print('>>>')
|
||||
response = await fetch.http(url_new)
|
||||
if (response and
|
||||
response.status_code == 200 and
|
||||
response.reason == 'OK' and
|
||||
response['status_code'] == 200 and
|
||||
# response.reason == 'OK' and
|
||||
url_new.startswith(proxy_url)):
|
||||
break
|
||||
else:
|
||||
|
@ -104,13 +109,16 @@ def replace_hostname(url, url_type):
|
|||
config.backup_obsolete(proxies_obsolete_file,
|
||||
proxy_name, proxy_type,
|
||||
proxy_url)
|
||||
config.update_proxies(proxies_file, proxy_name,
|
||||
proxy_type, proxy_url)
|
||||
try:
|
||||
config.update_proxies(proxies_file, proxy_name,
|
||||
proxy_type, proxy_url)
|
||||
except ValueError as e:
|
||||
logging.error([str(e), proxy_url])
|
||||
url_new = None
|
||||
else:
|
||||
logging.warning(
|
||||
"No proxy URLs for {}."
|
||||
"Update proxies.toml".format(proxy_name))
|
||||
"No proxy URLs for {}. Please update proxies.toml"
|
||||
.format(proxy_name))
|
||||
url_new = url
|
||||
break
|
||||
return url_new
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = '0.1.52'
|
||||
__version_info__ = (0, 1, 52)
|
||||
__version__ = '0.1.53'
|
||||
__version_info__ = (0, 1, 53)
|
||||
|
|
|
@ -15,7 +15,7 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
|
|||
class XmppBookmark:
|
||||
|
||||
|
||||
async def get(self):
|
||||
async def get_bookmarks(self):
|
||||
result = await self.plugin['xep_0048'].get_bookmarks()
|
||||
conferences = result['private']['bookmarks']['conferences']
|
||||
return conferences
|
||||
|
|
File diff suppressed because it is too large
Load diff
File diff suppressed because it is too large
Load diff
|
@ -20,20 +20,6 @@ import logging
|
|||
|
||||
class XmppGroupchat:
|
||||
|
||||
# async def accept_muc_invite(self, message, ctr=None):
|
||||
# # if isinstance(message, str):
|
||||
# if not ctr:
|
||||
# ctr = message["from"].bare
|
||||
# jid = message['groupchat_invite']['jid']
|
||||
# else:
|
||||
# jid = message
|
||||
def accept_invitation(self, message):
|
||||
# operator muc_chat
|
||||
inviter = message["from"].bare
|
||||
jid = message['groupchat_invite']['jid']
|
||||
self.join(self, inviter, jid)
|
||||
|
||||
|
||||
async def join(self, jid, alias=None, password=None):
|
||||
# token = await initdb(
|
||||
# muc_jid,
|
||||
|
|
|
@ -319,22 +319,49 @@ async def message(self, message):
|
|||
message_text = message_text[4:]
|
||||
url = message_text.split(' ')[0]
|
||||
title = ' '.join(message_text.split(' ')[1:])
|
||||
if not title:
|
||||
title = uri.get_hostname(url)
|
||||
counter = 0
|
||||
hostname = uri.get_hostname(url)
|
||||
node = hostname + ':' + str(counter)
|
||||
while True:
|
||||
if sqlite.check_node_exist(db_file, node):
|
||||
counter += 1
|
||||
node = hostname + ':' + str(counter)
|
||||
else:
|
||||
break
|
||||
if url.startswith('http'):
|
||||
if not title:
|
||||
title = uri.get_hostname(url)
|
||||
db_file = config.get_pathname_to_database(jid_file)
|
||||
counter = 0
|
||||
hostname = uri.get_hostname(url)
|
||||
hostname = hostname.replace('.','-')
|
||||
identifier = hostname + ':' + str(counter)
|
||||
while True:
|
||||
if sqlite.check_identifier_exist(db_file, identifier):
|
||||
counter += 1
|
||||
identifier = hostname + ':' + str(counter)
|
||||
else:
|
||||
break
|
||||
exist = sqlite.get_feed_id_and_name(db_file, url)
|
||||
if not exist:
|
||||
await sqlite.insert_feed(db_file, url, title, node)
|
||||
await sqlite.insert_feed(db_file, url, title,
|
||||
identifier)
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
document = result['content']
|
||||
feed = parse(document)
|
||||
feed_valid = 0 if feed.bozo else 1
|
||||
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||
if feed.has_key('updated_parsed'):
|
||||
feed_updated = feed.updated_parsed
|
||||
try:
|
||||
feed_updated = dt.convert_struct_time_to_iso8601(feed_updated)
|
||||
except:
|
||||
feed_updated = None
|
||||
else:
|
||||
feed_updated = None
|
||||
entries_count = len(feed.entries)
|
||||
await sqlite.update_feed_properties(db_file, feed_id,
|
||||
entries_count,
|
||||
feed_updated)
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
new_entries = action.get_properties_of_entries(
|
||||
self, jid_bare, db_file, url, feed_id, feed)
|
||||
if new_entries:
|
||||
await sqlite.add_entries_and_update_feed_state(
|
||||
db_file, feed_id, new_entries)
|
||||
await action.scan(self, jid_bare, db_file, url)
|
||||
if jid_bare not in self.settings:
|
||||
Config.add_settings_jid(self.settings, jid_bare,
|
||||
|
@ -477,7 +504,8 @@ async def message(self, message):
|
|||
XmppMessage.send_reply(self, message, response)
|
||||
case 'bookmarks':
|
||||
if is_operator(self, jid_bare):
|
||||
response = await action.list_bookmarks(self)
|
||||
conferences = await XmppBookmark.get_bookmarks(self)
|
||||
response = action.list_bookmarks(self, conferences)
|
||||
else:
|
||||
response = ('This action is restricted. '
|
||||
'Type: viewing bookmarks.')
|
||||
|
@ -608,7 +636,7 @@ async def message(self, message):
|
|||
url = ix_url
|
||||
if url:
|
||||
url = uri.remove_tracking_parameters(url)
|
||||
url = (uri.replace_hostname(url, 'link')) or url
|
||||
url = (await uri.replace_hostname(url, 'link')) or url
|
||||
result = await fetch.http(url)
|
||||
if not result['error']:
|
||||
data = result['content']
|
||||
|
@ -696,15 +724,17 @@ async def message(self, message):
|
|||
url = info[1]
|
||||
db_file = config.get_pathname_to_database(jid)
|
||||
if len(info) > 2:
|
||||
node = info[2]
|
||||
identifier = info[2]
|
||||
else:
|
||||
counter = 0
|
||||
hostname = uri.get_hostname(url)
|
||||
node = hostname + ':' + str(counter)
|
||||
hostname = hostname.replace('.','-')
|
||||
identifier = hostname + ':' + str(counter)
|
||||
while True:
|
||||
if sqlite.check_node_exist(db_file, node):
|
||||
if sqlite.check_identifier_exist(
|
||||
db_file, identifier):
|
||||
counter += 1
|
||||
node = hostname + ':' + str(counter)
|
||||
identifier = hostname + ':' + str(counter)
|
||||
else:
|
||||
break
|
||||
# task.clean_tasks_xmpp_chat(self, jid_bare, ['status'])
|
||||
|
@ -720,8 +750,10 @@ async def message(self, message):
|
|||
status_type=status_type)
|
||||
if url.startswith('feed:'):
|
||||
url = uri.feed_to_http(url)
|
||||
url = (uri.replace_hostname(url, 'feed')) or url
|
||||
result = await action.add_feed(self, jid_bare, db_file, url, node)
|
||||
url = (await uri.replace_hostname(url, 'feed')) or url
|
||||
result = await action.add_feed(self, jid_bare,
|
||||
db_file, url,
|
||||
identifier)
|
||||
if isinstance(result, list):
|
||||
results = result
|
||||
response = ("Web feeds found for {}\n\n```\n"
|
||||
|
@ -740,11 +772,11 @@ async def message(self, message):
|
|||
.format(result['link'],
|
||||
result['name'],
|
||||
result['index']))
|
||||
elif result['node']:
|
||||
response = ('> {}\nNode "{}" is already '
|
||||
elif result['identifier']:
|
||||
response = ('> {}\nIdentifier "{}" is already '
|
||||
'allocated to index {}'
|
||||
.format(result['link'],
|
||||
result['node'],
|
||||
result['identifier'],
|
||||
result['index']))
|
||||
elif result['error']:
|
||||
response = ('> {}\nFailed to find subscriptions. '
|
||||
|
@ -776,10 +808,10 @@ async def message(self, message):
|
|||
'\n'
|
||||
'Missing argument. '
|
||||
'Enter PubSub JID and subscription URL '
|
||||
'(and optionally: NodeName).')
|
||||
'(and optionally: Identifier Name).')
|
||||
else:
|
||||
response = ('This action is restricted. '
|
||||
'Type: adding node.')
|
||||
'Type: publishing to node.')
|
||||
XmppMessage.send_reply(self, message, response)
|
||||
case _ if (message_lowercase.startswith('http') or
|
||||
message_lowercase.startswith('feed:')):
|
||||
|
@ -797,19 +829,21 @@ async def message(self, message):
|
|||
status_type=status_type)
|
||||
if url.startswith('feed:'):
|
||||
url = uri.feed_to_http(url)
|
||||
url = (uri.replace_hostname(url, 'feed')) or url
|
||||
url = (await uri.replace_hostname(url, 'feed')) or url
|
||||
db_file = config.get_pathname_to_database(jid_file)
|
||||
counter = 0
|
||||
hostname = uri.get_hostname(url)
|
||||
node = hostname + ':' + str(counter)
|
||||
hostname = hostname.replace('.','-')
|
||||
identifier = hostname + ':' + str(counter)
|
||||
while True:
|
||||
if sqlite.check_node_exist(db_file, node):
|
||||
if sqlite.check_identifier_exist(db_file, identifier):
|
||||
counter += 1
|
||||
node = hostname + ':' + str(counter)
|
||||
identifier = hostname + ':' + str(counter)
|
||||
else:
|
||||
break
|
||||
# try:
|
||||
result = await action.add_feed(self, jid_bare, db_file, url, node)
|
||||
result = await action.add_feed(self, jid_bare, db_file, url,
|
||||
identifier)
|
||||
if isinstance(result, list):
|
||||
results = result
|
||||
response = ("Web feeds found for {}\n\n```\n"
|
||||
|
@ -1122,15 +1156,15 @@ async def message(self, message):
|
|||
status_type=status_type)
|
||||
if url.startswith('feed:'):
|
||||
url = uri.feed_to_http(url)
|
||||
url = (uri.replace_hostname(url, 'feed')) or url
|
||||
url = (await uri.replace_hostname(url, 'feed')) or url
|
||||
match len(data):
|
||||
case 1:
|
||||
if url.startswith('http'):
|
||||
while True:
|
||||
result = await fetch.http(url)
|
||||
status = result['status_code']
|
||||
if not result['error']:
|
||||
document = result['content']
|
||||
status = result['status_code']
|
||||
feed = parse(document)
|
||||
# if is_feed(url, feed):
|
||||
if action.is_feed(feed):
|
||||
|
@ -1151,7 +1185,7 @@ async def message(self, message):
|
|||
.format(len(results)))
|
||||
break
|
||||
else:
|
||||
url = result[0]
|
||||
url = result['link']
|
||||
else:
|
||||
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||
.format(url, status))
|
||||
|
@ -1188,7 +1222,7 @@ async def message(self, message):
|
|||
.format(len(results)))
|
||||
break
|
||||
else:
|
||||
url = result[0]
|
||||
url = result['link']
|
||||
else:
|
||||
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||
.format(url, status))
|
||||
|
|
|
@ -15,7 +15,7 @@ class XmppPubsub:
|
|||
|
||||
|
||||
async def get_pubsub_services(self):
|
||||
jids = [self.boundjid.bare]
|
||||
results = []
|
||||
iq = await self['xep_0030'].get_items(jid=self.boundjid.domain)
|
||||
items = iq['disco_items']['items']
|
||||
for item in items:
|
||||
|
@ -23,9 +23,13 @@ class XmppPubsub:
|
|||
identities = iq['disco_info']['identities']
|
||||
for identity in identities:
|
||||
if identity[0] == 'pubsub' and identity[1] == 'service':
|
||||
jid = item[0]
|
||||
jids.extend([jid])
|
||||
return jids
|
||||
result = {}
|
||||
result['jid'] = item[0]
|
||||
if item[1]: result['name'] = item[1]
|
||||
elif item[2]: result['name'] = item[2]
|
||||
else: result['name'] = item[0]
|
||||
results.extend([result])
|
||||
return results
|
||||
|
||||
|
||||
def delete_node(self, jid, node):
|
||||
|
@ -44,7 +48,7 @@ class XmppPubsub:
|
|||
|
||||
|
||||
# TODO Make use of var "xep" with match/case (XEP-0060, XEP-0277, XEP-0472)
|
||||
def create_node(self, jid, node, xep ,title=None, summary=None):
|
||||
def create_node(self, jid, node, xep ,title=None, subtitle=None):
|
||||
jid_from = str(self.boundjid) if self.is_component else None
|
||||
iq = self.Iq(stype='set',
|
||||
sto=jid,
|
||||
|
@ -57,7 +61,7 @@ class XmppPubsub:
|
|||
value=title)
|
||||
form.addField('pubsub#description',
|
||||
ftype='text-single',
|
||||
value=summary)
|
||||
value=subtitle)
|
||||
form.addField('pubsub#notify_retract',
|
||||
ftype='boolean',
|
||||
value=1)
|
||||
|
|
|
@ -17,6 +17,7 @@ def is_operator(self, jid_bare):
|
|||
break
|
||||
return result
|
||||
|
||||
|
||||
def is_moderator(self, jid_bare, jid_full):
|
||||
alias = jid_full[jid_full.index('/')+1:]
|
||||
role = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'role')
|
||||
|
@ -27,6 +28,16 @@ def is_moderator(self, jid_bare, jid_full):
|
|||
return result
|
||||
|
||||
|
||||
def is_member(self, jid_bare, jid_full):
|
||||
alias = jid_full[jid_full.index('/')+1:]
|
||||
affiliation = self.plugin['xep_0045'].get_jid_property(jid_bare, alias, 'affiliation')
|
||||
if affiliation == 'member':
|
||||
result = True
|
||||
else:
|
||||
result = False
|
||||
return result
|
||||
|
||||
|
||||
# TODO Rename to get_jid_type
|
||||
async def get_chat_type(self, jid):
|
||||
"""
|
||||
|
|
Loading…
Reference in a new issue