Add functionality for handling with PubSub nodes;

Add functionality to submit items from database to PubSub nodes;
Change the fashion by which items are sent;
Fix minor issues.
This commit is contained in:
Schimon Jehudah 2024-04-14 12:56:45 +00:00
parent b69953eb7f
commit bcbbf1ab04
12 changed files with 2853 additions and 1964 deletions

View file

@ -28,6 +28,7 @@ TODO
from asyncio.exceptions import IncompleteReadError
from bs4 import BeautifulSoup
from feedparser import parse
import hashlib
from http.client import IncompleteRead
import json
from slixfeed.log import Logger
@ -39,7 +40,6 @@ import slixfeed.crawl as crawl
import slixfeed.dt as dt
import slixfeed.fetch as fetch
import slixfeed.sqlite as sqlite
import slixfeed.url as uri
from slixfeed.url import (
complete_url,
join_url,
@ -56,10 +56,11 @@ from slixfeed.xmpp.presence import XmppPresence
from slixfeed.xmpp.publish import XmppPubsub
from slixfeed.xmpp.upload import XmppUpload
from slixfeed.xmpp.utility import get_chat_type
from slixmpp.xmlstream import ET
import sys
from urllib import error
from urllib.parse import parse_qs, urlsplit
import xml.etree.ElementTree as ET
import xml.etree.ElementTree as ETR
try:
import tomllib
@ -174,10 +175,7 @@ async def xmpp_send_status_message(self, jid):
jid_file = jid.replace('/', '_')
db_file = config.get_pathname_to_database(jid_file)
enabled = Config.get_setting_value(self.settings, jid, 'enabled')
if not enabled:
status_mode = 'xa'
status_text = '📪️ Send "Start" to receive updates'
else:
if enabled:
jid_task = self.pending_tasks[jid]
if len(jid_task):
status_mode = 'dnd'
@ -202,7 +200,9 @@ async def xmpp_send_status_message(self, jid):
else:
status_mode = 'available'
status_text = '📭️ No news'
else:
status_mode = 'xa'
status_text = '📪️ Send "Start" to receive updates'
# breakpoint()
# print(await current_time(), status_text, "for", jid)
XmppPresence.send(self, jid, status_text, status_type=status_mode)
@ -215,69 +215,267 @@ async def xmpp_send_status_message(self, jid):
# )
async def xmpp_send_pubsub(self, jid_bare, num=None):
async def xmpp_pubsub_send_selected_entry(self, jid_bare, jid_file, node_id, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid_bare: {} jid_file: {}'.format(function_name, jid_bare, jid_file))
# jid_file = jid_bare.replace('/', '_')
db_file = config.get_pathname_to_database(jid_file)
report = {}
if jid_bare == self.boundjid.bare:
node_id = 'urn:xmpp:microblog:0'
node_subtitle = None
node_title = None
else:
feed_id = sqlite.get_feed_id_by_entry_index(db_file, entry_id)
feed_id = feed_id[0]
feed_properties = sqlite.get_feed_properties(db_file, feed_id)
node_id = feed_properties[2]
node_title = feed_properties[3]
node_subtitle = feed_properties[5]
xep = None
iq_create_node = XmppPubsub.create_node(
self, jid_bare, node_id, xep, node_title, node_subtitle)
await XmppIQ.send(self, iq_create_node)
entry = sqlite.get_entry_properties(db_file, entry_id)
print('xmpp_pubsub_send_selected_entry',jid_bare)
print(node_id)
entry_dict = pack_entry_into_dict(db_file, entry)
node_item = create_rfc4287_entry(entry_dict)
entry_url = entry_dict['link']
item_id = hash_url_to_md5(entry_url)
iq_create_entry = XmppPubsub.create_entry(
self, jid_bare, node_id, item_id, node_item)
await XmppIQ.send(self, iq_create_entry)
await sqlite.mark_as_read(db_file, entry_id)
report = entry_url
return report
async def xmpp_pubsub_send_unread_items(self, jid_bare):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare))
jid_file = jid_bare.replace('/', '_')
db_file = config.get_pathname_to_database(jid_file)
enabled = Config.get_setting_value(self.settings, jid_bare, 'enabled')
if enabled:
if num: counter = 0
report = {}
subscriptions = sqlite.get_active_feeds_url(db_file)
for url in subscriptions:
url = url[0]
if jid_bare == self.boundjid.bare:
node = 'urn:xmpp:microblog:0'
feed_title = None
feed_subtitle = None
else:
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
feed_title = sqlite.get_feed_title(db_file, feed_id)
feed_title = feed_title[0]
feed_subtitle = sqlite.get_feed_subtitle(db_file, feed_id)
feed_subtitle = feed_subtitle[0]
node = sqlite.get_feed_identifier(db_file, feed_id)
node = node[0]
xep = None
iq_create_node = XmppPubsub.create_node(
self, jid_bare, node, xep, feed_title, feed_subtitle)
await XmppIQ.send(self, iq_create_node)
entries = sqlite.get_unread_entries_of_feed(db_file, feed_id)
feed_properties = sqlite.get_feed_properties(db_file, feed_id)
feed_version = feed_properties[2]
print('xmpp_send_pubsub',jid_bare)
print(node)
# if num and counter < num:
report[url] = len(entries)
for entry in entries:
feed_entry = {'authors' : entry[3],
'content' : entry[6],
'content_type' : entry[7],
'contact' : entry[4],
'contributors' : entry[5],
'summary' : entry[8],
'summary_type' : entry[9],
'enclosures' : entry[13],
'language' : entry[10],
'link' : entry[2],
'links' : entry[11],
'published' : entry[15],
'tags' : entry[12],
'title' : entry[1],
'updated' : entry[16]}
iq_create_entry = XmppPubsub.create_entry(
self, jid_bare, node, feed_entry, feed_version)
await XmppIQ.send(self, iq_create_entry)
ix = entry[0]
await sqlite.mark_as_read(db_file, ix)
# counter += 1
# if num and counter > num: break
return report
report = {}
subscriptions = sqlite.get_active_feeds_url(db_file)
for url in subscriptions:
url = url[0]
if jid_bare == self.boundjid.bare:
node_id = 'urn:xmpp:microblog:0'
node_subtitle = None
node_title = None
else:
# feed_id = sqlite.get_feed_id(db_file, url)
# feed_id = feed_id[0]
# feed_properties = sqlite.get_feed_properties(db_file, feed_id)
# node_id = feed_properties[2]
# node_title = feed_properties[3]
# node_subtitle = feed_properties[5]
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
node_id = sqlite.get_feed_identifier(db_file, feed_id)
node_id = node_id[0]
node_title = sqlite.get_feed_title(db_file, feed_id)
node_title = node_title[0]
node_subtitle = sqlite.get_feed_subtitle(db_file, feed_id)
node_subtitle = node_subtitle[0]
xep = None
iq_create_node = XmppPubsub.create_node(
self, jid_bare, node_id, xep, node_title, node_subtitle)
await XmppIQ.send(self, iq_create_node)
entries = sqlite.get_unread_entries_of_feed(db_file, feed_id)
print('xmpp_pubsub_send_unread_items',jid_bare)
print(node_id)
report[url] = len(entries)
for entry in entries:
feed_entry = pack_entry_into_dict(db_file, entry)
node_entry = create_rfc4287_entry(feed_entry)
entry_url = feed_entry['link']
item_id = hash_url_to_md5(entry_url)
iq_create_entry = XmppPubsub.create_entry(
self, jid_bare, node_id, item_id, node_entry)
await XmppIQ.send(self, iq_create_entry)
ix = entry[0]
await sqlite.mark_as_read(db_file, ix)
return report
async def xmpp_send_message(self, jid, num=None):
def pack_entry_into_dict(db_file, entry):
entry_id = entry[0]
authors = sqlite.get_authors_by_entry_id(db_file, entry_id)
entry_authors = []
for author in authors:
entry_author = {
'name': author[2],
'email': author[3],
'url': author[4]}
entry_authors.extend([entry_author])
contributors = sqlite.get_contributors_by_entry_id(db_file, entry_id)
entry_contributors = []
for contributor in contributors:
entry_contributor = {
'name': contributor[2],
'email': contributor[3],
'url': contributor[4]}
entry_contributors.extend([entry_contributor])
links = sqlite.get_links_by_entry_id(db_file, entry_id)
entry_links = []
for link in links:
entry_link = {
'url': link[2],
'type': link[3],
'rel': link[4],
'size': link[5]}
entry_links.extend([entry_link])
tags = sqlite.get_tags_by_entry_id(db_file, entry_id)
entry_tags = []
for tag in tags:
entry_tag = {
'term': tag[2],
'scheme': tag[3],
'label': tag[4]}
entry_tags.extend([entry_tag])
contents = sqlite.get_contents_by_entry_id(db_file, entry_id)
entry_contents = []
for content in contents:
entry_content = {
'text': content[2],
'type': content[3],
'base': content[4],
'lang': content[5]}
entry_contents.extend([entry_content])
feed_entry = {
'authors' : entry_authors,
'category' : entry[10],
'comments' : entry[12],
'contents' : entry_contents,
'contributors' : entry_contributors,
'summary_base' : entry[9],
'summary_lang' : entry[7],
'summary_text' : entry[6],
'summary_type' : entry[8],
'enclosures' : entry[13],
'href' : entry[11],
'link' : entry[3],
'links' : entry_links,
'published' : entry[14],
'rating' : entry[13],
'tags' : entry_tags,
'title' : entry[4],
'title_type' : entry[3],
'updated' : entry[15]}
return feed_entry
# NOTE Warning: Entry might not have a link
# TODO Handle situation error
def hash_url_to_md5(url):
url_encoded = url.encode()
url_hashed = hashlib.md5(url_encoded)
url_digest = url_hashed.hexdigest()
return url_digest
def create_rfc4287_entry(feed_entry):
node_entry = ET.Element('entry')
node_entry.set('xmlns', 'http://www.w3.org/2005/Atom')
# Title
title = ET.SubElement(node_entry, 'title')
if feed_entry['title']:
if feed_entry['title_type']: title.set('type', feed_entry['title_type'])
title.text = feed_entry['title']
elif feed_entry['summary_text']:
if feed_entry['summary_type']: title.set('type', feed_entry['summary_type'])
title.text = feed_entry['summary_text']
# if feed_entry['summary_base']: title.set('base', feed_entry['summary_base'])
# if feed_entry['summary_lang']: title.set('lang', feed_entry['summary_lang'])
else:
title.text = feed_entry['published']
# Some feeds have identical content for contents and summary
# So if content is present, do not add summary
if feed_entry['contents']:
# Content
for feed_entry_content in feed_entry['contents']:
content = ET.SubElement(node_entry, 'content')
# if feed_entry_content['base']: content.set('base', feed_entry_content['base'])
if feed_entry_content['lang']: content.set('lang', feed_entry_content['lang'])
if feed_entry_content['type']: content.set('type', feed_entry_content['type'])
content.text = feed_entry_content['text']
else:
# Summary
summary = ET.SubElement(node_entry, 'summary') # TODO Try 'content'
# if feed_entry['summary_base']: summary.set('base', feed_entry['summary_base'])
# TODO Check realization of "lang"
if feed_entry['summary_type']: summary.set('type', feed_entry['summary_type'])
if feed_entry['summary_lang']: summary.set('lang', feed_entry['summary_lang'])
summary.text = feed_entry['summary_text']
# Authors
for feed_entry_author in feed_entry['authors']:
author = ET.SubElement(node_entry, 'author')
name = ET.SubElement(author, 'name')
name.text = feed_entry_author['name']
if feed_entry_author['url']:
uri = ET.SubElement(author, 'uri')
uri.text = feed_entry_author['url']
if feed_entry_author['email']:
email = ET.SubElement(author, 'email')
email.text = feed_entry_author['email']
# Contributors
for feed_entry_contributor in feed_entry['contributors']:
contributor = ET.SubElement(node_entry, 'author')
name = ET.SubElement(contributor, 'name')
name.text = feed_entry_contributor['name']
if feed_entry_contributor['url']:
uri = ET.SubElement(contributor, 'uri')
uri.text = feed_entry_contributor['url']
if feed_entry_contributor['email']:
email = ET.SubElement(contributor, 'email')
email.text = feed_entry_contributor['email']
# Category
category = ET.SubElement(node_entry, "category")
category.set('category', feed_entry['category'])
# Tags
for feed_entry_tag in feed_entry['tags']:
tag = ET.SubElement(node_entry, 'category')
tag.set('term', feed_entry_tag['term'])
# Link
link = ET.SubElement(node_entry, "link")
link.set('href', feed_entry['link'])
# Links
for feed_entry_link in feed_entry['links']:
link = ET.SubElement(node_entry, "link")
link.set('href', feed_entry_link['url'])
link.set('type', feed_entry_link['type'])
link.set('rel', feed_entry_link['rel'])
# Date updated
if feed_entry['updated']:
updated = ET.SubElement(node_entry, 'updated')
updated.text = feed_entry['updated']
# Date published
if feed_entry['published']:
published = ET.SubElement(node_entry, 'published')
published.text = feed_entry['published']
return node_entry
async def xmpp_chat_send_unread_items(self, jid, num=None):
"""
Send news items as messages.
@ -292,56 +490,54 @@ async def xmpp_send_message(self, jid, num=None):
logger.debug('{}: jid: {} num: {}'.format(function_name, jid, num))
jid_file = jid.replace('/', '_')
db_file = config.get_pathname_to_database(jid_file)
enabled = Config.get_setting_value(self.settings, jid, 'enabled')
if enabled:
show_media = Config.get_setting_value(self.settings, jid, 'media')
if not num:
num = Config.get_setting_value(self.settings, jid, 'quantum')
else:
num = int(num)
results = sqlite.get_unread_entries(db_file, num)
news_digest = ''
media = None
chat_type = await get_chat_type(self, jid)
for result in results:
ix = result[0]
title_e = result[1]
url = result[2]
summary = result[3]
feed_id = result[4]
date = result[5]
enclosure = sqlite.get_enclosure_by_entry_id(db_file, ix)
if enclosure: enclosure = enclosure[0]
title_f = sqlite.get_feed_title(db_file, feed_id)
title_f = title_f[0]
news_digest += await list_unread_entries(self, result, title_f, jid)
# print(db_file)
# print(result[0])
# breakpoint()
await sqlite.mark_as_read(db_file, ix)
show_media = Config.get_setting_value(self.settings, jid, 'media')
if not num:
num = Config.get_setting_value(self.settings, jid, 'quantum')
else:
num = int(num)
results = sqlite.get_unread_entries(db_file, num)
news_digest = ''
media = None
chat_type = await get_chat_type(self, jid)
for result in results:
ix = result[0]
title_e = result[1]
url = result[2]
summary = result[3]
feed_id = result[4]
date = result[5]
enclosure = sqlite.get_enclosure_by_entry_id(db_file, ix)
if enclosure: enclosure = enclosure[0]
title_f = sqlite.get_feed_title(db_file, feed_id)
title_f = title_f[0]
news_digest += await list_unread_entries(self, result, title_f, jid)
# print(db_file)
# print(result[0])
# breakpoint()
await sqlite.mark_as_read(db_file, ix)
# Find media
# if url.startswith("magnet:"):
# media = action.get_magnet(url)
# elif enclosure.startswith("magnet:"):
# media = action.get_magnet(enclosure)
# elif enclosure:
if show_media:
if enclosure:
media = enclosure
else:
media = await extract_image_from_html(url)
if media and news_digest:
# Send textual message
XmppMessage.send(self, jid, news_digest, chat_type)
news_digest = ''
# Send media
XmppMessage.send_oob(self, jid, media, chat_type)
media = None
if news_digest:
# Find media
# if url.startswith("magnet:"):
# media = action.get_magnet(url)
# elif enclosure.startswith("magnet:"):
# media = action.get_magnet(enclosure)
# elif enclosure:
if show_media:
if enclosure:
media = enclosure
else:
media = await extract_image_from_html(url)
if media and news_digest:
# Send textual message
XmppMessage.send(self, jid, news_digest, chat_type)
news_digest = ''
# Send media
XmppMessage.send_oob(self, jid, media, chat_type)
media = None
if news_digest:
XmppMessage.send(self, jid, news_digest, chat_type)
# TODO Add while loop to assure delivery.
# print(await current_time(), ">>> ACT send_message",jid)
# NOTE Do we need "if statement"? See NOTE at is_muc.
@ -807,25 +1003,25 @@ def export_to_opml(jid, filename, results):
function_name = sys._getframe().f_code.co_name
logger.debug('{} jid: {} filename: {}'
.format(function_name, jid, filename))
root = ET.Element("opml")
root = ETR.Element("opml")
root.set("version", "1.0")
head = ET.SubElement(root, "head")
ET.SubElement(head, "title").text = "{}".format(jid)
ET.SubElement(head, "description").text = (
head = ETR.SubElement(root, "head")
ETR.SubElement(head, "title").text = "{}".format(jid)
ETR.SubElement(head, "description").text = (
"Set of subscriptions exported by Slixfeed")
ET.SubElement(head, "generator").text = "Slixfeed"
ET.SubElement(head, "urlPublic").text = (
ETR.SubElement(head, "generator").text = "Slixfeed"
ETR.SubElement(head, "urlPublic").text = (
"https://gitgud.io/sjehuda/slixfeed")
time_stamp = dt.current_time()
ET.SubElement(head, "dateCreated").text = time_stamp
ET.SubElement(head, "dateModified").text = time_stamp
body = ET.SubElement(root, "body")
ETR.SubElement(head, "dateCreated").text = time_stamp
ETR.SubElement(head, "dateModified").text = time_stamp
body = ETR.SubElement(root, "body")
for result in results:
outline = ET.SubElement(body, "outline")
outline = ETR.SubElement(body, "outline")
outline.set("text", result[1])
outline.set("xmlUrl", result[2])
# outline.set("type", result[2])
tree = ET.ElementTree(root)
tree = ETR.ElementTree(root)
tree.write(filename)
@ -835,7 +1031,7 @@ async def import_opml(db_file, result):
.format(function_name, db_file))
if not result['error']:
document = result['content']
root = ET.fromstring(document)
root = ETR.fromstring(document)
before = sqlite.get_number_of_items(db_file, 'feeds_properties')
feeds = []
for child in root.findall(".//outline"):
@ -1789,6 +1985,9 @@ def generate_txt(text, filename):
with open(filename, 'w') as file:
file.write(text)
# This works too
# ''.join(xml.etree.ElementTree.fromstring(text).itertext())
def remove_html_tags(data):
function_name = sys._getframe().f_code.co_name
logger.debug('{}'.format(function_name))

View file

@ -4,7 +4,7 @@ subtitle = "Slixfeed, slixmpp and more"
[[about]]
name = "Slixfeed"
about = "XMPP news bot"
desc = "XMPP news bot"
info = ["""
Slixfeed is a news broker bot for syndicated news which aims to be \
an easy to use and fully-featured news aggregating bot.
@ -34,7 +34,7 @@ url = "https://gitgud.io/sjehuda/slixfeed"
[[about]]
name = "slixmpp"
about = "Slixmpp XMPP Library"
desc = "XMPP library"
info = ["""
Slixmpp is an MIT licensed XMPP library for Python 3.7+. It is a fork of \
SleekXMPP.
@ -47,7 +47,7 @@ url = "https://codeberg.org/poezio/slixmpp"
[[about]]
name = "SleekXMPP"
about = "SleekXMPP XMPP Library"
desc = "XMPP library"
info = ["""
SleekXMPP is an MIT licensed XMPP library for Python 2.6/3.1+, and is \
featured in examples in the book XMPP: The Definitive Guide by Kevin Smith, \
@ -57,18 +57,22 @@ url = "https://codeberg.org/fritzy/SleekXMPP"
[[about]]
name = "XMPP"
about = "Previously known as Jabber"
desc = "Messaging protocol (also known as Jabber)"
info = ["""
XMPP is the Extensible Messaging and Presence Protocol, a set of open \
technologies for instant messaging, presence, multi-party chat, voice and \
video calls, collaboration, lightweight middleware, content syndication, and \
generalized routing of XML data.
video calls, collaboration, lightweight middleware, content syndication, \
and generalized routing of XML data.
XMPP was originally developed in the Jabber open-source community to \
provide an open, decentralized alternative to the closed instant messaging \
services at that time.
"""]
url = "https://xmpp.org/about"
[[about]]
name = "RSS Task Force"
about = "Swiss Organization"
desc = "Swiss organization"
info = ["""
The RSS Task Force (previously known as The Syndication Society) is an \
international organization headquartered in Switzerland.
@ -117,17 +121,17 @@ title = "Contributors"
subtitle = "People who have contributed to Slixfeed"
[[contributors]]
name = "grym from #python"
name = "grym from IRC channel #python"
role = "Contributor"
info = ["""
Correcting code structure to be better prepared for packaging 18c93083.
Correcting code structure to be better prepared for packaging (18c93083).
"""]
[[contributors]]
name = "Guus der Kinderen"
role = "XMPP server administrator"
info = ["""
Providing OpenFire server for testing various of features.
Providing an Openfire server for testing various of features.
XEP-0060: Publish-Subscribe
XEP-0114: Jabber Component Protocol
@ -139,7 +143,7 @@ url = "http://goodbytes.im"
name = "Simone (roughnecks) Canaletti"
role = "XMPP server administrator"
info = ["""
Providing Prosody server and Movim instance for testing PubSub.
Providing a Prosody server and a Movim instance for testing PubSub.
XEP-0472: Pubsub Social Feed
"""]
@ -616,7 +620,7 @@ All your data belongs to us.
"""]
[[clients]]
title = "Recommended Clients"
title = "Clients"
subtitle = """
As a chat bot, Slixfeed works with any XMPP messenger, yet we have deemed it \
appropriate to list the software that work best with Slixfeed, namely those \
@ -625,8 +629,14 @@ that provide support for XEP-0050: Ad-Hoc Commands.
[[clients]]
name = "Cheogram"
info = "XMPP client for mobile"
desc = "XMPP client for mobile"
info = ["""
The Cheogram Android app allows you to join a worldwide communication network. \
It especially focuses on features useful to users who want to contact those on \
other networks as well, such as SMS-enabled phone numbers.
"""]
url = "https://cheogram.com"
platform = "Android"
# [[clients]]
# name = "Conversations"
@ -635,8 +645,13 @@ url = "https://cheogram.com"
[[clients]]
name = "Converse"
info = "XMPP client for desktop and mobile"
desc = "XMPP client for desktop and mobile"
info = ["""
Converse is a free and open-source XMPP chat client that runs in a web browser \
or on your desktop.
"""]
url = "https://conversejs.org"
platform = "HTML (Web)"
# [[clients]]
# name = "Gajim"
@ -650,13 +665,31 @@ url = "https://conversejs.org"
[[clients]]
name = "monocles chat"
info = "XMPP client for mobile"
desc = "XMPP client for mobile"
info = """
monocles chat is a modern and secure Android XMPP chat client. Based on \
blabber.im and Conversations with a lot of changes and additional features \
to improve usability and security.
"""
url = "https://monocles.chat"
platform = "Android"
[[clients]]
name = "Movim"
info = "XMPP client for desktop and mobile"
desc = "XMPP client for desktop and mobile"
info = ["""
Movim is a social and chat platform that acts as a frontend for the XMPP network.
Once deployed Movim offers a complete social and chat experience for the \
decentralized XMPP network users. It can easily connect to several XMPP \
servers at the same time.
With a simple configuration it can also be restricted to one XMPP server \
and will then act as a powerful frontend for it. Movim is fully compatible \
with the most used XMPP servers such as ejabberd or Prosody.
"""]
url = "https://mov.im"
platform = "HTML (Web)"
# [[clients]]
# name = "Moxxy"
@ -665,18 +698,50 @@ url = "https://mov.im"
[[clients]]
name = "Poezio"
info = "XMPP client for console"
desc = "XMPP client for console"
info = ["""
Poezio is a free console XMPP client (the protocol on which the Jabber IM \
network is built).
Its goal is to let you connect very easily (no account creation needed) to \
the network and join various chatrooms, immediately. It tries to look like \
the most famous IRC clients (weechat, irssi, etc). Many commands are identical \
and you won't be lost if you already know these clients. Configuration can be \
made in a configuration file or directly from the client.
"""]
url = "https://poez.io"
platform = "FreeBSD and Linux"
[[clients]]
name = "Psi"
info = "XMPP client for desktop"
desc = "XMPP client for desktop"
info = ["""
Instant messaging as free and open as it should be.
Psi is a free instant messaging application designed for the XMPP network. \
Fast and lightweight, Psi is fully open-source and compatible with Windows, \
Linux, and macOS.
With Psi's full Unicode support and localizations, easy file transfers, \
customizable iconsets, and many other great features, you'll learn why users \
around the world are making the switch to free, open instant messaging.
"""]
url = "https://psi-im.org"
platform = "Any"
[[clients]]
name = "Psi+"
info = "XMPP client for desktop"
desc = "XMPP client for desktop"
info = ["""
In 2009 a Psi fork named Psi+ was started. Project purpose are: implementation \
of new features, writing of patches and plugins for transferring them to upstream. \
As of 2017 the most of active Psi+ developers have become official Psi developers, \
but Psi+ still has a number of unique features. From developers point of view Psi+ \
is just a development branch of Psi IM client which is hosted at separate git \
repositories and for which rolling release development model is used.
"""]
url = "https://psi-plus.com"
platform = "Any"
# [[clients]]
# name = "Swift"
@ -689,7 +754,7 @@ url = "https://psi-plus.com"
# url = "https://yaxim.org"
[[services]]
title = "Recommended News Services"
title = "Online Services"
subtitle = ["""
Below are online services that extend the syndication experience by means \
of bookmarking and multimedia, and also enhance it by restoring access to \
@ -706,7 +771,7 @@ link = "https://www.fivefilters.org/feed-creator/"
[[services]]
name = "Kill the Newsletter"
info = "Kill the Newsletter converts email newsletters into Web feeds."
info = ["Kill the Newsletter converts email newsletters into Web feeds."]
link = "https://kill-the-newsletter.com"
[[services]]
@ -737,7 +802,7 @@ It's capable of generating RSS feeds from pretty much everything.
link = "https://docs.rsshub.app"
[[software]]
title = "Recommended News Software"
title = "News Software"
subtitle = ["""
Take back control of your news. With free, quality, software for your \
desktop, home and mobile devices.
@ -749,7 +814,7 @@ info = ["""
A self-hosted RSS reader, based on Dropwizard and React/TypeScript.
"""]
link = "https://commafeed.com"
os = "Any (HTML)"
platform = "HTML (Web)"
[[software]]
name = "FreshRSS"
@ -758,7 +823,7 @@ FreshRSS is a self-hosted RSS and Atom feed aggregator.
It is lightweight, easy to work with, powerful, and customizable.
"""]
link = "https://freshrss.org"
os = "Any (HTML)"
platform = "HTML (Web)"
[[software]]
name = "Liferea"
@ -769,7 +834,7 @@ it easy to organize and browse feeds. Its GUI is similar to a desktop \
mail/news client, with an embedded web browser.
"""]
link = "https://lzone.de/liferea/"
os = "FreeBSD and Linux"
platform = "FreeBSD and Linux"
[[software]]
name = "NetNewsWire"
@ -787,7 +852,7 @@ can switch to NetNewsWire to get news directly and more reliably from the \
sites you trust.
"""]
link = "https://netnewswire.com"
os = "MacOS"
platform = "MacOS"
[[software]]
name = "Newsboat"
@ -796,7 +861,7 @@ Newsboat is an RSS/Atom feed reader for the text console. Its an actively \
maintained fork of Newsbeuter
"""]
link = "https://newsboat.org"
os = "Any"
platform = "HTML (Web)"
[[software]]
name = "Spot-On"
@ -806,7 +871,7 @@ search and other forms of communications into a single communications \
orchestra.
"""]
link = "https://textbrowser.github.io/spot-on/"
os = "Any"
platform = "Any"
[[software]]
name = "Vienna RSS"
@ -816,7 +881,7 @@ help you make sense of the flood of information that is distributed via \
these formats today.
"""]
link = "https://vienna-rss.com"
os = "MacOS"
platform = "MacOS"
[[resources]]
title = "Useful Resources"
@ -824,15 +889,34 @@ subtitle = "Technologies which Slixfeed is based upon"
[[resources]]
name = "feedparser"
info = "Syndication Library"
info = "Syndication library"
desc = "Parse Atom and RSS feeds in Python."
url = "https://pythonhosted.org/feedparser"
[[resources]]
name = "Slixmpp"
info = "XMPP Library"
info = "XMPP library"
desc = """
Slixmpp is an MIT licensed XMPP library for Python 3.7+. It is a fork of \
SleekXMPP.
Slixmpp's goals is to only rewrite the core of the SleekXMPP library \
(the low level socket handling, the timers, the events dispatching) \
in order to remove all threads.
"""
url = "https://slixmpp.readthedocs.io"
[[resources]]
name = "XMPP"
info = "Messaging Protocol"
info = "Messaging protocol (also known as Jabber)"
desc = """
XMPP is the Extensible Messaging and Presence Protocol, a set of open \
technologies for instant messaging, presence, multi-party chat, voice and \
video calls, collaboration, lightweight middleware, content syndication, \
and generalized routing of XML data.
XMPP was originally developed in the Jabber open-source community to \
provide an open, decentralized alternative to the closed instant messaging \
services at that time.
"""
url = "https://xmpp.org/about"

View file

@ -160,6 +160,30 @@ name = "Κόμμα Πειρατών Ελλάδας Pirate party of Greece"
link = "https://www.pirateparty.gr/feed/"
tags = ["greece", "party", "pirate"]
[[feeds]]
lang = "en"
name = "Cycling Together with Fiona and Marc"
link = "https://pixelfed.social/users/cyclingtogether.atom"
tags = ["sports", "cycling", "adventure", "life"]
[[feeds]]
lang = "en"
name = "Lagrange Gemini Client"
link = "https://skyjake.fi/@lagrange.rss"
tags = ["gemini", "gopher", "browser", "telecommunication", "internet"]
[[feeds]]
lang = "en"
name = "[ngn.tf] | blog"
link = "https://api.ngn.tf/blog/feed.atom"
tags = ["computer", "service", "technology", "telecommunication", "xmpp"]
[[feeds]]
lang = "en"
name = "The SWORD Project"
link = "http://www.crosswire.org/sword/sword.rss.jsp"
tags = ["bible", "religion", "christianity", "history", "education", "life"]
[[feeds]]
lang = "en-au"
name = "Pirate Party Australia"
@ -268,11 +292,23 @@ name = "The Brexit Party"
link = "https://www.thebrexitparty.org/feed/"
tags = ["europe", "politics", "uk"]
[[feeds]]
lang = "en-us"
name = "4chan /diy/ - Do It Yourself"
link = "https://boards.4chan.org/diy/index.rss"
tags = ["design", "diy", "household"]
[[feeds]]
lang = "en-us"
name = "12bytes.org"
link = "https://12bytes.org/feed.xml"
tags = ["conspiracy", "health", "government", "war", "world"]
[[feeds]]
lang = "en-us"
name = "153 News - Videos Being Watched"
link = "https://153news.net/rss.php?mode=watching"
tags = ["news", "politics", "usa", "video"]
tags = ["europe", "news", "politics", "usa", "video", "world"]
[[feeds]]
lang = "en-us"
@ -290,7 +326,7 @@ tags = ["lifestyle", "men"]
lang = "en-us"
name = "BlackListed News"
link = "https://www.blacklistednews.com/rss.php"
tags = ["news", "politics", "usa", "world"]
tags = ["conspiracy", "health", "government", "news", "politics", "usa", "world"]
[[feeds]]
lang = "en-us"
@ -518,7 +554,7 @@ tags = ["gemini", "internet"]
lang = "en-us"
name = "Public Intelligence Blog"
link = "https://phibetaiota.net/feed/"
tags = ["cia", "conspiracy", "health", "government", "war"]
tags = ["cia", "conspiracy", "health", "government", "war", "world"]
[[feeds]]
lang = "en-us"
@ -694,6 +730,12 @@ name = "Disroot Blog"
link = "https://disroot.org/es/blog.atom"
tags = ["decentralization", "privacy"]
[[feeds]]
lang = "ch-fr"
name = "Demoniak Network"
link = "https://demoniak.ch/index.xml"
tags = ["computer", "technology"]
[[feeds]]
lang = "fr-fr"
name = "Agate Blue"

View file

@ -920,7 +920,7 @@ def get_feed_properties(db_file, feed_id):
"""
SELECT *
FROM feeds_properties
WHERE feed_id = ?
WHERE id = :feed_id
"""
)
par = (feed_id,)
@ -1406,20 +1406,20 @@ def get_entries_rejected(db_file, num):
return result
def get_enclosure_by_entry_id(db_file, ix):
def get_enclosure_by_entry_id(db_file, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} ix: {}'
.format(function_name, db_file, ix))
logger.debug('{}: db_file: {} entry_id: {}'
.format(function_name, db_file, entry_id))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT url
FROM entries_properties_links
WHERE entry_id = :ix AND rel = "enclosure"
WHERE entry_id = :entry_id AND rel = "enclosure"
"""
)
par = (ix,)
par = (entry_id,)
result = cur.execute(sql, par).fetchone()
return result
@ -1831,6 +1831,24 @@ async def set_feed_title(db_file, feed_id, title):
cur.execute(sql, par)
def get_entry_properties(db_file, ix):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} ix: {}'
.format(function_name, db_file, ix))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT *
FROM entries_properties
WHERE id = :ix
"""
)
par = (ix,)
title = cur.execute(sql, par).fetchone()
return title
def get_entry_title(db_file, ix):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} ix: {}'
@ -2516,6 +2534,98 @@ async def maintain_archive(db_file, limit):
cur.execute(sql, par)
def get_authors_by_entry_id(db_file, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{} db_file: {} entry_id: {}'
.format(function_name, db_file, entry_id))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT *
FROM entries_properties_authors
WHERE entry_id = :entry_id
ORDER BY name DESC
"""
)
par = (entry_id,)
result = cur.execute(sql, par).fetchall()
return result
def get_contributors_by_entry_id(db_file, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{} db_file: {} entry_id: {}'
.format(function_name, db_file, entry_id))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT *
FROM entries_properties_contributors
WHERE entry_id = :entry_id
ORDER BY name DESC
"""
)
par = (entry_id,)
result = cur.execute(sql, par).fetchall()
return result
def get_links_by_entry_id(db_file, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} entry_id: {}'
.format(function_name, db_file, entry_id))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT *
FROM entries_properties_links
WHERE entry_id = :entry_id
"""
)
par = (entry_id,)
result = cur.execute(sql, par).fetchall()
return result
def get_tags_by_entry_id(db_file, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} entry_id: {}'
.format(function_name, db_file, entry_id))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT *
FROM entries_properties_tags
WHERE entry_id = :entry_id
"""
)
par = (entry_id,)
result = cur.execute(sql, par).fetchall()
return result
def get_contents_by_entry_id(db_file, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: db_file: {} entry_id: {}'
.format(function_name, db_file, entry_id))
with create_connection(db_file) as conn:
cur = conn.cursor()
sql = (
"""
SELECT *
FROM entries_properties_contents
WHERE entry_id = :entry_id
"""
)
par = (entry_id,)
result = cur.execute(sql, par).fetchall()
return result
# TODO Move entries that don't exist into table archive.
# NOTE Entries that are read from archive are deleted.
# NOTE Unlike entries from table entries, entries from
@ -2538,7 +2648,7 @@ def get_entries_of_feed(db_file, feed_id):
cur = conn.cursor()
sql = (
"""
SELECT id, title, link, identifier, published, read
SELECT id, title, link, identifier, published
FROM entries_properties
WHERE feed_id = ?
ORDER BY published DESC

View file

@ -170,7 +170,7 @@ async def task_publish(self, jid_bare):
if jid_bare not in self.settings:
Config.add_settings_jid(self.settings, jid_bare, db_file)
while True:
await action.xmpp_send_pubsub(self, jid_bare)
await action.xmpp_pubsub_send_unread_items(self, jid_bare)
await asyncio.sleep(60 * 180)
@ -260,7 +260,7 @@ async def task_message(self, jid_bare):
await sqlite.update_last_update_time(db_file)
else:
await sqlite.set_last_update_time(db_file)
await action.xmpp_send_message(self, jid_bare)
await action.xmpp_chat_send_unread_items(self, jid_bare)
refresh_task(self, jid_bare, task_message, 'interval')
await start_tasks_xmpp_chat(self, jid_bare, ['status'])

View file

@ -1,2 +1,2 @@
__version__ = '0.1.57'
__version_info__ = (0, 1, 57)
__version__ = '0.1.58'
__version_info__ = (0, 1, 58)

View file

@ -21,7 +21,7 @@ class XmppBookmark:
return conferences
async def properties(self, jid):
async def get_bookmark_properties(self, jid):
result = await self.plugin['xep_0048'].get_bookmarks()
groupchats = result['private']['bookmarks']['conferences']
for groupchat in groupchats:

1569
slixfeed/xmpp/chat.py Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -42,7 +42,7 @@ from slixfeed.xmpp.connect import XmppConnect
from slixfeed.xmpp.muc import XmppGroupchat
from slixfeed.xmpp.iq import XmppIQ
from slixfeed.xmpp.message import XmppMessage
import slixfeed.xmpp.process as process
from slixfeed.xmpp.chat import Chat
import slixfeed.xmpp.profile as profile
from slixfeed.xmpp.publish import XmppPubsub
# from slixfeed.xmpp.roster import XmppRoster
@ -330,7 +330,7 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
self.pending_tasks[jid_bare] = {}
# if jid_full not in self.pending_tasks:
# self.pending_tasks[jid_full] = {}
await process.message(self, message)
await Chat.process_message(self, message)
# chat_type = message["type"]
# message_body = message["body"]
# message_reply = message.reply

File diff suppressed because it is too large Load diff

View file

@ -7,7 +7,6 @@ Functions create_node and create_entry are derived from project atomtopubsub.
"""
import hashlib
import slixmpp.plugins.xep_0060.stanza.pubsub as pubsub
from slixmpp.xmlstream import ET
@ -32,6 +31,33 @@ class XmppPubsub:
return results
async def get_node_properties(self, jid, node):
config = await self.plugin['xep_0060'].get_node_config(jid, node)
subscriptions = await self.plugin['xep_0060'].get_node_subscriptions(jid, node)
affiliations = await self.plugin['xep_0060'].get_node_affiliations(jid, node)
properties = {'config': config,
'subscriptions': subscriptions,
'affiliations': affiliations}
breakpoint()
return properties
async def get_nodes(self, jid):
nodes = await self.plugin['xep_0060'].get_nodes(jid)
# 'self' would lead to slixmpp.jid.InvalidJID: idna validation failed:
return nodes
async def get_item(self, jid, node, item_id):
item = await self.plugin['xep_0060'].get_item(jid, node, item_id)
return item
async def get_items(self, jid, node):
items = await self.plugin['xep_0060'].get_items(jid, node)
return items
def delete_node(self, jid, node):
jid_from = str(self.boundjid) if self.is_component else None
self.plugin['xep_0060'].delete_node(jid, node, ifrom=jid_from)
@ -87,9 +113,11 @@ class XmppPubsub:
return iq
def create_entry(self, jid, node, entry, version):
# TODO Consider to create a separate function called "create_atom_entry"
# or "create_rfc4287_entry" for anything related to variable "node_entry".
def create_entry(self, jid, node_id, item_id, node_item):
iq = self.Iq(stype="set", sto=jid)
iq['pubsub']['publish']['node'] = node
iq['pubsub']['publish']['node'] = node_id
item = pubsub.Item()
@ -102,33 +130,8 @@ class XmppPubsub:
# cross reference, and namely - in another project to utilize PubSub as
# links sharing system (see del.icio.us) - to share node entries.
# NOTE Warning: Entry might not have a link
# TODO Handle situation error
url_encoded = entry['link'].encode()
url_hashed = hashlib.md5(url_encoded)
url_digest = url_hashed.hexdigest()
item['id'] = url_digest
node_entry = ET.Element("entry")
node_entry.set('xmlns', 'http://www.w3.org/2005/Atom')
title = ET.SubElement(node_entry, "title")
title.text = entry['title']
updated = ET.SubElement(node_entry, "updated")
updated.text = entry['updated']
# Content
content = ET.SubElement(node_entry, "content")
content.set('type', 'text/html')
content.text = entry['description']
# Links
link = ET.SubElement(node_entry, "link")
link.set('href', entry['link'])
item['payload'] = node_entry
item['id'] = item_id
item['payload'] = node_item
iq['pubsub']['publish'].append(item)
return iq