Fix many issues amidst change of table structure

This commit is contained in:
Schimon Jehudah 2024-02-04 17:08:12 +00:00
parent f683e11c4a
commit c7fa2496a8
21 changed files with 1419 additions and 1629 deletions

View file

@ -100,13 +100,12 @@ import os
#import slixfeed.irc
#import slixfeed.matrix
from slixfeed.config import get_default_config_directory, get_value
import slixfeed.config as config
import socks
import socket
xmpp_type = get_value(
"accounts", "XMPP", "type")
xmpp_type = config.get_value("accounts", "XMPP", "type")
match xmpp_type:
case "client":
@ -122,18 +121,21 @@ class JabberComponent:
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0045') # Multi-User Chat
# xmpp.register_plugin('xep_0048') # Bookmarks
xmpp.register_plugin('xep_0050') # Ad-Hoc Commands
xmpp.register_plugin('xep_0054') # vcard-temp
xmpp.register_plugin('xep_0060') # Publish-Subscribe
# xmpp.register_plugin('xep_0065') # SOCKS5 Bytestreams
xmpp.register_plugin('xep_0066') # Out of Band Data
xmpp.register_plugin('xep_0071') # XHTML-IM
xmpp.register_plugin('xep_0084') # User Avatar
# xmpp.register_plugin('xep_0085') # Chat State Notifications
xmpp.register_plugin('xep_0085') # Chat State Notifications
xmpp.register_plugin('xep_0115') # Entity Capabilities
xmpp.register_plugin('xep_0153') # vCard-Based Avatars
xmpp.register_plugin('xep_0199', {'keepalive': True}) # XMPP Ping
xmpp.register_plugin('xep_0249') # Multi-User Chat
xmpp.register_plugin('xep_0249') # Direct MUC Invitations
xmpp.register_plugin('xep_0363') # HTTP File Upload
xmpp.register_plugin('xep_0402') # PEP Native Bookmarks
xmpp.register_plugin('xep_0444') # Message Reactions
xmpp.connect()
xmpp.process()
@ -145,22 +147,25 @@ class JabberClient:
xmpp.register_plugin('xep_0030') # Service Discovery
xmpp.register_plugin('xep_0045') # Multi-User Chat
xmpp.register_plugin('xep_0048') # Bookmarks
xmpp.register_plugin('xep_0050') # Ad-Hoc Commands
xmpp.register_plugin('xep_0054') # vcard-temp
xmpp.register_plugin('xep_0060') # Publish-Subscribe
# xmpp.register_plugin('xep_0065') # SOCKS5 Bytestreams
xmpp.register_plugin('xep_0066') # Out of Band Data
xmpp.register_plugin('xep_0071') # XHTML-IM
xmpp.register_plugin('xep_0084') # User Avatar
# xmpp.register_plugin('xep_0085') # Chat State Notifications
xmpp.register_plugin('xep_0085') # Chat State Notifications
xmpp.register_plugin('xep_0115') # Entity Capabilities
xmpp.register_plugin('xep_0153') # vCard-Based Avatars
xmpp.register_plugin('xep_0199', {'keepalive': True}) # XMPP Ping
xmpp.register_plugin('xep_0249') # Multi-User Chat
xmpp.register_plugin('xep_0249') # Direct MUC Invitations
xmpp.register_plugin('xep_0363') # HTTP File Upload
xmpp.register_plugin('xep_0402') # PEP Native Bookmarks
xmpp.register_plugin('xep_0444') # Message Reactions
# proxy_enabled = get_value("accounts", "XMPP", "proxy_enabled")
# proxy_enabled = config.get_value("accounts", "XMPP", "proxy_enabled")
# if proxy_enabled == '1':
# values = get_value("accounts", "XMPP", [
# values = config.get_value("accounts", "XMPP", [
# "proxy_host",
# "proxy_port",
# "proxy_username",
@ -179,7 +184,7 @@ class JabberClient:
# Connect to the XMPP server and start processing XMPP stanzas.
address = get_value(
address = config.get_value(
"accounts", "XMPP Client", ["hostname", "port"])
if address[0] and address[1]:
xmpp.connect(tuple(address))
@ -190,11 +195,11 @@ class JabberClient:
def main():
config_dir = get_default_config_directory()
config_dir = config.get_default_config_directory()
logging.info("Reading configuration from {}".format(config_dir))
print("Reading configuration from {}".format(config_dir))
values = get_value(
values = config.get_value(
"accounts", "XMPP Proxy", ["socks5_host", "socks5_port"])
if values[0] and values[1]:
host = values[0]
@ -208,37 +213,30 @@ def main():
parser = ArgumentParser(description=Slixfeed.__doc__)
# Output verbosity options.
parser.add_argument(
"-q", "--quiet", help="set logging to ERROR",
action="store_const", dest="loglevel",
const=logging.ERROR, default=logging.INFO)
parser.add_argument(
"-d", "--debug", help="set logging to DEBUG",
action="store_const", dest="loglevel",
const=logging.DEBUG, default=logging.INFO)
parser.add_argument("-q", "--quiet", help="set logging to ERROR",
action="store_const", dest="loglevel",
const=logging.ERROR, default=logging.INFO)
parser.add_argument("-d", "--debug", help="set logging to DEBUG",
action="store_const", dest="loglevel",
const=logging.DEBUG, default=logging.INFO)
# JID and password options.
parser.add_argument(
"-j", "--jid", dest="jid", help="Jabber ID")
parser.add_argument(
"-p", "--password", dest="password", help="Password of JID")
parser.add_argument(
"-a", "--alias", dest="alias", help="Display name")
parser.add_argument(
"-n", "--hostname", dest="hostname", help="Hostname")
parser.add_argument(
"-o", "--port", dest="port", help="Port number")
parser.add_argument("-j", "--jid", help="Jabber ID", dest="jid")
parser.add_argument("-p", "--password", help="Password of JID",
dest="password")
parser.add_argument("-a", "--alias", help="Display name", dest="alias")
parser.add_argument("-n", "--hostname", help="Hostname", dest="hostname")
parser.add_argument("-o", "--port", help="Port number", dest="port")
args = parser.parse_args()
# Setup logging.
logging.basicConfig(
level=args.loglevel, format='%(levelname)-8s %(message)s')
logging.basicConfig(level=args.loglevel,
format='%(levelname)-8s %(message)s')
# Try configuration file
values = get_value(
"accounts", "XMPP Client", [
"alias", "jid", "password", "hostname", "port"])
values = config.get_value("accounts", "XMPP Client",
["alias", "jid", "password", "hostname", "port"])
alias = values[0]
jid = values[1]
password = values[2]

View file

@ -89,14 +89,16 @@ def manual(filename, section=None, command=None):
if command and section:
try:
cmd_list = cmds[section][command]
except KeyError:
except KeyError as e:
logging.error(str(e))
cmd_list = None
elif section:
try:
cmd_list = []
for cmd in cmds[section]:
cmd_list.extend([cmd])
except KeyError:
except KeyError as e:
logging.error('KeyError:' + str(e))
cmd_list = None
else:
cmd_list = []
@ -305,6 +307,7 @@ async def get_setting_value(db_file, key):
await sqlite.get_settings_value(db_file, key) or
config.get_value("settings", "Settings", key)
)
value = int(value)
return value
@ -529,16 +532,15 @@ async def add_feed(db_file, url):
status_code=status_code,
updated=updated
)
await scan(
db_file, url)
await scan(db_file, url)
old = await get_setting_value(db_file, "old")
if not old:
await sqlite.mark_feed_as_read(
db_file, url)
response = (
"> {}\nNews source \"{}\" has been "
"added to subscription list."
).format(url, title)
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.mark_feed_as_read(db_file, feed_id)
response = ('> {}\nNews source "{}" has been '
'added to subscription list.'
.format(url, title))
break
# NOTE This elif statement be unnecessary
# when feedparser be supporting json feed.
@ -580,12 +582,12 @@ async def add_feed(db_file, url):
db_file, url)
old = await get_setting_value(db_file, "old")
if not old:
await sqlite.mark_feed_as_read(
db_file, url)
response = (
"> {}\nNews source \"{}\" has been "
"added to subscription list."
).format(url, title)
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.mark_feed_as_read(db_file, feed_id)
response = ('> {}\nNews source "{}" has been '
'added to subscription list.'
.format(url, title))
break
else:
result = await crawl.probe_page(
@ -596,18 +598,15 @@ async def add_feed(db_file, url):
else:
url = result[0]
else:
response = (
"> {}\nFailed to load URL. Reason: {}"
).format(url, status_code)
response = ('> {}\nFailed to load URL. Reason: {}'
.format(url, status_code))
break
else:
ix = exist[0]
name = exist[1]
response = (
"> {}\nNews source \"{}\" is already "
"listed in the subscription list at "
"index {}".format(url, name, ix)
)
response = ('> {}\nNews source "{}" is already '
'listed in the subscription list at '
'index {}'.format(url, name, ix))
break
return response
@ -638,6 +637,7 @@ async def scan_json(db_file, url):
db_file, url, feed)
try:
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
# await sqlite.update_feed_validity(
# db_file, feed_id, valid)
if "date_published" in feed.keys():
@ -649,6 +649,7 @@ async def scan_json(db_file, url):
else:
updated = ''
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.update_feed_properties(
db_file, feed_id, len(feed["items"]), updated)
# await update_feed_status
@ -680,15 +681,20 @@ async def scan_json(db_file, url):
title = entry["title"] if "title" in entry.keys() else date
entry_id = entry["id"] if "id" in entry.keys() else link
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
exist = await sqlite.check_entry_exist(
db_file, feed_id, entry_id=entry_id,
title=title, link=link, date=date)
if not exist:
summary = entry["summary"] if "summary" in entry.keys() else ''
if not summary:
summary = entry["content_html"] if "content_html" in entry.keys() else ''
summary = (entry["content_html"]
if "content_html" in entry.keys()
else '')
if not summary:
summary = entry["content_text"] if "content_text" in entry.keys() else ''
summary = (entry["content_text"]
if "content_text" in entry.keys()
else '')
read_status = 0
pathname = urlsplit(link).path
string = (
@ -725,12 +731,12 @@ async def scan_json(db_file, url):
media_link = trim_url(media_link)
break
except:
logging.error(
"KeyError: 'url'\n"
"Missing 'url' attribute for {}".format(url))
logging.info(
"Continue scanning for next potential "
"enclosure of {}".format(link))
logging.error('KeyError: "url"\n'
'Missing "url" attribute for {}'
.format(url))
logging.info('Continue scanning for next '
'potential enclosure of {}'
.format(link))
entry = {
"title": title,
"link": link,
@ -746,6 +752,7 @@ async def scan_json(db_file, url):
# await sqlite.set_date(db_file, url)
if len(new_entries):
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.add_entries_and_update_timestamp(
db_file, feed_id, new_entries)
@ -808,9 +815,8 @@ async def view_feed(url):
else:
url = result[0]
else:
response = (
"> {}\nFailed to load URL. Reason: {}"
).format(url, status)
response = ('> {}\nFailed to load URL. Reason: {}'
.format(url, status))
break
return response
@ -877,9 +883,8 @@ async def view_entry(url, num):
else:
url = result[0]
else:
response = (
"> {}\nFailed to load URL. Reason: {}"
).format(url, status)
response = ('> {}\nFailed to load URL. Reason: {}'
.format(url, status))
break
return response
@ -921,6 +926,7 @@ async def scan(db_file, url):
else:
valid = 1
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.update_feed_validity(
db_file, feed_id, valid)
if "updated_parsed" in feed["feed"].keys():
@ -932,6 +938,7 @@ async def scan(db_file, url):
else:
updated = ''
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.update_feed_properties(
db_file, feed_id, len(feed["entries"]), updated)
# await update_feed_status
@ -963,6 +970,7 @@ async def scan(db_file, url):
title = entry.title if entry.has_key("title") else date
entry_id = entry.id if entry.has_key("id") else link
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
exist = await sqlite.check_entry_exist(
db_file, feed_id, entry_id=entry_id,
title=title, link=link, date=date)
@ -986,8 +994,8 @@ async def scan(db_file, url):
"Keyword : {}".format(
link, reject_list))
if isinstance(date, int):
logging.error(
"Variable 'date' is int: {}".format(date))
logging.error('Variable "date" is int: {}'
.format(date))
media_link = ''
if entry.has_key("links"):
for e_link in entry.links:
@ -1006,12 +1014,12 @@ async def scan(db_file, url):
media_link = trim_url(media_link)
break
except:
logging.error(
"KeyError: 'href'\n"
"Missing 'href' attribute for {}".format(url))
logging.info(
"Continue scanning for next potential "
"enclosure of {}".format(link))
logging.error('KeyError: "href"\n'
'Missing "href" attribute for {}'
.format(url))
logging.info('Continue scanning for next '
'potential enclosure of {}'
.format(link))
entry = {
"title": title,
"link": link,
@ -1027,6 +1035,7 @@ async def scan(db_file, url):
# await sqlite.set_date(db_file, url)
if len(new_entries):
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
await sqlite.add_entries_and_update_timestamp(
db_file, feed_id, new_entries)
@ -1048,8 +1057,7 @@ def generate_document(data, url, ext, filename):
content = document.summary()
except:
content = data
logging.warning(
"Check that package readability is installed.")
logging.warning('Check that package readability is installed.')
match ext:
case "epub":
error = generate_epub(content, filename)
@ -1064,11 +1072,9 @@ def generate_document(data, url, ext, filename):
try:
generate_markdown(content, filename)
except:
logging.warning(
"Check that package html2text is installed, "
"or try again.")
error = (
"Package html2text was not found.")
logging.warning('Check that package html2text '
'is installed, or try again.')
error = 'Package html2text was not found.'
case "pdf":
error = generate_pdf(content, filename)
if error:
@ -1093,6 +1099,7 @@ def generate_document(data, url, ext, filename):
async def extract_image_from_feed(db_file, feed_id, url):
feed_url = sqlite.get_feed_url(db_file, feed_id)
feed_url = feed_url[0]
result = await fetch.http(feed_url)
document = result[0]
if document:
@ -1107,8 +1114,7 @@ async def extract_image_from_feed(db_file, feed_id, url):
return image_url
except:
logging.error(url)
logging.error(
"AttributeError: object has no attribute 'link'")
logging.error('AttributeError: object has no attribute "link"')
async def extract_image_from_html(url):
@ -1120,8 +1126,7 @@ async def extract_image_from_html(url):
content = document.summary()
except:
content = data
logging.warning(
"Check that package readability is installed.")
logging.warning('Check that package readability is installed.')
tree = html.fromstring(content)
# TODO Exclude banners, class="share" links etc.
images = tree.xpath(
@ -1209,9 +1214,8 @@ async def get_magnet(link):
filename = queries["dn"][0]
checksum = query_xt[len("urn:btih:"):]
torrent = await fetch.magnet(link)
logging.debug(
"Attempting to retrieve {} ({})".format(
filename, checksum))
logging.debug('Attempting to retrieve {} ({})'
.format(filename, checksum))
if not torrent:
logging.debug(
"Attempting to retrieve {} from HTTP caching service".format(
@ -1245,6 +1249,7 @@ async def remove_nonexistent_entries(db_file, url, feed):
Parsed feed document.
"""
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
items = await sqlite.get_entries_of_feed(db_file, feed_id)
entries = feed.entries
for item in items:
@ -1350,6 +1355,7 @@ async def remove_nonexistent_entries_json(db_file, url, feed):
Parsed feed document.
"""
feed_id = await sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
items = await sqlite.get_entries_of_feed(db_file, feed_id)
entries = feed["items"]
for item in items:

View file

@ -1,7 +1,12 @@
about = """
Slixfeed
A Syndication bot for the XMPP communication network.
Slixfeed aims to be an easy to use and fully-featured news \
aggregator bot for XMPP. It provides a convenient access to Blogs, \
Fediverse and News websites along with filtering functionality.
aggregator bot for XMPP. It provides a convenient access to Blogs, \
News websites and even Fediverse instances, along with filtering \
functionality.
Slixfeed is primarily designed for XMPP (aka Jabber). \
Visit https://xmpp.org/software/ for more information.
@ -19,17 +24,30 @@ Supported filetypes: Atom, JSON, RDF, RSS and XML.
"""
license = """
Slixfeed is free software; you can redistribute it and/or \
modify it under the terms of the MIT License.
Copyright 2022 - 2024 Schimon Zackary Jehudah
Slixfeed is distributed in the hope that it will be useful, \
but WITHOUT ANY WARRANTY; without even the implied warranty of \
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \
MIT License for more details.
Permission is hereby granted, free of charge, to any person obtaining \
a copy of this software and associated documentation files (the \
Software), to deal in the Software without restriction, including \
without limitation the rights to use, copy, modify, merge, publish, \
distribute, sublicense, and/or sell copies of the Software, and to \
permit persons to whom the Software is furnished to do so, subject to \
the following conditions:
The above copyright notice and this permission notice shall be included \
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS \
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR \
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, \
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \
DEALINGS IN THE SOFTWARE.
"""
note = """
You can run Slixfeed as a client on your own computer, server, \
You can run Slixfeed as a client, from your own computer, server, \
and even on a Linux phone (i.e. Droidian, Kupfer, Mobian, NixOS, \
postmarketOS). You can even use Termux.
@ -44,11 +62,11 @@ No operator was specified for this instance.
platforms = """
Supported platforms: XMPP
Platforms to be added in future: Briar, Email, IRC, Matrix, MQTT, Tox.
For the best experience, we recommend to use XMPP.
For ideal experience, we recommend using XMPP.
"""
privacy = """
All your data belongs to us!
All your data belongs to us.
"""
protocols = """
@ -67,11 +85,19 @@ https://pythonhosted.org/feedparser
"""
terms = """
You may not abuse this service.
Slixfeed is free software; you can redistribute it and/or \
modify it under the terms of the MIT License.
Slixfeed is distributed in the hope that it will be useful, \
but WITHOUT ANY WARRANTY; without even the implied warranty of \
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \
MIT License for more details.
https://gitgud.io/sjehuda/slixfeed
"""
thanks = """
Alixander Court (Utah), \
Alixander Court (alixandercourt.com, Utah), \
Christian Dersch (SalixOS), \
Cyrille Pontvieux (SalixOS, France), \
Denis Fomin (Gajim, Russia), \

View file

@ -1,523 +1,492 @@
[proxies.anonymousoverflow]
clearnet = [
"https://ao.phreedom.club",
"https://overflow.hostux.net",
"https://ao.foss.wtf",
"https://overflow.adminforge.de",
"https://overflow.lunar.icu",
"https://anonymousoverflow.esmailelbob.xyz",
"https://overflow.smnz.de",
"https://ao.vern.cc",
"https://overflow.777.tf",
"https://code.whatever.social",
"https://stackoverflow.vern.cc",
"https://anonymousoverflow.vern.cc",
"https://ao.bloatcat.tk",
"https://se.chaotic.ninja",
"https://anonymousoverflow.privacyfucking.rocks",
"https://overflow.projectsegfau.lt",
"https://anonoverflow.frontendfriendly.xyz",
"https://overflow.fascinated.cc",
"https://ao.phreedom.club",
"https://overflow.hostux.net",
"https://ao.foss.wtf",
"https://overflow.adminforge.de",
"https://overflow.lunar.icu",
"https://anonymousoverflow.esmailelbob.xyz",
"https://overflow.smnz.de",
"https://ao.vern.cc",
"https://overflow.777.tf",
"https://code.whatever.social",
"https://stackoverflow.vern.cc",
"https://anonymousoverflow.vern.cc",
"https://ao.bloatcat.tk",
"https://se.chaotic.ninja",
"https://anonymousoverflow.privacyfucking.rocks",
"https://overflow.projectsegfau.lt",
"https://anonoverflow.frontendfriendly.xyz",
"https://overflow.fascinated.cc",
]
i2p = [
"http://vernmzgraj6aaoafmehupvtkkynpaa67rxcdj2kinwiy6konn6rq.b32.i2p",
"http://vernmzgraj6aaoafmehupvtkkynpaa67rxcdj2kinwiy6konn6rq.b32.i2p",
]
loki = []
tor = [
"http://anonymousoverflow.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://ao.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
"http://anonymousoverflow.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://ao.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
]
yggdrasil = [
"http://[301:f69c:2017:b6b8::8]",
"http://[301:f69c:2017:b6b8::8]",
]
hostname = [
"stackoverflow.com",
"stackoverflow.com",
]
type = [
"link",
"link",
]
[proxies.dumb]
clearnet = [
"https://dumb.privacydev.net",
"https://dm.vern.cc",
"https://dumb.lunar.icu",
"https://dumb.esmailelbob.xyz",
"https://dumb.privacydev.net",
"https://dm.vern.cc",
"https://dumb.lunar.icu",
"https://dumb.esmailelbob.xyz",
]
hostname = [
"genius.com",
"genius.com",
]
type = [
"link",
"link",
]
[proxies.invidious]
clearnet = [
"https://incogtube.com",
"https://vid.puffyan.us",
"https://yt.artemislena.eu",
"https://invidious.snopyta.org",
"https://youtube.076.ne.jp",
"https://invidious.osi.kr",
"https://invidious-us.kavin.rocks",
"https://inv.cthd.icu",
"https://invidious.namazso.eu",
"https://yewtu.be",
"https://invidio.xamh.de",
"https://invidious.kavin.rocks",
"https://monocles.live",
"https://inv.riverside.rocks",
"https://invidious.lunar.icu",
"https://y.com.sb",
"https://inv.bp.projectsegfau.lt",
"https://invidious.flokinet.to",
"https://invidious.sethforprivacy.com",
"https://invidious.esmailelbob.xyz",
"https://ytb.trom.tf",
"https://invidious.domain.glass",
"https://tube.cthd.icu",
"https://inv.vern.cc",
"https://invidious.garudalinux.org",
"https://youtube.owacon.moe",
"https://invidious.tinfoil-hat.net",
"https://iv.melmac.space",
"https://invidious.tiekoetter.com",
"https://invidious.baczek.me",
"https://invidious.no-logs.com",
"https://invidious.0011.lt",
"https://yt.funami.tech",
"https://inv.tux.pizza",
"https://vid.priv.au",
"https://not-ytb.blocus.ch",
"https://inv.creller.net",
"https://inv.zzls.xyz",
"https://yt.floss.media",
"https://invidious.slipfox.xyz",
"https://par1.iv.ggtyler.dev",
"https://inv.citw.lgbt",
"https://invidious.io.lol",
"https://yt.oelrichsgarcia.de",
"https://iv.nboeck.de",
"https://invidious.protokolla.fi",
"https://invidious.fi",
"https://onion.tube",
"https://inv.in.projectsegfau.lt",
"https://invidious.privacydev.net",
"https://invidious.takebackourtech.org",
"https://qc1.iv.ggtyler.dev",
"https://anontube.lvkaszus.pl",
"https://invidious.asir.dev",
"https://invidious.fdn.fr",
"https://iv.datura.network",
"https://invidious.private.coffee",
"https://inv.pistasjis.net",
"https://invidious.pavot.ca",
"https://yt.cdaut.de",
"https://yt.drgnz.club",
"https://invidious.perennialte.ch",
"https://yt.chaotic.ninja",
"https://yt.omada.cafe",
"https://super8.absturztau.be",
"https://i.redsnake.io",
"https://watch.supernets.org",
"https://invidious.qwik.space",
"https://farside.link/invidious",
"https://inv.odyssey346.dev",
"https://invidious.mutahar.rocks",
"https://invidious.nerdvpn.de",
"https://invidious.projectsegfau.lt",
"https://invidious.weblibre.org",
"https://iv.ggtyler.dev",
"https://watch.thekitty.zone",
"https://inv.us.projectsegfau.lt",
"https://invidious.drgns.space",
"https://incogtube.com",
"https://vid.puffyan.us",
"https://yt.artemislena.eu",
"https://invidious.snopyta.org",
"https://youtube.076.ne.jp",
"https://invidious.osi.kr",
"https://invidious-us.kavin.rocks",
"https://inv.cthd.icu",
"https://invidious.namazso.eu",
"https://yewtu.be",
"https://invidio.xamh.de",
"https://invidious.kavin.rocks",
"https://monocles.live",
"https://inv.riverside.rocks",
"https://invidious.lunar.icu",
"https://y.com.sb",
"https://inv.bp.projectsegfau.lt",
"https://invidious.flokinet.to",
"https://invidious.sethforprivacy.com",
"https://invidious.esmailelbob.xyz",
"https://ytb.trom.tf",
"https://invidious.domain.glass",
"https://tube.cthd.icu",
"https://inv.vern.cc",
"https://invidious.garudalinux.org",
"https://youtube.owacon.moe",
"https://invidious.tinfoil-hat.net",
"https://iv.melmac.space",
"https://invidious.tiekoetter.com",
"https://invidious.baczek.me",
"https://invidious.no-logs.com",
"https://invidious.0011.lt",
"https://yt.funami.tech",
"https://inv.tux.pizza",
"https://vid.priv.au",
"https://not-ytb.blocus.ch",
"https://inv.creller.net",
"https://inv.zzls.xyz",
"https://yt.floss.media",
"https://invidious.slipfox.xyz",
"https://par1.iv.ggtyler.dev",
"https://inv.citw.lgbt",
"https://invidious.io.lol",
"https://yt.oelrichsgarcia.de",
"https://iv.nboeck.de",
"https://invidious.protokolla.fi",
"https://invidious.fi",
"https://onion.tube",
"https://inv.in.projectsegfau.lt",
"https://invidious.privacydev.net",
"https://invidious.takebackourtech.org",
"https://qc1.iv.ggtyler.dev",
"https://anontube.lvkaszus.pl",
"https://invidious.asir.dev",
"https://invidious.fdn.fr",
"https://iv.datura.network",
"https://invidious.private.coffee",
"https://inv.pistasjis.net",
"https://invidious.pavot.ca",
"https://yt.cdaut.de",
"https://yt.drgnz.club",
"https://invidious.perennialte.ch",
"https://yt.chaotic.ninja",
"https://yt.omada.cafe",
"https://super8.absturztau.be",
"https://i.redsnake.io",
"https://watch.supernets.org",
"https://invidious.qwik.space",
"https://farside.link/invidious",
"https://inv.odyssey346.dev",
"https://invidious.mutahar.rocks",
"https://invidious.nerdvpn.de",
"https://invidious.projectsegfau.lt",
"https://invidious.weblibre.org",
"https://iv.ggtyler.dev",
"https://watch.thekitty.zone",
"https://inv.us.projectsegfau.lt",
"https://invidious.drgns.space",
]
i2p = [
"http://tube.i2p",
"http://inv.cn.i2p",
"http://jewtube.i2p",
"http://ytmous.i2p",
"http://pa7eextqat4wg35onzs4cnlhqa3gvzen243bcbrng67zyla4fqya.b32.i2p",
"http://inv.vern.i2p",
"http://inv.zzls.i2p",
"http://verni6dr4qxjgjumnvesxerh5rvhv6oy5ddeibaqy5d7tgbiiyfa.b32.i2p",
"http://tube.i2p",
"http://inv.cn.i2p",
"http://jewtube.i2p",
"http://ytmous.i2p",
"http://pa7eextqat4wg35onzs4cnlhqa3gvzen243bcbrng67zyla4fqya.b32.i2p",
"http://inv.vern.i2p",
"http://inv.zzls.i2p",
"http://verni6dr4qxjgjumnvesxerh5rvhv6oy5ddeibaqy5d7tgbiiyfa.b32.i2p",
]
loki = []
tor = [
"http://tuberyps2pn6dor6h47brof3w2asmauahhk4ei42krugybzzzo55klad.onion",
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
"http://qwikxxt6jvggxzxe2v2fuzro5j7ibgphxmblmri6wkj5vpicdbo2kwad.onion",
"http://c7hqkpkpemu6e7emz5b4vyz7idjgdvgaaa3dyimmeojqbgpea3xqjoid.onion",
"http://grwp24hodrefzvjjuccrkw3mjq4tzhaaq32amf33dzpmuxe7ilepcmad.onion",
"http://invidious.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://euxxcnhsynwmfidvhjf6uzptsmh4dipkmgdmcmxxuo7tunp3ad2jrwyd.onion",
"http://invidious.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
"http://iv.odysfvr23q5wgt7i456o5t3trw2cw5dgn56vbjfbq2m7xsc5vqbqpcyd.onion",
"http://kbjggqkzv65ivcqj6bumvp337z6264huv5kpkwuv6gu5yjiskvan7fad.onion",
"http://ng27owmagn5amdm7l5s3rsqxwscl5ynppnis5dqcasogkyxcfqn7psid.onion",
"http://osbivz6guyeahrwp2lnwyjk2xos342h4ocsxyqrlaopqjuhwn2djiiyd.onion",
"http://u2cvlit75owumwpy4dj2hsmvkq7nvrclkpht7xgyye2pyoxhpmclkrad.onion",
"http://w6ijuptxiku4xpnnaetxvnkc5vqcdu7mgns2u77qefoixi63vbvnpnqd.onion",
"http://tuberyps2pn6dor6h47brof3w2asmauahhk4ei42krugybzzzo55klad.onion",
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
"http://qwikxxt6jvggxzxe2v2fuzro5j7ibgphxmblmri6wkj5vpicdbo2kwad.onion",
"http://c7hqkpkpemu6e7emz5b4vyz7idjgdvgaaa3dyimmeojqbgpea3xqjoid.onion",
"http://grwp24hodrefzvjjuccrkw3mjq4tzhaaq32amf33dzpmuxe7ilepcmad.onion",
"http://invidious.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://euxxcnhsynwmfidvhjf6uzptsmh4dipkmgdmcmxxuo7tunp3ad2jrwyd.onion",
"http://invidious.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
"http://iv.odysfvr23q5wgt7i456o5t3trw2cw5dgn56vbjfbq2m7xsc5vqbqpcyd.onion",
"http://kbjggqkzv65ivcqj6bumvp337z6264huv5kpkwuv6gu5yjiskvan7fad.onion",
"http://ng27owmagn5amdm7l5s3rsqxwscl5ynppnis5dqcasogkyxcfqn7psid.onion",
"http://osbivz6guyeahrwp2lnwyjk2xos342h4ocsxyqrlaopqjuhwn2djiiyd.onion",
"http://u2cvlit75owumwpy4dj2hsmvkq7nvrclkpht7xgyye2pyoxhpmclkrad.onion",
"http://w6ijuptxiku4xpnnaetxvnkc5vqcdu7mgns2u77qefoixi63vbvnpnqd.onion",
]
yggdrasil = [
"http://[200:168a:c80a:b258:1dfe:f920:4414:6897]",
"http://[200:168a:c80a:b258:1dfe:f920:4414:6897]",
]
hostname = [
"youtu.be",
"youtube.com",
"youtu.be",
"youtube.com",
]
type = [
"feed",
"link",
"feed",
"link",
]
[proxies.librarian]
clearnet = [
"https://librarian.pussthecat.org",
"https://odysee.076.ne.jp",
"https://lbry.projectsegfau.lt",
"https://librarian.esmailelbob.xyz",
"https://lbry.mywire.org",
"https://lbry.slipfox.xyz",
"https://lbry.vern.cc",
"https://lbry.ooguy.com",
"https://lbn.frail.duckdns.org",
"https://odysee.owacon.moe",
"https://farside.link/librarian",
"https://librarian.pussthecat.org",
"https://odysee.076.ne.jp",
"https://lbry.projectsegfau.lt",
"https://librarian.esmailelbob.xyz",
"https://lbry.mywire.org",
"https://lbry.slipfox.xyz",
"https://lbry.vern.cc",
"https://lbry.ooguy.com",
"https://lbn.frail.duckdns.org",
"https://odysee.owacon.moe",
"https://farside.link/librarian",
]
i2p = []
loki = []
tor = [
"http://librarian.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://lbry.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
"http://5znbzx2xcymhddzekfjib3isgqq4ilcyxa2bsq6vqmnvbtgu4f776lqd.onion",
"http://bxewpsswttslepw27w2hhxhlizwm7l7y54x3jw5cfrb64hb6lgc557ad.onion",
"http://librarian.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://lbry.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
"http://5znbzx2xcymhddzekfjib3isgqq4ilcyxa2bsq6vqmnvbtgu4f776lqd.onion",
"http://bxewpsswttslepw27w2hhxhlizwm7l7y54x3jw5cfrb64hb6lgc557ad.onion",
]
yggdrasil = []
hostname = [
"odysee.com",
"odysee.com",
]
type = [
"feed",
"link",
"feed",
"link",
]
[proxies.libreddit]
clearnet = [
"https://libreddit.spike.codes",
"https://libreddit.hu",
"https://libreddit.nl",
"https://libreddit.bus-hit.me",
"https://libreddit.strongthany.cc",
"https://libreddit.esmailelbob.xyz",
"https://lr.riverside.rocks",
"https://libreddit.40two.app",
"https://libreddit.albony.xyz",
"https://libreddit.domain.glass",
"https://discuss.whatever.social",
"https://libreddit.kavin.rocks",
"https://libreddit.privacy.com.de",
"https://libreddit.eu.org",
"https://libreddit.bloatcat.tk",
"https://libreddit.pabloferreiro.es",
"https://lr.foss.wtf",
"https://libreddit.no-logs.com",
"https://lr.slipfox.xyz",
"https://lr.creller.net",
"https://libreddit.dcs0.hu",
"https://l.opnxng.com",
"https://libreddit.tux.pizza",
"https://reddit.leptons.xyz",
"https://reddit.baby",
"https://snoo.habedieeh.re",
"https://lr.4201337.xyz",
"https://libreddit.private.coffee",
"https://lr.artemislena.eu",
"https://libreddit.privacyfucking.rocks",
"https://libreddit.qwik.space",
"https://farside.link/libreddit",
"https://de.leddit.xyz",
"https://leddit.xyz",
"https://libreddit.alefvanoon.xyz",
"https://libreddit.autarkic.org",
"https://libreddit.awesomehub.io",
"https://libreddit.crewz.me",
"https://libreddit.database.red",
"https://libreddit.datatunnel.xyz",
"https://libreddit.de",
"https://libreddit.dothq.co",
"https://libreddit.drivet.xyz",
"https://libreddit.flux.industries",
"https://libreddit.igna.rocks",
"https://libredd.it",
"https://libreddit.jamiethalacker.dev",
"https://libreddit.kylrth.com",
"https://libreddit.lunar.icu",
"https://libreddit.mutahar.rocks",
"https://libreddit.northboot.xyz",
"https://libreddit.pussthecat.org",
"https://libreddit.silkky.cloud",
"https://libreddit.some-things.org",
"https://libreddit.sugoma.tk",
"https://libreddit.tiekoetter.com",
"https://libreddit.totaldarkness.net",
"https://libreddit.winscloud.net",
"https://libreddit.yonalee.eu",
"https://lr.cowfee.moe",
"https://lr.mint.lgbt",
"https://lr.oversold.host",
"https://lr.stilic.ml",
"https://r.nf",
"https://r.walkx.org",
"https://reddi.tk",
"https://reddit.artemislena.eu",
"https://reddit.invak.id",
"https://reddit.phii.me",
"https://reddit.rtrace.io",
"https://reddit.stuehieyr.com",
"https://safereddit.com",
"https://libreddit.nohost.network",
"https://libreddit.projectsegfau.lt",
"https://reddit.simo.sh",
"https://libreddit.strongthany.cc",
"https://libreddit.40two.app",
"https://discuss.whatever.social",
"https://libreddit.kavin.rocks",
"https://libreddit.privacy.com.de",
"https://libreddit.no-logs.com",
"https://lr.slipfox.xyz",
"https://libreddit.tux.pizza",
"https://snoo.habedieeh.re",
"https://lr.artemislena.eu",
"https://libreddit.privacyfucking.rocks",
"https://libreddit.qwik.space",
"https://de.leddit.xyz",
"https://leddit.xyz",
"https://libreddit.crewz.me",
"https://libreddit.de",
"https://libreddit.igna.rocks",
"https://libredd.it",
"https://libreddit.kylrth.com",
"https://libreddit.lunar.icu",
"https://libreddit.pussthecat.org",
"https://lr.cowfee.moe",
"https://lr.mint.lgbt",
"https://r.nf",
"https://reddit.invak.id",
"https://safereddit.com",
"https://libreddit.nohost.network",
"https://libreddit.projectsegfau.lt",
"https://reddit.simo.sh",
]
i2p = [
"http://woo5ugmoomzbtaq6z46q4wgei5mqmc6jkafqfi5c37zni7xc4ymq.b32.i2p",
"http://woo5ugmoomzbtaq6z46q4wgei5mqmc6jkafqfi5c37zni7xc4ymq.b32.i2p",
]
loki = []
tor = [
"http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion",
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
"http://qwikxx4xqvhdyyazkrw7pwdpdppfnmn7j2n6cvq5zecm4atbppaslzad.onion",
"http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion",
"http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion",
"http://inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion",
"http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion",
"http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion",
"http://lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion",
"http://libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion",
"http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion",
"http://libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion",
"http://libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion",
"http://ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion",
"http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion",
"http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion",
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
"http://qwikxx4xqvhdyyazkrw7pwdpdppfnmn7j2n6cvq5zecm4atbppaslzad.onion",
"http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion",
"http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion",
"http://inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion",
"http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion",
"http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion",
"http://lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion",
"http://libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion",
"http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion",
"http://libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion",
"http://libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion",
"http://ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion",
"http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion",
]
yggdrasil = []
hostname = [
"reddit.com",
"reddit.com",
]
type = [
"link",
"link",
]
[proxies.neuters]
clearnet = [
"https://neuters.de",
"https://neuters.privacyfucking.rocks",
"https://neuters.de",
"https://neuters.privacyfucking.rocks",
]
hostname = [
"reuters.com",
"reuters.com",
]
type = [
"link",
"link",
]
[proxies.nitter]
clearnet = [
"https://nitter.hu",
"https://nitter.actionsack.com",
"https://nitter.net",
"https://nitter.1d4.us",
"https://nitter.nixnet.services",
"https://nitter.unixfox.eu",
"https://nitter.sethforprivacy.com",
"https://nitter.pussthecat.org",
"https://nitter.it",
"https://nitter.moomoo.me",
"https://tw.artemislena.eu",
"https://nitter.snopyta.org",
"https://birdsite.xanny.family",
"https://nitter.domain.glass",
"https://read.whatever.social",
"https://nitter.lacontrevoie.fr",
"https://bird.trom.tf",
"https://nitter.hostux.net",
"https://nitter.sneed.network",
"https://twitter.owacon.moe",
"https://nitter.ggc-project.de",
"https://unofficialbird.com",
"https://nitter.fdn.fr",
"https://nitter.no-logs.com",
"https://nitter.slipfox.xyz",
"https://nitter.one",
"https://nitter.ungovernable.men",
"https://nitter.private.coffee",
"https://nitter.soopy.moe",
"https://nitter.oksocial.net",
"https://n.sneed.network",
"https://nitter.qwik.space",
"https://nitter.nohost.network",
"https://de.nttr.stream",
"https://farside.link/nitter",
"https://nitter.42l.fr",
"https://nitter.bus-hit.me",
"https://nitter.ca",
"https://nitter.eu",
"https://nitter.grimneko.de",
"https://nitter.kavin.rocks",
"https://nitter.koyu.space",
"https://nitter.namazso.eu",
"https://nttr.stream",
"https://twitter.076.ne.jp",
"https://twitter.censors.us",
"https://n.hyperborea.cloud",
"https://n.biendeo.com",
"https://n.opnxng.com",
"https://nitter.adminforge.de",
"https://nitter.catsarch.com",
"https://nitter.cz",
"https://nitter.esmailelbob.xyz",
"https://nitter.in.projectsegfau.lt",
"https://nitter.io.lol",
"https://nitter.ktachibana.party",
"https://nitter.kylrth.com",
"https://nitter.poast.org",
"https://nitter.privacydev.net",
"https://nitter.salastil.com",
"https://nitter.woodland.cafe",
"https://nitter.hu",
"https://nitter.actionsack.com",
"https://nitter.net",
"https://nitter.1d4.us",
"https://nitter.nixnet.services",
"https://nitter.unixfox.eu",
"https://nitter.sethforprivacy.com",
"https://nitter.pussthecat.org",
"https://nitter.it",
"https://nitter.moomoo.me",
"https://tw.artemislena.eu",
"https://nitter.snopyta.org",
"https://birdsite.xanny.family",
"https://nitter.domain.glass",
"https://read.whatever.social",
"https://nitter.lacontrevoie.fr",
"https://bird.trom.tf",
"https://nitter.hostux.net",
"https://nitter.sneed.network",
"https://twitter.owacon.moe",
"https://nitter.ggc-project.de",
"https://unofficialbird.com",
"https://nitter.fdn.fr",
"https://nitter.no-logs.com",
"https://nitter.slipfox.xyz",
"https://nitter.one",
"https://nitter.ungovernable.men",
"https://nitter.private.coffee",
"https://nitter.soopy.moe",
"https://nitter.oksocial.net",
"https://n.sneed.network",
"https://nitter.qwik.space",
"https://nitter.nohost.network",
"https://de.nttr.stream",
"https://farside.link/nitter",
"https://nitter.42l.fr",
"https://nitter.bus-hit.me",
"https://nitter.ca",
"https://nitter.eu",
"https://nitter.grimneko.de",
"https://nitter.kavin.rocks",
"https://nitter.koyu.space",
"https://nitter.namazso.eu",
"https://nttr.stream",
"https://twitter.076.ne.jp",
"https://twitter.censors.us",
"https://n.hyperborea.cloud",
"https://n.biendeo.com",
"https://n.opnxng.com",
"https://nitter.adminforge.de",
"https://nitter.catsarch.com",
"https://nitter.cz",
"https://nitter.esmailelbob.xyz",
"https://nitter.in.projectsegfau.lt",
"https://nitter.io.lol",
"https://nitter.ktachibana.party",
"https://nitter.kylrth.com",
"https://nitter.poast.org",
"https://nitter.privacydev.net",
"https://nitter.salastil.com",
"https://nitter.woodland.cafe",
]
i2p = [
"http://tm4rwkeysv3zz3q5yacyr4rlmca2c4etkdobfvuqzt6vsfsu4weq.b32.i2p",
"http://tm4rwkeysv3zz3q5yacyr4rlmca2c4etkdobfvuqzt6vsfsu4weq.b32.i2p",
]
loki = []
tor = [
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
"http://qwikxx2erhx6qrymued6ox2qkf2yeogjwypqvzoif4fqkljixasr6oid.onion",
"http://n.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
"http://qwikxx2erhx6qrymued6ox2qkf2yeogjwypqvzoif4fqkljixasr6oid.onion",
"http://n.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
]
yggdrasil = []
hostname = [
"twitter.com",
"x.com",
"twitter.com",
"x.com",
]
type = [
"feed",
"link",
"feed",
"link",
]
[proxies.proxitok]
clearnet = [
"https://proxitok.lunar.icu",
"https://tik.hostux.net",
"https://proxitok.pabloferreiro.es",
"https://proxitok.privacy.com.de",
"https://tok.adminforge.de",
"https://tok.habedieeh.re",
"https://proxitok.pussthecat.org",
"https://proxitok.privacyfucking.rocks",
"https://cringe.whatever.social",
"https://proxitok.esmailelbob.xyz",
"https://proxitok.privacydev.net",
"https://proxitok.pufe.org",
"https://tok.artemislena.eu",
"https://tok.thekitty.zone",
"https://tiktok.chauvet.pro",
"https://tt.vern.cc",
"https://farside.link/proxitok",
"https://proxitok.lunar.icu",
"https://tik.hostux.net",
"https://proxitok.pabloferreiro.es",
"https://proxitok.privacy.com.de",
"https://tok.adminforge.de",
"https://tok.habedieeh.re",
"https://proxitok.pussthecat.org",
"https://proxitok.privacyfucking.rocks",
"https://cringe.whatever.social",
"https://proxitok.esmailelbob.xyz",
"https://proxitok.privacydev.net",
"https://proxitok.pufe.org",
"https://tok.artemislena.eu",
"https://tok.thekitty.zone",
"https://tiktok.chauvet.pro",
"https://tt.vern.cc",
"https://farside.link/proxitok",
]
i2p = [
"http://qr.vern.i2p",
"http://qr.vern.i2p",
]
loki = []
tor = []
yggdrasil = []
hostname = [
"tiktok.com",
"tiktok.com",
]
type = [
"link",
"link",
]
[proxies.quetre]
clearnet = [
"https://quetre.privacydev.net",
"https://quetre.pufe.org",
"https://que.wilbvr.me",
"https://quetre.iket.me",
"https://quetre.pussthecat.org",
"https://quetre.tokhmi.xyz",
"https://quetre.projectsegfau.lt",
"https://quetre.esmailelbob.xyz",
"https://quetre.odyssey346.dev",
"https://ask.habedieeh.re",
"https://quetre.marcopisco.com",
"https://quetre.blackdrgn.nl",
"https://quetre.lunar.icu",
"https://quora.femboy.hu",
"https://quora.vern.cc",
"https://farside.link/quetre",
"https://quetre.fascinated.cc",
"https://quetre.privacydev.net",
"https://quetre.pufe.org",
"https://que.wilbvr.me",
"https://quetre.iket.me",
"https://quetre.pussthecat.org",
"https://quetre.tokhmi.xyz",
"https://quetre.projectsegfau.lt",
"https://quetre.esmailelbob.xyz",
"https://quetre.odyssey346.dev",
"https://ask.habedieeh.re",
"https://quetre.marcopisco.com",
"https://quetre.blackdrgn.nl",
"https://quetre.lunar.icu",
"https://quora.femboy.hu",
"https://quora.vern.cc",
"https://farside.link/quetre",
"https://quetre.fascinated.cc",
]
i2p = []
loki = []
tor = [
"http://ask.habeehrhadazsw3izbrbilqajalfyqqln54mrja3iwpqxgcuxnus7eid.onion",
"http://qr.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
"http://quetre.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://quetre.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
"http://quora.cepyxplublbyw2f4axy4pyztfbxmf63lrt2c7uwv6wl4iixz53czload.onion",
"http://ask.habeehrhadazsw3izbrbilqajalfyqqln54mrja3iwpqxgcuxnus7eid.onion",
"http://qr.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
"http://quetre.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
"http://quetre.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
"http://quora.cepyxplublbyw2f4axy4pyztfbxmf63lrt2c7uwv6wl4iixz53czload.onion",
]
yggdrasil = []
hostname = [
"quora.com",
"quora.com",
]
type = [
"link",
"link",
]
[proxies.redlib]
clearnet = [
"https://redlib.private.coffee",
]
i2p = []
loki = []
tor = []
yggdrasil = []
hostname = [
"reddit.com",
]
type = [
"link",
]
[proxies.teddit]
clearnet = [
"https://teddit.pussthecat.org",
"https://teddit.zaggy.nl",
"https://teddit.bus-hit.me",
"https://teddit.adminforge.de",
"https://incogsnoo.com",
"https://teddit.hostux.net",
"https://teddit.ggc-project.de",
"https://teddit.httpjames.space",
"https://snoo.ioens.is",
"https://teddit.no-logs.com",
"https://teddit.net",
"https://i.opnxng.com",
"https://tedd.it",
"https://teddit.projectsegfau.lt",
"https://reddit.lol",
"https://rdt.trom.tf",
"https://t.sneed.network",
"https://farside.link/teddit",
"https://teddit.alefvanoon.xyz",
"https://teddit.domain.glass",
"https://teddit.froth.zone",
"https://teddit.namazso.eu",
"https://teddit.sethforprivacy.com",
"https://teddit.tinfoil-hat.net",
"https://teddit.totaldarkness.net",
"https://td.vern.cc",
"https://teddit.pussthecat.org",
"https://teddit.zaggy.nl",
"https://teddit.bus-hit.me",
"https://teddit.adminforge.de",
"https://incogsnoo.com",
"https://teddit.hostux.net",
"https://teddit.ggc-project.de",
"https://teddit.httpjames.space",
"https://snoo.ioens.is",
"https://teddit.no-logs.com",
"https://teddit.net",
"https://i.opnxng.com",
"https://tedd.it",
"https://teddit.projectsegfau.lt",
"https://reddit.lol",
"https://rdt.trom.tf",
"https://t.sneed.network",
"https://farside.link/teddit",
"https://teddit.alefvanoon.xyz",
"https://teddit.domain.glass",
"https://teddit.froth.zone",
"https://teddit.namazso.eu",
"https://teddit.sethforprivacy.com",
"https://teddit.tinfoil-hat.net",
"https://teddit.totaldarkness.net",
"https://td.vern.cc",
]
i2p = [
"http://k62ptris7p72aborr4zoanee7xai6wguucveptwgxs5vbgt7qzpq.b32.i2p",
"http://teddit.i2p",
"http://k62ptris7p72aborr4zoanee7xai6wguucveptwgxs5vbgt7qzpq.b32.i2p",
"http://teddit.i2p",
]
loki = []
tor = [
"http://t.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
"http://tedditfyn6idalzso5wam5qd3kdtxoljjhbrbbx34q2xkcisvshuytad.onion",
"http://t.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
"http://tedditfyn6idalzso5wam5qd3kdtxoljjhbrbbx34q2xkcisvshuytad.onion",
]
yggdrasil = [
"http://[200:5e4b:515c:e42b:3e73:6fbf:2f11:779d]",
"http://[200:5e4b:515c:e42b:3e73:6fbf:2f11:779d]",
]
hostname = [
"reddit.com",
"reddit.com",
]
type = [
"link",
"link",
]

View file

@ -27,8 +27,87 @@ import os
# from random import randrange
import slixfeed.sqlite as sqlite
import sys
import tomli_w
import tomllib
# TODO Merge with backup_obsolete
def update_proxies(file, proxy_name, proxy_type, proxy_url, action='remove'):
"""
Add given URL to given list.
Parameters
----------
file : str
Filename.
proxy_name : str
Proxy name.
proxy_type : str
Proxy title.
proxy_url : str
Proxy URL.
action : str
add or remove
Returns
-------
None.
"""
data = open_config_file('proxies.toml')
proxy_list = data['proxies'][proxy_name][proxy_type]
proxy_index = proxy_list.index(proxy_url)
proxy_list.pop(proxy_index)
with open(file, 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
# TODO Merge with update_proxies
def backup_obsolete(file, proxy_name, proxy_type, proxy_url, action='add'):
"""
Add given URL to given list.
Parameters
----------
file : str
Filename.
proxy_name : str
Proxy name.
proxy_type : str
Proxy title.
proxy_url : str
Proxy URL.
action : str
add or remove
Returns
-------
None.
"""
data = open_config_file('proxies_obsolete.toml')
proxy_list = data['proxies'][proxy_name][proxy_type]
proxy_list.extend([proxy_url])
with open(file, 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
def create_skeleton(file):
with open(file, 'rb') as original_file:
data = tomllib.load(original_file)
data = clear_values(data)
with open('proxies_obsolete.toml', 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
def clear_values(input):
if isinstance(input, dict):
return {k: clear_values(v) for k, v in input.items()}
elif isinstance(input, list):
return ['']
else:
return ''
def get_value(filename, section, keys):
"""
@ -120,7 +199,9 @@ def get_value_default(filename, section, key):
return result
def get_list(filename, key):
# TODO DELETE THIS FUNCTION OR KEEP ONLY THE CODE BELOW NOTE
# IF CODE BELOW NOTE IS KEPT, RENAME FUNCTION TO open_toml
def open_config_file(filename):
"""
Get settings default value.
@ -128,8 +209,6 @@ def get_list(filename, key):
----------
filename : str
Filename of toml file.
key: str
Key.
Returns
-------
@ -142,11 +221,11 @@ def get_list(filename, key):
if not os.path.isdir(config_dir):
config_dir = os.path.dirname(__file__) + "/assets"
config_file = os.path.join(config_dir, filename)
# NOTE THIS IS THE IMPORTANT CODE
with open(config_file, mode="rb") as defaults:
# default = yaml.safe_load(defaults)
# result = default[key]
result = tomllib.load(defaults)
result = result[key]
return result
@ -221,6 +300,8 @@ def get_default_cache_directory():
return os.path.join(data_home, 'slixfeed')
# TODO Write a similar function for file.
# NOTE the is a function of directory, noot file.
def get_default_config_directory():
"""
Determine the directory path where configuration will be stored.
@ -370,7 +451,7 @@ async def is_include_keyword(db_file, key, string):
# async def is_blacklisted(db_file, string):
keywords = (await sqlite.get_filters_value(db_file, key)) or ''
keywords = keywords.split(",")
keywords = keywords + (get_list("lists.toml", key))
keywords = keywords + (open_config_file("lists.toml")[key])
for keyword in keywords:
if not keyword or len(keyword) < 2:
continue

View file

@ -5,16 +5,17 @@
TODO
1.1) Do not compose messages.
Only return results.
See: # TODO return feeds
1.1) Attempt to scan more paths: /blog/, /news/ etc., including root /
Attempt to scan sub domains
https://esmailelbob.xyz/en/
https://blog.esmailelbob.xyz/feed/
1.2) Return URLs, nothing else other (e.g. processed messages).
1.3) NOTE: Correction of URLs is aceptable.
1.2) Consider utilizing fetch.http_response
2) Consider merging with module fetch.py
FEEDS CRAWLER PROJECT
3) Mark redirects for manual check
Title : JSON Feed
@ -163,7 +164,7 @@ async def feed_mode_guess(url, tree):
"""
urls = []
parted_url = urlsplit(url)
paths = config.get_list("lists.toml", "pathnames")
paths = config.open_config_file("lists.toml")["pathnames"]
# Check whether URL has path (i.e. not root)
# Check parted_url.path to avoid error in case root wasn't given
# TODO Make more tests
@ -202,7 +203,7 @@ async def feed_mode_scan(url, tree):
Message with URLs.
"""
urls = []
paths = config.get_list("lists.toml", "pathnames")
paths = config.open_config_file("lists.toml")["pathnames"]
for path in paths:
# xpath_query = "//*[@*[contains(.,'{}')]]".format(path)
# xpath_query = "//a[contains(@href,'{}')]".format(path)

View file

@ -10,6 +10,8 @@ FIXME
TODO
0) Improve function http to return sensible value (the list is not good enough)
1) Support Gemini and Gopher.
2) Check also for HTML, not only feed.bozo.
@ -29,6 +31,7 @@ from asyncio import TimeoutError
import logging
# from lxml import html
# from xml.etree.ElementTree import ElementTree, ParseError
import requests
import slixfeed.config as config
try:
from magnet2torrent import Magnet2Torrent, FailedToFetchException
@ -50,6 +53,44 @@ except:
# async def ipfs():
def http_response(url):
"""
Download response headers.
Parameters
----------
url : str
URL.
Returns
-------
response: requests.models.Response
HTTP Header Response.
Result would contain these:
response.encoding
response.headers
response.history
response.reason
response.status_code
response.url
"""
user_agent = (
config.get_value(
"settings", "Network", "user-agent")
) or 'Slixfeed/0.1'
headers = {
"User-Agent": user_agent
}
try:
# Don't use HEAD request because quite a few websites may deny it
# response = requests.head(url, headers=headers, allow_redirects=True)
response = requests.get(url, headers=headers, allow_redirects=True)
except Exception as e:
logging.error(str(e))
response = None
return response
async def http(url):
"""
Download content of given URL.

View file

@ -76,6 +76,21 @@ def create_tables(db_file):
);
"""
)
feeds_statistics_table_sql = (
"""
CREATE TABLE IF NOT EXISTS statistics (
id INTEGER NOT NULL,
feed_id INTEGER NOT NULL UNIQUE,
offline INTEGER,
entries INTEGER,
entries INTEGER,
FOREIGN KEY ("feed_id") REFERENCES "feeds" ("id")
ON UPDATE CASCADE
ON DELETE CASCADE,
PRIMARY KEY ("id")
);
"""
)
feeds_properties_table_sql = (
"""
CREATE TABLE IF NOT EXISTS feeds_properties (
@ -153,16 +168,6 @@ def create_tables(db_file):
);
"""
)
# statistics_table_sql = (
# """
# CREATE TABLE IF NOT EXISTS statistics (
# id INTEGER NOT NULL,
# title TEXT NOT NULL,
# number INTEGER,
# PRIMARY KEY ("id")
# );
# """
# )
status_table_sql = (
"""
CREATE TABLE IF NOT EXISTS status (
@ -527,15 +532,6 @@ async def remove_feed_by_index(db_file, ix):
with create_connection(db_file) as conn:
async with DBLOCK:
cur = conn.cursor()
sql = (
"""
SELECT url
FROM feeds
WHERE id = ?
"""
)
par = (ix,)
url = cur.execute(sql, par).fetchone()[0]
# # NOTE Should we move DBLOCK to this line? 2022-12-23
# sql = (
# "DELETE "
@ -559,7 +555,6 @@ async def remove_feed_by_index(db_file, ix):
)
par = (ix,)
cur.execute(sql, par)
return url
async def get_feed_id_and_name(db_file, url):
@ -744,7 +739,7 @@ async def get_feed_id(db_file, url):
"""
)
par = (url,)
feed_id = cur.execute(sql, par).fetchone()[0]
feed_id = cur.execute(sql, par).fetchone()
return feed_id
@ -770,7 +765,7 @@ async def mark_entry_as_read(cur, ix):
cur.execute(sql, par)
async def mark_feed_as_read(db_file, url):
async def mark_feed_as_read(db_file, feed_id):
"""
Set read status of entries of given feed as read.
@ -778,8 +773,8 @@ async def mark_feed_as_read(db_file, url):
----------
db_file : str
Path to database file.
url : str
URL.
feed_id : str
Feed Id.
"""
async with DBLOCK:
with create_connection(db_file) as conn:
@ -791,7 +786,7 @@ async def mark_feed_as_read(db_file, url):
WHERE feed_id = ?
"""
)
par = (url,)
par = (feed_id,)
cur.execute(sql, par)
@ -879,7 +874,7 @@ def get_feed_title(db_file, ix):
"""
)
par = (ix,)
title = cur.execute(sql, par).fetchone()[0]
title = cur.execute(sql, par).fetchone()
return title
@ -909,7 +904,7 @@ def get_feed_url(db_file, feed_id):
"""
)
par = (feed_id,)
url = cur.execute(sql, par).fetchone()[0]
url = cur.execute(sql, par).fetchone()
return url

View file

@ -64,6 +64,7 @@ from slixfeed.sqlite import (
)
# from xmpp import Slixfeed
import slixfeed.xmpp.client as xmpp
import slixfeed.xmpp.connect as connect
import slixfeed.xmpp.utility as utility
import time
@ -73,6 +74,26 @@ task_manager = {}
loop = asyncio.get_event_loop()
# def init_tasks(self):
# global task_ping
# # if task_ping is None or task_ping.done():
# # task_ping = asyncio.create_task(ping(self, jid=None))
# try:
# task_ping.cancel()
# except:
# logging.info('No ping task to cancel')
# task_ping = asyncio.create_task(ping(self, jid=None))
def ping_task(self):
global ping_task
try:
ping_task.cancel()
except:
logging.info('No ping task to cancel.')
ping_task = asyncio.create_task(connect.ping(self))
"""
FIXME
@ -87,22 +108,40 @@ await taskhandler.start_tasks(
)
"""
async def start_tasks_xmpp(self, jid, tasks):
logging.debug("Starting tasks {} for JID {}".format(tasks, jid))
task_manager[jid] = {}
async def start_tasks_xmpp(self, jid, tasks=None):
if jid == self.boundjid.bare:
return
try:
task_manager[jid]
print('Old details for tasks of {}:\n'.format(jid), task_manager[jid].keys())
except KeyError as e:
task_manager[jid] = {}
logging.info('KeyError:', str(e))
logging.debug('Creating new task manager for JID {}'.format(jid))
if not tasks:
tasks = ['interval', 'status', 'check']
logging.info('Stopping tasks {} for JID {}'.format(tasks, jid))
for task in tasks:
# if task_manager[jid][task]:
try:
task_manager[jid][task].cancel()
except:
logging.debug('No task {} for JID {} (start_tasks_xmpp)'
.format(task, jid))
logging.info('Starting tasks {} for JID {}'.format(tasks, jid))
for task in tasks:
# print("task:", task)
# print("tasks:")
# print(tasks)
# breakpoint()
match task:
case "check":
task_manager[jid]["check"] = asyncio.create_task(
case 'check':
task_manager[jid]['check'] = asyncio.create_task(
check_updates(jid))
case "status":
task_manager[jid]["status"] = asyncio.create_task(
task_manager[jid]['status'] = asyncio.create_task(
send_status(self, jid))
case "interval":
case 'interval':
jid_file = jid.replace('/', '_')
db_file = get_pathname_to_database(jid_file)
update_interval = (
@ -116,13 +155,16 @@ async def start_tasks_xmpp(self, jid, tasks):
diff = time.time() - last_update_time
if diff < update_interval:
next_update_time = update_interval - diff
print("jid :", jid, "\n"
"time :", time.time(), "\n"
"last_update_time :", last_update_time, "\n"
"difference :", diff, "\n"
"update interval :", update_interval, "\n"
"next_update_time :", next_update_time, "\n")
await asyncio.sleep(next_update_time)
# print("jid :", jid, "\n"
# "time :", time.time(), "\n"
# "last_update_time :", last_update_time, "\n"
# "difference :", diff, "\n"
# "update interval :", update_interval, "\n"
# "next_update_time :", next_update_time, "\n"
# )
# elif diff > val:
# next_update_time = val
await update_last_update_time(db_file)
@ -139,84 +181,20 @@ async def start_tasks_xmpp(self, jid, tasks):
# print(jid)
# breakpoint()
# await task
print('New details for tasks of {}:\n'.format(jid), task_manager[jid])
async def clean_tasks_xmpp(jid, tasks):
logging.debug(
"Stopping tasks {} for JID {}".format(tasks, jid)
)
async def clean_tasks_xmpp(jid, tasks=None):
if not tasks:
tasks = ['interval', 'status', 'check']
logging.info('Stopping tasks {} for JID {}'.format(tasks, jid))
for task in tasks:
# if task_manager[jid][task]:
try:
task_manager[jid][task].cancel()
except:
logging.debug(
"No task {} for JID {} (clean_tasks)".format(task, jid)
)
"""
TODO
Rename to "start_tasks"
Pass a list (or dict) of tasks to start
NOTE
Consider callback e.g. Slixfeed.send_status.
Or taskhandler for each protocol or specific taskhandler function.
"""
async def task_jid(self, jid):
"""
JID (Jabber ID) task manager.
Parameters
----------
jid : str
Jabber ID.
"""
jid_file = jid.replace('/', '_')
db_file = get_pathname_to_database(jid_file)
enabled = (
await get_settings_value(db_file, "enabled") or
get_value("settings", "Settings", "enabled")
)
if enabled:
# NOTE Perhaps we want to utilize super with keyword
# arguments in order to know what tasks to initiate.
task_manager[jid] = {}
task_manager[jid]["check"] = asyncio.create_task(
check_updates(jid))
task_manager[jid]["status"] = asyncio.create_task(
send_status(self, jid))
task_manager[jid]["interval"] = asyncio.create_task(
send_update(self, jid))
await task_manager[jid]["check"]
await task_manager[jid]["status"]
await task_manager[jid]["interval"]
# tasks_dict = {
# "check": check_updates,
# "status": send_status,
# "interval": send_update
# }
# for task, function in tasks_dict.items():
# task_manager[jid][task] = asyncio.create_task(
# function(jid)
# )
# await function
else:
# FIXME
# The following error occurs only upon first attempt to stop.
# /usr/lib/python3.11/asyncio/events.py:73: RuntimeWarning: coroutine 'Slixfeed.send_update' was never awaited
# self._args = None
# RuntimeWarning: Enable tracemalloc to get the object allocation traceback
try:
task_manager[jid]["interval"].cancel()
except:
None
await send_status(self, jid)
logging.debug('No task {} for JID {} (clean_tasks_xmpp)'
.format(task, jid))
async def send_update(self, jid, num=None):
@ -230,7 +208,7 @@ async def send_update(self, jid, num=None):
num : str, optional
Number. The default is None.
"""
logging.debug("Sending a news update to JID {}".format(jid))
logging.info('Sending a news update to JID {}'.format(jid))
jid_file = jid.replace('/', '_')
db_file = get_pathname_to_database(jid_file)
enabled = (
@ -258,6 +236,7 @@ async def send_update(self, jid, num=None):
feed_id = result[4]
date = result[5]
title_f = get_feed_title(db_file, feed_id)
title_f = title_f[0]
news_digest += action.list_unread_entries(result, title_f)
# print(db_file)
# print(result[0])
@ -356,9 +335,8 @@ async def send_status(self, jid):
jid : str
Jabber ID.
"""
logging.debug(
"Sending a status message to JID {}".format(jid))
status_text = "📜️ Slixfeed RSS News Bot"
logging.info('Sending a status message to JID {}'.format(jid))
status_text = '📜️ Slixfeed RSS News Bot'
jid_file = jid.replace('/', '_')
db_file = get_pathname_to_database(jid_file)
enabled = (
@ -366,24 +344,19 @@ async def send_status(self, jid):
get_value("settings", "Settings", "enabled")
)
if not enabled:
status_mode = "xa"
status_text = "📫️ Send \"Start\" to receive updates"
status_mode = 'xa'
status_text = '📫️ Send "Start" to receive updates'
else:
feeds = await get_number_of_items(
db_file, "feeds")
feeds = await get_number_of_items(db_file, 'feeds')
# print(await current_time(), jid, "has", feeds, "feeds")
if not feeds:
status_mode = "available"
status_text = (
"📪️ Send a URL from a blog or a news website"
)
status_mode = 'available'
status_text = '📪️ Send a URL from a blog or a news website'
else:
unread = await get_number_of_entries_unread(db_file)
if unread:
status_mode = "chat"
status_text = (
"📬️ There are {} news items"
).format(str(unread))
status_mode = 'chat'
status_text = '📬️ There are {} news items'.format(str(unread))
# status_text = (
# "📰 News items: {}"
# ).format(str(unread))
@ -391,8 +364,8 @@ async def send_status(self, jid):
# "📰 You have {} news items"
# ).format(str(unread))
else:
status_mode = "available"
status_text = "📭️ No news"
status_mode = 'available'
status_text = '📭️ No news'
# breakpoint()
# print(await current_time(), status_text, "for", jid)
@ -404,8 +377,7 @@ async def send_status(self, jid):
pstatus=status_text
)
# await asyncio.sleep(60 * 20)
await refresh_task(
self, jid, send_status, "status", "20")
await refresh_task(self, jid, send_status, 'status', '90')
# loop.call_at(
# loop.time() + 60 * 20,
# loop.create_task,
@ -426,9 +398,7 @@ async def refresh_task(self, jid, callback, key, val=None):
val : str, optional
Value. The default is None.
"""
logging.debug(
"Refreshing task {} for JID {}".format(callback, jid)
)
logging.info('Refreshing task {} for JID {}'.format(callback, jid))
if not val:
jid_file = jid.replace('/', '_')
db_file = get_pathname_to_database(jid_file)
@ -441,9 +411,8 @@ async def refresh_task(self, jid, callback, key, val=None):
try:
task_manager[jid][key].cancel()
except:
logging.debug(
"No task of type {} to cancel for "
"JID {} (clean_tasks)".format(key, jid)
logging.info('No task of type {} to cancel for '
'JID {} (refresh_task)'.format(key, jid)
)
# task_manager[jid][key] = loop.call_at(
# loop.time() + 60 * float(val),
@ -482,9 +451,7 @@ async def check_updates(jid):
jid : str
Jabber ID.
"""
logging.debug(
"Scanning for updates for JID {}".format(jid)
)
logging.info('Scanning for updates for JID {}'.format(jid))
while True:
jid_file = jid.replace('/', '_')
db_file = get_pathname_to_database(jid_file)
@ -502,64 +469,6 @@ async def check_updates(jid):
# )
async def start_tasks(self, presence):
jid = presence["from"].bare
logging.debug(
"Beginning tasks for JID {}".format(jid)
)
if jid not in self.boundjid.bare:
await clean_tasks_xmpp(
jid, ["interval", "status", "check"]
)
await start_tasks_xmpp(
self, jid, ["interval", "status", "check"]
)
# await task_jid(self, jid)
# main_task.extend([asyncio.create_task(task_jid(jid))])
# print(main_task)
async def stop_tasks(self, presence):
if not self.boundjid.bare:
jid = presence["from"].bare
logging.debug(
"Stopping tasks for JID {}".format(jid)
)
await clean_tasks_xmpp(
jid, ["interval", "status", "check"]
)
async def check_readiness(self, presence):
"""
Begin tasks if available, otherwise eliminate tasks.
Parameters
----------
presence : str
XML stanza .
Returns
-------
None.
"""
# print("def check_readiness", presence["from"].bare, presence["type"])
# # available unavailable away (chat) dnd xa
# print(">>> type", presence["type"], presence["from"].bare)
# # away chat dnd xa
# print(">>> show", presence["show"], presence["from"].bare)
jid = presence["from"].bare
if presence["show"] in ("away", "dnd", "xa"):
logging.debug(
"Stopping updates for JID {}".format(jid)
)
await clean_tasks_xmpp(
jid, ["interval"])
await start_tasks_xmpp(
self, jid, ["status", "check"])
"""
NOTE
This is an older system, utilizing local storage instead of XMPP presence.
@ -573,13 +482,11 @@ async def select_file(self):
while True:
db_dir = get_default_data_directory()
if not os.path.isdir(db_dir):
msg = (
"Slixfeed can not work without a database.\n"
"To create a database, follow these steps:\n"
"Add Slixfeed contact to your roster.\n"
"Send a feed to the bot by URL:\n"
"https://reclaimthenet.org/feed/"
)
msg = ('Slixfeed does not work without a database.\n'
'To create a database, follow these steps:\n'
'Add Slixfeed contact to your roster.\n'
'Send a feed to the bot by URL:\n'
'https://reclaimthenet.org/feed/')
# print(await current_time(), msg)
print(msg)
else:

View file

@ -7,11 +7,16 @@ TODO
1) ActivityPub URL revealer activitypub_to_http.
2) SQLite preference "instance" for preferred instances.
"""
from email.utils import parseaddr
import logging
import os
import random
import slixfeed.config as config
import slixfeed.fetch as fetch
from urllib.parse import (
parse_qs,
urlencode,
@ -31,6 +36,10 @@ from urllib.parse import (
# coordinated with the dataset of project LibRedirect, even
# though rule-sets might be adopted (see )Privacy Redirect).
def get_hostname(url):
parted_url = urlsplit(url)
return parted_url.netloc
def replace_hostname(url, url_type):
"""
Replace hostname.
@ -47,29 +56,56 @@ def replace_hostname(url, url_type):
url : str
URL.
"""
url_new = None
parted_url = urlsplit(url)
# protocol = parted_url.scheme
hostname = parted_url.netloc
hostname = hostname.replace("www.","")
hostname = hostname.replace('www.','')
pathname = parted_url.path
queries = parted_url.query
fragment = parted_url.fragment
proxies = config.get_list("proxies.toml", "proxies")
for proxy in proxies:
proxy = proxies[proxy]
if hostname in proxy["hostname"] and url_type in proxy["type"]:
select_proxy = random.choice(proxy["clearnet"])
parted_proxy = urlsplit(select_proxy)
protocol_new = parted_proxy.scheme
hostname_new = parted_proxy.netloc
url = urlunsplit([
protocol_new,
hostname_new,
pathname,
queries,
fragment
])
return url
proxies = config.open_config_file('proxies.toml')['proxies']
for proxy_name in proxies:
proxy = proxies[proxy_name]
if hostname in proxy['hostname'] and url_type in proxy['type']:
while not url_new:
proxy_type = 'clearnet'
proxy_list = proxy[proxy_type]
if len(proxy_list):
# proxy_list = proxies[proxy_name][proxy_type]
proxy_url = random.choice(proxy_list)
parted_proxy_url = urlsplit(proxy_url)
protocol_new = parted_proxy_url.scheme
hostname_new = parted_proxy_url.netloc
url_new = urlunsplit([
protocol_new,
hostname_new,
pathname,
queries,
fragment
])
response = fetch.http_response(url_new)
if (response and
response.status_code == 200 and
response.reason == 'OK' and
url_new.startswith(proxy_url)):
break
else:
config_dir = config.get_default_config_directory()
proxies_obsolete_file = config_dir + '/proxies_obsolete.toml'
proxies_file = config_dir + '/proxies.toml'
if not os.path.isfile(proxies_obsolete_file):
config.create_skeleton(proxies_file)
config.backup_obsolete(proxies_obsolete_file, proxy_name, proxy_type, proxy_url)
config.update_proxies(proxies_file, proxy_name, proxy_type, proxy_url)
url_new = None
else:
logging.warning(
"No proxy URLs for {}."
"Update proxies.toml".format(proxy_name))
url_new = url
break
return url_new
def remove_tracking_parameters(url):
@ -92,7 +128,7 @@ def remove_tracking_parameters(url):
pathname = parted_url.path
queries = parse_qs(parted_url.query)
fragment = parted_url.fragment
trackers = config.get_list("queries.toml", "trackers")
trackers = config.open_config_file('queries.toml')['trackers']
for tracker in trackers:
if tracker in queries: del queries[tracker]
queries_new = urlencode(queries, doseq=True)
@ -122,7 +158,7 @@ def feed_to_http(url):
"""
par_url = urlsplit(url)
new_url = urlunsplit([
"http",
'http',
par_url.netloc,
par_url.path,
par_url.query,
@ -169,15 +205,15 @@ def complete_url(source, link):
str
URL.
"""
if link.startswith("www."):
return "http://" + link
if link.startswith('www.'):
return 'http://' + link
parted_link = urlsplit(link)
parted_feed = urlsplit(source)
if parted_link.scheme == "magnet" and parted_link.query:
if parted_link.scheme == 'magnet' and parted_link.query:
return link
if parted_link.scheme and parted_link.netloc:
return link
if link.startswith("//"):
if link.startswith('//'):
if parted_link.netloc and parted_link.path:
new_link = urlunsplit([
parted_feed.scheme,
@ -186,7 +222,7 @@ def complete_url(source, link):
parted_link.query,
parted_link.fragment
])
elif link.startswith("/"):
elif link.startswith('/'):
new_link = urlunsplit([
parted_feed.scheme,
parted_feed.netloc,
@ -194,57 +230,59 @@ def complete_url(source, link):
parted_link.query,
parted_link.fragment
])
elif link.startswith("../"):
pathlink = parted_link.path.split("/")
pathfeed = parted_feed.path.split("/")
elif link.startswith('../'):
pathlink = parted_link.path.split('/')
pathfeed = parted_feed.path.split('/')
for i in pathlink:
if i == "..":
if pathlink.index("..") == 0:
if i == '..':
if pathlink.index('..') == 0:
pathfeed.pop()
else:
break
while pathlink.count(".."):
if pathlink.index("..") == 0:
pathlink.remove("..")
while pathlink.count('..'):
if pathlink.index('..') == 0:
pathlink.remove('..')
else:
break
pathlink = "/".join(pathlink)
pathlink = '/'.join(pathlink)
pathfeed.extend([pathlink])
new_link = urlunsplit([
parted_feed.scheme,
parted_feed.netloc,
"/".join(pathfeed),
'/'.join(pathfeed),
parted_link.query,
parted_link.fragment
])
else:
pathlink = parted_link.path.split("/")
pathfeed = parted_feed.path.split("/")
if link.startswith("./"):
pathlink.remove(".")
if not source.endswith("/"):
pathlink = parted_link.path.split('/')
pathfeed = parted_feed.path.split('/')
if link.startswith('./'):
pathlink.remove('.')
if not source.endswith('/'):
pathfeed.pop()
pathlink = "/".join(pathlink)
pathlink = '/'.join(pathlink)
pathfeed.extend([pathlink])
new_link = urlunsplit([
parted_feed.scheme,
parted_feed.netloc,
"/".join(pathfeed),
'/'.join(pathfeed),
parted_link.query,
parted_link.fragment
])
return new_link
"""
TODO
Feed https://www.ocaml.org/feed.xml
Link %20https://frama-c.com/fc-versions/cobalt.html%20
FIXME
Feed https://cyber.dabamos.de/blog/feed.rss
Link https://cyber.dabamos.de/blog/#article-2022-07-15
"""
# TODO
# Feed https://www.ocaml.org/feed.xml
# Link %20https://frama-c.com/fc-versions/cobalt.html%20
# FIXME
# Feed https://cyber.dabamos.de/blog/feed.rss
# Link https://cyber.dabamos.de/blog/#article-2022-07-15
def join_url(source, link):
"""
Join base URL with given pathname.
@ -261,13 +299,13 @@ def join_url(source, link):
str
URL.
"""
if link.startswith("www."):
new_link = "http://" + link
elif link.startswith("%20") and link.endswith("%20"):
old_link = link.split("%20")
if link.startswith('www.'):
new_link = 'http://' + link
elif link.startswith('%20') and link.endswith('%20'):
old_link = link.split('%20')
del old_link[0]
old_link.pop()
new_link = "".join(old_link)
new_link = ''.join(old_link)
else:
new_link = urljoin(source, link)
return new_link
@ -293,8 +331,8 @@ def trim_url(url):
pathname = parted_url.path
queries = parted_url.query
fragment = parted_url.fragment
while "//" in pathname:
pathname = pathname.replace("//", "/")
while '//' in pathname:
pathname = pathname.replace('//', '/')
url = urlunsplit([
protocol,
hostname,

View file

@ -1,10 +1,24 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
TODO
1) Save groupchat name instead of jid in field name.
"""
from slixmpp.plugins.xep_0048.stanza import Bookmarks
async def get(self):
result = await self.plugin['xep_0048'].get_bookmarks()
bookmarks = result['private']['bookmarks']
conferences = bookmarks['conferences']
return conferences
async def add(self, muc_jid):
result = await self.plugin['xep_0048'].get_bookmarks()
bookmarks = result['private']['bookmarks']
@ -32,13 +46,6 @@ async def add(self, muc_jid):
# await self['xep_0402'].publish(bm)
async def get(self):
result = await self.plugin['xep_0048'].get_bookmarks()
bookmarks = result['private']['bookmarks']
conferences = bookmarks['conferences']
return conferences
async def remove(self, muc_jid):
result = await self.plugin['xep_0048'].get_bookmarks()
bookmarks = result['private']['bookmarks']

View file

@ -16,14 +16,7 @@ TODO
2) Assure message delivery before calling a new task.
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged
3) Check the lesyt message sent by the bot.
This is essential in case bot restarts within an update interval.
Example:
Bot is set to send an update every 5 hours.
Bot was disconnected and reconnected after an hour.
Bot will send an update when it is connected, which is lesser than 5 hours as it should.
4) XHTTML-IM
3) XHTTML-IM
case _ if message_lowercase.startswith("html"):
message['html']="
Parse me!
@ -67,12 +60,13 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
# import xml.etree.ElementTree as ET
# from lxml import etree
import slixfeed.xmpp.bookmark as bookmark
import slixfeed.xmpp.connect as connect
import slixfeed.xmpp.muc as muc
import slixfeed.xmpp.process as process
import slixfeed.xmpp.profile as profile
import slixfeed.xmpp.roster as roster
import slixfeed.xmpp.service as service
# import slixfeed.xmpp.service as service
import slixfeed.xmpp.state as state
import slixfeed.xmpp.status as status
import slixfeed.xmpp.utility as utility
@ -110,39 +104,54 @@ class Slixfeed(slixmpp.ClientXMPP):
# and the XML streams are ready for use. We want to
# listen for this event so that we we can initialize
# our roster.
self.add_event_handler("session_start", self.on_session_start)
self.add_event_handler("session_resumed", self.on_session_resumed)
self.add_event_handler("session_start",
self.on_session_start)
self.add_event_handler("session_resumed",
self.on_session_resumed)
self.add_event_handler("got_offline", print("got_offline"))
# self.add_event_handler("got_online", self.check_readiness)
self.add_event_handler("changed_status", self.on_changed_status)
self.add_event_handler("presence_available", self.on_presence_available)
self.add_event_handler("presence_unavailable", self.on_presence_unavailable)
self.add_event_handler("changed_subscription", self.on_changed_subscription)
self.add_event_handler("chatstate_active", self.on_chatstate_active)
self.add_event_handler("chatstate_gone", self.on_chatstate_gone)
self.add_event_handler("chatstate_composing", self.check_chatstate_composing)
self.add_event_handler("chatstate_paused", self.check_chatstate_paused)
self.add_event_handler("changed_status",
self.on_changed_status)
self.add_event_handler("presence_available",
self.on_presence_available)
self.add_event_handler("presence_unavailable",
self.on_presence_unavailable)
self.add_event_handler("chatstate_active",
self.on_chatstate_active)
self.add_event_handler("chatstate_composing",
self.on_chatstate_composing)
self.add_event_handler("chatstate_gone",
self.on_chatstate_gone)
self.add_event_handler("chatstate_inactive",
self.on_chatstate_inactive)
self.add_event_handler("chatstate_paused",
self.on_chatstate_paused)
# The message event is triggered whenever a message
# stanza is received. Be aware that that includes
# MUC messages and error messages.
self.add_event_handler("message", self.on_message)
self.add_event_handler("message",
self.on_message)
self.add_event_handler("groupchat_invite", self.on_groupchat_invite) # XEP_0045
self.add_event_handler("groupchat_direct_invite", self.on_groupchat_direct_invite) # XEP_0249
self.add_event_handler("groupchat_invite",
self.on_groupchat_invite) # XEP_0045
self.add_event_handler("groupchat_direct_invite",
self.on_groupchat_direct_invite) # XEP_0249
# self.add_event_handler("groupchat_message", self.message)
# self.add_event_handler("disconnected", self.reconnect)
# self.add_event_handler("disconnected", self.inspect_connection)
self.add_event_handler("reactions", self.on_reactions)
self.add_event_handler("presence_error", self.on_presence_error)
self.add_event_handler("presence_subscribe", self.on_presence_subscribe)
self.add_event_handler("presence_subscribed", self.on_presence_subscribed)
self.add_event_handler("presence_unsubscribe", self.on_presence_unsubscribe)
self.add_event_handler("presence_unsubscribed", self.on_presence_unsubscribed)
self.add_event_handler("reactions",
self.on_reactions)
self.add_event_handler("presence_error",
self.on_presence_error)
self.add_event_handler("presence_subscribe",
self.on_presence_subscribe)
self.add_event_handler("presence_subscribed",
self.on_presence_subscribed)
self.add_event_handler("presence_unsubscribed",
self.on_presence_unsubscribed)
# Initialize event loop
# self.loop = asyncio.get_event_loop()
@ -154,39 +163,61 @@ class Slixfeed(slixmpp.ClientXMPP):
self.add_event_handler("session_end", self.on_session_end)
# TODO Test
async def on_groupchat_invite(self, message):
print("on_groupchat_invite")
await muc.accept_invitation(self, message)
logging.warning("on_groupchat_invite")
inviter = message["from"].bare
muc_jid = message['groupchat_invite']['jid']
await muc.join(self, inviter, muc_jid)
await bookmark.add(self, muc_jid)
# NOTE Tested with Gajim and Psi
async def on_groupchat_direct_invite(self, message):
print("on_groupchat_direct_invite")
await muc.accept_invitation(self, message)
inviter = message["from"].bare
muc_jid = message['groupchat_invite']['jid']
await muc.join(self, inviter, muc_jid)
await bookmark.add(self, muc_jid)
async def on_session_end(self, event):
if event:
message = "Session has ended. Reason: {}".format(event)
else:
message = "Session has ended."
await connect.recover_connection(self, event, message)
message = "Session has ended."
await connect.recover_connection(self, message)
async def on_connection_failed(self, event):
message = "Connection has failed. Reason: {}".format(event)
await connect.recover_connection(self, event, message)
message = "Connection has failed. Reason: {}".format(event)
await connect.recover_connection(self, message)
async def on_session_start(self, event):
await process.event(self, event)
await process.event(self)
await muc.autojoin(self)
profile.set_identity(self, "client")
await profile.update(self)
service.identity(self, "client")
task.ping_task(self)
# await Service.capabilities(self)
# Service.commands(self)
# Service.reactions(self)
await self.service_capabilities()
self.service_commands()
self.service_reactions()
async def on_session_resumed(self, event):
await process.event(self, event)
await process.event(self)
await muc.autojoin(self)
profile.set_identity(self, "client")
# await Service.capabilities(self)
# Service.commands(self)
# Service.reactions(self)
await self.service_capabilities()
self.service_commands()
self.service_reactions()
# TODO Request for subscription
@ -195,20 +226,21 @@ class Slixfeed(slixmpp.ClientXMPP):
if "chat" == await utility.get_chat_type(self, jid):
await roster.add(self, jid)
await state.request(self, jid)
await process.message(self, message)
# chat_type = message["type"]
# message_body = message["body"]
# message_reply = message.reply
await process.message(self, message)
async def on_changed_status(self, presence):
await task.check_readiness(self, presence)
# await task.check_readiness(self, presence)
jid = presence['from'].bare
if presence['show'] in ('away', 'dnd', 'xa'):
await task.clean_tasks_xmpp(jid, ['interval'])
await task.start_tasks_xmpp(self, jid, ['status', 'check'])
# TODO Request for subscription
async def on_presence_subscribe(self, presence):
print("on_presence_subscribe")
print(presence)
jid = presence["from"].bare
await state.request(self, jid)
@ -220,7 +252,10 @@ class Slixfeed(slixmpp.ClientXMPP):
async def on_presence_available(self, presence):
# TODO Add function to check whether task is already running or not
await task.start_tasks(self, presence)
# await task.start_tasks(self, presence)
# NOTE Already done inside the start-task function
jid = presence["from"].bare
await task.start_tasks_xmpp(self, jid)
async def on_presence_unsubscribed(self, presence):
@ -230,64 +265,59 @@ class Slixfeed(slixmpp.ClientXMPP):
async def on_presence_unavailable(self, presence):
await task.stop_tasks(self, presence)
async def on_changed_subscription(self, presence):
print("on_changed_subscription")
print(presence)
jid = presence["from"].bare
# breakpoint()
async def on_presence_unsubscribe(self, presence):
print("on_presence_unsubscribe")
print(presence)
# await task.stop_tasks(self, jid)
await task.clean_tasks_xmpp(jid)
# TODO
# Send message that database will be deleted within 30 days
# Check whether JID is in bookmarks or roster
# If roster, remove contact JID into file
# If bookmarks, remove groupchat JID into file
async def on_presence_error(self, presence):
print("on_presence_error")
print(presence)
jid = presence["from"].bare
await task.clean_tasks_xmpp(jid)
async def on_reactions(self, message):
print("on_reactions")
print(message)
print(message['from'])
print(message['reactions']['values'])
async def on_chatstate_active(self, message):
print("on_chatstate_active")
print(message)
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
# await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])
async def on_chatstate_composing(self, message):
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
# await task.clean_tasks_xmpp(jid, ['status'])
status_text='Press "help" for manual, or "info" for information.'
status.send(self, jid, status_text)
async def on_chatstate_gone(self, message):
print("on_chatstate_gone")
print(message)
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
# await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])
async def check_chatstate_composing(self, message):
print("def check_chatstate_composing")
print(message)
if message["type"] in ("chat", "normal"):
jid = message["from"].bare
status_text="Press \"help\" for manual."
self.send_presence(
# pshow=status_mode,
pstatus=status_text,
pto=jid,
)
async def on_chatstate_inactive(self, message):
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
# await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])
async def check_chatstate_paused(self, message):
print("def check_chatstate_paused")
print(message)
if message["type"] in ("chat", "normal"):
jid = message["from"].bare
await task.refresh_task(
self,
jid,
task.send_status,
"status",
20
)
async def on_chatstate_paused(self, message):
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
# await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])

View file

@ -60,12 +60,14 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
# import xml.etree.ElementTree as ET
# from lxml import etree
# import slixfeed.xmpp.bookmark as bookmark
import slixfeed.xmpp.connect as connect
import slixfeed.xmpp.muc as muc
# NOTE MUC is possible for component
# import slixfeed.xmpp.muc as muc
import slixfeed.xmpp.process as process
import slixfeed.xmpp.profile as profile
import slixfeed.xmpp.roster as roster
import slixfeed.xmpp.service as service
# import slixfeed.xmpp.roster as roster
# import slixfeed.xmpp.service as service
import slixfeed.xmpp.state as state
import slixfeed.xmpp.status as status
import slixfeed.xmpp.utility as utility
@ -102,9 +104,6 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
self.add_event_handler("changed_status", self.on_changed_status)
self.add_event_handler("presence_available", self.on_presence_available)
self.add_event_handler("presence_unavailable", self.on_presence_unavailable)
self.add_event_handler("changed_subscription", self.on_changed_subscription)
self.add_event_handler("chatstate_active", self.on_chatstate_active)
self.add_event_handler("chatstate_gone", self.on_chatstate_gone)
self.add_event_handler("chatstate_composing", self.check_chatstate_composing)
@ -126,7 +125,6 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
self.add_event_handler("presence_error", self.on_presence_error)
self.add_event_handler("presence_subscribe", self.on_presence_subscribe)
self.add_event_handler("presence_subscribed", self.on_presence_subscribed)
self.add_event_handler("presence_unsubscribe", self.on_presence_unsubscribe)
self.add_event_handler("presence_unsubscribed", self.on_presence_unsubscribed)
# Initialize event loop
@ -139,39 +137,61 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
self.add_event_handler("session_end", self.on_session_end)
async def on_groupchat_invite(self, message):
print("on_groupchat_invite")
await muc.accept_invitation(self, message)
# async def on_groupchat_invite(self, message):
# logging.warning("on_groupchat_invite")
# inviter = message["from"].bare
# muc_jid = message['groupchat_invite']['jid']
# await muc.join(self, inviter, muc_jid)
# await bookmark.add(self, muc_jid)
async def on_groupchat_direct_invite(self, message):
print("on_groupchat_direct_invite")
await muc.accept_invitation(self, message)
# NOTE Tested with Gajim and Psi
# async def on_groupchat_direct_invite(self, message):
# inviter = message["from"].bare
# muc_jid = message['groupchat_invite']['jid']
# await muc.join(self, inviter, muc_jid)
# await bookmark.add(self, muc_jid)
async def on_session_end(self, event):
if event:
message = "Session has ended. Reason: {}".format(event)
else:
message = "Session has ended."
await connect.recover_connection(self, event, message)
message = "Session has ended."
await connect.recover_connection(self, message)
async def on_connection_failed(self, event):
message = "Connection has failed. Reason: {}".format(event)
await connect.recover_connection(self, event, message)
await connect.recover_connection(self, message)
async def on_session_start(self, event):
await process.event_component(self, event)
self.send_presence()
await process.event_component(self)
# await muc.autojoin(self)
profile.set_identity(self, "service")
await profile.update(self)
service.identity(self, "service")
connect.ping_task(self)
# await Service.capabilities(self)
# Service.commands(self)
# Service.reactions(self)
await self.service_capabilities()
self.service_commands()
self.service_reactions()
async def on_session_resumed(self, event):
await process.event_component(self, event)
await process.event_component(self)
# await muc.autojoin(self)
profile.set_identity(self, "service")
# await Service.capabilities(self)
# Service.commands(self)
# Service.reactions(self)
await self.service_capabilities()
self.service_commands()
self.service_reactions()
# TODO Request for subscription
@ -180,20 +200,17 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
# if "chat" == await utility.get_chat_type(self, jid):
# await roster.add(self, jid)
# await state.request(self, jid)
await process.message(self, message)
# chat_type = message["type"]
# message_body = message["body"]
# message_reply = message.reply
await process.message(self, message)
async def on_changed_status(self, presence):
await task.check_readiness(self, presence)
# TODO Request for subscription
async def on_presence_subscribe(self, presence):
print("on_presence_subscribe")
print(presence)
jid = presence["from"].bare
# await state.request(self, jid)
self.send_presence_subscription(
@ -219,19 +236,8 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
async def on_presence_unavailable(self, presence):
await task.stop_tasks(self, presence)
async def on_changed_subscription(self, presence):
print("on_changed_subscription")
print(presence)
jid = presence["from"].bare
# breakpoint()
async def on_presence_unsubscribe(self, presence):
print("on_presence_unsubscribe")
print(presence)
await task.stop_tasks(self, jid)
async def on_presence_error(self, presence):
@ -240,43 +246,35 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
async def on_reactions(self, message):
print("on_reactions")
print(message)
print(message['from'])
print(message['reactions']['values'])
async def on_chatstate_active(self, message):
print("on_chatstate_active")
print(message)
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])
async def on_chatstate_gone(self, message):
print("on_chatstate_gone")
print(message)
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])
async def check_chatstate_composing(self, message):
print("def check_chatstate_composing")
print(message)
if message["type"] in ("chat", "normal"):
jid = message["from"].bare
status_text="Press \"help\" for manual."
self.send_presence(
# pshow=status_mode,
pstatus=status_text,
pto=jid,
)
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
await task.clean_tasks_xmpp(jid, ['status'])
status_text='Press "help" for manual, or "info" for information.'
status.send(self, jid, status_text)
async def check_chatstate_paused(self, message):
print("def check_chatstate_paused")
print(message)
if message["type"] in ("chat", "normal"):
jid = message["from"].bare
await task.refresh_task(
self,
jid,
task.send_status,
"status",
20
)
if message['type'] in ('chat', 'normal'):
jid = message['from'].bare
await task.clean_tasks_xmpp(jid, ['status'])
await task.start_tasks_xmpp(self, jid, ['status'])

View file

@ -13,13 +13,47 @@ TODO
"""
import asyncio
from slixfeed.config import get_value
from slixfeed.dt import current_time
from slixmpp.exceptions import IqTimeout, IqError
from time import sleep
import logging
async def recover_connection(self, event, message):
async def ping(self, jid=None):
"""
Check for ping and disconnect if no ping has been received.
Parameters
----------
jid : str, optional
Jabber ID. The default is None.
Returns
-------
None.
"""
if not jid:
jid = self.boundjid.bare
while True:
rtt = None
try:
rtt = await self['xep_0199'].ping(jid, timeout=10)
logging.info("Success! RTT: %s", rtt)
except IqError as e:
logging.info("Error pinging %s: %s",
jid,
e.iq['error']['condition'])
except IqTimeout:
logging.info("No response from %s", jid)
if not rtt:
self.disconnect()
await asyncio.sleep(60 * 1)
async def recover_connection(self, message):
logging.warning(message)
print(current_time(), message, "Attempting to reconnect.")
self.connection_attempts += 1

View file

@ -1,283 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def print_info():
"""
Print information.
Returns
-------
msg : str
Message.
"""
msg = (
"```"
"\n"
"ABOUT\n"
" Slixfeed aims to be an easy to use and fully-featured news\n"
" aggregator bot for XMPP. It provides a convenient access to Blogs,\n"
" Fediverse and News websites along with filtering functionality."
"\n"
" Slixfeed is primarily designed for XMPP (aka Jabber).\n"
" Visit https://xmpp.org/software/ for more information.\n"
"\n"
" XMPP is the Extensible Messaging and Presence Protocol, a set\n"
" of open technologies for instant messaging, presence, multi-party\n"
" chat, voice and video calls, collaboration, lightweight\n"
" middleware, content syndication, and generalized routing of XML\n"
" data."
" Visit https://xmpp.org/about/ for more information on the XMPP\n"
" protocol."
" "
# "PLATFORMS\n"
# " Supported prootcols are IRC, Matrix, Tox and XMPP.\n"
# " For the best experience, we recommend you to use XMPP.\n"
# "\n"
"FILETYPES\n"
" Supported filetypes: Atom, RDF, RSS and XML.\n"
"\n"
"PROTOCOLS\n"
" Supported protocols: Dat, FTP, Gemini, Gopher, HTTP and IPFS.\n"
"\n"
"AUTHORS\n"
" Laura Lapina, Schimon Zackary.\n"
"\n"
"THANKS\n"
" Christian Dersch (SalixOS),"
" Cyrille Pontvieux (SalixOS, France),"
"\n"
" Denis Fomin (Gajim, Russia),"
" Dimitris Tzemos (SalixOS, Greece),"
"\n"
" Emmanuel Gil Peyrot (poezio, France),"
" Florent Le Coz (poezio, France),"
"\n"
" George Vlahavas (SalixOS, Greece),"
" Guus der Kinderen (IgniteRealtime.org Openfire, Netherlands),"
"\n"
" Maxime Buquet (slixmpp, France),"
" Mathieu Pasquet (slixmpp, France),"
"\n"
" Pierrick Le Brun (SalixOS, France),"
" Remko Tronçon (Swift, Germany),"
"\n"
" Raphael Groner (Fedora, Germany),"
" Thorsten Mühlfelder (SalixOS, Germany),"
"\n"
" Yann Leboulanger (Gajim, France)."
"\n"
"\n"
"COPYRIGHT\n"
" Slixfeed is free software; you can redistribute it and/or\n"
" modify it under the terms of the MIT License.\n"
"\n"
" Slixfeed is distributed in the hope that it will be useful,\n"
" but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
" MIT License for more details.\n"
"\n"
"NOTE\n"
" You can run Slixfeed on your own computer, server, and\n"
" even on a Linux phone (i.e. Droidian, Kupfer, Mobian, NixOS,\n"
" postmarketOS). You can also use Termux.\n"
"\n"
" All you need is one of the above and an XMPP account to\n"
" connect Slixfeed to.\n"
"\n"
"DOCUMENTATION\n"
" Slixfeed\n"
" https://gitgud.io/sjehuda/slixfeed\n"
" Slixmpp\n"
" https://slixmpp.readthedocs.io/\n"
" feedparser\n"
" https://pythonhosted.org/feedparser\n"
"```"
)
return msg
def print_help():
"""
Print help manual.
Returns
-------
msg : str
Message.
"""
msg = (
"```"
"\n"
"NAME\n"
"Slixfeed - News syndication bot for Jabber/XMPP\n"
"\n"
"DESCRIPTION\n"
" Slixfeed is a news aggregator bot for online news feeds.\n"
" This program is primarily designed for XMPP.\n"
" For more information, visit https://xmpp.org/software/\n"
"\n"
"BASIC USAGE\n"
" <url>\n"
" Add <url> to subscription list.\n"
" add <url> TITLE\n"
" Add <url> to subscription list (without validity check).\n"
" get <id> <type>\n"
" Send an article as file. Specify <id> and <type>."
" Supported types are HTML, MD and PDF (default).\n"
" join <muc>\n"
" Join specified groupchat.\n"
" read <url>\n"
" Display most recent 20 titles of given <url>.\n"
" read <url> <n>\n"
" Display specified entry number from given <url>.\n"
"\n"
"CUSTOM ACTIONS\n"
" new\n"
" Send only new items of newly added feeds.\n"
" old\n"
" Send all items of newly added feeds.\n"
" next N\n"
" Send N next updates.\n"
" reset\n"
" Mark all entries as read and remove all archived entries\n"
" reset <url>\n"
" Mark entries of <url> as read and remove all archived entries of <url>.\n"
" start\n"
" Enable bot and send updates.\n"
" stop\n"
" Disable bot and stop updates.\n"
"\n"
"MESSAGE OPTIONS\n"
" interval <num>\n"
" Set interval update to every <num> minutes.\n"
" length\n"
" Set maximum length of news item description. (0 for no limit)\n"
" quantum <num>\n"
" Set <num> amount of updates per interval.\n"
"\n"
"GROUPCHAT OPTIONS\n"
" ! (command initiation)\n"
" Use exclamation mark to initiate an actionable command.\n"
# " activate CODE\n"
# " Activate and command bot.\n"
# " demaster NICKNAME\n"
# " Remove master privilege.\n"
# " mastership NICKNAME\n"
# " Add master privilege.\n"
# " ownership NICKNAME\n"
# " Set new owner.\n"
"\n"
"FILTER OPTIONS\n"
" allow +\n"
" Add keywords to allow (comma separates).\n"
" allow -\n"
" Delete keywords from allow list (comma separates).\n"
" deny +\n"
" Keywords to block (comma separates).\n"
" deny -\n"
" Delete keywords from deny list (comma separates).\n"
# " filter clear allow\n"
# " Reset allow list.\n"
# " filter clear deny\n"
# " Reset deny list.\n"
"\n"
"EDIT OPTIONS\n"
" remove <id>\n"
" Remove feed of <id> from subscription list.\n"
" disable <id>\n"
" Disable updates for feed of <id>.\n"
" enable <id>\n"
" Enable updates for feed of <id>.\n"
"\n"
"SEARCH OPTIONS\n"
" feeds\n"
" List all subscriptions.\n"
" feeds <text>\n"
" Search subscriptions by given <text>.\n"
" search <text>\n"
" Search news items by given <text>.\n"
" recent <num>\n"
" List recent <num> news items (up to 50 items).\n"
"\n"
# "STATISTICS OPTIONS\n"
# " analyses\n"
# " Show report and statistics of feeds.\n"
# " obsolete\n"
# " List feeds that are not available.\n"
# " unread\n"
# " Print number of unread news items.\n"
# "\n"
"BACKUP OPTIONS\n"
" export opml\n"
" Send an OPML file with feeds.\n"
# " backup news html\n"
# " Send an HTML formatted file of your news items.\n"
# " backup news md\n"
# " Send a Markdown file of your news items.\n"
# " backup news text\n"
# " Send a Plain Text file of your news items.\n"
"\n"
"SUPPORT\n"
" commands\n"
" Print list of commands.\n"
" help\n"
" Print this help manual.\n"
" info\n"
" Print information page.\n"
" support\n"
" Join xmpp:slixfeed@chat.woodpeckersnest.space?join\n"
# "\n"
# "PROTOCOLS\n"
# " Supported prootcols are IRC, Matrix and XMPP.\n"
# " For the best experience, we recommend you to use XMPP.\n"
# "\n"
"```"
)
return msg
def print_cmd():
"""
Print list of commands.
Returns
-------
msg : str
Message.
"""
msg = (
"```"
"\n"
"! : Use exclamation mark to initiate an actionable command (groupchats only).\n"
"<muc> : Join specified groupchat.\n"
"<url> : Add <url> to subscription list.\n"
"add <url> <title> : Add <url> to subscription list (without validity check).\n"
"allow + : Add keywords to allow (comma separates).\n"
"allow - : Delete keywords from allow list (comma separates).\n"
"deny + : Keywords to block (comma separates).\n"
"deny - : Delete keywords from deny list (comma separates).\n"
"disable <id> : Disable updates for feed of <id>.\n"
"enable <id> : Enable updates for feed of <id>.\n"
"export opml : Send an OPML file with feeds.\n"
"feeds : List all subscriptions.\n"
"feeds <text> : Search subscriptions by given <text>.\n"
"get <id> <type> : Send an article as file. Specify <id> and <type>. Supported types are HTML, MD and PDF (default).\n"
"interval <n> : Set interval update to every <n> minutes.\n"
"join <muc> : Join specified groupchat.\n"
"length : Set maximum length of news item description. (0 for no limit)\n"
"new : Send only new items of newly added feeds.\n"
"next <n> : Send <n> next updates.\n"
"old : Send all items of newly added feeds.\n"
"quantum <n> : Set <n> amount of updates per interval.\n"
"read <url> : Display most recent 20 titles of given <url>.\n"
"read <url> <n> : Display specified entry number from given <url>.\n"
"recent <n> : List recent <n> news items (up to 50 items).\n"
"reset : Mark all entries as read.\n"
"reset <url> : Mark entries of <url> as read.\n"
"remove <id> : Remove feed from subscription list.\n"
"search <text> : Search news items by given <text>.\n"
"start : Enable bot and send updates.\n"
"stop : Disable bot and stop updates.\n"
"```"
)
return msg

View file

@ -11,7 +11,9 @@ TODO
3) If groupchat error is received, send that error message to inviter.
4) Save name of groupchat instead of jid as name
FIXME
1) Save name of groupchat instead of jid as name
"""
import logging
@ -40,10 +42,16 @@ async def autojoin(self):
for conference in conferences:
if conference["autojoin"]:
muc_jid = conference["jid"]
logging.debug(
"Autojoin {} ({})".format(conference["name"], muc_jid))
print(
"Autojoin {} ({})".format(conference["name"], muc_jid))
logging.info(
'Autojoin groupchat\n'
'Name : {}\n'
'JID : {}\n'
'Alias : {}\n'
.format(
conference["name"],
muc_jid,
conference["nick"]
))
self.plugin['xep_0045'].join_muc(
muc_jid,
conference["nick"],
@ -71,15 +79,20 @@ async def join(self, inviter, muc_jid):
# "Send activation token {} to groupchat xmpp:{}?join."
# ).format(token, muc_jid)
# )
print("muc_jid")
print(muc_jid)
logging.info(
'Joining groupchat\n'
'JID : {}\n'
'Inviter : {}\n'
.format(
muc_jid,
inviter
))
self.plugin['xep_0045'].join_muc(
muc_jid,
self.alias,
# If a room password is needed, use:
# password=the_room_password,
)
await bookmark.add(self, muc_jid)
process.greet(self, muc_jid, chat_type="groupchat")
@ -97,7 +110,6 @@ async def leave(self, muc_jid):
mbody=message,
mtype="groupchat"
)
await bookmark.remove(self, muc_jid)
self.plugin['xep_0045'].leave_muc(
muc_jid,
self.alias,

File diff suppressed because it is too large Load diff

View file

@ -62,6 +62,29 @@ async def set_avatar(self):
await self.plugin["xep_0153"].set_avatar(avatar=avatar)
def set_identity(self, category):
"""
Identify for Service Descovery.
Parameters
----------
category : str
"client" or "service".
Returns
-------
None.
"""
self['xep_0030'].add_identity(
category=category,
itype='news',
name='slixfeed',
node=None,
jid=self.boundjid.full,
)
async def set_vcard(self):
vcard = self.plugin["xep_0054"].make_vcard()
fields = {

View file

@ -1,24 +0,0 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def identity(self, category):
"""
Identify for Service Duscovery
Parameters
----------
category : str
"client" or "service".
Returns
-------
None.
"""
self["xep_0030"].add_identity(
category=category,
itype="news",
name="slixfeed",
node=None,
jid=self.boundjid.full,
)

View file

@ -1,10 +1,10 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def process_task_message(self, jid, status_message):
def send(self, jid, status_message, status_type=None):
self.send_presence(
pshow="dnd",
pshow=status_type,
pstatus=status_message,
pto=jid,
pfrom=self.boundjid.bare,
pto=jid
)

View file

@ -39,7 +39,8 @@ async def get_chat_type(self, jid):
# NOTE Is it needed? We do not interact with gateways or services
else:
chat_type = "chat"
print('JID {} chat type is {}'.format(jid, chat_type))
logging.info('Jabber ID: {}\n'
'Chat Type: {}'.format(jid, chat_type))
return chat_type
# TODO Test whether this exception is realized
except IqTimeout as e: