forked from sch/Slixfeed
Fix many issues amidst change of table structure
This commit is contained in:
parent
f683e11c4a
commit
c7fa2496a8
21 changed files with 1419 additions and 1629 deletions
|
@ -100,13 +100,12 @@ import os
|
||||||
#import slixfeed.irc
|
#import slixfeed.irc
|
||||||
#import slixfeed.matrix
|
#import slixfeed.matrix
|
||||||
|
|
||||||
from slixfeed.config import get_default_config_directory, get_value
|
import slixfeed.config as config
|
||||||
|
|
||||||
import socks
|
import socks
|
||||||
import socket
|
import socket
|
||||||
|
|
||||||
xmpp_type = get_value(
|
xmpp_type = config.get_value("accounts", "XMPP", "type")
|
||||||
"accounts", "XMPP", "type")
|
|
||||||
|
|
||||||
match xmpp_type:
|
match xmpp_type:
|
||||||
case "client":
|
case "client":
|
||||||
|
@ -122,18 +121,21 @@ class JabberComponent:
|
||||||
xmpp.register_plugin('xep_0030') # Service Discovery
|
xmpp.register_plugin('xep_0030') # Service Discovery
|
||||||
xmpp.register_plugin('xep_0045') # Multi-User Chat
|
xmpp.register_plugin('xep_0045') # Multi-User Chat
|
||||||
# xmpp.register_plugin('xep_0048') # Bookmarks
|
# xmpp.register_plugin('xep_0048') # Bookmarks
|
||||||
|
xmpp.register_plugin('xep_0050') # Ad-Hoc Commands
|
||||||
xmpp.register_plugin('xep_0054') # vcard-temp
|
xmpp.register_plugin('xep_0054') # vcard-temp
|
||||||
xmpp.register_plugin('xep_0060') # Publish-Subscribe
|
xmpp.register_plugin('xep_0060') # Publish-Subscribe
|
||||||
# xmpp.register_plugin('xep_0065') # SOCKS5 Bytestreams
|
# xmpp.register_plugin('xep_0065') # SOCKS5 Bytestreams
|
||||||
xmpp.register_plugin('xep_0066') # Out of Band Data
|
xmpp.register_plugin('xep_0066') # Out of Band Data
|
||||||
xmpp.register_plugin('xep_0071') # XHTML-IM
|
xmpp.register_plugin('xep_0071') # XHTML-IM
|
||||||
xmpp.register_plugin('xep_0084') # User Avatar
|
xmpp.register_plugin('xep_0084') # User Avatar
|
||||||
# xmpp.register_plugin('xep_0085') # Chat State Notifications
|
xmpp.register_plugin('xep_0085') # Chat State Notifications
|
||||||
|
xmpp.register_plugin('xep_0115') # Entity Capabilities
|
||||||
xmpp.register_plugin('xep_0153') # vCard-Based Avatars
|
xmpp.register_plugin('xep_0153') # vCard-Based Avatars
|
||||||
xmpp.register_plugin('xep_0199', {'keepalive': True}) # XMPP Ping
|
xmpp.register_plugin('xep_0199', {'keepalive': True}) # XMPP Ping
|
||||||
xmpp.register_plugin('xep_0249') # Multi-User Chat
|
xmpp.register_plugin('xep_0249') # Direct MUC Invitations
|
||||||
xmpp.register_plugin('xep_0363') # HTTP File Upload
|
xmpp.register_plugin('xep_0363') # HTTP File Upload
|
||||||
xmpp.register_plugin('xep_0402') # PEP Native Bookmarks
|
xmpp.register_plugin('xep_0402') # PEP Native Bookmarks
|
||||||
|
xmpp.register_plugin('xep_0444') # Message Reactions
|
||||||
xmpp.connect()
|
xmpp.connect()
|
||||||
xmpp.process()
|
xmpp.process()
|
||||||
|
|
||||||
|
@ -145,22 +147,25 @@ class JabberClient:
|
||||||
xmpp.register_plugin('xep_0030') # Service Discovery
|
xmpp.register_plugin('xep_0030') # Service Discovery
|
||||||
xmpp.register_plugin('xep_0045') # Multi-User Chat
|
xmpp.register_plugin('xep_0045') # Multi-User Chat
|
||||||
xmpp.register_plugin('xep_0048') # Bookmarks
|
xmpp.register_plugin('xep_0048') # Bookmarks
|
||||||
|
xmpp.register_plugin('xep_0050') # Ad-Hoc Commands
|
||||||
xmpp.register_plugin('xep_0054') # vcard-temp
|
xmpp.register_plugin('xep_0054') # vcard-temp
|
||||||
xmpp.register_plugin('xep_0060') # Publish-Subscribe
|
xmpp.register_plugin('xep_0060') # Publish-Subscribe
|
||||||
# xmpp.register_plugin('xep_0065') # SOCKS5 Bytestreams
|
# xmpp.register_plugin('xep_0065') # SOCKS5 Bytestreams
|
||||||
xmpp.register_plugin('xep_0066') # Out of Band Data
|
xmpp.register_plugin('xep_0066') # Out of Band Data
|
||||||
xmpp.register_plugin('xep_0071') # XHTML-IM
|
xmpp.register_plugin('xep_0071') # XHTML-IM
|
||||||
xmpp.register_plugin('xep_0084') # User Avatar
|
xmpp.register_plugin('xep_0084') # User Avatar
|
||||||
# xmpp.register_plugin('xep_0085') # Chat State Notifications
|
xmpp.register_plugin('xep_0085') # Chat State Notifications
|
||||||
|
xmpp.register_plugin('xep_0115') # Entity Capabilities
|
||||||
xmpp.register_plugin('xep_0153') # vCard-Based Avatars
|
xmpp.register_plugin('xep_0153') # vCard-Based Avatars
|
||||||
xmpp.register_plugin('xep_0199', {'keepalive': True}) # XMPP Ping
|
xmpp.register_plugin('xep_0199', {'keepalive': True}) # XMPP Ping
|
||||||
xmpp.register_plugin('xep_0249') # Multi-User Chat
|
xmpp.register_plugin('xep_0249') # Direct MUC Invitations
|
||||||
xmpp.register_plugin('xep_0363') # HTTP File Upload
|
xmpp.register_plugin('xep_0363') # HTTP File Upload
|
||||||
xmpp.register_plugin('xep_0402') # PEP Native Bookmarks
|
xmpp.register_plugin('xep_0402') # PEP Native Bookmarks
|
||||||
|
xmpp.register_plugin('xep_0444') # Message Reactions
|
||||||
|
|
||||||
# proxy_enabled = get_value("accounts", "XMPP", "proxy_enabled")
|
# proxy_enabled = config.get_value("accounts", "XMPP", "proxy_enabled")
|
||||||
# if proxy_enabled == '1':
|
# if proxy_enabled == '1':
|
||||||
# values = get_value("accounts", "XMPP", [
|
# values = config.get_value("accounts", "XMPP", [
|
||||||
# "proxy_host",
|
# "proxy_host",
|
||||||
# "proxy_port",
|
# "proxy_port",
|
||||||
# "proxy_username",
|
# "proxy_username",
|
||||||
|
@ -179,7 +184,7 @@ class JabberClient:
|
||||||
|
|
||||||
# Connect to the XMPP server and start processing XMPP stanzas.
|
# Connect to the XMPP server and start processing XMPP stanzas.
|
||||||
|
|
||||||
address = get_value(
|
address = config.get_value(
|
||||||
"accounts", "XMPP Client", ["hostname", "port"])
|
"accounts", "XMPP Client", ["hostname", "port"])
|
||||||
if address[0] and address[1]:
|
if address[0] and address[1]:
|
||||||
xmpp.connect(tuple(address))
|
xmpp.connect(tuple(address))
|
||||||
|
@ -190,11 +195,11 @@ class JabberClient:
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
|
||||||
config_dir = get_default_config_directory()
|
config_dir = config.get_default_config_directory()
|
||||||
logging.info("Reading configuration from {}".format(config_dir))
|
logging.info("Reading configuration from {}".format(config_dir))
|
||||||
print("Reading configuration from {}".format(config_dir))
|
print("Reading configuration from {}".format(config_dir))
|
||||||
|
|
||||||
values = get_value(
|
values = config.get_value(
|
||||||
"accounts", "XMPP Proxy", ["socks5_host", "socks5_port"])
|
"accounts", "XMPP Proxy", ["socks5_host", "socks5_port"])
|
||||||
if values[0] and values[1]:
|
if values[0] and values[1]:
|
||||||
host = values[0]
|
host = values[0]
|
||||||
|
@ -208,37 +213,30 @@ def main():
|
||||||
parser = ArgumentParser(description=Slixfeed.__doc__)
|
parser = ArgumentParser(description=Slixfeed.__doc__)
|
||||||
|
|
||||||
# Output verbosity options.
|
# Output verbosity options.
|
||||||
parser.add_argument(
|
parser.add_argument("-q", "--quiet", help="set logging to ERROR",
|
||||||
"-q", "--quiet", help="set logging to ERROR",
|
action="store_const", dest="loglevel",
|
||||||
action="store_const", dest="loglevel",
|
const=logging.ERROR, default=logging.INFO)
|
||||||
const=logging.ERROR, default=logging.INFO)
|
parser.add_argument("-d", "--debug", help="set logging to DEBUG",
|
||||||
parser.add_argument(
|
action="store_const", dest="loglevel",
|
||||||
"-d", "--debug", help="set logging to DEBUG",
|
const=logging.DEBUG, default=logging.INFO)
|
||||||
action="store_const", dest="loglevel",
|
|
||||||
const=logging.DEBUG, default=logging.INFO)
|
|
||||||
|
|
||||||
# JID and password options.
|
# JID and password options.
|
||||||
parser.add_argument(
|
parser.add_argument("-j", "--jid", help="Jabber ID", dest="jid")
|
||||||
"-j", "--jid", dest="jid", help="Jabber ID")
|
parser.add_argument("-p", "--password", help="Password of JID",
|
||||||
parser.add_argument(
|
dest="password")
|
||||||
"-p", "--password", dest="password", help="Password of JID")
|
parser.add_argument("-a", "--alias", help="Display name", dest="alias")
|
||||||
parser.add_argument(
|
parser.add_argument("-n", "--hostname", help="Hostname", dest="hostname")
|
||||||
"-a", "--alias", dest="alias", help="Display name")
|
parser.add_argument("-o", "--port", help="Port number", dest="port")
|
||||||
parser.add_argument(
|
|
||||||
"-n", "--hostname", dest="hostname", help="Hostname")
|
|
||||||
parser.add_argument(
|
|
||||||
"-o", "--port", dest="port", help="Port number")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
# Setup logging.
|
# Setup logging.
|
||||||
logging.basicConfig(
|
logging.basicConfig(level=args.loglevel,
|
||||||
level=args.loglevel, format='%(levelname)-8s %(message)s')
|
format='%(levelname)-8s %(message)s')
|
||||||
|
|
||||||
# Try configuration file
|
# Try configuration file
|
||||||
values = get_value(
|
values = config.get_value("accounts", "XMPP Client",
|
||||||
"accounts", "XMPP Client", [
|
["alias", "jid", "password", "hostname", "port"])
|
||||||
"alias", "jid", "password", "hostname", "port"])
|
|
||||||
alias = values[0]
|
alias = values[0]
|
||||||
jid = values[1]
|
jid = values[1]
|
||||||
password = values[2]
|
password = values[2]
|
||||||
|
|
|
@ -89,14 +89,16 @@ def manual(filename, section=None, command=None):
|
||||||
if command and section:
|
if command and section:
|
||||||
try:
|
try:
|
||||||
cmd_list = cmds[section][command]
|
cmd_list = cmds[section][command]
|
||||||
except KeyError:
|
except KeyError as e:
|
||||||
|
logging.error(str(e))
|
||||||
cmd_list = None
|
cmd_list = None
|
||||||
elif section:
|
elif section:
|
||||||
try:
|
try:
|
||||||
cmd_list = []
|
cmd_list = []
|
||||||
for cmd in cmds[section]:
|
for cmd in cmds[section]:
|
||||||
cmd_list.extend([cmd])
|
cmd_list.extend([cmd])
|
||||||
except KeyError:
|
except KeyError as e:
|
||||||
|
logging.error('KeyError:' + str(e))
|
||||||
cmd_list = None
|
cmd_list = None
|
||||||
else:
|
else:
|
||||||
cmd_list = []
|
cmd_list = []
|
||||||
|
@ -305,6 +307,7 @@ async def get_setting_value(db_file, key):
|
||||||
await sqlite.get_settings_value(db_file, key) or
|
await sqlite.get_settings_value(db_file, key) or
|
||||||
config.get_value("settings", "Settings", key)
|
config.get_value("settings", "Settings", key)
|
||||||
)
|
)
|
||||||
|
value = int(value)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
@ -529,16 +532,15 @@ async def add_feed(db_file, url):
|
||||||
status_code=status_code,
|
status_code=status_code,
|
||||||
updated=updated
|
updated=updated
|
||||||
)
|
)
|
||||||
await scan(
|
await scan(db_file, url)
|
||||||
db_file, url)
|
|
||||||
old = await get_setting_value(db_file, "old")
|
old = await get_setting_value(db_file, "old")
|
||||||
if not old:
|
if not old:
|
||||||
await sqlite.mark_feed_as_read(
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
db_file, url)
|
feed_id = feed_id[0]
|
||||||
response = (
|
await sqlite.mark_feed_as_read(db_file, feed_id)
|
||||||
"> {}\nNews source \"{}\" has been "
|
response = ('> {}\nNews source "{}" has been '
|
||||||
"added to subscription list."
|
'added to subscription list.'
|
||||||
).format(url, title)
|
.format(url, title))
|
||||||
break
|
break
|
||||||
# NOTE This elif statement be unnecessary
|
# NOTE This elif statement be unnecessary
|
||||||
# when feedparser be supporting json feed.
|
# when feedparser be supporting json feed.
|
||||||
|
@ -580,12 +582,12 @@ async def add_feed(db_file, url):
|
||||||
db_file, url)
|
db_file, url)
|
||||||
old = await get_setting_value(db_file, "old")
|
old = await get_setting_value(db_file, "old")
|
||||||
if not old:
|
if not old:
|
||||||
await sqlite.mark_feed_as_read(
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
db_file, url)
|
feed_id = feed_id[0]
|
||||||
response = (
|
await sqlite.mark_feed_as_read(db_file, feed_id)
|
||||||
"> {}\nNews source \"{}\" has been "
|
response = ('> {}\nNews source "{}" has been '
|
||||||
"added to subscription list."
|
'added to subscription list.'
|
||||||
).format(url, title)
|
.format(url, title))
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
result = await crawl.probe_page(
|
result = await crawl.probe_page(
|
||||||
|
@ -596,18 +598,15 @@ async def add_feed(db_file, url):
|
||||||
else:
|
else:
|
||||||
url = result[0]
|
url = result[0]
|
||||||
else:
|
else:
|
||||||
response = (
|
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||||
"> {}\nFailed to load URL. Reason: {}"
|
.format(url, status_code))
|
||||||
).format(url, status_code)
|
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
ix = exist[0]
|
ix = exist[0]
|
||||||
name = exist[1]
|
name = exist[1]
|
||||||
response = (
|
response = ('> {}\nNews source "{}" is already '
|
||||||
"> {}\nNews source \"{}\" is already "
|
'listed in the subscription list at '
|
||||||
"listed in the subscription list at "
|
'index {}'.format(url, name, ix))
|
||||||
"index {}".format(url, name, ix)
|
|
||||||
)
|
|
||||||
break
|
break
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -638,6 +637,7 @@ async def scan_json(db_file, url):
|
||||||
db_file, url, feed)
|
db_file, url, feed)
|
||||||
try:
|
try:
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
# await sqlite.update_feed_validity(
|
# await sqlite.update_feed_validity(
|
||||||
# db_file, feed_id, valid)
|
# db_file, feed_id, valid)
|
||||||
if "date_published" in feed.keys():
|
if "date_published" in feed.keys():
|
||||||
|
@ -649,6 +649,7 @@ async def scan_json(db_file, url):
|
||||||
else:
|
else:
|
||||||
updated = ''
|
updated = ''
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
await sqlite.update_feed_properties(
|
await sqlite.update_feed_properties(
|
||||||
db_file, feed_id, len(feed["items"]), updated)
|
db_file, feed_id, len(feed["items"]), updated)
|
||||||
# await update_feed_status
|
# await update_feed_status
|
||||||
|
@ -680,15 +681,20 @@ async def scan_json(db_file, url):
|
||||||
title = entry["title"] if "title" in entry.keys() else date
|
title = entry["title"] if "title" in entry.keys() else date
|
||||||
entry_id = entry["id"] if "id" in entry.keys() else link
|
entry_id = entry["id"] if "id" in entry.keys() else link
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
exist = await sqlite.check_entry_exist(
|
exist = await sqlite.check_entry_exist(
|
||||||
db_file, feed_id, entry_id=entry_id,
|
db_file, feed_id, entry_id=entry_id,
|
||||||
title=title, link=link, date=date)
|
title=title, link=link, date=date)
|
||||||
if not exist:
|
if not exist:
|
||||||
summary = entry["summary"] if "summary" in entry.keys() else ''
|
summary = entry["summary"] if "summary" in entry.keys() else ''
|
||||||
if not summary:
|
if not summary:
|
||||||
summary = entry["content_html"] if "content_html" in entry.keys() else ''
|
summary = (entry["content_html"]
|
||||||
|
if "content_html" in entry.keys()
|
||||||
|
else '')
|
||||||
if not summary:
|
if not summary:
|
||||||
summary = entry["content_text"] if "content_text" in entry.keys() else ''
|
summary = (entry["content_text"]
|
||||||
|
if "content_text" in entry.keys()
|
||||||
|
else '')
|
||||||
read_status = 0
|
read_status = 0
|
||||||
pathname = urlsplit(link).path
|
pathname = urlsplit(link).path
|
||||||
string = (
|
string = (
|
||||||
|
@ -725,12 +731,12 @@ async def scan_json(db_file, url):
|
||||||
media_link = trim_url(media_link)
|
media_link = trim_url(media_link)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
logging.error(
|
logging.error('KeyError: "url"\n'
|
||||||
"KeyError: 'url'\n"
|
'Missing "url" attribute for {}'
|
||||||
"Missing 'url' attribute for {}".format(url))
|
.format(url))
|
||||||
logging.info(
|
logging.info('Continue scanning for next '
|
||||||
"Continue scanning for next potential "
|
'potential enclosure of {}'
|
||||||
"enclosure of {}".format(link))
|
.format(link))
|
||||||
entry = {
|
entry = {
|
||||||
"title": title,
|
"title": title,
|
||||||
"link": link,
|
"link": link,
|
||||||
|
@ -746,6 +752,7 @@ async def scan_json(db_file, url):
|
||||||
# await sqlite.set_date(db_file, url)
|
# await sqlite.set_date(db_file, url)
|
||||||
if len(new_entries):
|
if len(new_entries):
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
await sqlite.add_entries_and_update_timestamp(
|
await sqlite.add_entries_and_update_timestamp(
|
||||||
db_file, feed_id, new_entries)
|
db_file, feed_id, new_entries)
|
||||||
|
|
||||||
|
@ -808,9 +815,8 @@ async def view_feed(url):
|
||||||
else:
|
else:
|
||||||
url = result[0]
|
url = result[0]
|
||||||
else:
|
else:
|
||||||
response = (
|
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||||
"> {}\nFailed to load URL. Reason: {}"
|
.format(url, status))
|
||||||
).format(url, status)
|
|
||||||
break
|
break
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -877,9 +883,8 @@ async def view_entry(url, num):
|
||||||
else:
|
else:
|
||||||
url = result[0]
|
url = result[0]
|
||||||
else:
|
else:
|
||||||
response = (
|
response = ('> {}\nFailed to load URL. Reason: {}'
|
||||||
"> {}\nFailed to load URL. Reason: {}"
|
.format(url, status))
|
||||||
).format(url, status)
|
|
||||||
break
|
break
|
||||||
return response
|
return response
|
||||||
|
|
||||||
|
@ -921,6 +926,7 @@ async def scan(db_file, url):
|
||||||
else:
|
else:
|
||||||
valid = 1
|
valid = 1
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
await sqlite.update_feed_validity(
|
await sqlite.update_feed_validity(
|
||||||
db_file, feed_id, valid)
|
db_file, feed_id, valid)
|
||||||
if "updated_parsed" in feed["feed"].keys():
|
if "updated_parsed" in feed["feed"].keys():
|
||||||
|
@ -932,6 +938,7 @@ async def scan(db_file, url):
|
||||||
else:
|
else:
|
||||||
updated = ''
|
updated = ''
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
await sqlite.update_feed_properties(
|
await sqlite.update_feed_properties(
|
||||||
db_file, feed_id, len(feed["entries"]), updated)
|
db_file, feed_id, len(feed["entries"]), updated)
|
||||||
# await update_feed_status
|
# await update_feed_status
|
||||||
|
@ -963,6 +970,7 @@ async def scan(db_file, url):
|
||||||
title = entry.title if entry.has_key("title") else date
|
title = entry.title if entry.has_key("title") else date
|
||||||
entry_id = entry.id if entry.has_key("id") else link
|
entry_id = entry.id if entry.has_key("id") else link
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
exist = await sqlite.check_entry_exist(
|
exist = await sqlite.check_entry_exist(
|
||||||
db_file, feed_id, entry_id=entry_id,
|
db_file, feed_id, entry_id=entry_id,
|
||||||
title=title, link=link, date=date)
|
title=title, link=link, date=date)
|
||||||
|
@ -986,8 +994,8 @@ async def scan(db_file, url):
|
||||||
"Keyword : {}".format(
|
"Keyword : {}".format(
|
||||||
link, reject_list))
|
link, reject_list))
|
||||||
if isinstance(date, int):
|
if isinstance(date, int):
|
||||||
logging.error(
|
logging.error('Variable "date" is int: {}'
|
||||||
"Variable 'date' is int: {}".format(date))
|
.format(date))
|
||||||
media_link = ''
|
media_link = ''
|
||||||
if entry.has_key("links"):
|
if entry.has_key("links"):
|
||||||
for e_link in entry.links:
|
for e_link in entry.links:
|
||||||
|
@ -1006,12 +1014,12 @@ async def scan(db_file, url):
|
||||||
media_link = trim_url(media_link)
|
media_link = trim_url(media_link)
|
||||||
break
|
break
|
||||||
except:
|
except:
|
||||||
logging.error(
|
logging.error('KeyError: "href"\n'
|
||||||
"KeyError: 'href'\n"
|
'Missing "href" attribute for {}'
|
||||||
"Missing 'href' attribute for {}".format(url))
|
.format(url))
|
||||||
logging.info(
|
logging.info('Continue scanning for next '
|
||||||
"Continue scanning for next potential "
|
'potential enclosure of {}'
|
||||||
"enclosure of {}".format(link))
|
.format(link))
|
||||||
entry = {
|
entry = {
|
||||||
"title": title,
|
"title": title,
|
||||||
"link": link,
|
"link": link,
|
||||||
|
@ -1027,6 +1035,7 @@ async def scan(db_file, url):
|
||||||
# await sqlite.set_date(db_file, url)
|
# await sqlite.set_date(db_file, url)
|
||||||
if len(new_entries):
|
if len(new_entries):
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
await sqlite.add_entries_and_update_timestamp(
|
await sqlite.add_entries_and_update_timestamp(
|
||||||
db_file, feed_id, new_entries)
|
db_file, feed_id, new_entries)
|
||||||
|
|
||||||
|
@ -1048,8 +1057,7 @@ def generate_document(data, url, ext, filename):
|
||||||
content = document.summary()
|
content = document.summary()
|
||||||
except:
|
except:
|
||||||
content = data
|
content = data
|
||||||
logging.warning(
|
logging.warning('Check that package readability is installed.')
|
||||||
"Check that package readability is installed.")
|
|
||||||
match ext:
|
match ext:
|
||||||
case "epub":
|
case "epub":
|
||||||
error = generate_epub(content, filename)
|
error = generate_epub(content, filename)
|
||||||
|
@ -1064,11 +1072,9 @@ def generate_document(data, url, ext, filename):
|
||||||
try:
|
try:
|
||||||
generate_markdown(content, filename)
|
generate_markdown(content, filename)
|
||||||
except:
|
except:
|
||||||
logging.warning(
|
logging.warning('Check that package html2text '
|
||||||
"Check that package html2text is installed, "
|
'is installed, or try again.')
|
||||||
"or try again.")
|
error = 'Package html2text was not found.'
|
||||||
error = (
|
|
||||||
"Package html2text was not found.")
|
|
||||||
case "pdf":
|
case "pdf":
|
||||||
error = generate_pdf(content, filename)
|
error = generate_pdf(content, filename)
|
||||||
if error:
|
if error:
|
||||||
|
@ -1093,6 +1099,7 @@ def generate_document(data, url, ext, filename):
|
||||||
|
|
||||||
async def extract_image_from_feed(db_file, feed_id, url):
|
async def extract_image_from_feed(db_file, feed_id, url):
|
||||||
feed_url = sqlite.get_feed_url(db_file, feed_id)
|
feed_url = sqlite.get_feed_url(db_file, feed_id)
|
||||||
|
feed_url = feed_url[0]
|
||||||
result = await fetch.http(feed_url)
|
result = await fetch.http(feed_url)
|
||||||
document = result[0]
|
document = result[0]
|
||||||
if document:
|
if document:
|
||||||
|
@ -1107,8 +1114,7 @@ async def extract_image_from_feed(db_file, feed_id, url):
|
||||||
return image_url
|
return image_url
|
||||||
except:
|
except:
|
||||||
logging.error(url)
|
logging.error(url)
|
||||||
logging.error(
|
logging.error('AttributeError: object has no attribute "link"')
|
||||||
"AttributeError: object has no attribute 'link'")
|
|
||||||
|
|
||||||
|
|
||||||
async def extract_image_from_html(url):
|
async def extract_image_from_html(url):
|
||||||
|
@ -1120,8 +1126,7 @@ async def extract_image_from_html(url):
|
||||||
content = document.summary()
|
content = document.summary()
|
||||||
except:
|
except:
|
||||||
content = data
|
content = data
|
||||||
logging.warning(
|
logging.warning('Check that package readability is installed.')
|
||||||
"Check that package readability is installed.")
|
|
||||||
tree = html.fromstring(content)
|
tree = html.fromstring(content)
|
||||||
# TODO Exclude banners, class="share" links etc.
|
# TODO Exclude banners, class="share" links etc.
|
||||||
images = tree.xpath(
|
images = tree.xpath(
|
||||||
|
@ -1209,9 +1214,8 @@ async def get_magnet(link):
|
||||||
filename = queries["dn"][0]
|
filename = queries["dn"][0]
|
||||||
checksum = query_xt[len("urn:btih:"):]
|
checksum = query_xt[len("urn:btih:"):]
|
||||||
torrent = await fetch.magnet(link)
|
torrent = await fetch.magnet(link)
|
||||||
logging.debug(
|
logging.debug('Attempting to retrieve {} ({})'
|
||||||
"Attempting to retrieve {} ({})".format(
|
.format(filename, checksum))
|
||||||
filename, checksum))
|
|
||||||
if not torrent:
|
if not torrent:
|
||||||
logging.debug(
|
logging.debug(
|
||||||
"Attempting to retrieve {} from HTTP caching service".format(
|
"Attempting to retrieve {} from HTTP caching service".format(
|
||||||
|
@ -1245,6 +1249,7 @@ async def remove_nonexistent_entries(db_file, url, feed):
|
||||||
Parsed feed document.
|
Parsed feed document.
|
||||||
"""
|
"""
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
||||||
entries = feed.entries
|
entries = feed.entries
|
||||||
for item in items:
|
for item in items:
|
||||||
|
@ -1350,6 +1355,7 @@ async def remove_nonexistent_entries_json(db_file, url, feed):
|
||||||
Parsed feed document.
|
Parsed feed document.
|
||||||
"""
|
"""
|
||||||
feed_id = await sqlite.get_feed_id(db_file, url)
|
feed_id = await sqlite.get_feed_id(db_file, url)
|
||||||
|
feed_id = feed_id[0]
|
||||||
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
items = await sqlite.get_entries_of_feed(db_file, feed_id)
|
||||||
entries = feed["items"]
|
entries = feed["items"]
|
||||||
for item in items:
|
for item in items:
|
||||||
|
|
|
@ -1,7 +1,12 @@
|
||||||
about = """
|
about = """
|
||||||
|
Slixfeed
|
||||||
|
|
||||||
|
A Syndication bot for the XMPP communication network.
|
||||||
|
|
||||||
Slixfeed aims to be an easy to use and fully-featured news \
|
Slixfeed aims to be an easy to use and fully-featured news \
|
||||||
aggregator bot for XMPP. It provides a convenient access to Blogs, \
|
aggregator bot for XMPP. It provides a convenient access to Blogs, \
|
||||||
Fediverse and News websites along with filtering functionality.
|
News websites and even Fediverse instances, along with filtering \
|
||||||
|
functionality.
|
||||||
|
|
||||||
Slixfeed is primarily designed for XMPP (aka Jabber). \
|
Slixfeed is primarily designed for XMPP (aka Jabber). \
|
||||||
Visit https://xmpp.org/software/ for more information.
|
Visit https://xmpp.org/software/ for more information.
|
||||||
|
@ -19,17 +24,30 @@ Supported filetypes: Atom, JSON, RDF, RSS and XML.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
license = """
|
license = """
|
||||||
Slixfeed is free software; you can redistribute it and/or \
|
Copyright 2022 - 2024 Schimon Zackary Jehudah
|
||||||
modify it under the terms of the MIT License.
|
|
||||||
|
|
||||||
Slixfeed is distributed in the hope that it will be useful, \
|
Permission is hereby granted, free of charge, to any person obtaining \
|
||||||
but WITHOUT ANY WARRANTY; without even the implied warranty of \
|
a copy of this software and associated documentation files (the \
|
||||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \
|
“Software”), to deal in the Software without restriction, including \
|
||||||
MIT License for more details.
|
without limitation the rights to use, copy, modify, merge, publish, \
|
||||||
|
distribute, sublicense, and/or sell copies of the Software, and to \
|
||||||
|
permit persons to whom the Software is furnished to do so, subject to \
|
||||||
|
the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included \
|
||||||
|
in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS \
|
||||||
|
OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL \
|
||||||
|
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR \
|
||||||
|
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, \
|
||||||
|
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER \
|
||||||
|
DEALINGS IN THE SOFTWARE.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
note = """
|
note = """
|
||||||
You can run Slixfeed as a client on your own computer, server, \
|
You can run Slixfeed as a client, from your own computer, server, \
|
||||||
and even on a Linux phone (i.e. Droidian, Kupfer, Mobian, NixOS, \
|
and even on a Linux phone (i.e. Droidian, Kupfer, Mobian, NixOS, \
|
||||||
postmarketOS). You can even use Termux.
|
postmarketOS). You can even use Termux.
|
||||||
|
|
||||||
|
@ -44,11 +62,11 @@ No operator was specified for this instance.
|
||||||
platforms = """
|
platforms = """
|
||||||
Supported platforms: XMPP
|
Supported platforms: XMPP
|
||||||
Platforms to be added in future: Briar, Email, IRC, Matrix, MQTT, Tox.
|
Platforms to be added in future: Briar, Email, IRC, Matrix, MQTT, Tox.
|
||||||
For the best experience, we recommend to use XMPP.
|
For ideal experience, we recommend using XMPP.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
privacy = """
|
privacy = """
|
||||||
All your data belongs to us!
|
All your data belongs to us.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
protocols = """
|
protocols = """
|
||||||
|
@ -67,11 +85,19 @@ https://pythonhosted.org/feedparser
|
||||||
"""
|
"""
|
||||||
|
|
||||||
terms = """
|
terms = """
|
||||||
You may not abuse this service.
|
Slixfeed is free software; you can redistribute it and/or \
|
||||||
|
modify it under the terms of the MIT License.
|
||||||
|
|
||||||
|
Slixfeed is distributed in the hope that it will be useful, \
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of \
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \
|
||||||
|
MIT License for more details.
|
||||||
|
|
||||||
|
https://gitgud.io/sjehuda/slixfeed
|
||||||
"""
|
"""
|
||||||
|
|
||||||
thanks = """
|
thanks = """
|
||||||
Alixander Court (Utah), \
|
Alixander Court (alixandercourt.com, Utah), \
|
||||||
Christian Dersch (SalixOS), \
|
Christian Dersch (SalixOS), \
|
||||||
Cyrille Pontvieux (SalixOS, France), \
|
Cyrille Pontvieux (SalixOS, France), \
|
||||||
Denis Fomin (Gajim, Russia), \
|
Denis Fomin (Gajim, Russia), \
|
||||||
|
|
|
@ -1,523 +1,492 @@
|
||||||
[proxies.anonymousoverflow]
|
[proxies.anonymousoverflow]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://ao.phreedom.club",
|
"https://ao.phreedom.club",
|
||||||
"https://overflow.hostux.net",
|
"https://overflow.hostux.net",
|
||||||
"https://ao.foss.wtf",
|
"https://ao.foss.wtf",
|
||||||
"https://overflow.adminforge.de",
|
"https://overflow.adminforge.de",
|
||||||
"https://overflow.lunar.icu",
|
"https://overflow.lunar.icu",
|
||||||
"https://anonymousoverflow.esmailelbob.xyz",
|
"https://anonymousoverflow.esmailelbob.xyz",
|
||||||
"https://overflow.smnz.de",
|
"https://overflow.smnz.de",
|
||||||
"https://ao.vern.cc",
|
"https://ao.vern.cc",
|
||||||
"https://overflow.777.tf",
|
"https://overflow.777.tf",
|
||||||
"https://code.whatever.social",
|
"https://code.whatever.social",
|
||||||
"https://stackoverflow.vern.cc",
|
"https://stackoverflow.vern.cc",
|
||||||
"https://anonymousoverflow.vern.cc",
|
"https://anonymousoverflow.vern.cc",
|
||||||
"https://ao.bloatcat.tk",
|
"https://ao.bloatcat.tk",
|
||||||
"https://se.chaotic.ninja",
|
"https://se.chaotic.ninja",
|
||||||
"https://anonymousoverflow.privacyfucking.rocks",
|
"https://anonymousoverflow.privacyfucking.rocks",
|
||||||
"https://overflow.projectsegfau.lt",
|
"https://overflow.projectsegfau.lt",
|
||||||
"https://anonoverflow.frontendfriendly.xyz",
|
"https://anonoverflow.frontendfriendly.xyz",
|
||||||
"https://overflow.fascinated.cc",
|
"https://overflow.fascinated.cc",
|
||||||
]
|
]
|
||||||
i2p = [
|
i2p = [
|
||||||
"http://vernmzgraj6aaoafmehupvtkkynpaa67rxcdj2kinwiy6konn6rq.b32.i2p",
|
"http://vernmzgraj6aaoafmehupvtkkynpaa67rxcdj2kinwiy6konn6rq.b32.i2p",
|
||||||
]
|
]
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://anonymousoverflow.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
"http://anonymousoverflow.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
||||||
"http://ao.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
|
"http://ao.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = [
|
yggdrasil = [
|
||||||
"http://[301:f69c:2017:b6b8::8]",
|
"http://[301:f69c:2017:b6b8::8]",
|
||||||
]
|
]
|
||||||
hostname = [
|
hostname = [
|
||||||
"stackoverflow.com",
|
"stackoverflow.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.dumb]
|
[proxies.dumb]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://dumb.privacydev.net",
|
"https://dumb.privacydev.net",
|
||||||
"https://dm.vern.cc",
|
"https://dm.vern.cc",
|
||||||
"https://dumb.lunar.icu",
|
"https://dumb.lunar.icu",
|
||||||
"https://dumb.esmailelbob.xyz",
|
"https://dumb.esmailelbob.xyz",
|
||||||
]
|
]
|
||||||
hostname = [
|
hostname = [
|
||||||
"genius.com",
|
"genius.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.invidious]
|
[proxies.invidious]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://incogtube.com",
|
"https://incogtube.com",
|
||||||
"https://vid.puffyan.us",
|
"https://vid.puffyan.us",
|
||||||
"https://yt.artemislena.eu",
|
"https://yt.artemislena.eu",
|
||||||
"https://invidious.snopyta.org",
|
"https://invidious.snopyta.org",
|
||||||
"https://youtube.076.ne.jp",
|
"https://youtube.076.ne.jp",
|
||||||
"https://invidious.osi.kr",
|
"https://invidious.osi.kr",
|
||||||
"https://invidious-us.kavin.rocks",
|
"https://invidious-us.kavin.rocks",
|
||||||
"https://inv.cthd.icu",
|
"https://inv.cthd.icu",
|
||||||
"https://invidious.namazso.eu",
|
"https://invidious.namazso.eu",
|
||||||
"https://yewtu.be",
|
"https://yewtu.be",
|
||||||
"https://invidio.xamh.de",
|
"https://invidio.xamh.de",
|
||||||
"https://invidious.kavin.rocks",
|
"https://invidious.kavin.rocks",
|
||||||
"https://monocles.live",
|
"https://monocles.live",
|
||||||
"https://inv.riverside.rocks",
|
"https://inv.riverside.rocks",
|
||||||
"https://invidious.lunar.icu",
|
"https://invidious.lunar.icu",
|
||||||
"https://y.com.sb",
|
"https://y.com.sb",
|
||||||
"https://inv.bp.projectsegfau.lt",
|
"https://inv.bp.projectsegfau.lt",
|
||||||
"https://invidious.flokinet.to",
|
"https://invidious.flokinet.to",
|
||||||
"https://invidious.sethforprivacy.com",
|
"https://invidious.sethforprivacy.com",
|
||||||
"https://invidious.esmailelbob.xyz",
|
"https://invidious.esmailelbob.xyz",
|
||||||
"https://ytb.trom.tf",
|
"https://ytb.trom.tf",
|
||||||
"https://invidious.domain.glass",
|
"https://invidious.domain.glass",
|
||||||
"https://tube.cthd.icu",
|
"https://tube.cthd.icu",
|
||||||
"https://inv.vern.cc",
|
"https://inv.vern.cc",
|
||||||
"https://invidious.garudalinux.org",
|
"https://invidious.garudalinux.org",
|
||||||
"https://youtube.owacon.moe",
|
"https://youtube.owacon.moe",
|
||||||
"https://invidious.tinfoil-hat.net",
|
"https://invidious.tinfoil-hat.net",
|
||||||
"https://iv.melmac.space",
|
"https://iv.melmac.space",
|
||||||
"https://invidious.tiekoetter.com",
|
"https://invidious.tiekoetter.com",
|
||||||
"https://invidious.baczek.me",
|
"https://invidious.baczek.me",
|
||||||
"https://invidious.no-logs.com",
|
"https://invidious.no-logs.com",
|
||||||
"https://invidious.0011.lt",
|
"https://invidious.0011.lt",
|
||||||
"https://yt.funami.tech",
|
"https://yt.funami.tech",
|
||||||
"https://inv.tux.pizza",
|
"https://inv.tux.pizza",
|
||||||
"https://vid.priv.au",
|
"https://vid.priv.au",
|
||||||
"https://not-ytb.blocus.ch",
|
"https://not-ytb.blocus.ch",
|
||||||
"https://inv.creller.net",
|
"https://inv.creller.net",
|
||||||
"https://inv.zzls.xyz",
|
"https://inv.zzls.xyz",
|
||||||
"https://yt.floss.media",
|
"https://yt.floss.media",
|
||||||
"https://invidious.slipfox.xyz",
|
"https://invidious.slipfox.xyz",
|
||||||
"https://par1.iv.ggtyler.dev",
|
"https://par1.iv.ggtyler.dev",
|
||||||
"https://inv.citw.lgbt",
|
"https://inv.citw.lgbt",
|
||||||
"https://invidious.io.lol",
|
"https://invidious.io.lol",
|
||||||
"https://yt.oelrichsgarcia.de",
|
"https://yt.oelrichsgarcia.de",
|
||||||
"https://iv.nboeck.de",
|
"https://iv.nboeck.de",
|
||||||
"https://invidious.protokolla.fi",
|
"https://invidious.protokolla.fi",
|
||||||
"https://invidious.fi",
|
"https://invidious.fi",
|
||||||
"https://onion.tube",
|
"https://onion.tube",
|
||||||
"https://inv.in.projectsegfau.lt",
|
"https://inv.in.projectsegfau.lt",
|
||||||
"https://invidious.privacydev.net",
|
"https://invidious.privacydev.net",
|
||||||
"https://invidious.takebackourtech.org",
|
"https://invidious.takebackourtech.org",
|
||||||
"https://qc1.iv.ggtyler.dev",
|
"https://qc1.iv.ggtyler.dev",
|
||||||
"https://anontube.lvkaszus.pl",
|
"https://anontube.lvkaszus.pl",
|
||||||
"https://invidious.asir.dev",
|
"https://invidious.asir.dev",
|
||||||
"https://invidious.fdn.fr",
|
"https://invidious.fdn.fr",
|
||||||
"https://iv.datura.network",
|
"https://iv.datura.network",
|
||||||
"https://invidious.private.coffee",
|
"https://invidious.private.coffee",
|
||||||
"https://inv.pistasjis.net",
|
"https://inv.pistasjis.net",
|
||||||
"https://invidious.pavot.ca",
|
"https://invidious.pavot.ca",
|
||||||
"https://yt.cdaut.de",
|
"https://yt.cdaut.de",
|
||||||
"https://yt.drgnz.club",
|
"https://yt.drgnz.club",
|
||||||
"https://invidious.perennialte.ch",
|
"https://invidious.perennialte.ch",
|
||||||
"https://yt.chaotic.ninja",
|
"https://yt.chaotic.ninja",
|
||||||
"https://yt.omada.cafe",
|
"https://yt.omada.cafe",
|
||||||
"https://super8.absturztau.be",
|
"https://super8.absturztau.be",
|
||||||
"https://i.redsnake.io",
|
"https://i.redsnake.io",
|
||||||
"https://watch.supernets.org",
|
"https://watch.supernets.org",
|
||||||
"https://invidious.qwik.space",
|
"https://invidious.qwik.space",
|
||||||
"https://farside.link/invidious",
|
"https://farside.link/invidious",
|
||||||
"https://inv.odyssey346.dev",
|
"https://inv.odyssey346.dev",
|
||||||
"https://invidious.mutahar.rocks",
|
"https://invidious.mutahar.rocks",
|
||||||
"https://invidious.nerdvpn.de",
|
"https://invidious.nerdvpn.de",
|
||||||
"https://invidious.projectsegfau.lt",
|
"https://invidious.projectsegfau.lt",
|
||||||
"https://invidious.weblibre.org",
|
"https://invidious.weblibre.org",
|
||||||
"https://iv.ggtyler.dev",
|
"https://iv.ggtyler.dev",
|
||||||
"https://watch.thekitty.zone",
|
"https://watch.thekitty.zone",
|
||||||
"https://inv.us.projectsegfau.lt",
|
"https://inv.us.projectsegfau.lt",
|
||||||
"https://invidious.drgns.space",
|
"https://invidious.drgns.space",
|
||||||
]
|
]
|
||||||
i2p = [
|
i2p = [
|
||||||
"http://tube.i2p",
|
"http://tube.i2p",
|
||||||
"http://inv.cn.i2p",
|
"http://inv.cn.i2p",
|
||||||
"http://jewtube.i2p",
|
"http://jewtube.i2p",
|
||||||
"http://ytmous.i2p",
|
"http://ytmous.i2p",
|
||||||
"http://pa7eextqat4wg35onzs4cnlhqa3gvzen243bcbrng67zyla4fqya.b32.i2p",
|
"http://pa7eextqat4wg35onzs4cnlhqa3gvzen243bcbrng67zyla4fqya.b32.i2p",
|
||||||
"http://inv.vern.i2p",
|
"http://inv.vern.i2p",
|
||||||
"http://inv.zzls.i2p",
|
"http://inv.zzls.i2p",
|
||||||
"http://verni6dr4qxjgjumnvesxerh5rvhv6oy5ddeibaqy5d7tgbiiyfa.b32.i2p",
|
"http://verni6dr4qxjgjumnvesxerh5rvhv6oy5ddeibaqy5d7tgbiiyfa.b32.i2p",
|
||||||
]
|
]
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://tuberyps2pn6dor6h47brof3w2asmauahhk4ei42krugybzzzo55klad.onion",
|
"http://tuberyps2pn6dor6h47brof3w2asmauahhk4ei42krugybzzzo55klad.onion",
|
||||||
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
|
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
|
||||||
"http://qwikxxt6jvggxzxe2v2fuzro5j7ibgphxmblmri6wkj5vpicdbo2kwad.onion",
|
"http://qwikxxt6jvggxzxe2v2fuzro5j7ibgphxmblmri6wkj5vpicdbo2kwad.onion",
|
||||||
"http://c7hqkpkpemu6e7emz5b4vyz7idjgdvgaaa3dyimmeojqbgpea3xqjoid.onion",
|
"http://c7hqkpkpemu6e7emz5b4vyz7idjgdvgaaa3dyimmeojqbgpea3xqjoid.onion",
|
||||||
"http://grwp24hodrefzvjjuccrkw3mjq4tzhaaq32amf33dzpmuxe7ilepcmad.onion",
|
"http://grwp24hodrefzvjjuccrkw3mjq4tzhaaq32amf33dzpmuxe7ilepcmad.onion",
|
||||||
"http://invidious.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
"http://invidious.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
||||||
"http://euxxcnhsynwmfidvhjf6uzptsmh4dipkmgdmcmxxuo7tunp3ad2jrwyd.onion",
|
"http://euxxcnhsynwmfidvhjf6uzptsmh4dipkmgdmcmxxuo7tunp3ad2jrwyd.onion",
|
||||||
"http://invidious.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
|
"http://invidious.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
|
||||||
"http://iv.odysfvr23q5wgt7i456o5t3trw2cw5dgn56vbjfbq2m7xsc5vqbqpcyd.onion",
|
"http://iv.odysfvr23q5wgt7i456o5t3trw2cw5dgn56vbjfbq2m7xsc5vqbqpcyd.onion",
|
||||||
"http://kbjggqkzv65ivcqj6bumvp337z6264huv5kpkwuv6gu5yjiskvan7fad.onion",
|
"http://kbjggqkzv65ivcqj6bumvp337z6264huv5kpkwuv6gu5yjiskvan7fad.onion",
|
||||||
"http://ng27owmagn5amdm7l5s3rsqxwscl5ynppnis5dqcasogkyxcfqn7psid.onion",
|
"http://ng27owmagn5amdm7l5s3rsqxwscl5ynppnis5dqcasogkyxcfqn7psid.onion",
|
||||||
"http://osbivz6guyeahrwp2lnwyjk2xos342h4ocsxyqrlaopqjuhwn2djiiyd.onion",
|
"http://osbivz6guyeahrwp2lnwyjk2xos342h4ocsxyqrlaopqjuhwn2djiiyd.onion",
|
||||||
"http://u2cvlit75owumwpy4dj2hsmvkq7nvrclkpht7xgyye2pyoxhpmclkrad.onion",
|
"http://u2cvlit75owumwpy4dj2hsmvkq7nvrclkpht7xgyye2pyoxhpmclkrad.onion",
|
||||||
"http://w6ijuptxiku4xpnnaetxvnkc5vqcdu7mgns2u77qefoixi63vbvnpnqd.onion",
|
"http://w6ijuptxiku4xpnnaetxvnkc5vqcdu7mgns2u77qefoixi63vbvnpnqd.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = [
|
yggdrasil = [
|
||||||
"http://[200:168a:c80a:b258:1dfe:f920:4414:6897]",
|
"http://[200:168a:c80a:b258:1dfe:f920:4414:6897]",
|
||||||
]
|
]
|
||||||
hostname = [
|
hostname = [
|
||||||
"youtu.be",
|
"youtu.be",
|
||||||
"youtube.com",
|
"youtube.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"feed",
|
"feed",
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.librarian]
|
[proxies.librarian]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://librarian.pussthecat.org",
|
"https://librarian.pussthecat.org",
|
||||||
"https://odysee.076.ne.jp",
|
"https://odysee.076.ne.jp",
|
||||||
"https://lbry.projectsegfau.lt",
|
"https://lbry.projectsegfau.lt",
|
||||||
"https://librarian.esmailelbob.xyz",
|
"https://librarian.esmailelbob.xyz",
|
||||||
"https://lbry.mywire.org",
|
"https://lbry.mywire.org",
|
||||||
"https://lbry.slipfox.xyz",
|
"https://lbry.slipfox.xyz",
|
||||||
"https://lbry.vern.cc",
|
"https://lbry.vern.cc",
|
||||||
"https://lbry.ooguy.com",
|
"https://lbry.ooguy.com",
|
||||||
"https://lbn.frail.duckdns.org",
|
"https://lbn.frail.duckdns.org",
|
||||||
"https://odysee.owacon.moe",
|
"https://odysee.owacon.moe",
|
||||||
"https://farside.link/librarian",
|
"https://farside.link/librarian",
|
||||||
]
|
]
|
||||||
i2p = []
|
i2p = []
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://librarian.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
"http://librarian.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
||||||
"http://lbry.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
|
"http://lbry.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
|
||||||
"http://5znbzx2xcymhddzekfjib3isgqq4ilcyxa2bsq6vqmnvbtgu4f776lqd.onion",
|
"http://5znbzx2xcymhddzekfjib3isgqq4ilcyxa2bsq6vqmnvbtgu4f776lqd.onion",
|
||||||
"http://bxewpsswttslepw27w2hhxhlizwm7l7y54x3jw5cfrb64hb6lgc557ad.onion",
|
"http://bxewpsswttslepw27w2hhxhlizwm7l7y54x3jw5cfrb64hb6lgc557ad.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = []
|
yggdrasil = []
|
||||||
hostname = [
|
hostname = [
|
||||||
"odysee.com",
|
"odysee.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"feed",
|
"feed",
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.libreddit]
|
[proxies.libreddit]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://libreddit.spike.codes",
|
"https://libreddit.strongthany.cc",
|
||||||
"https://libreddit.hu",
|
"https://libreddit.40two.app",
|
||||||
"https://libreddit.nl",
|
"https://discuss.whatever.social",
|
||||||
"https://libreddit.bus-hit.me",
|
"https://libreddit.kavin.rocks",
|
||||||
"https://libreddit.strongthany.cc",
|
"https://libreddit.privacy.com.de",
|
||||||
"https://libreddit.esmailelbob.xyz",
|
"https://libreddit.no-logs.com",
|
||||||
"https://lr.riverside.rocks",
|
"https://lr.slipfox.xyz",
|
||||||
"https://libreddit.40two.app",
|
"https://libreddit.tux.pizza",
|
||||||
"https://libreddit.albony.xyz",
|
"https://snoo.habedieeh.re",
|
||||||
"https://libreddit.domain.glass",
|
"https://lr.artemislena.eu",
|
||||||
"https://discuss.whatever.social",
|
"https://libreddit.privacyfucking.rocks",
|
||||||
"https://libreddit.kavin.rocks",
|
"https://libreddit.qwik.space",
|
||||||
"https://libreddit.privacy.com.de",
|
"https://de.leddit.xyz",
|
||||||
"https://libreddit.eu.org",
|
"https://leddit.xyz",
|
||||||
"https://libreddit.bloatcat.tk",
|
"https://libreddit.crewz.me",
|
||||||
"https://libreddit.pabloferreiro.es",
|
"https://libreddit.de",
|
||||||
"https://lr.foss.wtf",
|
"https://libreddit.igna.rocks",
|
||||||
"https://libreddit.no-logs.com",
|
"https://libredd.it",
|
||||||
"https://lr.slipfox.xyz",
|
"https://libreddit.kylrth.com",
|
||||||
"https://lr.creller.net",
|
"https://libreddit.lunar.icu",
|
||||||
"https://libreddit.dcs0.hu",
|
"https://libreddit.pussthecat.org",
|
||||||
"https://l.opnxng.com",
|
"https://lr.cowfee.moe",
|
||||||
"https://libreddit.tux.pizza",
|
"https://lr.mint.lgbt",
|
||||||
"https://reddit.leptons.xyz",
|
"https://r.nf",
|
||||||
"https://reddit.baby",
|
"https://reddit.invak.id",
|
||||||
"https://snoo.habedieeh.re",
|
"https://safereddit.com",
|
||||||
"https://lr.4201337.xyz",
|
"https://libreddit.nohost.network",
|
||||||
"https://libreddit.private.coffee",
|
"https://libreddit.projectsegfau.lt",
|
||||||
"https://lr.artemislena.eu",
|
"https://reddit.simo.sh",
|
||||||
"https://libreddit.privacyfucking.rocks",
|
|
||||||
"https://libreddit.qwik.space",
|
|
||||||
"https://farside.link/libreddit",
|
|
||||||
"https://de.leddit.xyz",
|
|
||||||
"https://leddit.xyz",
|
|
||||||
"https://libreddit.alefvanoon.xyz",
|
|
||||||
"https://libreddit.autarkic.org",
|
|
||||||
"https://libreddit.awesomehub.io",
|
|
||||||
"https://libreddit.crewz.me",
|
|
||||||
"https://libreddit.database.red",
|
|
||||||
"https://libreddit.datatunnel.xyz",
|
|
||||||
"https://libreddit.de",
|
|
||||||
"https://libreddit.dothq.co",
|
|
||||||
"https://libreddit.drivet.xyz",
|
|
||||||
"https://libreddit.flux.industries",
|
|
||||||
"https://libreddit.igna.rocks",
|
|
||||||
"https://libredd.it",
|
|
||||||
"https://libreddit.jamiethalacker.dev",
|
|
||||||
"https://libreddit.kylrth.com",
|
|
||||||
"https://libreddit.lunar.icu",
|
|
||||||
"https://libreddit.mutahar.rocks",
|
|
||||||
"https://libreddit.northboot.xyz",
|
|
||||||
"https://libreddit.pussthecat.org",
|
|
||||||
"https://libreddit.silkky.cloud",
|
|
||||||
"https://libreddit.some-things.org",
|
|
||||||
"https://libreddit.sugoma.tk",
|
|
||||||
"https://libreddit.tiekoetter.com",
|
|
||||||
"https://libreddit.totaldarkness.net",
|
|
||||||
"https://libreddit.winscloud.net",
|
|
||||||
"https://libreddit.yonalee.eu",
|
|
||||||
"https://lr.cowfee.moe",
|
|
||||||
"https://lr.mint.lgbt",
|
|
||||||
"https://lr.oversold.host",
|
|
||||||
"https://lr.stilic.ml",
|
|
||||||
"https://r.nf",
|
|
||||||
"https://r.walkx.org",
|
|
||||||
"https://reddi.tk",
|
|
||||||
"https://reddit.artemislena.eu",
|
|
||||||
"https://reddit.invak.id",
|
|
||||||
"https://reddit.phii.me",
|
|
||||||
"https://reddit.rtrace.io",
|
|
||||||
"https://reddit.stuehieyr.com",
|
|
||||||
"https://safereddit.com",
|
|
||||||
"https://libreddit.nohost.network",
|
|
||||||
"https://libreddit.projectsegfau.lt",
|
|
||||||
"https://reddit.simo.sh",
|
|
||||||
]
|
]
|
||||||
i2p = [
|
i2p = [
|
||||||
"http://woo5ugmoomzbtaq6z46q4wgei5mqmc6jkafqfi5c37zni7xc4ymq.b32.i2p",
|
"http://woo5ugmoomzbtaq6z46q4wgei5mqmc6jkafqfi5c37zni7xc4ymq.b32.i2p",
|
||||||
]
|
]
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion",
|
"http://spjmllawtheisznfs7uryhxumin26ssv2draj7oope3ok3wuhy43eoyd.onion",
|
||||||
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
|
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
|
||||||
"http://qwikxx4xqvhdyyazkrw7pwdpdppfnmn7j2n6cvq5zecm4atbppaslzad.onion",
|
"http://qwikxx4xqvhdyyazkrw7pwdpdppfnmn7j2n6cvq5zecm4atbppaslzad.onion",
|
||||||
"http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion",
|
"http://ecue64ybzvn6vjzl37kcsnwt4ycmbsyf74nbttyg7rkc3t3qwnj7mcyd.onion",
|
||||||
"http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion",
|
"http://fwhhsbrbltmrct5hshrnqlqygqvcgmnek3cnka55zj4y7nuus5muwyyd.onion",
|
||||||
"http://inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion",
|
"http://inytumdgnri7xsqtvpntjevaelxtgbjqkuqhtf6txxhwbll2fwqtakqd.onion",
|
||||||
"http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion",
|
"http://kphht2jcflojtqte4b4kyx7p2ahagv4debjj32nre67dxz7y57seqwyd.onion",
|
||||||
"http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion",
|
"http://kzhfp3nvb4qp575vy23ccbrgfocezjtl5dx66uthgrhu7nscu6rcwjyd.onion",
|
||||||
"http://lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion",
|
"http://lbrdtjaj7567ptdd4rv74lv27qhxfkraabnyphgcvptl64ijx2tijwid.onion",
|
||||||
"http://libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion",
|
"http://libreddit.2syis2nnyytz6jnusnjurva4swlaizlnleiks5mjp46phuwjbdjqwgqd.onion",
|
||||||
"http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion",
|
"http://ledditqo2mxfvlgobxnlhrkq4dh34jss6evfkdkb2thlvy6dn4f4gpyd.onion",
|
||||||
"http://libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion",
|
"http://libreddit.lqs5fjmajyp7rvp4qvyubwofzi6d4imua7vs237rkc4m5qogitqwrgyd.onion",
|
||||||
"http://libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion",
|
"http://libredoxhxwnmsb6dvzzd35hmgzmawsq5i764es7witwhddvpc2razid.onion",
|
||||||
"http://ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion",
|
"http://ol5begilptoou34emq2sshf3may3hlblvipdjtybbovpb7c7zodxmtqd.onion",
|
||||||
"http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion",
|
"http://liredejj74h5xjqr2dylnl5howb2bpikfowqoveub55ru27x43357iid.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = []
|
yggdrasil = []
|
||||||
hostname = [
|
hostname = [
|
||||||
"reddit.com",
|
"reddit.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.neuters]
|
[proxies.neuters]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://neuters.de",
|
"https://neuters.de",
|
||||||
"https://neuters.privacyfucking.rocks",
|
"https://neuters.privacyfucking.rocks",
|
||||||
]
|
]
|
||||||
hostname = [
|
hostname = [
|
||||||
"reuters.com",
|
"reuters.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.nitter]
|
[proxies.nitter]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://nitter.hu",
|
"https://nitter.hu",
|
||||||
"https://nitter.actionsack.com",
|
"https://nitter.actionsack.com",
|
||||||
"https://nitter.net",
|
"https://nitter.net",
|
||||||
"https://nitter.1d4.us",
|
"https://nitter.1d4.us",
|
||||||
"https://nitter.nixnet.services",
|
"https://nitter.nixnet.services",
|
||||||
"https://nitter.unixfox.eu",
|
"https://nitter.unixfox.eu",
|
||||||
"https://nitter.sethforprivacy.com",
|
"https://nitter.sethforprivacy.com",
|
||||||
"https://nitter.pussthecat.org",
|
"https://nitter.pussthecat.org",
|
||||||
"https://nitter.it",
|
"https://nitter.it",
|
||||||
"https://nitter.moomoo.me",
|
"https://nitter.moomoo.me",
|
||||||
"https://tw.artemislena.eu",
|
"https://tw.artemislena.eu",
|
||||||
"https://nitter.snopyta.org",
|
"https://nitter.snopyta.org",
|
||||||
"https://birdsite.xanny.family",
|
"https://birdsite.xanny.family",
|
||||||
"https://nitter.domain.glass",
|
"https://nitter.domain.glass",
|
||||||
"https://read.whatever.social",
|
"https://read.whatever.social",
|
||||||
"https://nitter.lacontrevoie.fr",
|
"https://nitter.lacontrevoie.fr",
|
||||||
"https://bird.trom.tf",
|
"https://bird.trom.tf",
|
||||||
"https://nitter.hostux.net",
|
"https://nitter.hostux.net",
|
||||||
"https://nitter.sneed.network",
|
"https://nitter.sneed.network",
|
||||||
"https://twitter.owacon.moe",
|
"https://twitter.owacon.moe",
|
||||||
"https://nitter.ggc-project.de",
|
"https://nitter.ggc-project.de",
|
||||||
"https://unofficialbird.com",
|
"https://unofficialbird.com",
|
||||||
"https://nitter.fdn.fr",
|
"https://nitter.fdn.fr",
|
||||||
"https://nitter.no-logs.com",
|
"https://nitter.no-logs.com",
|
||||||
"https://nitter.slipfox.xyz",
|
"https://nitter.slipfox.xyz",
|
||||||
"https://nitter.one",
|
"https://nitter.one",
|
||||||
"https://nitter.ungovernable.men",
|
"https://nitter.ungovernable.men",
|
||||||
"https://nitter.private.coffee",
|
"https://nitter.private.coffee",
|
||||||
"https://nitter.soopy.moe",
|
"https://nitter.soopy.moe",
|
||||||
"https://nitter.oksocial.net",
|
"https://nitter.oksocial.net",
|
||||||
"https://n.sneed.network",
|
"https://n.sneed.network",
|
||||||
"https://nitter.qwik.space",
|
"https://nitter.qwik.space",
|
||||||
"https://nitter.nohost.network",
|
"https://nitter.nohost.network",
|
||||||
"https://de.nttr.stream",
|
"https://de.nttr.stream",
|
||||||
"https://farside.link/nitter",
|
"https://farside.link/nitter",
|
||||||
"https://nitter.42l.fr",
|
"https://nitter.42l.fr",
|
||||||
"https://nitter.bus-hit.me",
|
"https://nitter.bus-hit.me",
|
||||||
"https://nitter.ca",
|
"https://nitter.ca",
|
||||||
"https://nitter.eu",
|
"https://nitter.eu",
|
||||||
"https://nitter.grimneko.de",
|
"https://nitter.grimneko.de",
|
||||||
"https://nitter.kavin.rocks",
|
"https://nitter.kavin.rocks",
|
||||||
"https://nitter.koyu.space",
|
"https://nitter.koyu.space",
|
||||||
"https://nitter.namazso.eu",
|
"https://nitter.namazso.eu",
|
||||||
"https://nttr.stream",
|
"https://nttr.stream",
|
||||||
"https://twitter.076.ne.jp",
|
"https://twitter.076.ne.jp",
|
||||||
"https://twitter.censors.us",
|
"https://twitter.censors.us",
|
||||||
"https://n.hyperborea.cloud",
|
"https://n.hyperborea.cloud",
|
||||||
"https://n.biendeo.com",
|
"https://n.biendeo.com",
|
||||||
"https://n.opnxng.com",
|
"https://n.opnxng.com",
|
||||||
"https://nitter.adminforge.de",
|
"https://nitter.adminforge.de",
|
||||||
"https://nitter.catsarch.com",
|
"https://nitter.catsarch.com",
|
||||||
"https://nitter.cz",
|
"https://nitter.cz",
|
||||||
"https://nitter.esmailelbob.xyz",
|
"https://nitter.esmailelbob.xyz",
|
||||||
"https://nitter.in.projectsegfau.lt",
|
"https://nitter.in.projectsegfau.lt",
|
||||||
"https://nitter.io.lol",
|
"https://nitter.io.lol",
|
||||||
"https://nitter.ktachibana.party",
|
"https://nitter.ktachibana.party",
|
||||||
"https://nitter.kylrth.com",
|
"https://nitter.kylrth.com",
|
||||||
"https://nitter.poast.org",
|
"https://nitter.poast.org",
|
||||||
"https://nitter.privacydev.net",
|
"https://nitter.privacydev.net",
|
||||||
"https://nitter.salastil.com",
|
"https://nitter.salastil.com",
|
||||||
"https://nitter.woodland.cafe",
|
"https://nitter.woodland.cafe",
|
||||||
]
|
]
|
||||||
i2p = [
|
i2p = [
|
||||||
"http://tm4rwkeysv3zz3q5yacyr4rlmca2c4etkdobfvuqzt6vsfsu4weq.b32.i2p",
|
"http://tm4rwkeysv3zz3q5yacyr4rlmca2c4etkdobfvuqzt6vsfsu4weq.b32.i2p",
|
||||||
]
|
]
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
|
"http://qwikxxeiw4kgmml6vjw2bsxtviuwjce735dunai2djhu6q7qbacq73id.onion",
|
||||||
"http://qwikxx2erhx6qrymued6ox2qkf2yeogjwypqvzoif4fqkljixasr6oid.onion",
|
"http://qwikxx2erhx6qrymued6ox2qkf2yeogjwypqvzoif4fqkljixasr6oid.onion",
|
||||||
"http://n.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
|
"http://n.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = []
|
yggdrasil = []
|
||||||
hostname = [
|
hostname = [
|
||||||
"twitter.com",
|
"twitter.com",
|
||||||
"x.com",
|
"x.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"feed",
|
"feed",
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.proxitok]
|
[proxies.proxitok]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://proxitok.lunar.icu",
|
"https://proxitok.lunar.icu",
|
||||||
"https://tik.hostux.net",
|
"https://tik.hostux.net",
|
||||||
"https://proxitok.pabloferreiro.es",
|
"https://proxitok.pabloferreiro.es",
|
||||||
"https://proxitok.privacy.com.de",
|
"https://proxitok.privacy.com.de",
|
||||||
"https://tok.adminforge.de",
|
"https://tok.adminforge.de",
|
||||||
"https://tok.habedieeh.re",
|
"https://tok.habedieeh.re",
|
||||||
"https://proxitok.pussthecat.org",
|
"https://proxitok.pussthecat.org",
|
||||||
"https://proxitok.privacyfucking.rocks",
|
"https://proxitok.privacyfucking.rocks",
|
||||||
"https://cringe.whatever.social",
|
"https://cringe.whatever.social",
|
||||||
"https://proxitok.esmailelbob.xyz",
|
"https://proxitok.esmailelbob.xyz",
|
||||||
"https://proxitok.privacydev.net",
|
"https://proxitok.privacydev.net",
|
||||||
"https://proxitok.pufe.org",
|
"https://proxitok.pufe.org",
|
||||||
"https://tok.artemislena.eu",
|
"https://tok.artemislena.eu",
|
||||||
"https://tok.thekitty.zone",
|
"https://tok.thekitty.zone",
|
||||||
"https://tiktok.chauvet.pro",
|
"https://tiktok.chauvet.pro",
|
||||||
"https://tt.vern.cc",
|
"https://tt.vern.cc",
|
||||||
"https://farside.link/proxitok",
|
"https://farside.link/proxitok",
|
||||||
]
|
]
|
||||||
i2p = [
|
i2p = [
|
||||||
"http://qr.vern.i2p",
|
"http://qr.vern.i2p",
|
||||||
]
|
]
|
||||||
loki = []
|
loki = []
|
||||||
tor = []
|
tor = []
|
||||||
yggdrasil = []
|
yggdrasil = []
|
||||||
hostname = [
|
hostname = [
|
||||||
"tiktok.com",
|
"tiktok.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.quetre]
|
[proxies.quetre]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://quetre.privacydev.net",
|
"https://quetre.privacydev.net",
|
||||||
"https://quetre.pufe.org",
|
"https://quetre.pufe.org",
|
||||||
"https://que.wilbvr.me",
|
"https://que.wilbvr.me",
|
||||||
"https://quetre.iket.me",
|
"https://quetre.iket.me",
|
||||||
"https://quetre.pussthecat.org",
|
"https://quetre.pussthecat.org",
|
||||||
"https://quetre.tokhmi.xyz",
|
"https://quetre.tokhmi.xyz",
|
||||||
"https://quetre.projectsegfau.lt",
|
"https://quetre.projectsegfau.lt",
|
||||||
"https://quetre.esmailelbob.xyz",
|
"https://quetre.esmailelbob.xyz",
|
||||||
"https://quetre.odyssey346.dev",
|
"https://quetre.odyssey346.dev",
|
||||||
"https://ask.habedieeh.re",
|
"https://ask.habedieeh.re",
|
||||||
"https://quetre.marcopisco.com",
|
"https://quetre.marcopisco.com",
|
||||||
"https://quetre.blackdrgn.nl",
|
"https://quetre.blackdrgn.nl",
|
||||||
"https://quetre.lunar.icu",
|
"https://quetre.lunar.icu",
|
||||||
"https://quora.femboy.hu",
|
"https://quora.femboy.hu",
|
||||||
"https://quora.vern.cc",
|
"https://quora.vern.cc",
|
||||||
"https://farside.link/quetre",
|
"https://farside.link/quetre",
|
||||||
"https://quetre.fascinated.cc",
|
"https://quetre.fascinated.cc",
|
||||||
]
|
]
|
||||||
i2p = []
|
i2p = []
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://ask.habeehrhadazsw3izbrbilqajalfyqqln54mrja3iwpqxgcuxnus7eid.onion",
|
"http://ask.habeehrhadazsw3izbrbilqajalfyqqln54mrja3iwpqxgcuxnus7eid.onion",
|
||||||
"http://qr.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
|
"http://qr.vernccvbvyi5qhfzyqengccj7lkove6bjot2xhh5kajhwvidqafczrad.onion",
|
||||||
"http://quetre.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
"http://quetre.esmail5pdn24shtvieloeedh7ehz3nrwcdivnfhfcedl7gf4kwddhkqd.onion",
|
||||||
"http://quetre.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
|
"http://quetre.g4c3eya4clenolymqbpgwz3q3tawoxw56yhzk4vugqrl6dtu3ejvhjid.onion",
|
||||||
"http://quora.cepyxplublbyw2f4axy4pyztfbxmf63lrt2c7uwv6wl4iixz53czload.onion",
|
"http://quora.cepyxplublbyw2f4axy4pyztfbxmf63lrt2c7uwv6wl4iixz53czload.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = []
|
yggdrasil = []
|
||||||
hostname = [
|
hostname = [
|
||||||
"quora.com",
|
"quora.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
|
]
|
||||||
|
|
||||||
|
[proxies.redlib]
|
||||||
|
clearnet = [
|
||||||
|
"https://redlib.private.coffee",
|
||||||
|
]
|
||||||
|
i2p = []
|
||||||
|
loki = []
|
||||||
|
tor = []
|
||||||
|
yggdrasil = []
|
||||||
|
hostname = [
|
||||||
|
"reddit.com",
|
||||||
|
]
|
||||||
|
type = [
|
||||||
|
"link",
|
||||||
]
|
]
|
||||||
|
|
||||||
[proxies.teddit]
|
[proxies.teddit]
|
||||||
clearnet = [
|
clearnet = [
|
||||||
"https://teddit.pussthecat.org",
|
"https://teddit.pussthecat.org",
|
||||||
"https://teddit.zaggy.nl",
|
"https://teddit.zaggy.nl",
|
||||||
"https://teddit.bus-hit.me",
|
"https://teddit.bus-hit.me",
|
||||||
"https://teddit.adminforge.de",
|
"https://teddit.adminforge.de",
|
||||||
"https://incogsnoo.com",
|
"https://incogsnoo.com",
|
||||||
"https://teddit.hostux.net",
|
"https://teddit.hostux.net",
|
||||||
"https://teddit.ggc-project.de",
|
"https://teddit.ggc-project.de",
|
||||||
"https://teddit.httpjames.space",
|
"https://teddit.httpjames.space",
|
||||||
"https://snoo.ioens.is",
|
"https://snoo.ioens.is",
|
||||||
"https://teddit.no-logs.com",
|
"https://teddit.no-logs.com",
|
||||||
"https://teddit.net",
|
"https://teddit.net",
|
||||||
"https://i.opnxng.com",
|
"https://i.opnxng.com",
|
||||||
"https://tedd.it",
|
"https://tedd.it",
|
||||||
"https://teddit.projectsegfau.lt",
|
"https://teddit.projectsegfau.lt",
|
||||||
"https://reddit.lol",
|
"https://reddit.lol",
|
||||||
"https://rdt.trom.tf",
|
"https://rdt.trom.tf",
|
||||||
"https://t.sneed.network",
|
"https://t.sneed.network",
|
||||||
"https://farside.link/teddit",
|
"https://farside.link/teddit",
|
||||||
"https://teddit.alefvanoon.xyz",
|
"https://teddit.alefvanoon.xyz",
|
||||||
"https://teddit.domain.glass",
|
"https://teddit.domain.glass",
|
||||||
"https://teddit.froth.zone",
|
"https://teddit.froth.zone",
|
||||||
"https://teddit.namazso.eu",
|
"https://teddit.namazso.eu",
|
||||||
"https://teddit.sethforprivacy.com",
|
"https://teddit.sethforprivacy.com",
|
||||||
"https://teddit.tinfoil-hat.net",
|
"https://teddit.tinfoil-hat.net",
|
||||||
"https://teddit.totaldarkness.net",
|
"https://teddit.totaldarkness.net",
|
||||||
"https://td.vern.cc",
|
"https://td.vern.cc",
|
||||||
]
|
]
|
||||||
i2p = [
|
i2p = [
|
||||||
"http://k62ptris7p72aborr4zoanee7xai6wguucveptwgxs5vbgt7qzpq.b32.i2p",
|
"http://k62ptris7p72aborr4zoanee7xai6wguucveptwgxs5vbgt7qzpq.b32.i2p",
|
||||||
"http://teddit.i2p",
|
"http://teddit.i2p",
|
||||||
]
|
]
|
||||||
loki = []
|
loki = []
|
||||||
tor = [
|
tor = [
|
||||||
"http://t.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
|
"http://t.sneed4fmhevap3ci4xhf4wgkf72lwk275lcgomnfgwniwmqvaxyluuid.onion",
|
||||||
"http://tedditfyn6idalzso5wam5qd3kdtxoljjhbrbbx34q2xkcisvshuytad.onion",
|
"http://tedditfyn6idalzso5wam5qd3kdtxoljjhbrbbx34q2xkcisvshuytad.onion",
|
||||||
]
|
]
|
||||||
yggdrasil = [
|
yggdrasil = [
|
||||||
"http://[200:5e4b:515c:e42b:3e73:6fbf:2f11:779d]",
|
"http://[200:5e4b:515c:e42b:3e73:6fbf:2f11:779d]",
|
||||||
]
|
]
|
||||||
hostname = [
|
hostname = [
|
||||||
"reddit.com",
|
"reddit.com",
|
||||||
]
|
]
|
||||||
type = [
|
type = [
|
||||||
"link",
|
"link",
|
||||||
]
|
]
|
||||||
|
|
|
@ -27,8 +27,87 @@ import os
|
||||||
# from random import randrange
|
# from random import randrange
|
||||||
import slixfeed.sqlite as sqlite
|
import slixfeed.sqlite as sqlite
|
||||||
import sys
|
import sys
|
||||||
|
import tomli_w
|
||||||
import tomllib
|
import tomllib
|
||||||
|
|
||||||
|
# TODO Merge with backup_obsolete
|
||||||
|
def update_proxies(file, proxy_name, proxy_type, proxy_url, action='remove'):
|
||||||
|
"""
|
||||||
|
Add given URL to given list.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
file : str
|
||||||
|
Filename.
|
||||||
|
proxy_name : str
|
||||||
|
Proxy name.
|
||||||
|
proxy_type : str
|
||||||
|
Proxy title.
|
||||||
|
proxy_url : str
|
||||||
|
Proxy URL.
|
||||||
|
action : str
|
||||||
|
add or remove
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None.
|
||||||
|
"""
|
||||||
|
data = open_config_file('proxies.toml')
|
||||||
|
proxy_list = data['proxies'][proxy_name][proxy_type]
|
||||||
|
proxy_index = proxy_list.index(proxy_url)
|
||||||
|
proxy_list.pop(proxy_index)
|
||||||
|
with open(file, 'w') as new_file:
|
||||||
|
content = tomli_w.dumps(data)
|
||||||
|
new_file.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO Merge with update_proxies
|
||||||
|
def backup_obsolete(file, proxy_name, proxy_type, proxy_url, action='add'):
|
||||||
|
"""
|
||||||
|
Add given URL to given list.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
file : str
|
||||||
|
Filename.
|
||||||
|
proxy_name : str
|
||||||
|
Proxy name.
|
||||||
|
proxy_type : str
|
||||||
|
Proxy title.
|
||||||
|
proxy_url : str
|
||||||
|
Proxy URL.
|
||||||
|
action : str
|
||||||
|
add or remove
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None.
|
||||||
|
"""
|
||||||
|
data = open_config_file('proxies_obsolete.toml')
|
||||||
|
proxy_list = data['proxies'][proxy_name][proxy_type]
|
||||||
|
proxy_list.extend([proxy_url])
|
||||||
|
with open(file, 'w') as new_file:
|
||||||
|
content = tomli_w.dumps(data)
|
||||||
|
new_file.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def create_skeleton(file):
|
||||||
|
with open(file, 'rb') as original_file:
|
||||||
|
data = tomllib.load(original_file)
|
||||||
|
data = clear_values(data)
|
||||||
|
with open('proxies_obsolete.toml', 'w') as new_file:
|
||||||
|
content = tomli_w.dumps(data)
|
||||||
|
new_file.write(content)
|
||||||
|
|
||||||
|
|
||||||
|
def clear_values(input):
|
||||||
|
if isinstance(input, dict):
|
||||||
|
return {k: clear_values(v) for k, v in input.items()}
|
||||||
|
elif isinstance(input, list):
|
||||||
|
return ['']
|
||||||
|
else:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
|
||||||
def get_value(filename, section, keys):
|
def get_value(filename, section, keys):
|
||||||
"""
|
"""
|
||||||
|
@ -120,7 +199,9 @@ def get_value_default(filename, section, key):
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
def get_list(filename, key):
|
# TODO DELETE THIS FUNCTION OR KEEP ONLY THE CODE BELOW NOTE
|
||||||
|
# IF CODE BELOW NOTE IS KEPT, RENAME FUNCTION TO open_toml
|
||||||
|
def open_config_file(filename):
|
||||||
"""
|
"""
|
||||||
Get settings default value.
|
Get settings default value.
|
||||||
|
|
||||||
|
@ -128,8 +209,6 @@ def get_list(filename, key):
|
||||||
----------
|
----------
|
||||||
filename : str
|
filename : str
|
||||||
Filename of toml file.
|
Filename of toml file.
|
||||||
key: str
|
|
||||||
Key.
|
|
||||||
|
|
||||||
Returns
|
Returns
|
||||||
-------
|
-------
|
||||||
|
@ -142,11 +221,11 @@ def get_list(filename, key):
|
||||||
if not os.path.isdir(config_dir):
|
if not os.path.isdir(config_dir):
|
||||||
config_dir = os.path.dirname(__file__) + "/assets"
|
config_dir = os.path.dirname(__file__) + "/assets"
|
||||||
config_file = os.path.join(config_dir, filename)
|
config_file = os.path.join(config_dir, filename)
|
||||||
|
# NOTE THIS IS THE IMPORTANT CODE
|
||||||
with open(config_file, mode="rb") as defaults:
|
with open(config_file, mode="rb") as defaults:
|
||||||
# default = yaml.safe_load(defaults)
|
# default = yaml.safe_load(defaults)
|
||||||
# result = default[key]
|
# result = default[key]
|
||||||
result = tomllib.load(defaults)
|
result = tomllib.load(defaults)
|
||||||
result = result[key]
|
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
@ -221,6 +300,8 @@ def get_default_cache_directory():
|
||||||
return os.path.join(data_home, 'slixfeed')
|
return os.path.join(data_home, 'slixfeed')
|
||||||
|
|
||||||
|
|
||||||
|
# TODO Write a similar function for file.
|
||||||
|
# NOTE the is a function of directory, noot file.
|
||||||
def get_default_config_directory():
|
def get_default_config_directory():
|
||||||
"""
|
"""
|
||||||
Determine the directory path where configuration will be stored.
|
Determine the directory path where configuration will be stored.
|
||||||
|
@ -370,7 +451,7 @@ async def is_include_keyword(db_file, key, string):
|
||||||
# async def is_blacklisted(db_file, string):
|
# async def is_blacklisted(db_file, string):
|
||||||
keywords = (await sqlite.get_filters_value(db_file, key)) or ''
|
keywords = (await sqlite.get_filters_value(db_file, key)) or ''
|
||||||
keywords = keywords.split(",")
|
keywords = keywords.split(",")
|
||||||
keywords = keywords + (get_list("lists.toml", key))
|
keywords = keywords + (open_config_file("lists.toml")[key])
|
||||||
for keyword in keywords:
|
for keyword in keywords:
|
||||||
if not keyword or len(keyword) < 2:
|
if not keyword or len(keyword) < 2:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -5,16 +5,17 @@
|
||||||
|
|
||||||
TODO
|
TODO
|
||||||
|
|
||||||
1.1) Do not compose messages.
|
1.1) Attempt to scan more paths: /blog/, /news/ etc., including root /
|
||||||
Only return results.
|
Attempt to scan sub domains
|
||||||
See: # TODO return feeds
|
https://esmailelbob.xyz/en/
|
||||||
|
https://blog.esmailelbob.xyz/feed/
|
||||||
|
|
||||||
1.2) Return URLs, nothing else other (e.g. processed messages).
|
1.2) Consider utilizing fetch.http_response
|
||||||
|
|
||||||
1.3) NOTE: Correction of URLs is aceptable.
|
|
||||||
|
|
||||||
2) Consider merging with module fetch.py
|
2) Consider merging with module fetch.py
|
||||||
|
|
||||||
|
FEEDS CRAWLER PROJECT
|
||||||
|
|
||||||
3) Mark redirects for manual check
|
3) Mark redirects for manual check
|
||||||
|
|
||||||
Title : JSON Feed
|
Title : JSON Feed
|
||||||
|
@ -163,7 +164,7 @@ async def feed_mode_guess(url, tree):
|
||||||
"""
|
"""
|
||||||
urls = []
|
urls = []
|
||||||
parted_url = urlsplit(url)
|
parted_url = urlsplit(url)
|
||||||
paths = config.get_list("lists.toml", "pathnames")
|
paths = config.open_config_file("lists.toml")["pathnames"]
|
||||||
# Check whether URL has path (i.e. not root)
|
# Check whether URL has path (i.e. not root)
|
||||||
# Check parted_url.path to avoid error in case root wasn't given
|
# Check parted_url.path to avoid error in case root wasn't given
|
||||||
# TODO Make more tests
|
# TODO Make more tests
|
||||||
|
@ -202,7 +203,7 @@ async def feed_mode_scan(url, tree):
|
||||||
Message with URLs.
|
Message with URLs.
|
||||||
"""
|
"""
|
||||||
urls = []
|
urls = []
|
||||||
paths = config.get_list("lists.toml", "pathnames")
|
paths = config.open_config_file("lists.toml")["pathnames"]
|
||||||
for path in paths:
|
for path in paths:
|
||||||
# xpath_query = "//*[@*[contains(.,'{}')]]".format(path)
|
# xpath_query = "//*[@*[contains(.,'{}')]]".format(path)
|
||||||
# xpath_query = "//a[contains(@href,'{}')]".format(path)
|
# xpath_query = "//a[contains(@href,'{}')]".format(path)
|
||||||
|
|
|
@ -10,6 +10,8 @@ FIXME
|
||||||
|
|
||||||
TODO
|
TODO
|
||||||
|
|
||||||
|
0) Improve function http to return sensible value (the list is not good enough)
|
||||||
|
|
||||||
1) Support Gemini and Gopher.
|
1) Support Gemini and Gopher.
|
||||||
|
|
||||||
2) Check also for HTML, not only feed.bozo.
|
2) Check also for HTML, not only feed.bozo.
|
||||||
|
@ -29,6 +31,7 @@ from asyncio import TimeoutError
|
||||||
import logging
|
import logging
|
||||||
# from lxml import html
|
# from lxml import html
|
||||||
# from xml.etree.ElementTree import ElementTree, ParseError
|
# from xml.etree.ElementTree import ElementTree, ParseError
|
||||||
|
import requests
|
||||||
import slixfeed.config as config
|
import slixfeed.config as config
|
||||||
try:
|
try:
|
||||||
from magnet2torrent import Magnet2Torrent, FailedToFetchException
|
from magnet2torrent import Magnet2Torrent, FailedToFetchException
|
||||||
|
@ -50,6 +53,44 @@ except:
|
||||||
|
|
||||||
# async def ipfs():
|
# async def ipfs():
|
||||||
|
|
||||||
|
def http_response(url):
|
||||||
|
"""
|
||||||
|
Download response headers.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
url : str
|
||||||
|
URL.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
response: requests.models.Response
|
||||||
|
HTTP Header Response.
|
||||||
|
|
||||||
|
Result would contain these:
|
||||||
|
response.encoding
|
||||||
|
response.headers
|
||||||
|
response.history
|
||||||
|
response.reason
|
||||||
|
response.status_code
|
||||||
|
response.url
|
||||||
|
"""
|
||||||
|
user_agent = (
|
||||||
|
config.get_value(
|
||||||
|
"settings", "Network", "user-agent")
|
||||||
|
) or 'Slixfeed/0.1'
|
||||||
|
headers = {
|
||||||
|
"User-Agent": user_agent
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
# Don't use HEAD request because quite a few websites may deny it
|
||||||
|
# response = requests.head(url, headers=headers, allow_redirects=True)
|
||||||
|
response = requests.get(url, headers=headers, allow_redirects=True)
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(str(e))
|
||||||
|
response = None
|
||||||
|
return response
|
||||||
|
|
||||||
async def http(url):
|
async def http(url):
|
||||||
"""
|
"""
|
||||||
Download content of given URL.
|
Download content of given URL.
|
||||||
|
|
|
@ -76,6 +76,21 @@ def create_tables(db_file):
|
||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
feeds_statistics_table_sql = (
|
||||||
|
"""
|
||||||
|
CREATE TABLE IF NOT EXISTS statistics (
|
||||||
|
id INTEGER NOT NULL,
|
||||||
|
feed_id INTEGER NOT NULL UNIQUE,
|
||||||
|
offline INTEGER,
|
||||||
|
entries INTEGER,
|
||||||
|
entries INTEGER,
|
||||||
|
FOREIGN KEY ("feed_id") REFERENCES "feeds" ("id")
|
||||||
|
ON UPDATE CASCADE
|
||||||
|
ON DELETE CASCADE,
|
||||||
|
PRIMARY KEY ("id")
|
||||||
|
);
|
||||||
|
"""
|
||||||
|
)
|
||||||
feeds_properties_table_sql = (
|
feeds_properties_table_sql = (
|
||||||
"""
|
"""
|
||||||
CREATE TABLE IF NOT EXISTS feeds_properties (
|
CREATE TABLE IF NOT EXISTS feeds_properties (
|
||||||
|
@ -153,16 +168,6 @@ def create_tables(db_file):
|
||||||
);
|
);
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
# statistics_table_sql = (
|
|
||||||
# """
|
|
||||||
# CREATE TABLE IF NOT EXISTS statistics (
|
|
||||||
# id INTEGER NOT NULL,
|
|
||||||
# title TEXT NOT NULL,
|
|
||||||
# number INTEGER,
|
|
||||||
# PRIMARY KEY ("id")
|
|
||||||
# );
|
|
||||||
# """
|
|
||||||
# )
|
|
||||||
status_table_sql = (
|
status_table_sql = (
|
||||||
"""
|
"""
|
||||||
CREATE TABLE IF NOT EXISTS status (
|
CREATE TABLE IF NOT EXISTS status (
|
||||||
|
@ -527,15 +532,6 @@ async def remove_feed_by_index(db_file, ix):
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
cur = conn.cursor()
|
cur = conn.cursor()
|
||||||
sql = (
|
|
||||||
"""
|
|
||||||
SELECT url
|
|
||||||
FROM feeds
|
|
||||||
WHERE id = ?
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
par = (ix,)
|
|
||||||
url = cur.execute(sql, par).fetchone()[0]
|
|
||||||
# # NOTE Should we move DBLOCK to this line? 2022-12-23
|
# # NOTE Should we move DBLOCK to this line? 2022-12-23
|
||||||
# sql = (
|
# sql = (
|
||||||
# "DELETE "
|
# "DELETE "
|
||||||
|
@ -559,7 +555,6 @@ async def remove_feed_by_index(db_file, ix):
|
||||||
)
|
)
|
||||||
par = (ix,)
|
par = (ix,)
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
return url
|
|
||||||
|
|
||||||
|
|
||||||
async def get_feed_id_and_name(db_file, url):
|
async def get_feed_id_and_name(db_file, url):
|
||||||
|
@ -744,7 +739,7 @@ async def get_feed_id(db_file, url):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
par = (url,)
|
par = (url,)
|
||||||
feed_id = cur.execute(sql, par).fetchone()[0]
|
feed_id = cur.execute(sql, par).fetchone()
|
||||||
return feed_id
|
return feed_id
|
||||||
|
|
||||||
|
|
||||||
|
@ -770,7 +765,7 @@ async def mark_entry_as_read(cur, ix):
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
|
|
||||||
|
|
||||||
async def mark_feed_as_read(db_file, url):
|
async def mark_feed_as_read(db_file, feed_id):
|
||||||
"""
|
"""
|
||||||
Set read status of entries of given feed as read.
|
Set read status of entries of given feed as read.
|
||||||
|
|
||||||
|
@ -778,8 +773,8 @@ async def mark_feed_as_read(db_file, url):
|
||||||
----------
|
----------
|
||||||
db_file : str
|
db_file : str
|
||||||
Path to database file.
|
Path to database file.
|
||||||
url : str
|
feed_id : str
|
||||||
URL.
|
Feed Id.
|
||||||
"""
|
"""
|
||||||
async with DBLOCK:
|
async with DBLOCK:
|
||||||
with create_connection(db_file) as conn:
|
with create_connection(db_file) as conn:
|
||||||
|
@ -791,7 +786,7 @@ async def mark_feed_as_read(db_file, url):
|
||||||
WHERE feed_id = ?
|
WHERE feed_id = ?
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
par = (url,)
|
par = (feed_id,)
|
||||||
cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
|
|
||||||
|
|
||||||
|
@ -879,7 +874,7 @@ def get_feed_title(db_file, ix):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
par = (ix,)
|
par = (ix,)
|
||||||
title = cur.execute(sql, par).fetchone()[0]
|
title = cur.execute(sql, par).fetchone()
|
||||||
return title
|
return title
|
||||||
|
|
||||||
|
|
||||||
|
@ -909,7 +904,7 @@ def get_feed_url(db_file, feed_id):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
par = (feed_id,)
|
par = (feed_id,)
|
||||||
url = cur.execute(sql, par).fetchone()[0]
|
url = cur.execute(sql, par).fetchone()
|
||||||
return url
|
return url
|
||||||
|
|
||||||
|
|
||||||
|
|
263
slixfeed/task.py
263
slixfeed/task.py
|
@ -64,6 +64,7 @@ from slixfeed.sqlite import (
|
||||||
)
|
)
|
||||||
# from xmpp import Slixfeed
|
# from xmpp import Slixfeed
|
||||||
import slixfeed.xmpp.client as xmpp
|
import slixfeed.xmpp.client as xmpp
|
||||||
|
import slixfeed.xmpp.connect as connect
|
||||||
import slixfeed.xmpp.utility as utility
|
import slixfeed.xmpp.utility as utility
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
@ -73,6 +74,26 @@ task_manager = {}
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
|
|
||||||
|
|
||||||
|
# def init_tasks(self):
|
||||||
|
# global task_ping
|
||||||
|
# # if task_ping is None or task_ping.done():
|
||||||
|
# # task_ping = asyncio.create_task(ping(self, jid=None))
|
||||||
|
# try:
|
||||||
|
# task_ping.cancel()
|
||||||
|
# except:
|
||||||
|
# logging.info('No ping task to cancel')
|
||||||
|
# task_ping = asyncio.create_task(ping(self, jid=None))
|
||||||
|
|
||||||
|
|
||||||
|
def ping_task(self):
|
||||||
|
global ping_task
|
||||||
|
try:
|
||||||
|
ping_task.cancel()
|
||||||
|
except:
|
||||||
|
logging.info('No ping task to cancel.')
|
||||||
|
ping_task = asyncio.create_task(connect.ping(self))
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
FIXME
|
FIXME
|
||||||
|
|
||||||
|
@ -87,22 +108,40 @@ await taskhandler.start_tasks(
|
||||||
)
|
)
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async def start_tasks_xmpp(self, jid, tasks):
|
async def start_tasks_xmpp(self, jid, tasks=None):
|
||||||
logging.debug("Starting tasks {} for JID {}".format(tasks, jid))
|
if jid == self.boundjid.bare:
|
||||||
task_manager[jid] = {}
|
return
|
||||||
|
try:
|
||||||
|
task_manager[jid]
|
||||||
|
print('Old details for tasks of {}:\n'.format(jid), task_manager[jid].keys())
|
||||||
|
except KeyError as e:
|
||||||
|
task_manager[jid] = {}
|
||||||
|
logging.info('KeyError:', str(e))
|
||||||
|
logging.debug('Creating new task manager for JID {}'.format(jid))
|
||||||
|
if not tasks:
|
||||||
|
tasks = ['interval', 'status', 'check']
|
||||||
|
logging.info('Stopping tasks {} for JID {}'.format(tasks, jid))
|
||||||
|
for task in tasks:
|
||||||
|
# if task_manager[jid][task]:
|
||||||
|
try:
|
||||||
|
task_manager[jid][task].cancel()
|
||||||
|
except:
|
||||||
|
logging.debug('No task {} for JID {} (start_tasks_xmpp)'
|
||||||
|
.format(task, jid))
|
||||||
|
logging.info('Starting tasks {} for JID {}'.format(tasks, jid))
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
# print("task:", task)
|
# print("task:", task)
|
||||||
# print("tasks:")
|
# print("tasks:")
|
||||||
# print(tasks)
|
# print(tasks)
|
||||||
# breakpoint()
|
# breakpoint()
|
||||||
match task:
|
match task:
|
||||||
case "check":
|
case 'check':
|
||||||
task_manager[jid]["check"] = asyncio.create_task(
|
task_manager[jid]['check'] = asyncio.create_task(
|
||||||
check_updates(jid))
|
check_updates(jid))
|
||||||
case "status":
|
case "status":
|
||||||
task_manager[jid]["status"] = asyncio.create_task(
|
task_manager[jid]['status'] = asyncio.create_task(
|
||||||
send_status(self, jid))
|
send_status(self, jid))
|
||||||
case "interval":
|
case 'interval':
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = get_pathname_to_database(jid_file)
|
db_file = get_pathname_to_database(jid_file)
|
||||||
update_interval = (
|
update_interval = (
|
||||||
|
@ -116,13 +155,16 @@ async def start_tasks_xmpp(self, jid, tasks):
|
||||||
diff = time.time() - last_update_time
|
diff = time.time() - last_update_time
|
||||||
if diff < update_interval:
|
if diff < update_interval:
|
||||||
next_update_time = update_interval - diff
|
next_update_time = update_interval - diff
|
||||||
print("jid :", jid, "\n"
|
|
||||||
"time :", time.time(), "\n"
|
|
||||||
"last_update_time :", last_update_time, "\n"
|
|
||||||
"difference :", diff, "\n"
|
|
||||||
"update interval :", update_interval, "\n"
|
|
||||||
"next_update_time :", next_update_time, "\n")
|
|
||||||
await asyncio.sleep(next_update_time)
|
await asyncio.sleep(next_update_time)
|
||||||
|
|
||||||
|
# print("jid :", jid, "\n"
|
||||||
|
# "time :", time.time(), "\n"
|
||||||
|
# "last_update_time :", last_update_time, "\n"
|
||||||
|
# "difference :", diff, "\n"
|
||||||
|
# "update interval :", update_interval, "\n"
|
||||||
|
# "next_update_time :", next_update_time, "\n"
|
||||||
|
# )
|
||||||
|
|
||||||
# elif diff > val:
|
# elif diff > val:
|
||||||
# next_update_time = val
|
# next_update_time = val
|
||||||
await update_last_update_time(db_file)
|
await update_last_update_time(db_file)
|
||||||
|
@ -139,84 +181,20 @@ async def start_tasks_xmpp(self, jid, tasks):
|
||||||
# print(jid)
|
# print(jid)
|
||||||
# breakpoint()
|
# breakpoint()
|
||||||
# await task
|
# await task
|
||||||
|
print('New details for tasks of {}:\n'.format(jid), task_manager[jid])
|
||||||
|
|
||||||
|
|
||||||
async def clean_tasks_xmpp(jid, tasks):
|
async def clean_tasks_xmpp(jid, tasks=None):
|
||||||
logging.debug(
|
if not tasks:
|
||||||
"Stopping tasks {} for JID {}".format(tasks, jid)
|
tasks = ['interval', 'status', 'check']
|
||||||
)
|
logging.info('Stopping tasks {} for JID {}'.format(tasks, jid))
|
||||||
for task in tasks:
|
for task in tasks:
|
||||||
# if task_manager[jid][task]:
|
# if task_manager[jid][task]:
|
||||||
try:
|
try:
|
||||||
task_manager[jid][task].cancel()
|
task_manager[jid][task].cancel()
|
||||||
except:
|
except:
|
||||||
logging.debug(
|
logging.debug('No task {} for JID {} (clean_tasks_xmpp)'
|
||||||
"No task {} for JID {} (clean_tasks)".format(task, jid)
|
.format(task, jid))
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
TODO
|
|
||||||
|
|
||||||
Rename to "start_tasks"
|
|
||||||
|
|
||||||
Pass a list (or dict) of tasks to start
|
|
||||||
|
|
||||||
NOTE
|
|
||||||
|
|
||||||
Consider callback e.g. Slixfeed.send_status.
|
|
||||||
|
|
||||||
Or taskhandler for each protocol or specific taskhandler function.
|
|
||||||
"""
|
|
||||||
async def task_jid(self, jid):
|
|
||||||
"""
|
|
||||||
JID (Jabber ID) task manager.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
jid : str
|
|
||||||
Jabber ID.
|
|
||||||
"""
|
|
||||||
jid_file = jid.replace('/', '_')
|
|
||||||
db_file = get_pathname_to_database(jid_file)
|
|
||||||
enabled = (
|
|
||||||
await get_settings_value(db_file, "enabled") or
|
|
||||||
get_value("settings", "Settings", "enabled")
|
|
||||||
)
|
|
||||||
if enabled:
|
|
||||||
# NOTE Perhaps we want to utilize super with keyword
|
|
||||||
# arguments in order to know what tasks to initiate.
|
|
||||||
task_manager[jid] = {}
|
|
||||||
task_manager[jid]["check"] = asyncio.create_task(
|
|
||||||
check_updates(jid))
|
|
||||||
task_manager[jid]["status"] = asyncio.create_task(
|
|
||||||
send_status(self, jid))
|
|
||||||
task_manager[jid]["interval"] = asyncio.create_task(
|
|
||||||
send_update(self, jid))
|
|
||||||
await task_manager[jid]["check"]
|
|
||||||
await task_manager[jid]["status"]
|
|
||||||
await task_manager[jid]["interval"]
|
|
||||||
# tasks_dict = {
|
|
||||||
# "check": check_updates,
|
|
||||||
# "status": send_status,
|
|
||||||
# "interval": send_update
|
|
||||||
# }
|
|
||||||
# for task, function in tasks_dict.items():
|
|
||||||
# task_manager[jid][task] = asyncio.create_task(
|
|
||||||
# function(jid)
|
|
||||||
# )
|
|
||||||
# await function
|
|
||||||
else:
|
|
||||||
# FIXME
|
|
||||||
# The following error occurs only upon first attempt to stop.
|
|
||||||
# /usr/lib/python3.11/asyncio/events.py:73: RuntimeWarning: coroutine 'Slixfeed.send_update' was never awaited
|
|
||||||
# self._args = None
|
|
||||||
# RuntimeWarning: Enable tracemalloc to get the object allocation traceback
|
|
||||||
try:
|
|
||||||
task_manager[jid]["interval"].cancel()
|
|
||||||
except:
|
|
||||||
None
|
|
||||||
await send_status(self, jid)
|
|
||||||
|
|
||||||
|
|
||||||
async def send_update(self, jid, num=None):
|
async def send_update(self, jid, num=None):
|
||||||
|
@ -230,7 +208,7 @@ async def send_update(self, jid, num=None):
|
||||||
num : str, optional
|
num : str, optional
|
||||||
Number. The default is None.
|
Number. The default is None.
|
||||||
"""
|
"""
|
||||||
logging.debug("Sending a news update to JID {}".format(jid))
|
logging.info('Sending a news update to JID {}'.format(jid))
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = get_pathname_to_database(jid_file)
|
db_file = get_pathname_to_database(jid_file)
|
||||||
enabled = (
|
enabled = (
|
||||||
|
@ -258,6 +236,7 @@ async def send_update(self, jid, num=None):
|
||||||
feed_id = result[4]
|
feed_id = result[4]
|
||||||
date = result[5]
|
date = result[5]
|
||||||
title_f = get_feed_title(db_file, feed_id)
|
title_f = get_feed_title(db_file, feed_id)
|
||||||
|
title_f = title_f[0]
|
||||||
news_digest += action.list_unread_entries(result, title_f)
|
news_digest += action.list_unread_entries(result, title_f)
|
||||||
# print(db_file)
|
# print(db_file)
|
||||||
# print(result[0])
|
# print(result[0])
|
||||||
|
@ -356,9 +335,8 @@ async def send_status(self, jid):
|
||||||
jid : str
|
jid : str
|
||||||
Jabber ID.
|
Jabber ID.
|
||||||
"""
|
"""
|
||||||
logging.debug(
|
logging.info('Sending a status message to JID {}'.format(jid))
|
||||||
"Sending a status message to JID {}".format(jid))
|
status_text = '📜️ Slixfeed RSS News Bot'
|
||||||
status_text = "📜️ Slixfeed RSS News Bot"
|
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = get_pathname_to_database(jid_file)
|
db_file = get_pathname_to_database(jid_file)
|
||||||
enabled = (
|
enabled = (
|
||||||
|
@ -366,24 +344,19 @@ async def send_status(self, jid):
|
||||||
get_value("settings", "Settings", "enabled")
|
get_value("settings", "Settings", "enabled")
|
||||||
)
|
)
|
||||||
if not enabled:
|
if not enabled:
|
||||||
status_mode = "xa"
|
status_mode = 'xa'
|
||||||
status_text = "📫️ Send \"Start\" to receive updates"
|
status_text = '📫️ Send "Start" to receive updates'
|
||||||
else:
|
else:
|
||||||
feeds = await get_number_of_items(
|
feeds = await get_number_of_items(db_file, 'feeds')
|
||||||
db_file, "feeds")
|
|
||||||
# print(await current_time(), jid, "has", feeds, "feeds")
|
# print(await current_time(), jid, "has", feeds, "feeds")
|
||||||
if not feeds:
|
if not feeds:
|
||||||
status_mode = "available"
|
status_mode = 'available'
|
||||||
status_text = (
|
status_text = '📪️ Send a URL from a blog or a news website'
|
||||||
"📪️ Send a URL from a blog or a news website"
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
unread = await get_number_of_entries_unread(db_file)
|
unread = await get_number_of_entries_unread(db_file)
|
||||||
if unread:
|
if unread:
|
||||||
status_mode = "chat"
|
status_mode = 'chat'
|
||||||
status_text = (
|
status_text = '📬️ There are {} news items'.format(str(unread))
|
||||||
"📬️ There are {} news items"
|
|
||||||
).format(str(unread))
|
|
||||||
# status_text = (
|
# status_text = (
|
||||||
# "📰 News items: {}"
|
# "📰 News items: {}"
|
||||||
# ).format(str(unread))
|
# ).format(str(unread))
|
||||||
|
@ -391,8 +364,8 @@ async def send_status(self, jid):
|
||||||
# "📰 You have {} news items"
|
# "📰 You have {} news items"
|
||||||
# ).format(str(unread))
|
# ).format(str(unread))
|
||||||
else:
|
else:
|
||||||
status_mode = "available"
|
status_mode = 'available'
|
||||||
status_text = "📭️ No news"
|
status_text = '📭️ No news'
|
||||||
|
|
||||||
# breakpoint()
|
# breakpoint()
|
||||||
# print(await current_time(), status_text, "for", jid)
|
# print(await current_time(), status_text, "for", jid)
|
||||||
|
@ -404,8 +377,7 @@ async def send_status(self, jid):
|
||||||
pstatus=status_text
|
pstatus=status_text
|
||||||
)
|
)
|
||||||
# await asyncio.sleep(60 * 20)
|
# await asyncio.sleep(60 * 20)
|
||||||
await refresh_task(
|
await refresh_task(self, jid, send_status, 'status', '90')
|
||||||
self, jid, send_status, "status", "20")
|
|
||||||
# loop.call_at(
|
# loop.call_at(
|
||||||
# loop.time() + 60 * 20,
|
# loop.time() + 60 * 20,
|
||||||
# loop.create_task,
|
# loop.create_task,
|
||||||
|
@ -426,9 +398,7 @@ async def refresh_task(self, jid, callback, key, val=None):
|
||||||
val : str, optional
|
val : str, optional
|
||||||
Value. The default is None.
|
Value. The default is None.
|
||||||
"""
|
"""
|
||||||
logging.debug(
|
logging.info('Refreshing task {} for JID {}'.format(callback, jid))
|
||||||
"Refreshing task {} for JID {}".format(callback, jid)
|
|
||||||
)
|
|
||||||
if not val:
|
if not val:
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = get_pathname_to_database(jid_file)
|
db_file = get_pathname_to_database(jid_file)
|
||||||
|
@ -441,9 +411,8 @@ async def refresh_task(self, jid, callback, key, val=None):
|
||||||
try:
|
try:
|
||||||
task_manager[jid][key].cancel()
|
task_manager[jid][key].cancel()
|
||||||
except:
|
except:
|
||||||
logging.debug(
|
logging.info('No task of type {} to cancel for '
|
||||||
"No task of type {} to cancel for "
|
'JID {} (refresh_task)'.format(key, jid)
|
||||||
"JID {} (clean_tasks)".format(key, jid)
|
|
||||||
)
|
)
|
||||||
# task_manager[jid][key] = loop.call_at(
|
# task_manager[jid][key] = loop.call_at(
|
||||||
# loop.time() + 60 * float(val),
|
# loop.time() + 60 * float(val),
|
||||||
|
@ -482,9 +451,7 @@ async def check_updates(jid):
|
||||||
jid : str
|
jid : str
|
||||||
Jabber ID.
|
Jabber ID.
|
||||||
"""
|
"""
|
||||||
logging.debug(
|
logging.info('Scanning for updates for JID {}'.format(jid))
|
||||||
"Scanning for updates for JID {}".format(jid)
|
|
||||||
)
|
|
||||||
while True:
|
while True:
|
||||||
jid_file = jid.replace('/', '_')
|
jid_file = jid.replace('/', '_')
|
||||||
db_file = get_pathname_to_database(jid_file)
|
db_file = get_pathname_to_database(jid_file)
|
||||||
|
@ -502,64 +469,6 @@ async def check_updates(jid):
|
||||||
# )
|
# )
|
||||||
|
|
||||||
|
|
||||||
async def start_tasks(self, presence):
|
|
||||||
jid = presence["from"].bare
|
|
||||||
logging.debug(
|
|
||||||
"Beginning tasks for JID {}".format(jid)
|
|
||||||
)
|
|
||||||
if jid not in self.boundjid.bare:
|
|
||||||
await clean_tasks_xmpp(
|
|
||||||
jid, ["interval", "status", "check"]
|
|
||||||
)
|
|
||||||
await start_tasks_xmpp(
|
|
||||||
self, jid, ["interval", "status", "check"]
|
|
||||||
)
|
|
||||||
# await task_jid(self, jid)
|
|
||||||
# main_task.extend([asyncio.create_task(task_jid(jid))])
|
|
||||||
# print(main_task)
|
|
||||||
|
|
||||||
|
|
||||||
async def stop_tasks(self, presence):
|
|
||||||
if not self.boundjid.bare:
|
|
||||||
jid = presence["from"].bare
|
|
||||||
logging.debug(
|
|
||||||
"Stopping tasks for JID {}".format(jid)
|
|
||||||
)
|
|
||||||
await clean_tasks_xmpp(
|
|
||||||
jid, ["interval", "status", "check"]
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def check_readiness(self, presence):
|
|
||||||
"""
|
|
||||||
Begin tasks if available, otherwise eliminate tasks.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
presence : str
|
|
||||||
XML stanza .
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
None.
|
|
||||||
"""
|
|
||||||
# print("def check_readiness", presence["from"].bare, presence["type"])
|
|
||||||
# # available unavailable away (chat) dnd xa
|
|
||||||
# print(">>> type", presence["type"], presence["from"].bare)
|
|
||||||
# # away chat dnd xa
|
|
||||||
# print(">>> show", presence["show"], presence["from"].bare)
|
|
||||||
|
|
||||||
jid = presence["from"].bare
|
|
||||||
if presence["show"] in ("away", "dnd", "xa"):
|
|
||||||
logging.debug(
|
|
||||||
"Stopping updates for JID {}".format(jid)
|
|
||||||
)
|
|
||||||
await clean_tasks_xmpp(
|
|
||||||
jid, ["interval"])
|
|
||||||
await start_tasks_xmpp(
|
|
||||||
self, jid, ["status", "check"])
|
|
||||||
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
NOTE
|
NOTE
|
||||||
This is an older system, utilizing local storage instead of XMPP presence.
|
This is an older system, utilizing local storage instead of XMPP presence.
|
||||||
|
@ -573,13 +482,11 @@ async def select_file(self):
|
||||||
while True:
|
while True:
|
||||||
db_dir = get_default_data_directory()
|
db_dir = get_default_data_directory()
|
||||||
if not os.path.isdir(db_dir):
|
if not os.path.isdir(db_dir):
|
||||||
msg = (
|
msg = ('Slixfeed does not work without a database.\n'
|
||||||
"Slixfeed can not work without a database.\n"
|
'To create a database, follow these steps:\n'
|
||||||
"To create a database, follow these steps:\n"
|
'Add Slixfeed contact to your roster.\n'
|
||||||
"Add Slixfeed contact to your roster.\n"
|
'Send a feed to the bot by URL:\n'
|
||||||
"Send a feed to the bot by URL:\n"
|
'https://reclaimthenet.org/feed/')
|
||||||
"https://reclaimthenet.org/feed/"
|
|
||||||
)
|
|
||||||
# print(await current_time(), msg)
|
# print(await current_time(), msg)
|
||||||
print(msg)
|
print(msg)
|
||||||
else:
|
else:
|
||||||
|
|
150
slixfeed/url.py
150
slixfeed/url.py
|
@ -7,11 +7,16 @@ TODO
|
||||||
|
|
||||||
1) ActivityPub URL revealer activitypub_to_http.
|
1) ActivityPub URL revealer activitypub_to_http.
|
||||||
|
|
||||||
|
2) SQLite preference "instance" for preferred instances.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from email.utils import parseaddr
|
from email.utils import parseaddr
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import slixfeed.config as config
|
import slixfeed.config as config
|
||||||
|
import slixfeed.fetch as fetch
|
||||||
from urllib.parse import (
|
from urllib.parse import (
|
||||||
parse_qs,
|
parse_qs,
|
||||||
urlencode,
|
urlencode,
|
||||||
|
@ -31,6 +36,10 @@ from urllib.parse import (
|
||||||
# coordinated with the dataset of project LibRedirect, even
|
# coordinated with the dataset of project LibRedirect, even
|
||||||
# though rule-sets might be adopted (see )Privacy Redirect).
|
# though rule-sets might be adopted (see )Privacy Redirect).
|
||||||
|
|
||||||
|
def get_hostname(url):
|
||||||
|
parted_url = urlsplit(url)
|
||||||
|
return parted_url.netloc
|
||||||
|
|
||||||
def replace_hostname(url, url_type):
|
def replace_hostname(url, url_type):
|
||||||
"""
|
"""
|
||||||
Replace hostname.
|
Replace hostname.
|
||||||
|
@ -47,29 +56,56 @@ def replace_hostname(url, url_type):
|
||||||
url : str
|
url : str
|
||||||
URL.
|
URL.
|
||||||
"""
|
"""
|
||||||
|
url_new = None
|
||||||
parted_url = urlsplit(url)
|
parted_url = urlsplit(url)
|
||||||
# protocol = parted_url.scheme
|
# protocol = parted_url.scheme
|
||||||
hostname = parted_url.netloc
|
hostname = parted_url.netloc
|
||||||
hostname = hostname.replace("www.","")
|
hostname = hostname.replace('www.','')
|
||||||
pathname = parted_url.path
|
pathname = parted_url.path
|
||||||
queries = parted_url.query
|
queries = parted_url.query
|
||||||
fragment = parted_url.fragment
|
fragment = parted_url.fragment
|
||||||
proxies = config.get_list("proxies.toml", "proxies")
|
proxies = config.open_config_file('proxies.toml')['proxies']
|
||||||
for proxy in proxies:
|
for proxy_name in proxies:
|
||||||
proxy = proxies[proxy]
|
proxy = proxies[proxy_name]
|
||||||
if hostname in proxy["hostname"] and url_type in proxy["type"]:
|
if hostname in proxy['hostname'] and url_type in proxy['type']:
|
||||||
select_proxy = random.choice(proxy["clearnet"])
|
while not url_new:
|
||||||
parted_proxy = urlsplit(select_proxy)
|
proxy_type = 'clearnet'
|
||||||
protocol_new = parted_proxy.scheme
|
proxy_list = proxy[proxy_type]
|
||||||
hostname_new = parted_proxy.netloc
|
if len(proxy_list):
|
||||||
url = urlunsplit([
|
# proxy_list = proxies[proxy_name][proxy_type]
|
||||||
protocol_new,
|
proxy_url = random.choice(proxy_list)
|
||||||
hostname_new,
|
parted_proxy_url = urlsplit(proxy_url)
|
||||||
pathname,
|
protocol_new = parted_proxy_url.scheme
|
||||||
queries,
|
hostname_new = parted_proxy_url.netloc
|
||||||
fragment
|
url_new = urlunsplit([
|
||||||
])
|
protocol_new,
|
||||||
return url
|
hostname_new,
|
||||||
|
pathname,
|
||||||
|
queries,
|
||||||
|
fragment
|
||||||
|
])
|
||||||
|
response = fetch.http_response(url_new)
|
||||||
|
if (response and
|
||||||
|
response.status_code == 200 and
|
||||||
|
response.reason == 'OK' and
|
||||||
|
url_new.startswith(proxy_url)):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
config_dir = config.get_default_config_directory()
|
||||||
|
proxies_obsolete_file = config_dir + '/proxies_obsolete.toml'
|
||||||
|
proxies_file = config_dir + '/proxies.toml'
|
||||||
|
if not os.path.isfile(proxies_obsolete_file):
|
||||||
|
config.create_skeleton(proxies_file)
|
||||||
|
config.backup_obsolete(proxies_obsolete_file, proxy_name, proxy_type, proxy_url)
|
||||||
|
config.update_proxies(proxies_file, proxy_name, proxy_type, proxy_url)
|
||||||
|
url_new = None
|
||||||
|
else:
|
||||||
|
logging.warning(
|
||||||
|
"No proxy URLs for {}."
|
||||||
|
"Update proxies.toml".format(proxy_name))
|
||||||
|
url_new = url
|
||||||
|
break
|
||||||
|
return url_new
|
||||||
|
|
||||||
|
|
||||||
def remove_tracking_parameters(url):
|
def remove_tracking_parameters(url):
|
||||||
|
@ -92,7 +128,7 @@ def remove_tracking_parameters(url):
|
||||||
pathname = parted_url.path
|
pathname = parted_url.path
|
||||||
queries = parse_qs(parted_url.query)
|
queries = parse_qs(parted_url.query)
|
||||||
fragment = parted_url.fragment
|
fragment = parted_url.fragment
|
||||||
trackers = config.get_list("queries.toml", "trackers")
|
trackers = config.open_config_file('queries.toml')['trackers']
|
||||||
for tracker in trackers:
|
for tracker in trackers:
|
||||||
if tracker in queries: del queries[tracker]
|
if tracker in queries: del queries[tracker]
|
||||||
queries_new = urlencode(queries, doseq=True)
|
queries_new = urlencode(queries, doseq=True)
|
||||||
|
@ -122,7 +158,7 @@ def feed_to_http(url):
|
||||||
"""
|
"""
|
||||||
par_url = urlsplit(url)
|
par_url = urlsplit(url)
|
||||||
new_url = urlunsplit([
|
new_url = urlunsplit([
|
||||||
"http",
|
'http',
|
||||||
par_url.netloc,
|
par_url.netloc,
|
||||||
par_url.path,
|
par_url.path,
|
||||||
par_url.query,
|
par_url.query,
|
||||||
|
@ -169,15 +205,15 @@ def complete_url(source, link):
|
||||||
str
|
str
|
||||||
URL.
|
URL.
|
||||||
"""
|
"""
|
||||||
if link.startswith("www."):
|
if link.startswith('www.'):
|
||||||
return "http://" + link
|
return 'http://' + link
|
||||||
parted_link = urlsplit(link)
|
parted_link = urlsplit(link)
|
||||||
parted_feed = urlsplit(source)
|
parted_feed = urlsplit(source)
|
||||||
if parted_link.scheme == "magnet" and parted_link.query:
|
if parted_link.scheme == 'magnet' and parted_link.query:
|
||||||
return link
|
return link
|
||||||
if parted_link.scheme and parted_link.netloc:
|
if parted_link.scheme and parted_link.netloc:
|
||||||
return link
|
return link
|
||||||
if link.startswith("//"):
|
if link.startswith('//'):
|
||||||
if parted_link.netloc and parted_link.path:
|
if parted_link.netloc and parted_link.path:
|
||||||
new_link = urlunsplit([
|
new_link = urlunsplit([
|
||||||
parted_feed.scheme,
|
parted_feed.scheme,
|
||||||
|
@ -186,7 +222,7 @@ def complete_url(source, link):
|
||||||
parted_link.query,
|
parted_link.query,
|
||||||
parted_link.fragment
|
parted_link.fragment
|
||||||
])
|
])
|
||||||
elif link.startswith("/"):
|
elif link.startswith('/'):
|
||||||
new_link = urlunsplit([
|
new_link = urlunsplit([
|
||||||
parted_feed.scheme,
|
parted_feed.scheme,
|
||||||
parted_feed.netloc,
|
parted_feed.netloc,
|
||||||
|
@ -194,57 +230,59 @@ def complete_url(source, link):
|
||||||
parted_link.query,
|
parted_link.query,
|
||||||
parted_link.fragment
|
parted_link.fragment
|
||||||
])
|
])
|
||||||
elif link.startswith("../"):
|
elif link.startswith('../'):
|
||||||
pathlink = parted_link.path.split("/")
|
pathlink = parted_link.path.split('/')
|
||||||
pathfeed = parted_feed.path.split("/")
|
pathfeed = parted_feed.path.split('/')
|
||||||
for i in pathlink:
|
for i in pathlink:
|
||||||
if i == "..":
|
if i == '..':
|
||||||
if pathlink.index("..") == 0:
|
if pathlink.index('..') == 0:
|
||||||
pathfeed.pop()
|
pathfeed.pop()
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
while pathlink.count(".."):
|
while pathlink.count('..'):
|
||||||
if pathlink.index("..") == 0:
|
if pathlink.index('..') == 0:
|
||||||
pathlink.remove("..")
|
pathlink.remove('..')
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
pathlink = "/".join(pathlink)
|
pathlink = '/'.join(pathlink)
|
||||||
pathfeed.extend([pathlink])
|
pathfeed.extend([pathlink])
|
||||||
new_link = urlunsplit([
|
new_link = urlunsplit([
|
||||||
parted_feed.scheme,
|
parted_feed.scheme,
|
||||||
parted_feed.netloc,
|
parted_feed.netloc,
|
||||||
"/".join(pathfeed),
|
'/'.join(pathfeed),
|
||||||
parted_link.query,
|
parted_link.query,
|
||||||
parted_link.fragment
|
parted_link.fragment
|
||||||
])
|
])
|
||||||
else:
|
else:
|
||||||
pathlink = parted_link.path.split("/")
|
pathlink = parted_link.path.split('/')
|
||||||
pathfeed = parted_feed.path.split("/")
|
pathfeed = parted_feed.path.split('/')
|
||||||
if link.startswith("./"):
|
if link.startswith('./'):
|
||||||
pathlink.remove(".")
|
pathlink.remove('.')
|
||||||
if not source.endswith("/"):
|
if not source.endswith('/'):
|
||||||
pathfeed.pop()
|
pathfeed.pop()
|
||||||
pathlink = "/".join(pathlink)
|
pathlink = '/'.join(pathlink)
|
||||||
pathfeed.extend([pathlink])
|
pathfeed.extend([pathlink])
|
||||||
new_link = urlunsplit([
|
new_link = urlunsplit([
|
||||||
parted_feed.scheme,
|
parted_feed.scheme,
|
||||||
parted_feed.netloc,
|
parted_feed.netloc,
|
||||||
"/".join(pathfeed),
|
'/'.join(pathfeed),
|
||||||
parted_link.query,
|
parted_link.query,
|
||||||
parted_link.fragment
|
parted_link.fragment
|
||||||
])
|
])
|
||||||
return new_link
|
return new_link
|
||||||
|
|
||||||
|
|
||||||
"""
|
|
||||||
TODO
|
|
||||||
Feed https://www.ocaml.org/feed.xml
|
|
||||||
Link %20https://frama-c.com/fc-versions/cobalt.html%20
|
|
||||||
|
|
||||||
FIXME
|
# TODO
|
||||||
Feed https://cyber.dabamos.de/blog/feed.rss
|
|
||||||
Link https://cyber.dabamos.de/blog/#article-2022-07-15
|
# Feed https://www.ocaml.org/feed.xml
|
||||||
"""
|
# Link %20https://frama-c.com/fc-versions/cobalt.html%20
|
||||||
|
|
||||||
|
# FIXME
|
||||||
|
|
||||||
|
# Feed https://cyber.dabamos.de/blog/feed.rss
|
||||||
|
# Link https://cyber.dabamos.de/blog/#article-2022-07-15
|
||||||
|
|
||||||
def join_url(source, link):
|
def join_url(source, link):
|
||||||
"""
|
"""
|
||||||
Join base URL with given pathname.
|
Join base URL with given pathname.
|
||||||
|
@ -261,13 +299,13 @@ def join_url(source, link):
|
||||||
str
|
str
|
||||||
URL.
|
URL.
|
||||||
"""
|
"""
|
||||||
if link.startswith("www."):
|
if link.startswith('www.'):
|
||||||
new_link = "http://" + link
|
new_link = 'http://' + link
|
||||||
elif link.startswith("%20") and link.endswith("%20"):
|
elif link.startswith('%20') and link.endswith('%20'):
|
||||||
old_link = link.split("%20")
|
old_link = link.split('%20')
|
||||||
del old_link[0]
|
del old_link[0]
|
||||||
old_link.pop()
|
old_link.pop()
|
||||||
new_link = "".join(old_link)
|
new_link = ''.join(old_link)
|
||||||
else:
|
else:
|
||||||
new_link = urljoin(source, link)
|
new_link = urljoin(source, link)
|
||||||
return new_link
|
return new_link
|
||||||
|
@ -293,8 +331,8 @@ def trim_url(url):
|
||||||
pathname = parted_url.path
|
pathname = parted_url.path
|
||||||
queries = parted_url.query
|
queries = parted_url.query
|
||||||
fragment = parted_url.fragment
|
fragment = parted_url.fragment
|
||||||
while "//" in pathname:
|
while '//' in pathname:
|
||||||
pathname = pathname.replace("//", "/")
|
pathname = pathname.replace('//', '/')
|
||||||
url = urlunsplit([
|
url = urlunsplit([
|
||||||
protocol,
|
protocol,
|
||||||
hostname,
|
hostname,
|
||||||
|
|
|
@ -1,10 +1,24 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
1) Save groupchat name instead of jid in field name.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
from slixmpp.plugins.xep_0048.stanza import Bookmarks
|
from slixmpp.plugins.xep_0048.stanza import Bookmarks
|
||||||
|
|
||||||
|
|
||||||
|
async def get(self):
|
||||||
|
result = await self.plugin['xep_0048'].get_bookmarks()
|
||||||
|
bookmarks = result['private']['bookmarks']
|
||||||
|
conferences = bookmarks['conferences']
|
||||||
|
return conferences
|
||||||
|
|
||||||
|
|
||||||
async def add(self, muc_jid):
|
async def add(self, muc_jid):
|
||||||
result = await self.plugin['xep_0048'].get_bookmarks()
|
result = await self.plugin['xep_0048'].get_bookmarks()
|
||||||
bookmarks = result['private']['bookmarks']
|
bookmarks = result['private']['bookmarks']
|
||||||
|
@ -32,13 +46,6 @@ async def add(self, muc_jid):
|
||||||
# await self['xep_0402'].publish(bm)
|
# await self['xep_0402'].publish(bm)
|
||||||
|
|
||||||
|
|
||||||
async def get(self):
|
|
||||||
result = await self.plugin['xep_0048'].get_bookmarks()
|
|
||||||
bookmarks = result['private']['bookmarks']
|
|
||||||
conferences = bookmarks['conferences']
|
|
||||||
return conferences
|
|
||||||
|
|
||||||
|
|
||||||
async def remove(self, muc_jid):
|
async def remove(self, muc_jid):
|
||||||
result = await self.plugin['xep_0048'].get_bookmarks()
|
result = await self.plugin['xep_0048'].get_bookmarks()
|
||||||
bookmarks = result['private']['bookmarks']
|
bookmarks = result['private']['bookmarks']
|
||||||
|
|
|
@ -16,14 +16,7 @@ TODO
|
||||||
2) Assure message delivery before calling a new task.
|
2) Assure message delivery before calling a new task.
|
||||||
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged
|
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged
|
||||||
|
|
||||||
3) Check the lesyt message sent by the bot.
|
3) XHTTML-IM
|
||||||
This is essential in case bot restarts within an update interval.
|
|
||||||
Example:
|
|
||||||
Bot is set to send an update every 5 hours.
|
|
||||||
Bot was disconnected and reconnected after an hour.
|
|
||||||
Bot will send an update when it is connected, which is lesser than 5 hours as it should.
|
|
||||||
|
|
||||||
4) XHTTML-IM
|
|
||||||
case _ if message_lowercase.startswith("html"):
|
case _ if message_lowercase.startswith("html"):
|
||||||
message['html']="
|
message['html']="
|
||||||
Parse me!
|
Parse me!
|
||||||
|
@ -67,12 +60,13 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
|
||||||
# import xml.etree.ElementTree as ET
|
# import xml.etree.ElementTree as ET
|
||||||
# from lxml import etree
|
# from lxml import etree
|
||||||
|
|
||||||
|
import slixfeed.xmpp.bookmark as bookmark
|
||||||
import slixfeed.xmpp.connect as connect
|
import slixfeed.xmpp.connect as connect
|
||||||
import slixfeed.xmpp.muc as muc
|
import slixfeed.xmpp.muc as muc
|
||||||
import slixfeed.xmpp.process as process
|
import slixfeed.xmpp.process as process
|
||||||
import slixfeed.xmpp.profile as profile
|
import slixfeed.xmpp.profile as profile
|
||||||
import slixfeed.xmpp.roster as roster
|
import slixfeed.xmpp.roster as roster
|
||||||
import slixfeed.xmpp.service as service
|
# import slixfeed.xmpp.service as service
|
||||||
import slixfeed.xmpp.state as state
|
import slixfeed.xmpp.state as state
|
||||||
import slixfeed.xmpp.status as status
|
import slixfeed.xmpp.status as status
|
||||||
import slixfeed.xmpp.utility as utility
|
import slixfeed.xmpp.utility as utility
|
||||||
|
@ -110,39 +104,54 @@ class Slixfeed(slixmpp.ClientXMPP):
|
||||||
# and the XML streams are ready for use. We want to
|
# and the XML streams are ready for use. We want to
|
||||||
# listen for this event so that we we can initialize
|
# listen for this event so that we we can initialize
|
||||||
# our roster.
|
# our roster.
|
||||||
self.add_event_handler("session_start", self.on_session_start)
|
self.add_event_handler("session_start",
|
||||||
self.add_event_handler("session_resumed", self.on_session_resumed)
|
self.on_session_start)
|
||||||
|
self.add_event_handler("session_resumed",
|
||||||
|
self.on_session_resumed)
|
||||||
self.add_event_handler("got_offline", print("got_offline"))
|
self.add_event_handler("got_offline", print("got_offline"))
|
||||||
# self.add_event_handler("got_online", self.check_readiness)
|
# self.add_event_handler("got_online", self.check_readiness)
|
||||||
self.add_event_handler("changed_status", self.on_changed_status)
|
self.add_event_handler("changed_status",
|
||||||
self.add_event_handler("presence_available", self.on_presence_available)
|
self.on_changed_status)
|
||||||
self.add_event_handler("presence_unavailable", self.on_presence_unavailable)
|
self.add_event_handler("presence_available",
|
||||||
|
self.on_presence_available)
|
||||||
self.add_event_handler("changed_subscription", self.on_changed_subscription)
|
self.add_event_handler("presence_unavailable",
|
||||||
|
self.on_presence_unavailable)
|
||||||
self.add_event_handler("chatstate_active", self.on_chatstate_active)
|
self.add_event_handler("chatstate_active",
|
||||||
self.add_event_handler("chatstate_gone", self.on_chatstate_gone)
|
self.on_chatstate_active)
|
||||||
self.add_event_handler("chatstate_composing", self.check_chatstate_composing)
|
self.add_event_handler("chatstate_composing",
|
||||||
self.add_event_handler("chatstate_paused", self.check_chatstate_paused)
|
self.on_chatstate_composing)
|
||||||
|
self.add_event_handler("chatstate_gone",
|
||||||
|
self.on_chatstate_gone)
|
||||||
|
self.add_event_handler("chatstate_inactive",
|
||||||
|
self.on_chatstate_inactive)
|
||||||
|
self.add_event_handler("chatstate_paused",
|
||||||
|
self.on_chatstate_paused)
|
||||||
|
|
||||||
# The message event is triggered whenever a message
|
# The message event is triggered whenever a message
|
||||||
# stanza is received. Be aware that that includes
|
# stanza is received. Be aware that that includes
|
||||||
# MUC messages and error messages.
|
# MUC messages and error messages.
|
||||||
self.add_event_handler("message", self.on_message)
|
self.add_event_handler("message",
|
||||||
|
self.on_message)
|
||||||
|
|
||||||
self.add_event_handler("groupchat_invite", self.on_groupchat_invite) # XEP_0045
|
self.add_event_handler("groupchat_invite",
|
||||||
self.add_event_handler("groupchat_direct_invite", self.on_groupchat_direct_invite) # XEP_0249
|
self.on_groupchat_invite) # XEP_0045
|
||||||
|
self.add_event_handler("groupchat_direct_invite",
|
||||||
|
self.on_groupchat_direct_invite) # XEP_0249
|
||||||
# self.add_event_handler("groupchat_message", self.message)
|
# self.add_event_handler("groupchat_message", self.message)
|
||||||
|
|
||||||
# self.add_event_handler("disconnected", self.reconnect)
|
# self.add_event_handler("disconnected", self.reconnect)
|
||||||
# self.add_event_handler("disconnected", self.inspect_connection)
|
# self.add_event_handler("disconnected", self.inspect_connection)
|
||||||
|
|
||||||
self.add_event_handler("reactions", self.on_reactions)
|
self.add_event_handler("reactions",
|
||||||
self.add_event_handler("presence_error", self.on_presence_error)
|
self.on_reactions)
|
||||||
self.add_event_handler("presence_subscribe", self.on_presence_subscribe)
|
self.add_event_handler("presence_error",
|
||||||
self.add_event_handler("presence_subscribed", self.on_presence_subscribed)
|
self.on_presence_error)
|
||||||
self.add_event_handler("presence_unsubscribe", self.on_presence_unsubscribe)
|
self.add_event_handler("presence_subscribe",
|
||||||
self.add_event_handler("presence_unsubscribed", self.on_presence_unsubscribed)
|
self.on_presence_subscribe)
|
||||||
|
self.add_event_handler("presence_subscribed",
|
||||||
|
self.on_presence_subscribed)
|
||||||
|
self.add_event_handler("presence_unsubscribed",
|
||||||
|
self.on_presence_unsubscribed)
|
||||||
|
|
||||||
# Initialize event loop
|
# Initialize event loop
|
||||||
# self.loop = asyncio.get_event_loop()
|
# self.loop = asyncio.get_event_loop()
|
||||||
|
@ -154,39 +163,61 @@ class Slixfeed(slixmpp.ClientXMPP):
|
||||||
self.add_event_handler("session_end", self.on_session_end)
|
self.add_event_handler("session_end", self.on_session_end)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO Test
|
||||||
async def on_groupchat_invite(self, message):
|
async def on_groupchat_invite(self, message):
|
||||||
print("on_groupchat_invite")
|
logging.warning("on_groupchat_invite")
|
||||||
await muc.accept_invitation(self, message)
|
inviter = message["from"].bare
|
||||||
|
muc_jid = message['groupchat_invite']['jid']
|
||||||
|
await muc.join(self, inviter, muc_jid)
|
||||||
|
await bookmark.add(self, muc_jid)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE Tested with Gajim and Psi
|
||||||
async def on_groupchat_direct_invite(self, message):
|
async def on_groupchat_direct_invite(self, message):
|
||||||
print("on_groupchat_direct_invite")
|
inviter = message["from"].bare
|
||||||
await muc.accept_invitation(self, message)
|
muc_jid = message['groupchat_invite']['jid']
|
||||||
|
await muc.join(self, inviter, muc_jid)
|
||||||
|
await bookmark.add(self, muc_jid)
|
||||||
|
|
||||||
|
|
||||||
async def on_session_end(self, event):
|
async def on_session_end(self, event):
|
||||||
if event:
|
message = "Session has ended."
|
||||||
message = "Session has ended. Reason: {}".format(event)
|
await connect.recover_connection(self, message)
|
||||||
else:
|
|
||||||
message = "Session has ended."
|
|
||||||
await connect.recover_connection(self, event, message)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_connection_failed(self, event):
|
async def on_connection_failed(self, event):
|
||||||
message = "Connection has failed. Reason: {}".format(event)
|
message = "Connection has failed. Reason: {}".format(event)
|
||||||
await connect.recover_connection(self, event, message)
|
await connect.recover_connection(self, message)
|
||||||
|
|
||||||
|
|
||||||
async def on_session_start(self, event):
|
async def on_session_start(self, event):
|
||||||
await process.event(self, event)
|
await process.event(self)
|
||||||
await muc.autojoin(self)
|
await muc.autojoin(self)
|
||||||
|
profile.set_identity(self, "client")
|
||||||
await profile.update(self)
|
await profile.update(self)
|
||||||
service.identity(self, "client")
|
task.ping_task(self)
|
||||||
|
|
||||||
|
# await Service.capabilities(self)
|
||||||
|
# Service.commands(self)
|
||||||
|
# Service.reactions(self)
|
||||||
|
|
||||||
|
await self.service_capabilities()
|
||||||
|
self.service_commands()
|
||||||
|
self.service_reactions()
|
||||||
|
|
||||||
|
|
||||||
async def on_session_resumed(self, event):
|
async def on_session_resumed(self, event):
|
||||||
await process.event(self, event)
|
await process.event(self)
|
||||||
await muc.autojoin(self)
|
await muc.autojoin(self)
|
||||||
|
profile.set_identity(self, "client")
|
||||||
|
|
||||||
|
# await Service.capabilities(self)
|
||||||
|
# Service.commands(self)
|
||||||
|
# Service.reactions(self)
|
||||||
|
|
||||||
|
await self.service_capabilities()
|
||||||
|
self.service_commands()
|
||||||
|
self.service_reactions()
|
||||||
|
|
||||||
|
|
||||||
# TODO Request for subscription
|
# TODO Request for subscription
|
||||||
|
@ -195,20 +226,21 @@ class Slixfeed(slixmpp.ClientXMPP):
|
||||||
if "chat" == await utility.get_chat_type(self, jid):
|
if "chat" == await utility.get_chat_type(self, jid):
|
||||||
await roster.add(self, jid)
|
await roster.add(self, jid)
|
||||||
await state.request(self, jid)
|
await state.request(self, jid)
|
||||||
|
await process.message(self, message)
|
||||||
# chat_type = message["type"]
|
# chat_type = message["type"]
|
||||||
# message_body = message["body"]
|
# message_body = message["body"]
|
||||||
# message_reply = message.reply
|
# message_reply = message.reply
|
||||||
await process.message(self, message)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_changed_status(self, presence):
|
async def on_changed_status(self, presence):
|
||||||
await task.check_readiness(self, presence)
|
# await task.check_readiness(self, presence)
|
||||||
|
jid = presence['from'].bare
|
||||||
|
if presence['show'] in ('away', 'dnd', 'xa'):
|
||||||
|
await task.clean_tasks_xmpp(jid, ['interval'])
|
||||||
|
await task.start_tasks_xmpp(self, jid, ['status', 'check'])
|
||||||
|
|
||||||
|
|
||||||
# TODO Request for subscription
|
|
||||||
async def on_presence_subscribe(self, presence):
|
async def on_presence_subscribe(self, presence):
|
||||||
print("on_presence_subscribe")
|
|
||||||
print(presence)
|
|
||||||
jid = presence["from"].bare
|
jid = presence["from"].bare
|
||||||
await state.request(self, jid)
|
await state.request(self, jid)
|
||||||
|
|
||||||
|
@ -220,7 +252,10 @@ class Slixfeed(slixmpp.ClientXMPP):
|
||||||
|
|
||||||
async def on_presence_available(self, presence):
|
async def on_presence_available(self, presence):
|
||||||
# TODO Add function to check whether task is already running or not
|
# TODO Add function to check whether task is already running or not
|
||||||
await task.start_tasks(self, presence)
|
# await task.start_tasks(self, presence)
|
||||||
|
# NOTE Already done inside the start-task function
|
||||||
|
jid = presence["from"].bare
|
||||||
|
await task.start_tasks_xmpp(self, jid)
|
||||||
|
|
||||||
|
|
||||||
async def on_presence_unsubscribed(self, presence):
|
async def on_presence_unsubscribed(self, presence):
|
||||||
|
@ -230,64 +265,59 @@ class Slixfeed(slixmpp.ClientXMPP):
|
||||||
|
|
||||||
|
|
||||||
async def on_presence_unavailable(self, presence):
|
async def on_presence_unavailable(self, presence):
|
||||||
await task.stop_tasks(self, presence)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_changed_subscription(self, presence):
|
|
||||||
print("on_changed_subscription")
|
|
||||||
print(presence)
|
|
||||||
jid = presence["from"].bare
|
jid = presence["from"].bare
|
||||||
# breakpoint()
|
# await task.stop_tasks(self, jid)
|
||||||
|
await task.clean_tasks_xmpp(jid)
|
||||||
|
|
||||||
async def on_presence_unsubscribe(self, presence):
|
|
||||||
print("on_presence_unsubscribe")
|
|
||||||
print(presence)
|
|
||||||
|
|
||||||
|
|
||||||
|
# TODO
|
||||||
|
# Send message that database will be deleted within 30 days
|
||||||
|
# Check whether JID is in bookmarks or roster
|
||||||
|
# If roster, remove contact JID into file
|
||||||
|
# If bookmarks, remove groupchat JID into file
|
||||||
async def on_presence_error(self, presence):
|
async def on_presence_error(self, presence):
|
||||||
print("on_presence_error")
|
print("on_presence_error")
|
||||||
print(presence)
|
print(presence)
|
||||||
|
jid = presence["from"].bare
|
||||||
|
await task.clean_tasks_xmpp(jid)
|
||||||
|
|
||||||
|
|
||||||
async def on_reactions(self, message):
|
async def on_reactions(self, message):
|
||||||
print("on_reactions")
|
print(message['from'])
|
||||||
print(message)
|
print(message['reactions']['values'])
|
||||||
|
|
||||||
|
|
||||||
async def on_chatstate_active(self, message):
|
async def on_chatstate_active(self, message):
|
||||||
print("on_chatstate_active")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
|
# await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
|
|
||||||
|
|
||||||
|
async def on_chatstate_composing(self, message):
|
||||||
|
if message['type'] in ('chat', 'normal'):
|
||||||
|
jid = message['from'].bare
|
||||||
|
# await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
|
status_text='Press "help" for manual, or "info" for information.'
|
||||||
|
status.send(self, jid, status_text)
|
||||||
|
|
||||||
|
|
||||||
async def on_chatstate_gone(self, message):
|
async def on_chatstate_gone(self, message):
|
||||||
print("on_chatstate_gone")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
|
# await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
|
|
||||||
|
|
||||||
async def check_chatstate_composing(self, message):
|
async def on_chatstate_inactive(self, message):
|
||||||
print("def check_chatstate_composing")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
if message["type"] in ("chat", "normal"):
|
# await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
jid = message["from"].bare
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
status_text="Press \"help\" for manual."
|
|
||||||
self.send_presence(
|
|
||||||
# pshow=status_mode,
|
|
||||||
pstatus=status_text,
|
|
||||||
pto=jid,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def check_chatstate_paused(self, message):
|
async def on_chatstate_paused(self, message):
|
||||||
print("def check_chatstate_paused")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
if message["type"] in ("chat", "normal"):
|
# await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
jid = message["from"].bare
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
await task.refresh_task(
|
|
||||||
self,
|
|
||||||
jid,
|
|
||||||
task.send_status,
|
|
||||||
"status",
|
|
||||||
20
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
|
@ -60,12 +60,14 @@ from slixmpp.plugins.xep_0048.stanza import Bookmarks
|
||||||
# import xml.etree.ElementTree as ET
|
# import xml.etree.ElementTree as ET
|
||||||
# from lxml import etree
|
# from lxml import etree
|
||||||
|
|
||||||
|
# import slixfeed.xmpp.bookmark as bookmark
|
||||||
import slixfeed.xmpp.connect as connect
|
import slixfeed.xmpp.connect as connect
|
||||||
import slixfeed.xmpp.muc as muc
|
# NOTE MUC is possible for component
|
||||||
|
# import slixfeed.xmpp.muc as muc
|
||||||
import slixfeed.xmpp.process as process
|
import slixfeed.xmpp.process as process
|
||||||
import slixfeed.xmpp.profile as profile
|
import slixfeed.xmpp.profile as profile
|
||||||
import slixfeed.xmpp.roster as roster
|
# import slixfeed.xmpp.roster as roster
|
||||||
import slixfeed.xmpp.service as service
|
# import slixfeed.xmpp.service as service
|
||||||
import slixfeed.xmpp.state as state
|
import slixfeed.xmpp.state as state
|
||||||
import slixfeed.xmpp.status as status
|
import slixfeed.xmpp.status as status
|
||||||
import slixfeed.xmpp.utility as utility
|
import slixfeed.xmpp.utility as utility
|
||||||
|
@ -102,9 +104,6 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
|
||||||
self.add_event_handler("changed_status", self.on_changed_status)
|
self.add_event_handler("changed_status", self.on_changed_status)
|
||||||
self.add_event_handler("presence_available", self.on_presence_available)
|
self.add_event_handler("presence_available", self.on_presence_available)
|
||||||
self.add_event_handler("presence_unavailable", self.on_presence_unavailable)
|
self.add_event_handler("presence_unavailable", self.on_presence_unavailable)
|
||||||
|
|
||||||
self.add_event_handler("changed_subscription", self.on_changed_subscription)
|
|
||||||
|
|
||||||
self.add_event_handler("chatstate_active", self.on_chatstate_active)
|
self.add_event_handler("chatstate_active", self.on_chatstate_active)
|
||||||
self.add_event_handler("chatstate_gone", self.on_chatstate_gone)
|
self.add_event_handler("chatstate_gone", self.on_chatstate_gone)
|
||||||
self.add_event_handler("chatstate_composing", self.check_chatstate_composing)
|
self.add_event_handler("chatstate_composing", self.check_chatstate_composing)
|
||||||
|
@ -126,7 +125,6 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
|
||||||
self.add_event_handler("presence_error", self.on_presence_error)
|
self.add_event_handler("presence_error", self.on_presence_error)
|
||||||
self.add_event_handler("presence_subscribe", self.on_presence_subscribe)
|
self.add_event_handler("presence_subscribe", self.on_presence_subscribe)
|
||||||
self.add_event_handler("presence_subscribed", self.on_presence_subscribed)
|
self.add_event_handler("presence_subscribed", self.on_presence_subscribed)
|
||||||
self.add_event_handler("presence_unsubscribe", self.on_presence_unsubscribe)
|
|
||||||
self.add_event_handler("presence_unsubscribed", self.on_presence_unsubscribed)
|
self.add_event_handler("presence_unsubscribed", self.on_presence_unsubscribed)
|
||||||
|
|
||||||
# Initialize event loop
|
# Initialize event loop
|
||||||
|
@ -139,39 +137,61 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
|
||||||
self.add_event_handler("session_end", self.on_session_end)
|
self.add_event_handler("session_end", self.on_session_end)
|
||||||
|
|
||||||
|
|
||||||
async def on_groupchat_invite(self, message):
|
# async def on_groupchat_invite(self, message):
|
||||||
print("on_groupchat_invite")
|
# logging.warning("on_groupchat_invite")
|
||||||
await muc.accept_invitation(self, message)
|
# inviter = message["from"].bare
|
||||||
|
# muc_jid = message['groupchat_invite']['jid']
|
||||||
|
# await muc.join(self, inviter, muc_jid)
|
||||||
|
# await bookmark.add(self, muc_jid)
|
||||||
|
|
||||||
|
|
||||||
async def on_groupchat_direct_invite(self, message):
|
# NOTE Tested with Gajim and Psi
|
||||||
print("on_groupchat_direct_invite")
|
# async def on_groupchat_direct_invite(self, message):
|
||||||
await muc.accept_invitation(self, message)
|
# inviter = message["from"].bare
|
||||||
|
# muc_jid = message['groupchat_invite']['jid']
|
||||||
|
# await muc.join(self, inviter, muc_jid)
|
||||||
|
# await bookmark.add(self, muc_jid)
|
||||||
|
|
||||||
|
|
||||||
async def on_session_end(self, event):
|
async def on_session_end(self, event):
|
||||||
if event:
|
message = "Session has ended."
|
||||||
message = "Session has ended. Reason: {}".format(event)
|
await connect.recover_connection(self, message)
|
||||||
else:
|
|
||||||
message = "Session has ended."
|
|
||||||
await connect.recover_connection(self, event, message)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_connection_failed(self, event):
|
async def on_connection_failed(self, event):
|
||||||
message = "Connection has failed. Reason: {}".format(event)
|
message = "Connection has failed. Reason: {}".format(event)
|
||||||
await connect.recover_connection(self, event, message)
|
await connect.recover_connection(self, message)
|
||||||
|
|
||||||
|
|
||||||
async def on_session_start(self, event):
|
async def on_session_start(self, event):
|
||||||
await process.event_component(self, event)
|
self.send_presence()
|
||||||
|
await process.event_component(self)
|
||||||
# await muc.autojoin(self)
|
# await muc.autojoin(self)
|
||||||
|
profile.set_identity(self, "service")
|
||||||
await profile.update(self)
|
await profile.update(self)
|
||||||
service.identity(self, "service")
|
connect.ping_task(self)
|
||||||
|
|
||||||
|
# await Service.capabilities(self)
|
||||||
|
# Service.commands(self)
|
||||||
|
# Service.reactions(self)
|
||||||
|
|
||||||
|
await self.service_capabilities()
|
||||||
|
self.service_commands()
|
||||||
|
self.service_reactions()
|
||||||
|
|
||||||
|
|
||||||
async def on_session_resumed(self, event):
|
async def on_session_resumed(self, event):
|
||||||
await process.event_component(self, event)
|
await process.event_component(self)
|
||||||
# await muc.autojoin(self)
|
# await muc.autojoin(self)
|
||||||
|
profile.set_identity(self, "service")
|
||||||
|
|
||||||
|
# await Service.capabilities(self)
|
||||||
|
# Service.commands(self)
|
||||||
|
# Service.reactions(self)
|
||||||
|
|
||||||
|
await self.service_capabilities()
|
||||||
|
self.service_commands()
|
||||||
|
self.service_reactions()
|
||||||
|
|
||||||
|
|
||||||
# TODO Request for subscription
|
# TODO Request for subscription
|
||||||
|
@ -180,20 +200,17 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
|
||||||
# if "chat" == await utility.get_chat_type(self, jid):
|
# if "chat" == await utility.get_chat_type(self, jid):
|
||||||
# await roster.add(self, jid)
|
# await roster.add(self, jid)
|
||||||
# await state.request(self, jid)
|
# await state.request(self, jid)
|
||||||
|
await process.message(self, message)
|
||||||
# chat_type = message["type"]
|
# chat_type = message["type"]
|
||||||
# message_body = message["body"]
|
# message_body = message["body"]
|
||||||
# message_reply = message.reply
|
# message_reply = message.reply
|
||||||
await process.message(self, message)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_changed_status(self, presence):
|
async def on_changed_status(self, presence):
|
||||||
await task.check_readiness(self, presence)
|
await task.check_readiness(self, presence)
|
||||||
|
|
||||||
|
|
||||||
# TODO Request for subscription
|
|
||||||
async def on_presence_subscribe(self, presence):
|
async def on_presence_subscribe(self, presence):
|
||||||
print("on_presence_subscribe")
|
|
||||||
print(presence)
|
|
||||||
jid = presence["from"].bare
|
jid = presence["from"].bare
|
||||||
# await state.request(self, jid)
|
# await state.request(self, jid)
|
||||||
self.send_presence_subscription(
|
self.send_presence_subscription(
|
||||||
|
@ -219,19 +236,8 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
|
||||||
|
|
||||||
|
|
||||||
async def on_presence_unavailable(self, presence):
|
async def on_presence_unavailable(self, presence):
|
||||||
await task.stop_tasks(self, presence)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_changed_subscription(self, presence):
|
|
||||||
print("on_changed_subscription")
|
|
||||||
print(presence)
|
|
||||||
jid = presence["from"].bare
|
jid = presence["from"].bare
|
||||||
# breakpoint()
|
await task.stop_tasks(self, jid)
|
||||||
|
|
||||||
|
|
||||||
async def on_presence_unsubscribe(self, presence):
|
|
||||||
print("on_presence_unsubscribe")
|
|
||||||
print(presence)
|
|
||||||
|
|
||||||
|
|
||||||
async def on_presence_error(self, presence):
|
async def on_presence_error(self, presence):
|
||||||
|
@ -240,43 +246,35 @@ class SlixfeedComponent(slixmpp.ComponentXMPP):
|
||||||
|
|
||||||
|
|
||||||
async def on_reactions(self, message):
|
async def on_reactions(self, message):
|
||||||
print("on_reactions")
|
print(message['from'])
|
||||||
print(message)
|
print(message['reactions']['values'])
|
||||||
|
|
||||||
|
|
||||||
async def on_chatstate_active(self, message):
|
async def on_chatstate_active(self, message):
|
||||||
print("on_chatstate_active")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
|
await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
|
|
||||||
|
|
||||||
async def on_chatstate_gone(self, message):
|
async def on_chatstate_gone(self, message):
|
||||||
print("on_chatstate_gone")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
|
await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
|
|
||||||
|
|
||||||
async def check_chatstate_composing(self, message):
|
async def check_chatstate_composing(self, message):
|
||||||
print("def check_chatstate_composing")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
if message["type"] in ("chat", "normal"):
|
await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
jid = message["from"].bare
|
status_text='Press "help" for manual, or "info" for information.'
|
||||||
status_text="Press \"help\" for manual."
|
status.send(self, jid, status_text)
|
||||||
self.send_presence(
|
|
||||||
# pshow=status_mode,
|
|
||||||
pstatus=status_text,
|
|
||||||
pto=jid,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def check_chatstate_paused(self, message):
|
async def check_chatstate_paused(self, message):
|
||||||
print("def check_chatstate_paused")
|
if message['type'] in ('chat', 'normal'):
|
||||||
print(message)
|
jid = message['from'].bare
|
||||||
if message["type"] in ("chat", "normal"):
|
await task.clean_tasks_xmpp(jid, ['status'])
|
||||||
jid = message["from"].bare
|
await task.start_tasks_xmpp(self, jid, ['status'])
|
||||||
await task.refresh_task(
|
|
||||||
self,
|
|
||||||
jid,
|
|
||||||
task.send_status,
|
|
||||||
"status",
|
|
||||||
20
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
|
@ -13,13 +13,47 @@ TODO
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
from slixfeed.config import get_value
|
from slixfeed.config import get_value
|
||||||
from slixfeed.dt import current_time
|
from slixfeed.dt import current_time
|
||||||
|
from slixmpp.exceptions import IqTimeout, IqError
|
||||||
from time import sleep
|
from time import sleep
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
|
|
||||||
async def recover_connection(self, event, message):
|
async def ping(self, jid=None):
|
||||||
|
"""
|
||||||
|
Check for ping and disconnect if no ping has been received.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
jid : str, optional
|
||||||
|
Jabber ID. The default is None.
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not jid:
|
||||||
|
jid = self.boundjid.bare
|
||||||
|
while True:
|
||||||
|
rtt = None
|
||||||
|
try:
|
||||||
|
rtt = await self['xep_0199'].ping(jid, timeout=10)
|
||||||
|
logging.info("Success! RTT: %s", rtt)
|
||||||
|
except IqError as e:
|
||||||
|
logging.info("Error pinging %s: %s",
|
||||||
|
jid,
|
||||||
|
e.iq['error']['condition'])
|
||||||
|
except IqTimeout:
|
||||||
|
logging.info("No response from %s", jid)
|
||||||
|
if not rtt:
|
||||||
|
self.disconnect()
|
||||||
|
await asyncio.sleep(60 * 1)
|
||||||
|
|
||||||
|
|
||||||
|
async def recover_connection(self, message):
|
||||||
logging.warning(message)
|
logging.warning(message)
|
||||||
print(current_time(), message, "Attempting to reconnect.")
|
print(current_time(), message, "Attempting to reconnect.")
|
||||||
self.connection_attempts += 1
|
self.connection_attempts += 1
|
||||||
|
|
|
@ -1,283 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
def print_info():
|
|
||||||
"""
|
|
||||||
Print information.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
msg : str
|
|
||||||
Message.
|
|
||||||
"""
|
|
||||||
msg = (
|
|
||||||
"```"
|
|
||||||
"\n"
|
|
||||||
"ABOUT\n"
|
|
||||||
" Slixfeed aims to be an easy to use and fully-featured news\n"
|
|
||||||
" aggregator bot for XMPP. It provides a convenient access to Blogs,\n"
|
|
||||||
" Fediverse and News websites along with filtering functionality."
|
|
||||||
"\n"
|
|
||||||
" Slixfeed is primarily designed for XMPP (aka Jabber).\n"
|
|
||||||
" Visit https://xmpp.org/software/ for more information.\n"
|
|
||||||
"\n"
|
|
||||||
" XMPP is the Extensible Messaging and Presence Protocol, a set\n"
|
|
||||||
" of open technologies for instant messaging, presence, multi-party\n"
|
|
||||||
" chat, voice and video calls, collaboration, lightweight\n"
|
|
||||||
" middleware, content syndication, and generalized routing of XML\n"
|
|
||||||
" data."
|
|
||||||
" Visit https://xmpp.org/about/ for more information on the XMPP\n"
|
|
||||||
" protocol."
|
|
||||||
" "
|
|
||||||
# "PLATFORMS\n"
|
|
||||||
# " Supported prootcols are IRC, Matrix, Tox and XMPP.\n"
|
|
||||||
# " For the best experience, we recommend you to use XMPP.\n"
|
|
||||||
# "\n"
|
|
||||||
"FILETYPES\n"
|
|
||||||
" Supported filetypes: Atom, RDF, RSS and XML.\n"
|
|
||||||
"\n"
|
|
||||||
"PROTOCOLS\n"
|
|
||||||
" Supported protocols: Dat, FTP, Gemini, Gopher, HTTP and IPFS.\n"
|
|
||||||
"\n"
|
|
||||||
"AUTHORS\n"
|
|
||||||
" Laura Lapina, Schimon Zackary.\n"
|
|
||||||
"\n"
|
|
||||||
"THANKS\n"
|
|
||||||
" Christian Dersch (SalixOS),"
|
|
||||||
" Cyrille Pontvieux (SalixOS, France),"
|
|
||||||
"\n"
|
|
||||||
" Denis Fomin (Gajim, Russia),"
|
|
||||||
" Dimitris Tzemos (SalixOS, Greece),"
|
|
||||||
"\n"
|
|
||||||
" Emmanuel Gil Peyrot (poezio, France),"
|
|
||||||
" Florent Le Coz (poezio, France),"
|
|
||||||
"\n"
|
|
||||||
" George Vlahavas (SalixOS, Greece),"
|
|
||||||
" Guus der Kinderen (IgniteRealtime.org Openfire, Netherlands),"
|
|
||||||
"\n"
|
|
||||||
" Maxime Buquet (slixmpp, France),"
|
|
||||||
" Mathieu Pasquet (slixmpp, France),"
|
|
||||||
"\n"
|
|
||||||
" Pierrick Le Brun (SalixOS, France),"
|
|
||||||
" Remko Tronçon (Swift, Germany),"
|
|
||||||
"\n"
|
|
||||||
" Raphael Groner (Fedora, Germany),"
|
|
||||||
" Thorsten Mühlfelder (SalixOS, Germany),"
|
|
||||||
"\n"
|
|
||||||
" Yann Leboulanger (Gajim, France)."
|
|
||||||
"\n"
|
|
||||||
"\n"
|
|
||||||
"COPYRIGHT\n"
|
|
||||||
" Slixfeed is free software; you can redistribute it and/or\n"
|
|
||||||
" modify it under the terms of the MIT License.\n"
|
|
||||||
"\n"
|
|
||||||
" Slixfeed is distributed in the hope that it will be useful,\n"
|
|
||||||
" but WITHOUT ANY WARRANTY; without even the implied warranty of\n"
|
|
||||||
" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n"
|
|
||||||
" MIT License for more details.\n"
|
|
||||||
"\n"
|
|
||||||
"NOTE\n"
|
|
||||||
" You can run Slixfeed on your own computer, server, and\n"
|
|
||||||
" even on a Linux phone (i.e. Droidian, Kupfer, Mobian, NixOS,\n"
|
|
||||||
" postmarketOS). You can also use Termux.\n"
|
|
||||||
"\n"
|
|
||||||
" All you need is one of the above and an XMPP account to\n"
|
|
||||||
" connect Slixfeed to.\n"
|
|
||||||
"\n"
|
|
||||||
"DOCUMENTATION\n"
|
|
||||||
" Slixfeed\n"
|
|
||||||
" https://gitgud.io/sjehuda/slixfeed\n"
|
|
||||||
" Slixmpp\n"
|
|
||||||
" https://slixmpp.readthedocs.io/\n"
|
|
||||||
" feedparser\n"
|
|
||||||
" https://pythonhosted.org/feedparser\n"
|
|
||||||
"```"
|
|
||||||
)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def print_help():
|
|
||||||
"""
|
|
||||||
Print help manual.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
msg : str
|
|
||||||
Message.
|
|
||||||
"""
|
|
||||||
msg = (
|
|
||||||
"```"
|
|
||||||
"\n"
|
|
||||||
"NAME\n"
|
|
||||||
"Slixfeed - News syndication bot for Jabber/XMPP\n"
|
|
||||||
"\n"
|
|
||||||
"DESCRIPTION\n"
|
|
||||||
" Slixfeed is a news aggregator bot for online news feeds.\n"
|
|
||||||
" This program is primarily designed for XMPP.\n"
|
|
||||||
" For more information, visit https://xmpp.org/software/\n"
|
|
||||||
"\n"
|
|
||||||
"BASIC USAGE\n"
|
|
||||||
" <url>\n"
|
|
||||||
" Add <url> to subscription list.\n"
|
|
||||||
" add <url> TITLE\n"
|
|
||||||
" Add <url> to subscription list (without validity check).\n"
|
|
||||||
" get <id> <type>\n"
|
|
||||||
" Send an article as file. Specify <id> and <type>."
|
|
||||||
" Supported types are HTML, MD and PDF (default).\n"
|
|
||||||
" join <muc>\n"
|
|
||||||
" Join specified groupchat.\n"
|
|
||||||
" read <url>\n"
|
|
||||||
" Display most recent 20 titles of given <url>.\n"
|
|
||||||
" read <url> <n>\n"
|
|
||||||
" Display specified entry number from given <url>.\n"
|
|
||||||
"\n"
|
|
||||||
"CUSTOM ACTIONS\n"
|
|
||||||
" new\n"
|
|
||||||
" Send only new items of newly added feeds.\n"
|
|
||||||
" old\n"
|
|
||||||
" Send all items of newly added feeds.\n"
|
|
||||||
" next N\n"
|
|
||||||
" Send N next updates.\n"
|
|
||||||
" reset\n"
|
|
||||||
" Mark all entries as read and remove all archived entries\n"
|
|
||||||
" reset <url>\n"
|
|
||||||
" Mark entries of <url> as read and remove all archived entries of <url>.\n"
|
|
||||||
" start\n"
|
|
||||||
" Enable bot and send updates.\n"
|
|
||||||
" stop\n"
|
|
||||||
" Disable bot and stop updates.\n"
|
|
||||||
"\n"
|
|
||||||
"MESSAGE OPTIONS\n"
|
|
||||||
" interval <num>\n"
|
|
||||||
" Set interval update to every <num> minutes.\n"
|
|
||||||
" length\n"
|
|
||||||
" Set maximum length of news item description. (0 for no limit)\n"
|
|
||||||
" quantum <num>\n"
|
|
||||||
" Set <num> amount of updates per interval.\n"
|
|
||||||
"\n"
|
|
||||||
"GROUPCHAT OPTIONS\n"
|
|
||||||
" ! (command initiation)\n"
|
|
||||||
" Use exclamation mark to initiate an actionable command.\n"
|
|
||||||
# " activate CODE\n"
|
|
||||||
# " Activate and command bot.\n"
|
|
||||||
# " demaster NICKNAME\n"
|
|
||||||
# " Remove master privilege.\n"
|
|
||||||
# " mastership NICKNAME\n"
|
|
||||||
# " Add master privilege.\n"
|
|
||||||
# " ownership NICKNAME\n"
|
|
||||||
# " Set new owner.\n"
|
|
||||||
"\n"
|
|
||||||
"FILTER OPTIONS\n"
|
|
||||||
" allow +\n"
|
|
||||||
" Add keywords to allow (comma separates).\n"
|
|
||||||
" allow -\n"
|
|
||||||
" Delete keywords from allow list (comma separates).\n"
|
|
||||||
" deny +\n"
|
|
||||||
" Keywords to block (comma separates).\n"
|
|
||||||
" deny -\n"
|
|
||||||
" Delete keywords from deny list (comma separates).\n"
|
|
||||||
# " filter clear allow\n"
|
|
||||||
# " Reset allow list.\n"
|
|
||||||
# " filter clear deny\n"
|
|
||||||
# " Reset deny list.\n"
|
|
||||||
"\n"
|
|
||||||
"EDIT OPTIONS\n"
|
|
||||||
" remove <id>\n"
|
|
||||||
" Remove feed of <id> from subscription list.\n"
|
|
||||||
" disable <id>\n"
|
|
||||||
" Disable updates for feed of <id>.\n"
|
|
||||||
" enable <id>\n"
|
|
||||||
" Enable updates for feed of <id>.\n"
|
|
||||||
"\n"
|
|
||||||
"SEARCH OPTIONS\n"
|
|
||||||
" feeds\n"
|
|
||||||
" List all subscriptions.\n"
|
|
||||||
" feeds <text>\n"
|
|
||||||
" Search subscriptions by given <text>.\n"
|
|
||||||
" search <text>\n"
|
|
||||||
" Search news items by given <text>.\n"
|
|
||||||
" recent <num>\n"
|
|
||||||
" List recent <num> news items (up to 50 items).\n"
|
|
||||||
"\n"
|
|
||||||
# "STATISTICS OPTIONS\n"
|
|
||||||
# " analyses\n"
|
|
||||||
# " Show report and statistics of feeds.\n"
|
|
||||||
# " obsolete\n"
|
|
||||||
# " List feeds that are not available.\n"
|
|
||||||
# " unread\n"
|
|
||||||
# " Print number of unread news items.\n"
|
|
||||||
# "\n"
|
|
||||||
"BACKUP OPTIONS\n"
|
|
||||||
" export opml\n"
|
|
||||||
" Send an OPML file with feeds.\n"
|
|
||||||
# " backup news html\n"
|
|
||||||
# " Send an HTML formatted file of your news items.\n"
|
|
||||||
# " backup news md\n"
|
|
||||||
# " Send a Markdown file of your news items.\n"
|
|
||||||
# " backup news text\n"
|
|
||||||
# " Send a Plain Text file of your news items.\n"
|
|
||||||
"\n"
|
|
||||||
"SUPPORT\n"
|
|
||||||
" commands\n"
|
|
||||||
" Print list of commands.\n"
|
|
||||||
" help\n"
|
|
||||||
" Print this help manual.\n"
|
|
||||||
" info\n"
|
|
||||||
" Print information page.\n"
|
|
||||||
" support\n"
|
|
||||||
" Join xmpp:slixfeed@chat.woodpeckersnest.space?join\n"
|
|
||||||
# "\n"
|
|
||||||
# "PROTOCOLS\n"
|
|
||||||
# " Supported prootcols are IRC, Matrix and XMPP.\n"
|
|
||||||
# " For the best experience, we recommend you to use XMPP.\n"
|
|
||||||
# "\n"
|
|
||||||
"```"
|
|
||||||
)
|
|
||||||
return msg
|
|
||||||
|
|
||||||
|
|
||||||
def print_cmd():
|
|
||||||
"""
|
|
||||||
Print list of commands.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
msg : str
|
|
||||||
Message.
|
|
||||||
"""
|
|
||||||
msg = (
|
|
||||||
"```"
|
|
||||||
"\n"
|
|
||||||
"! : Use exclamation mark to initiate an actionable command (groupchats only).\n"
|
|
||||||
"<muc> : Join specified groupchat.\n"
|
|
||||||
"<url> : Add <url> to subscription list.\n"
|
|
||||||
"add <url> <title> : Add <url> to subscription list (without validity check).\n"
|
|
||||||
"allow + : Add keywords to allow (comma separates).\n"
|
|
||||||
"allow - : Delete keywords from allow list (comma separates).\n"
|
|
||||||
"deny + : Keywords to block (comma separates).\n"
|
|
||||||
"deny - : Delete keywords from deny list (comma separates).\n"
|
|
||||||
"disable <id> : Disable updates for feed of <id>.\n"
|
|
||||||
"enable <id> : Enable updates for feed of <id>.\n"
|
|
||||||
"export opml : Send an OPML file with feeds.\n"
|
|
||||||
"feeds : List all subscriptions.\n"
|
|
||||||
"feeds <text> : Search subscriptions by given <text>.\n"
|
|
||||||
"get <id> <type> : Send an article as file. Specify <id> and <type>. Supported types are HTML, MD and PDF (default).\n"
|
|
||||||
"interval <n> : Set interval update to every <n> minutes.\n"
|
|
||||||
"join <muc> : Join specified groupchat.\n"
|
|
||||||
"length : Set maximum length of news item description. (0 for no limit)\n"
|
|
||||||
"new : Send only new items of newly added feeds.\n"
|
|
||||||
"next <n> : Send <n> next updates.\n"
|
|
||||||
"old : Send all items of newly added feeds.\n"
|
|
||||||
"quantum <n> : Set <n> amount of updates per interval.\n"
|
|
||||||
"read <url> : Display most recent 20 titles of given <url>.\n"
|
|
||||||
"read <url> <n> : Display specified entry number from given <url>.\n"
|
|
||||||
"recent <n> : List recent <n> news items (up to 50 items).\n"
|
|
||||||
"reset : Mark all entries as read.\n"
|
|
||||||
"reset <url> : Mark entries of <url> as read.\n"
|
|
||||||
"remove <id> : Remove feed from subscription list.\n"
|
|
||||||
"search <text> : Search news items by given <text>.\n"
|
|
||||||
"start : Enable bot and send updates.\n"
|
|
||||||
"stop : Disable bot and stop updates.\n"
|
|
||||||
"```"
|
|
||||||
)
|
|
||||||
return msg
|
|
|
@ -11,7 +11,9 @@ TODO
|
||||||
|
|
||||||
3) If groupchat error is received, send that error message to inviter.
|
3) If groupchat error is received, send that error message to inviter.
|
||||||
|
|
||||||
4) Save name of groupchat instead of jid as name
|
FIXME
|
||||||
|
|
||||||
|
1) Save name of groupchat instead of jid as name
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
@ -40,10 +42,16 @@ async def autojoin(self):
|
||||||
for conference in conferences:
|
for conference in conferences:
|
||||||
if conference["autojoin"]:
|
if conference["autojoin"]:
|
||||||
muc_jid = conference["jid"]
|
muc_jid = conference["jid"]
|
||||||
logging.debug(
|
logging.info(
|
||||||
"Autojoin {} ({})".format(conference["name"], muc_jid))
|
'Autojoin groupchat\n'
|
||||||
print(
|
'Name : {}\n'
|
||||||
"Autojoin {} ({})".format(conference["name"], muc_jid))
|
'JID : {}\n'
|
||||||
|
'Alias : {}\n'
|
||||||
|
.format(
|
||||||
|
conference["name"],
|
||||||
|
muc_jid,
|
||||||
|
conference["nick"]
|
||||||
|
))
|
||||||
self.plugin['xep_0045'].join_muc(
|
self.plugin['xep_0045'].join_muc(
|
||||||
muc_jid,
|
muc_jid,
|
||||||
conference["nick"],
|
conference["nick"],
|
||||||
|
@ -71,15 +79,20 @@ async def join(self, inviter, muc_jid):
|
||||||
# "Send activation token {} to groupchat xmpp:{}?join."
|
# "Send activation token {} to groupchat xmpp:{}?join."
|
||||||
# ).format(token, muc_jid)
|
# ).format(token, muc_jid)
|
||||||
# )
|
# )
|
||||||
print("muc_jid")
|
logging.info(
|
||||||
print(muc_jid)
|
'Joining groupchat\n'
|
||||||
|
'JID : {}\n'
|
||||||
|
'Inviter : {}\n'
|
||||||
|
.format(
|
||||||
|
muc_jid,
|
||||||
|
inviter
|
||||||
|
))
|
||||||
self.plugin['xep_0045'].join_muc(
|
self.plugin['xep_0045'].join_muc(
|
||||||
muc_jid,
|
muc_jid,
|
||||||
self.alias,
|
self.alias,
|
||||||
# If a room password is needed, use:
|
# If a room password is needed, use:
|
||||||
# password=the_room_password,
|
# password=the_room_password,
|
||||||
)
|
)
|
||||||
await bookmark.add(self, muc_jid)
|
|
||||||
process.greet(self, muc_jid, chat_type="groupchat")
|
process.greet(self, muc_jid, chat_type="groupchat")
|
||||||
|
|
||||||
|
|
||||||
|
@ -97,7 +110,6 @@ async def leave(self, muc_jid):
|
||||||
mbody=message,
|
mbody=message,
|
||||||
mtype="groupchat"
|
mtype="groupchat"
|
||||||
)
|
)
|
||||||
await bookmark.remove(self, muc_jid)
|
|
||||||
self.plugin['xep_0045'].leave_muc(
|
self.plugin['xep_0045'].leave_muc(
|
||||||
muc_jid,
|
muc_jid,
|
||||||
self.alias,
|
self.alias,
|
||||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -62,6 +62,29 @@ async def set_avatar(self):
|
||||||
await self.plugin["xep_0153"].set_avatar(avatar=avatar)
|
await self.plugin["xep_0153"].set_avatar(avatar=avatar)
|
||||||
|
|
||||||
|
|
||||||
|
def set_identity(self, category):
|
||||||
|
"""
|
||||||
|
Identify for Service Descovery.
|
||||||
|
|
||||||
|
Parameters
|
||||||
|
----------
|
||||||
|
category : str
|
||||||
|
"client" or "service".
|
||||||
|
|
||||||
|
Returns
|
||||||
|
-------
|
||||||
|
None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
self['xep_0030'].add_identity(
|
||||||
|
category=category,
|
||||||
|
itype='news',
|
||||||
|
name='slixfeed',
|
||||||
|
node=None,
|
||||||
|
jid=self.boundjid.full,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def set_vcard(self):
|
async def set_vcard(self):
|
||||||
vcard = self.plugin["xep_0054"].make_vcard()
|
vcard = self.plugin["xep_0054"].make_vcard()
|
||||||
fields = {
|
fields = {
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
|
|
||||||
def identity(self, category):
|
|
||||||
"""
|
|
||||||
Identify for Service Duscovery
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
category : str
|
|
||||||
"client" or "service".
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
None.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self["xep_0030"].add_identity(
|
|
||||||
category=category,
|
|
||||||
itype="news",
|
|
||||||
name="slixfeed",
|
|
||||||
node=None,
|
|
||||||
jid=self.boundjid.full,
|
|
||||||
)
|
|
|
@ -1,10 +1,10 @@
|
||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
# -*- coding: utf-8 -*-
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
def send(self, jid, status_message, status_type=None):
|
||||||
def process_task_message(self, jid, status_message):
|
|
||||||
self.send_presence(
|
self.send_presence(
|
||||||
pshow="dnd",
|
pshow=status_type,
|
||||||
pstatus=status_message,
|
pstatus=status_message,
|
||||||
pto=jid,
|
pfrom=self.boundjid.bare,
|
||||||
|
pto=jid
|
||||||
)
|
)
|
||||||
|
|
|
@ -39,7 +39,8 @@ async def get_chat_type(self, jid):
|
||||||
# NOTE Is it needed? We do not interact with gateways or services
|
# NOTE Is it needed? We do not interact with gateways or services
|
||||||
else:
|
else:
|
||||||
chat_type = "chat"
|
chat_type = "chat"
|
||||||
print('JID {} chat type is {}'.format(jid, chat_type))
|
logging.info('Jabber ID: {}\n'
|
||||||
|
'Chat Type: {}'.format(jid, chat_type))
|
||||||
return chat_type
|
return chat_type
|
||||||
# TODO Test whether this exception is realized
|
# TODO Test whether this exception is realized
|
||||||
except IqTimeout as e:
|
except IqTimeout as e:
|
||||||
|
|
Loading…
Reference in a new issue