Increase usage of "self" handlers;

Remove CSV database.
This commit is contained in:
Schimon Jehudah, Adv. 2024-11-24 17:25:49 +02:00
parent 6a78afd67f
commit 878753b26e
27 changed files with 380 additions and 2768 deletions

View file

@ -58,6 +58,8 @@ TODO
# res = response (HTTP) # res = response (HTTP)
from argparse import ArgumentParser from argparse import ArgumentParser
import logging import logging
import os
import shutil
import sys import sys
# from eliot import start_action, to_file # from eliot import start_action, to_file
@ -65,8 +67,9 @@ import sys
# # with start_action(action_type='set_date()', jid=jid): # # with start_action(action_type='set_date()', jid=jid):
# # with start_action(action_type='message()', msg=msg): # # with start_action(action_type='message()', msg=msg):
import slixfeed.config as config from slixfeed.config import Settings, Share, Cache
from slixfeed.log import Logger from slixfeed.log import Logger
from slixfeed.utilities import Toml
from slixfeed.version import __version__ from slixfeed.version import __version__
logger = Logger(__name__) logger = Logger(__name__)
@ -78,10 +81,44 @@ logger = Logger(__name__)
def main(): def main():
config_dir = config.get_default_config_directory() directory = os.path.dirname(__file__)
logger.info('Reading configuration from {}'.format(config_dir))
print('Reading configuration from {}'.format(config_dir)) # Copy data files
network_settings = config.get_values('settings.toml', 'network') directory_data = Share.get_directory()
if not os.path.exists(directory_data):
directory_assets = os.path.join(directory, 'assets')
directory_assets_new = shutil.copytree(directory_assets, directory_data)
print(f'Data directory {directory_assets_new} has been created and populated.')
# Copy settings files
directory_settings = Settings.get_directory()
if not os.path.exists(directory_settings):
directory_configs = os.path.join(directory, 'configs')
directory_settings_new = shutil.copytree(directory_configs, directory_settings)
print(f'Settings directory {directory_settings_new} has been created and populated.')
# Create cache directories
directory_cache = Cache.get_directory()
if not os.path.exists(directory_cache):
print(f'Creating a cache directory at {directory_cache}.')
os.mkdir(directory_cache)
for subdirectory in ('md', 'enclosure', 'markdown', 'opml', 'readability'):
subdirectory_cache = os.path.join(directory_cache, subdirectory)
if not os.path.exists(subdirectory_cache):
print(f'Creating a cache subdirectory at {subdirectory_cache}.')
os.mkdir(subdirectory_cache)
filename_settings = os.path.join(directory_settings, 'settings.toml')
settings = Toml.open_file(filename_settings)
network_settings = settings['network']
# Configure account
print('User agent:', network_settings['user_agent'] or 'Slixfeed/0.1') print('User agent:', network_settings['user_agent'] or 'Slixfeed/0.1')
if network_settings['http_proxy']: print('HTTP Proxy:', network_settings['http_proxy']) if network_settings['http_proxy']: print('HTTP Proxy:', network_settings['http_proxy'])
@ -160,28 +197,33 @@ def main():
# if not alias: # if not alias:
# alias = (input('Alias: ')) or 'Slixfeed' # alias = (input('Alias: ')) or 'Slixfeed'
account_xmpp = config.get_values('accounts.toml', 'xmpp') filename_accounts = os.path.join(directory_settings, 'accounts.toml')
accounts = Toml.open_file(filename_accounts)
accounts_xmpp = accounts['xmpp']
# Try configuration file # Try configuration file
if 'client' in account_xmpp: if 'client' in accounts_xmpp:
from slixfeed.xmpp.client import XmppClient from slixfeed.xmpp.client import XmppClient
jid = account_xmpp['client']['jid']
password = account_xmpp['client']['password'] accounts_xmpp_client = accounts_xmpp['client']
alias = account_xmpp['client']['alias'] if 'alias' in account_xmpp['client'] else None jid = accounts_xmpp_client['jid']
hostname = account_xmpp['client']['hostname'] if 'hostname' in account_xmpp['client'] else None password = accounts_xmpp_client['password']
port = account_xmpp['client']['port'] if 'port' in account_xmpp['client'] else None alias = accounts_xmpp_client['alias'] if 'alias' in accounts_xmpp_client else None
hostname = accounts_xmpp_client['hostname'] if 'hostname' in accounts_xmpp_client else None
port = accounts_xmpp_client['port'] if 'port' in accounts_xmpp_client else None
XmppClient(jid, password, hostname, port, alias) XmppClient(jid, password, hostname, port, alias)
# xmpp_client = Slixfeed(jid, password, hostname, port, alias) # xmpp_client = Slixfeed(jid, password, hostname, port, alias)
# xmpp_client.connect((hostname, port)) if hostname and port else xmpp_client.connect() # xmpp_client.connect((hostname, port)) if hostname and port else xmpp_client.connect()
# xmpp_client.process() # xmpp_client.process()
if 'component' in account_xmpp: if 'component' in accounts_xmpp:
from slixfeed.xmpp.component import XmppComponent from slixfeed.xmpp.component import XmppComponent
jid = account_xmpp['component']['jid'] accounts_xmpp_component = accounts_xmpp['component']
secret = account_xmpp['component']['password'] jid = accounts_xmpp_component['jid']
alias = account_xmpp['component']['alias'] if 'alias' in account_xmpp['component'] else None secret = accounts_xmpp_component['password']
hostname = account_xmpp['component']['hostname'] if 'hostname' in account_xmpp['component'] else None alias = accounts_xmpp_component['alias'] if 'alias' in accounts_xmpp_component else None
port = account_xmpp['component']['port'] if 'port' in account_xmpp['component'] else None hostname = accounts_xmpp_component['hostname'] if 'hostname' in accounts_xmpp_component else None
port = accounts_xmpp_component['port'] if 'port' in accounts_xmpp_component else None
XmppComponent(jid, secret, hostname, port, alias) XmppComponent(jid, secret, hostname, port, alias)
# xmpp_component = SlixfeedComponent(jid, secret, hostname, port, alias) # xmpp_component = SlixfeedComponent(jid, secret, hostname, port, alias)
# xmpp_component.connect() # xmpp_component.connect()

File diff suppressed because it is too large Load diff

View file

@ -3,12 +3,6 @@
""" """
FIXME
1) Use dict for ConfigDefault
2) Store ConfigJabberID in dicts
TODO TODO
1) Site-specific filter (i.e. audiobookbay). 1) Site-specific filter (i.e. audiobookbay).
@ -21,14 +15,6 @@ TODO
4) Copy file from /etc/slixfeed/ or /usr/share/slixfeed/ 4) Copy file from /etc/slixfeed/ or /usr/share/slixfeed/
5) Merge get_value_default into get_value.
6) Use TOML https://ruudvanasseldonk.com/2023/01/11/the-yaml-document-from-hell
7) Make the program portable (directly use the directory assets) -- Thorsten
7.1) Read missing files from base directories or either set error message.
""" """
import configparser import configparser
@ -45,49 +31,11 @@ except:
logger = Logger(__name__) logger = Logger(__name__)
class Cache: class Settings:
def get_default_cache_directory(): def get_directory():
""" """
Determine the directory path where dbfile will be stored. Determine the directory path where setting files be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to cache directory.
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_CACHE_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/cache')
else:
return os.path.abspath('.slixfeed/cache')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.cache'
)
return os.path.join(data_home, 'slixfeed')
# TODO Consider a class ConfigDefault for default values to be initiate at most
# basic level possible and a class ConfigJID for each JID (i.e. db_file) to be
# also initiated at same level or at least at event call, then check whether
# setting_jid.setting_key has value, otherwise resort to setting_default.setting_key.
class Config:
# TODO Write a similar function for file.
# NOTE the is a function of directory, noot file.
def get_default_config_directory():
"""
Determine the directory path where configuration will be stored.
* If $XDG_CONFIG_HOME is defined, use it; * If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it; * else if $HOME exists, use it;
@ -99,7 +47,105 @@ class Config:
str str
Path to configuration directory. Path to configuration directory.
""" """
# config_home = xdg.BaseDirectory.xdg_config_home # config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
config_home = os.environ.get('APPDATA')
if config_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
config_home = os.path.join(
os.environ.get('HOME'), '.config'
)
return os.path.join(config_home, 'slixfeed')
class Share:
def get_directory():
"""
Determine the directory path where data files be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to database file.
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/data')
else:
return os.path.abspath('.slixfeed/data')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.local', 'share'
)
return os.path.join(data_home, 'slixfeed')
class Cache:
def get_directory():
"""
Determine the directory path where cache files be stored.
* If $XDG_CACHE_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to cache directory.
"""
# cache_home = xdg.BaseDirectory.xdg_cache_home
cache_home = os.environ.get('XDG_CACHE_HOME')
if cache_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
cache_home = os.environ.get('APPDATA')
if cache_home is None:
return os.path.abspath('.slixfeed/cache')
else:
return os.path.abspath('.slixfeed/cache')
else:
cache_home = os.path.join(
os.environ.get('HOME'), '.cache'
)
return os.path.join(cache_home, 'slixfeed')
class Config:
def get_directory():
"""
Determine the directory path where setting files be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to configuration directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME') config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None: if config_home is None:
if os.environ.get('HOME') is None: if os.environ.get('HOME') is None:
@ -120,10 +166,6 @@ class Config:
content = tomli_w.dumps(data) content = tomli_w.dumps(data)
new_file.write(content) new_file.write(content)
def add_settings_default(self):
settings_default = get_values('settings.toml', 'settings')
self.defaults = settings_default
# TODO Open SQLite file once # TODO Open SQLite file once
def add_settings_jid(self, jid_bare, db_file): def add_settings_jid(self, jid_bare, db_file):
self.settings[jid_bare] = {} self.settings[jid_bare] = {}
@ -136,11 +178,6 @@ class Config:
# self.defaults of get_setting_value # self.defaults of get_setting_value
self.settings[jid_bare][key] = self.defaults['default'][key] self.settings[jid_bare][key] = self.defaults['default'][key]
def get_settings_xmpp(key=None):
result = get_values('accounts.toml', 'xmpp')
result = result[key] if key else result
return result
async def set_setting_value(self, jid_bare, db_file, key, val): async def set_setting_value(self, jid_bare, db_file, key, val):
key = key.lower() key = key.lower()
key_val = [key, val] key_val = [key, val]
@ -159,29 +196,11 @@ class Config:
value = self.defaults['default'][key] value = self.defaults['default'][key]
return value return value
class ConfigNetwork:
def __init__(self, settings):
settings['network'] = {}
for key in ('http_proxy', 'user_agent'):
value = get_value('settings', 'Network', key)
settings['network'][key] = value
class ConfigJabberID:
def __init__(self, settings, jid_bare, db_file):
settings[jid_bare] = {}
for key in ('archive', 'enabled', 'filter', 'formatting', 'interval',
'length', 'media', 'old', 'quantum'):
value = sqlite.get_setting_value(db_file, key)
if value: value = value[0]
print(value)
settings[jid_bare][key] = value
class Data: class Data:
def get_default_data_directory(): def get_directory():
""" """
Determine the directory path where dbfile will be stored. Determine the directory path where dbfile will be stored.
@ -221,28 +240,6 @@ class Data:
return os.path.join(data_home, 'slixfeed') return os.path.join(data_home, 'slixfeed')
def get_pathname_to_omemo_directory():
"""
Get OMEMO directory.
Parameters
----------
None
Returns
-------
object
Coroutine object.
"""
db_dir = get_default_data_directory()
if not os.path.isdir(db_dir):
os.mkdir(db_dir)
if not os.path.isdir(db_dir + "/omemo"):
os.mkdir(db_dir + "/omemo")
omemo_dir = os.path.join(db_dir, "omemo")
return omemo_dir
def get_values(filename, key=None): def get_values(filename, key=None):
config_dir = get_default_config_directory() config_dir = get_default_config_directory()
if not os.path.isdir(config_dir): if not os.path.isdir(config_dir):
@ -274,78 +271,6 @@ def get_setting_value(db_file, key):
return value return value
# TODO Merge with backup_obsolete
def update_proxies(file, proxy_name, proxy_type, proxy_url, action='remove'):
"""
Add given URL to given list.
Parameters
----------
file : str
Filename.
proxy_name : str
Proxy name.
proxy_type : str
Proxy title.
proxy_url : str
Proxy URL.
action : str
add or remove
Returns
-------
None.
"""
data = open_config_file('proxies.toml')
proxy_list = data['proxies'][proxy_name][proxy_type]
# breakpoint()
print('####################### PROXY ######################')
proxy_index = proxy_list.index(proxy_url)
proxy_list.pop(proxy_index)
with open(file, 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
# TODO Merge with update_proxies
def backup_obsolete(file, proxy_name, proxy_type, proxy_url, action='add'):
"""
Add given URL to given list.
Parameters
----------
file : str
Filename.
proxy_name : str
Proxy name.
proxy_type : str
Proxy title.
proxy_url : str
Proxy URL.
action : str
add or remove
Returns
-------
None.
"""
data = open_config_file('proxies_obsolete.toml')
proxy_list = data['proxies'][proxy_name][proxy_type]
proxy_list.extend([proxy_url])
with open(file, 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
def create_skeleton(file):
with open(file, 'rb') as original_file:
data = tomllib.load(original_file)
data = clear_values(data)
with open('proxies_obsolete.toml', 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
def clear_values(input): def clear_values(input):
if isinstance(input, dict): if isinstance(input, dict):
return {k: clear_values(v) for k, v in input.items()} return {k: clear_values(v) for k, v in input.items()}
@ -355,262 +280,7 @@ def clear_values(input):
return '' return ''
# TODO Return dict instead of list def add_to_list(newwords, keywords):
def get_value(filename, section, keys):
"""
Get setting value.
Parameters
----------
filename : str
INI filename.
keys : list or str
A single key as string or multiple keys as list.
section : str
INI Section.
Returns
-------
result : list or str
A single value as string or multiple values as list.
"""
result = None
config_res = configparser.RawConfigParser()
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
config_dir = '/usr/share/slixfeed/'
if not os.path.isdir(config_dir):
config_dir = os.path.dirname(__file__) + "/assets"
config_file = os.path.join(config_dir, filename + ".ini")
config_res.read(config_file)
if config_res.has_section(section):
section_res = config_res[section]
if isinstance(keys, list):
result = []
for key in keys:
if key in section_res:
value = section_res[key]
logger.debug("Found value {} for key {}".format(value, key))
else:
value = ''
logger.debug("Missing key:", key)
result.extend([value])
elif isinstance(keys, str):
key = keys
if key in section_res:
result = section_res[key]
logger.debug("Found value {} for key {}".format(result, key))
else:
result = ''
# logger.error("Missing key:", key)
if result == None:
logger.error(
"Check configuration file {}.ini for "
"missing key(s) \"{}\" under section [{}].".format(
filename, keys, section)
)
else:
return result
# TODO Store config file as an object in runtime, otherwise
# the file will be opened time and time again.
# TODO Copy file from /etc/slixfeed/ or /usr/share/slixfeed/
def get_value_default(filename, section, key):
"""
Get settings default value.
Parameters
----------
key : str
Key: archive, enabled, interval,
length, old, quantum, random.
Returns
-------
result : str
Value.
"""
config_res = configparser.RawConfigParser()
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
config_dir = '/usr/share/slixfeed/'
config_file = os.path.join(config_dir, filename + ".ini")
config_res.read(config_file)
if config_res.has_section(section):
result = config_res[section][key]
return result
# TODO DELETE THIS FUNCTION OR KEEP ONLY THE CODE BELOW NOTE
# IF CODE BELOW NOTE IS KEPT, RENAME FUNCTION TO open_toml
def open_config_file(filename):
"""
Get settings default value.
Parameters
----------
filename : str
Filename of toml file.
Returns
-------
result : list
List of pathnames or keywords.
"""
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
config_dir = '/usr/share/slixfeed/'
if not os.path.isdir(config_dir):
config_dir = os.path.dirname(__file__) + "/assets"
config_file = os.path.join(config_dir, filename)
# NOTE THIS IS THE IMPORTANT CODE
with open(config_file, mode="rb") as defaults:
# default = yaml.safe_load(defaults)
# result = default[key]
result = tomllib.load(defaults)
return result
def get_default_data_directory():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to database file.
Note
----
This function was taken from project buku.
See https://github.com/jarun/buku
* Arun Prakash Jana (jarun)
* Dmitry Marakasov (AMDmi3)
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/data')
else:
return os.path.abspath('.slixfeed/data')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.local', 'share'
)
return os.path.join(data_home, 'slixfeed')
def get_default_cache_directory():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to cache directory.
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_CACHE_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/cache')
else:
return os.path.abspath('.slixfeed/cache')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.cache'
)
return os.path.join(data_home, 'slixfeed')
# TODO Write a similar function for file.
# NOTE the is a function of directory, noot file.
def get_default_config_directory():
"""
Determine the directory path where configuration will be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to configuration directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
config_home = os.environ.get('APPDATA')
if config_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
config_home = os.path.join(
os.environ.get('HOME'), '.config'
)
return os.path.join(config_home, 'slixfeed')
def get_pathname_to_database(jid_file):
"""
Callback function to instantiate action on database.
Parameters
----------
jid_file : str
Filename.
callback : ?
Function name.
message : str, optional
Optional kwarg when a message is a part or
required argument. The default is None.
Returns
-------
object
Coroutine object.
"""
db_dir = get_default_data_directory()
if not os.path.isdir(db_dir):
os.mkdir(db_dir)
if not os.path.isdir(db_dir + "/sqlite"):
os.mkdir(db_dir + "/sqlite")
db_file = os.path.join(db_dir, "sqlite", r"{}.db".format(jid_file))
sqlite.create_tables(db_file)
return db_file
# await set_default_values(db_file)
# if message:
# return await callback(db_file, message)
# else:
# return await callback(db_file)
async def add_to_list(newwords, keywords):
""" """
Append new keywords to list. Append new keywords to list.
@ -641,7 +311,7 @@ async def add_to_list(newwords, keywords):
return val return val
async def remove_from_list(newwords, keywords): def remove_from_list(newwords, keywords):
""" """
Remove given keywords from list. Remove given keywords from list.

View file

@ -0,0 +1 @@
proxies = {}

View file

@ -44,7 +44,6 @@ from asyncio import TimeoutError
# from lxml import html # from lxml import html
# from xml.etree.ElementTree import ElementTree, ParseError # from xml.etree.ElementTree import ElementTree, ParseError
#import requests #import requests
import slixfeed.config as config
from slixfeed.log import Logger from slixfeed.log import Logger
# import urllib.request # import urllib.request
# from urllib.error import HTTPError # from urllib.error import HTTPError
@ -87,11 +86,10 @@ class Http:
# return status # return status
async def fetch_headers(url): async def fetch_headers(settings_network, url):
network_settings = config.get_values('settings.toml', 'network') user_agent = (settings_network['user_agent'] or 'Slixfeed/0.1')
user_agent = (network_settings['user_agent'] or 'Slixfeed/0.1')
headers = {'User-Agent': user_agent} headers = {'User-Agent': user_agent}
proxy = (network_settings['http_proxy'] or None) proxy = (settings_network['http_proxy'] or None)
timeout = ClientTimeout(total=10) timeout = ClientTimeout(total=10)
async with ClientSession(headers=headers) as session: async with ClientSession(headers=headers) as session:
async with session.get(url, proxy=proxy, async with session.get(url, proxy=proxy,
@ -106,7 +104,7 @@ class Http:
# TODO Write file to disk. Consider aiofiles # TODO Write file to disk. Consider aiofiles
async def fetch_media(url, pathname): async def fetch_media(settings_network, url, pathname):
""" """
Download media content of given URL. Download media content of given URL.
@ -122,10 +120,9 @@ class Http:
msg: list or str msg: list or str
Document or error message. Document or error message.
""" """
network_settings = config.get_values('settings.toml', 'network') user_agent = (settings_network['user_agent'] or 'Slixfeed/0.1')
user_agent = (network_settings['user_agent'] or 'Slixfeed/0.1')
headers = {'User-Agent': user_agent} headers = {'User-Agent': user_agent}
proxy = (network_settings['http_proxy'] or None) proxy = (settings_network['http_proxy'] or None)
timeout = ClientTimeout(total=10) timeout = ClientTimeout(total=10)
async with ClientSession(headers=headers) as session: async with ClientSession(headers=headers) as session:
# async with ClientSession(trust_env=True) as session: # async with ClientSession(trust_env=True) as session:
@ -179,7 +176,7 @@ class Http:
return result return result
def http_response(url): def http_response(settings_network, url):
""" """
Download response headers. Download response headers.
@ -201,10 +198,7 @@ class Http:
response.status_code response.status_code
response.url response.url
""" """
user_agent = ( user_agent = settings_network['user_agent'] or 'Slixfeed/0.1'
config.get_value(
"settings", "Network", "user_agent")
) or 'Slixfeed/0.1'
headers = { headers = {
"User-Agent": user_agent "User-Agent": user_agent
} }
@ -220,7 +214,7 @@ class Http:
return response return response
async def http(url): async def http(settings_network, url):
""" """
Download content of given URL. Download content of given URL.
@ -234,10 +228,9 @@ async def http(url):
msg: list or str msg: list or str
Document or error message. Document or error message.
""" """
network_settings = config.get_values('settings.toml', 'network') user_agent = (settings_network['user_agent'] or 'Slixfeed/0.1')
user_agent = (network_settings['user_agent'] or 'Slixfeed/0.1')
headers = {'User-Agent': user_agent} headers = {'User-Agent': user_agent}
proxy = (network_settings['http_proxy'] or None) proxy = (settings_network['http_proxy'] or None)
timeout = ClientTimeout(total=10) timeout = ClientTimeout(total=10)
async with ClientSession(headers=headers) as session: async with ClientSession(headers=headers) as session:
# async with ClientSession(trust_env=True) as session: # async with ClientSession(trust_env=True) as session:

View file

@ -27,7 +27,6 @@ TODO
import asyncio import asyncio
from feedparser import parse from feedparser import parse
import os import os
import slixfeed.config as config
from slixfeed.config import Config from slixfeed.config import Config
import slixfeed.fetch as fetch import slixfeed.fetch as fetch
from slixfeed.log import Logger,Message from slixfeed.log import Logger,Message
@ -53,7 +52,7 @@ class Feed:
os.mkdir(dir_cache + '/' + ext) os.mkdir(dir_cache + '/' + ext)
filename = os.path.join( filename = os.path.join(
dir_cache, ext, 'slixfeed_' + DateAndTime.timestamp() + '.' + ext) dir_cache, ext, 'slixfeed_' + DateAndTime.timestamp() + '.' + ext)
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
results = sqlite.get_feeds(db_file) results = sqlite.get_feeds(db_file)
match ext: match ext:
# case 'html': # case 'html':
@ -347,8 +346,10 @@ class Feed:
if new_entries: if new_entries:
await sqlite.add_entries_and_update_feed_state( await sqlite.add_entries_and_update_feed_state(
db_file, feed_id, new_entries) db_file, feed_id, new_entries)
old = Config.get_setting_value(self, jid_bare, 'old') breakpoint()
if not old: await sqlite.mark_feed_as_read(db_file, feed_id) old = self.settings[jid_bare]['old'] or self.defaults['default']['old']
if not old: await sqlite.mark_feed_as_read(db_file,
feed_id)
result_final = {'link' : url, result_final = {'link' : url,
'index' : feed_id, 'index' : feed_id,
'name' : title, 'name' : title,
@ -362,7 +363,8 @@ class Feed:
# NOTE Do not be tempted to return a compact dictionary. # NOTE Do not be tempted to return a compact dictionary.
# That is, dictionary within dictionary # That is, dictionary within dictionary
# Return multiple dictionaries in a list or tuple. # Return multiple dictionaries in a list or tuple.
result = await FeedDiscovery.probe_page(url, document) result = await FeedDiscovery.probe_page(
self.settings_network, self.pathnames, url, document)
if not result: if not result:
# Get out of the loop with dict indicating error. # Get out of the loop with dict indicating error.
result_final = {'link' : url, result_final = {'link' : url,
@ -520,7 +522,7 @@ class Feed:
# NOTE This function is not being utilized # NOTE This function is not being utilized
async def download_feed(self, db_file, feed_url): async def download_feed(settings_network, db_file, feed_url):
""" """
Process feed content. Process feed content.
@ -535,7 +537,7 @@ class Feed:
logger.debug('{}: db_file: {} url: {}' logger.debug('{}: db_file: {} url: {}'
.format(function_name, db_file, feed_url)) .format(function_name, db_file, feed_url))
if isinstance(feed_url, tuple): feed_url = feed_url[0] if isinstance(feed_url, tuple): feed_url = feed_url[0]
result = await fetch.http(feed_url) result = await fetch.http(settings_network, feed_url)
feed_id = sqlite.get_feed_id(db_file, feed_url) feed_id = sqlite.get_feed_id(db_file, feed_url)
feed_id = feed_id[0] feed_id = feed_id[0]
status_code = result['status_code'] status_code = result['status_code']
@ -932,7 +934,7 @@ class FeedDiscovery:
# else: # else:
# return await callback(url) # return await callback(url)
async def probe_page(url, document=None): async def probe_page(settings_network, pathnames, url, document=None):
""" """
Parameters Parameters
---------- ----------
@ -947,7 +949,7 @@ class FeedDiscovery:
Single URL as list or selection of URLs as str. Single URL as list or selection of URLs as str.
""" """
if not document: if not document:
response = await fetch.http(url) response = await fetch.http(settings_network, url)
if not response['error']: if not response['error']:
document = response['content'] document = response['content']
try: try:
@ -985,10 +987,10 @@ class FeedDiscovery:
result = FeedDiscovery.feed_mode_auto_discovery(url, tree) result = FeedDiscovery.feed_mode_auto_discovery(url, tree)
if not result: if not result:
logger.debug("Feed link scan mode engaged for {}".format(url)) logger.debug("Feed link scan mode engaged for {}".format(url))
result = FeedDiscovery.feed_mode_scan(url, tree) result = FeedDiscovery.feed_mode_scan(url, tree, pathnames)
if not result: if not result:
logger.debug("Feed arbitrary mode engaged for {}".format(url)) logger.debug("Feed arbitrary mode engaged for {}".format(url))
result = FeedDiscovery.feed_mode_guess(url, tree) result = FeedDiscovery.feed_mode_guess(url, pathnames)
if not result: if not result:
logger.debug("No feeds were found for {}".format(url)) logger.debug("No feeds were found for {}".format(url))
result = None result = None
@ -997,7 +999,7 @@ class FeedDiscovery:
# TODO Improve scan by gradual decreasing of path # TODO Improve scan by gradual decreasing of path
def feed_mode_guess(url, tree): def feed_mode_guess(url, pathnames):
""" """
Lookup for feeds by pathname using HTTP Requests. Lookup for feeds by pathname using HTTP Requests.
@ -1007,8 +1009,8 @@ class FeedDiscovery:
Path to database file. Path to database file.
url : str url : str
URL. URL.
tree : TYPE pathnames : list
DESCRIPTION. pathnames.
Returns Returns
------- -------
@ -1017,18 +1019,17 @@ class FeedDiscovery:
""" """
urls = [] urls = []
parted_url = urlsplit(url) parted_url = urlsplit(url)
paths = config.open_config_file("lists.toml")["pathnames"]
# Check whether URL has path (i.e. not root) # Check whether URL has path (i.e. not root)
# Check parted_url.path to avoid error in case root wasn't given # Check parted_url.path to avoid error in case root wasn't given
# TODO Make more tests # TODO Make more tests
if parted_url.path and parted_url.path.split('/')[1]: if parted_url.path and parted_url.path.split('/')[1]:
paths.extend( pathnames.extend(
[".atom", ".feed", ".rdf", ".rss"] [".atom", ".feed", ".rdf", ".rss"]
) if '.rss' not in paths else -1 ) if '.rss' not in pathnames else -1
# if paths.index('.rss'): # if paths.index('.rss'):
# paths.extend([".atom", ".feed", ".rdf", ".rss"]) # paths.extend([".atom", ".feed", ".rdf", ".rss"])
parted_url_path = parted_url.path if parted_url.path else '/' parted_url_path = parted_url.path if parted_url.path else '/'
for path in paths: for path in pathnames:
address = Url.join_url(url, parted_url_path.split('/')[1] + path) address = Url.join_url(url, parted_url_path.split('/')[1] + path)
if address not in urls: if address not in urls:
urls.extend([address]) urls.extend([address])
@ -1037,7 +1038,7 @@ class FeedDiscovery:
return urls return urls
def feed_mode_scan(url, tree): def feed_mode_scan(url, tree, pathnames):
""" """
Scan page for potential feeds by pathname. Scan page for potential feeds by pathname.
@ -1056,8 +1057,7 @@ class FeedDiscovery:
Message with URLs. Message with URLs.
""" """
urls = [] urls = []
paths = config.open_config_file("lists.toml")["pathnames"] for path in pathnames:
for path in paths:
# xpath_query = "//*[@*[contains(.,'{}')]]".format(path) # xpath_query = "//*[@*[contains(.,'{}')]]".format(path)
# xpath_query = "//a[contains(@href,'{}')]".format(path) # xpath_query = "//a[contains(@href,'{}')]".format(path)
num = 5 num = 5
@ -1274,7 +1274,7 @@ class FeedTask:
# print('Scanning for updates for JID {}'.format(jid_bare)) # print('Scanning for updates for JID {}'.format(jid_bare))
logger.info('Scanning for updates for JID {}'.format(jid_bare)) logger.info('Scanning for updates for JID {}'.format(jid_bare))
while True: while True:
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
urls = sqlite.get_active_feeds_url_sorted_by_last_scanned(db_file) urls = sqlite.get_active_feeds_url_sorted_by_last_scanned(db_file)
for url in urls: for url in urls:
#Message.printer('Scanning updates for URL {} ...'.format(url)) #Message.printer('Scanning updates for URL {} ...'.format(url))

View file

@ -46,7 +46,6 @@ import hashlib
from lxml import etree, html from lxml import etree, html
import os import os
import random import random
import slixfeed.config as config
import slixfeed.fetch as fetch import slixfeed.fetch as fetch
from slixfeed.log import Logger from slixfeed.log import Logger
import sys import sys
@ -286,11 +285,11 @@ class DateAndTime:
class Documentation: class Documentation:
def manual(filename, section=None, command=None): def manual(config_dir, section=None, command=None):
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: filename: {}'.format(function_name, filename)) logger.debug('{}: filename: {}'.format(function_name, config_dir))
config_dir = config.get_default_config_directory() filename = os.path.join(config_dir, 'commands.toml')
with open(config_dir + '/' + filename, mode="rb") as commands: with open(filename, mode="rb") as commands:
cmds = tomllib.load(commands) cmds = tomllib.load(commands)
if section == 'all': if section == 'all':
cmd_list = '' cmd_list = ''
@ -450,6 +449,19 @@ class Task:
.format(task, jid_bare)) .format(task, jid_bare))
class Toml:
def open_file(filename: str) -> dict:
with open(filename, mode="rb") as fn:
data = tomllib.load(fn)
return data
def save_file(filename: str, data: dict) -> None:
with open(filename, 'w') as fn:
data_as_string = tomli_w.dumps(data)
fn.write(data_as_string)
""" """
FIXME FIXME
@ -486,21 +498,23 @@ class Url:
return hostname return hostname
async def replace_hostname(url, url_type): async def replace_hostname(configuration_directory, proxies, settings_network, url, url_type):
""" """
Replace hostname. Replace hostname.
Parameters Parameters
---------- ----------
proxies : list
A list of hostnames.
url : str url : str
URL. A URL.
url_type : str url_type : str
"feed" or "link". A "feed" or a "link".
Returns Returns
------- -------
url : str url : str
URL. A processed URL.
""" """
url_new = None url_new = None
parted_url = urlsplit(url) parted_url = urlsplit(url)
@ -510,7 +524,6 @@ class Url:
pathname = parted_url.path pathname = parted_url.path
queries = parted_url.query queries = parted_url.query
fragment = parted_url.fragment fragment = parted_url.fragment
proxies = config.open_config_file('proxies.toml')['proxies']
for proxy_name in proxies: for proxy_name in proxies:
proxy = proxies[proxy_name] proxy = proxies[proxy_name]
if hostname in proxy['hostname'] and url_type in proxy['type']: if hostname in proxy['hostname'] and url_type in proxy['type']:
@ -530,26 +543,22 @@ class Url:
print(proxy_url) print(proxy_url)
print(url_new) print(url_new)
print('>>>') print('>>>')
response = await fetch.http(url_new) response = await fetch.http(settings_network, url_new)
if (response and if (response and
response['status_code'] == 200 and response['status_code'] == 200 and
# response.reason == 'OK' and # response.reason == 'OK' and
url_new.startswith(proxy_url)): url_new.startswith(proxy_url)):
break break
else: else:
config_dir = config.get_default_config_directory() proxies_obsolete_file = os.path.join(configuration_directory, 'proxies_obsolete.toml')
proxies_obsolete_file = config_dir + '/proxies_obsolete.toml' proxies_file = os.path.join(configuration_directory, 'proxies.toml')
proxies_file = config_dir + '/proxies.toml' breakpoint()
if not os.path.isfile(proxies_obsolete_file): proxies_obsolete = Toml.open_file(proxies_obsolete_file)
config.create_skeleton(proxies_file) proxies_obsolete['proxies'][proxy_name][proxy_type].append(proxy_url)
config.backup_obsolete(proxies_obsolete_file, Toml.save_file(proxies_obsolete_file, proxies_obsolete)
proxy_name, proxy_type, # TODO self.proxies might need to be changed, so self probably should be passed.
proxy_url) proxies['proxies'][proxy_name][proxy_type].remove(proxy_url)
try: Toml.save_file(proxies_file, proxies)
config.update_proxies(proxies_file, proxy_name,
proxy_type, proxy_url)
except ValueError as e:
logger.error([str(e), proxy_url])
url_new = None url_new = None
else: else:
logger.warning('No proxy URLs for {}. ' logger.warning('No proxy URLs for {}. '
@ -560,19 +569,21 @@ class Url:
return url_new return url_new
def remove_tracking_parameters(url): def remove_tracking_parameters(trackers, url):
""" """
Remove queries with tracking parameters. Remove queries with tracking parameters.
Parameters Parameters
---------- ----------
trackers : list
A list of queries.
url : str url : str
URL. A URL.
Returns Returns
------- -------
url : str url : str
URL. A processed URL.
""" """
if url.startswith('data:') and ';base64,' in url: if url.startswith('data:') and ';base64,' in url:
return url return url
@ -582,7 +593,6 @@ class Url:
pathname = parted_url.path pathname = parted_url.path
queries = parse_qs(parted_url.query) queries = parse_qs(parted_url.query)
fragment = parted_url.fragment fragment = parted_url.fragment
trackers = config.open_config_file('queries.toml')['trackers']
for tracker in trackers: for tracker in trackers:
if tracker in queries: del queries[tracker] if tracker in queries: del queries[tracker]
queries_new = urlencode(queries, doseq=True) queries_new = urlencode(queries, doseq=True)
@ -821,12 +831,12 @@ class Utilities:
return url_digest return url_digest
def pick_a_feed(lang=None): def pick_a_feed(dir_config, lang=None):
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: lang: {}' logger.debug('{}: lang: {}'
.format(function_name, lang)) .format(function_name, lang))
config_dir = config.get_default_config_directory() filename_feeds = os.path.join(dir_config, 'feeds.toml')
with open(config_dir + '/' + 'feeds.toml', mode="rb") as feeds: with open(filename_feeds, mode="rb") as feeds:
urls = tomllib.load(feeds) urls = tomllib.load(feeds)
import random import random
url = random.choice(urls['feeds']) url = random.choice(urls['feeds'])

View file

@ -1,2 +1,2 @@
__version__ = '0.1.101' __version__ = '0.1.102'
__version_info__ = (0, 1, 101) __version_info__ = (0, 1, 102)

View file

@ -27,7 +27,6 @@ import asyncio
import os import os
from pathlib import Path from pathlib import Path
from random import randrange # pending_tasks: Use a list and read the first index (i.e. index 0). from random import randrange # pending_tasks: Use a list and read the first index (i.e. index 0).
import slixfeed.config as config
from slixfeed.config import Config from slixfeed.config import Config
import slixfeed.fetch as fetch import slixfeed.fetch as fetch
from slixfeed.fetch import Http from slixfeed.fetch import Http
@ -131,20 +130,20 @@ class XmppChat:
return return
response = None response = None
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
match command_lowercase: match command_lowercase:
case 'help': case 'help':
command_list = XmppCommands.print_help()
command_list = XmppCommands.print_help(self.dir_config)
response = ('Available command keys:\n' response = ('Available command keys:\n'
'```\n{}\n```\n' f'```\n{command_list}\n```\n'
'Usage: `help <key>`' 'Usage: `help <key>`')
.format(command_list))
case 'help all': case 'help all':
command_list = Documentation.manual( command_list = Documentation.manual(
'commands.toml', section='all') self.dir_config, section='all')
response = ('Complete list of commands:\n' response = ('Complete list of commands:\n'
'```\n{}\n```' f'```\n{command_list}\n```'
.format(command_list)) .format())
case _ if command_lowercase.startswith('help'): case _ if command_lowercase.startswith('help'):
command = command[5:].lower() command = command[5:].lower()
command = command.split(' ') command = command.split(' ')
@ -152,7 +151,7 @@ class XmppChat:
command_root = command[0] command_root = command[0]
command_name = command[1] command_name = command[1]
command_list = Documentation.manual( command_list = Documentation.manual(
'commands.toml', section=command_root, self.dir_config, section=command_root,
command=command_name) command=command_name)
if command_list: if command_list:
command_list = ''.join(command_list) command_list = ''.join(command_list)
@ -162,7 +161,7 @@ class XmppChat:
elif len(command) == 1: elif len(command) == 1:
command = command[0] command = command[0]
command_list = Documentation.manual( command_list = Documentation.manual(
'commands.toml', command) self.dir_config, command)
if command_list: if command_list:
command_list = ' '.join(command_list) command_list = ' '.join(command_list)
response = (f'Available command `{command}` keys:\n' response = (f'Available command `{command}` keys:\n'
@ -619,25 +618,6 @@ class XmppChat:
response_finished = f'Finished. Total time: {command_time_total}s' response_finished = f'Finished. Total time: {command_time_total}s'
XmppMessage.send_reply(self, message, response_finished) XmppMessage.send_reply(self, message, response_finished)
# if not response: response = 'EMPTY MESSAGE - ACTION ONLY'
# data_dir = config.get_default_data_directory()
# if not os.path.isdir(data_dir):
# os.mkdir(data_dir)
# if not os.path.isdir(data_dir + '/logs/'):
# os.mkdir(data_dir + '/logs/')
# MD.log_to_markdown(
# dt.current_time(), os.path.join(data_dir, 'logs', jid_bare),
# jid_bare, command)
# MD.log_to_markdown(
# dt.current_time(), os.path.join(data_dir, 'logs', jid_bare),
# jid_bare, response)
# print(
# f'Message : {command}\n'
# f'JID : {jid_bare}\n'
# f'{response}\n'
# )
class XmppChatAction: class XmppChatAction:
@ -655,7 +635,7 @@ class XmppChatAction:
""" """
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug(f'{function_name}: jid: {jid_bare} num: {num}') logger.debug(f'{function_name}: jid: {jid_bare} num: {num}')
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
encrypt_omemo = Config.get_setting_value(self, jid_bare, 'omemo') encrypt_omemo = Config.get_setting_value(self, jid_bare, 'omemo')
encrypted = True if encrypt_omemo else False encrypted = True if encrypt_omemo else False
jid = JID(jid_bare) jid = JID(jid_bare)
@ -698,7 +678,7 @@ class XmppChatAction:
else: else:
media_url = await Html.extract_image_from_html(url) media_url = await Html.extract_image_from_html(url)
try: try:
http_headers = await Http.fetch_headers(media_url) http_headers = await Http.fetch_headers(self.settings_network, media_url)
if ('Content-Length' in http_headers): if ('Content-Length' in http_headers):
if int(http_headers['Content-Length']) < 100000: if int(http_headers['Content-Length']) < 100000:
media_url = None media_url = None
@ -727,10 +707,10 @@ class XmppChatAction:
if not filename: breakpoint() if not filename: breakpoint()
pathname = os.path.join(self.dir_cache, filename) pathname = os.path.join(self.dir_cache, filename)
# http_response = await Http.response(media_url) # http_response = await Http.response(media_url)
http_headers = await Http.fetch_headers(media_url) http_headers = await Http.fetch_headers(self.settings_network, media_url)
if ('Content-Length' in http_headers and if ('Content-Length' in http_headers and
int(http_headers['Content-Length']) < 3000000): int(http_headers['Content-Length']) < 3000000):
status = await Http.fetch_media(media_url, pathname) status = await Http.fetch_media(self.settings_network, media_url, pathname)
if status: if status:
filesize = os.path.getsize(pathname) filesize = os.path.getsize(pathname)
media_url_new = await XmppUpload.start( media_url_new = await XmppUpload.start(
@ -766,7 +746,7 @@ class XmppChatAction:
# NOTE Tested against Gajim. # NOTE Tested against Gajim.
# FIXME Jandle data: URIs. # FIXME Jandle data: URIs.
if not media_url.startswith('data:'): if not media_url.startswith('data:'):
http_headers = await Http.fetch_headers(media_url) http_headers = await Http.fetch_headers(self.settings_network, media_url)
if ('Content-Length' in http_headers and if ('Content-Length' in http_headers and
int(http_headers['Content-Length']) > 100000): int(http_headers['Content-Length']) > 100000):
print(http_headers['Content-Length']) print(http_headers['Content-Length'])
@ -876,8 +856,8 @@ class XmppChatAction:
else: else:
summary = '*** No summary ***' summary = '*** No summary ***'
link = result[2] link = result[2]
link = Url.remove_tracking_parameters(link) link = Url.remove_tracking_parameters(self.trackers, link)
link = await Url.replace_hostname(link, "link") or link link = await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, link, "link") or link
feed_id = result[4] feed_id = result[4]
# news_item = (f'\n{str(title)}\n{str(link)}\n{str(feed_title)} [{str(ix)}]\n') # news_item = (f'\n{str(title)}\n{str(link)}\n{str(feed_title)} [{str(ix)}]\n')
formatting = Config.get_setting_value(self, jid, 'formatting') formatting = Config.get_setting_value(self, jid, 'formatting')
@ -895,7 +875,7 @@ class XmppChatTask:
async def task_message(self, jid_bare): async def task_message(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
while True: while True:

View file

@ -8,7 +8,7 @@ TODO
1) Assure message delivery before calling a new task. 1) Assure message delivery before calling a new task.
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged
2) XHTTML-IM 2) XHTML-IM
case _ if message_lowercase.startswith("html"): case _ if message_lowercase.startswith("html"):
message['html']=" message['html']="
Parse me! Parse me!
@ -46,13 +46,12 @@ import slixmpp
# import xml.etree.ElementTree as ET # import xml.etree.ElementTree as ET
# from lxml import etree # from lxml import etree
import slixfeed.config as config
from slixfeed.config import Cache, Config, Data from slixfeed.config import Cache, Config, Data
import slixfeed.fetch as fetch import slixfeed.fetch as fetch
from slixfeed.log import Logger from slixfeed.log import Logger
import slixfeed.sqlite as sqlite import slixfeed.sqlite as sqlite
from slixfeed.syndication import Feed, FeedDiscovery, FeedTask, Opml from slixfeed.syndication import Feed, FeedDiscovery, FeedTask, Opml
from slixfeed.utilities import DateAndTime, Html, String, Task, Url, Utilities from slixfeed.utilities import DateAndTime, Html, String, Task, Toml, Url, Utilities
from slixfeed.version import __version__ from slixfeed.version import __version__
from slixfeed.xmpp.bookmark import XmppBookmark from slixfeed.xmpp.bookmark import XmppBookmark
from slixfeed.xmpp.chat import XmppChat, XmppChatTask from slixfeed.xmpp.chat import XmppChat, XmppChatTask
@ -118,19 +117,46 @@ class XmppClient(slixmpp.ClientXMPP):
self.task_ping_instance = {} self.task_ping_instance = {}
# Handlers for directories # Handlers for directories
self.dir_config = Config.get_default_config_directory() self.dir_config = Config.get_directory()
self.dir_cache = Cache.get_default_cache_directory() self.dir_cache = Cache.get_directory()
self.dir_data = Data.get_default_data_directory() self.dir_data = Data.get_directory()
# Handler for default configuration # Handler for default configuration
self.defaults = config.get_values('settings.toml') filename_settings = os.path.join(self.dir_config, 'settings.toml')
data_settings = Toml.open_file(filename_settings)
# TODO self.defaults = data_settings['default']
self.defaults = data_settings
# Handler for network configurations
self.settings_network = data_settings['network']
# Handler for proxies
filename_proxies = os.path.join(self.dir_config, 'proxies.toml')
self.data_proxies = Toml.open_file(filename_proxies)
self.proxies = self.data_proxies['proxies']
# Handler for queries
filename_queries = os.path.join(self.dir_config, 'queries.toml')
self.data_queries = Toml.open_file(filename_queries)
self.trackers = self.data_queries['trackers']
# Handler for lists
filename_lists = os.path.join(self.dir_config, 'lists.toml')
self.data_lists = Toml.open_file(filename_lists)
self.pathnames = self.data_lists['pathnames']
# Handler for configuration # Handler for configuration
self.settings = {} self.settings = {}
# Handler for operators # Handler for operators
self.operators = config.get_values('accounts.toml', 'xmpp')['operators'] filename_accounts = os.path.join(self.dir_config, 'accounts.toml')
self.data_accounts = Toml.open_file(filename_accounts)
self.data_accounts_xmpp = self.data_accounts['xmpp']
self.operators = self.data_accounts_xmpp['operators']
# Handlers for whitelist and blacklist # Handlers for whitelist and blacklist
self.selector = config.get_values('selector.toml') filename_selector = os.path.join(self.dir_config, 'selector.toml')
self.selector = Toml.open_file(filename_selector)
paywall_enabled = self.selector['enabled'] paywall_enabled = self.selector['enabled']
self.whitelist = self.selector['whitelist'] self.whitelist = self.selector['whitelist']
self.blacklist = self.selector['blacklist'] self.blacklist = self.selector['blacklist']
@ -142,7 +168,7 @@ class XmppClient(slixmpp.ClientXMPP):
# Handlers for connection events # Handlers for connection events
self.connection_attempts = 0 self.connection_attempts = 0
self.max_connection_attempts = 10 self.max_connection_attempts = 10
self.reconnect_timeout = config.get_values('accounts.toml', 'xmpp')['settings']['reconnect_timeout'] self.reconnect_timeout = self.data_accounts_xmpp['settings']['reconnect_timeout']
self.register_plugin('xep_0004') # Data Forms self.register_plugin('xep_0004') # Data Forms
self.register_plugin('xep_0030') # Service Discovery self.register_plugin('xep_0030') # Service Discovery
@ -403,7 +429,7 @@ class XmppClient(slixmpp.ClientXMPP):
for result in await XmppPubsub.get_pubsub_services(self): for result in await XmppPubsub.get_pubsub_services(self):
jid_bare = result['jid'] jid_bare = result['jid']
if jid_bare not in self.settings: if jid_bare not in self.settings:
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
#await XmppPubsubTask.task_publish(self, jid_bare) #await XmppPubsubTask.task_publish(self, jid_bare)
#await FeedTask.check_updates(self, jid_bare) #await FeedTask.check_updates(self, jid_bare)
@ -465,7 +491,7 @@ class XmppClient(slixmpp.ClientXMPP):
message_log = '{}: jid_full: {}' message_log = '{}: jid_full: {}'
logger.debug(message_log.format(function_name, jid_full)) logger.debug(message_log.format(function_name, jid_full))
jid_bare = message['from'].bare jid_bare = message['from'].bare
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
if jid_bare == self.boundjid.bare: if jid_bare == self.boundjid.bare:
@ -1113,7 +1139,7 @@ class XmppClient(slixmpp.ClientXMPP):
ftype='hidden', ftype='hidden',
value=jid_bare) value=jid_bare)
num = 100 num = 100
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
results = sqlite.get_entries(db_file, num) results = sqlite.get_entries(db_file, num)
subtitle = 'Recent {} updates'.format(num) subtitle = 'Recent {} updates'.format(num)
if results: if results:
@ -1207,7 +1233,7 @@ class XmppClient(slixmpp.ClientXMPP):
if not node: node = Url.get_hostname(url) if not node: node = Url.get_hostname(url)
form = self['xep_0004'].make_form('form', 'Publish') form = self['xep_0004'].make_form('form', 'Publish')
while True: while True:
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
status = result['status_code'] status = result['status_code']
if not result['error']: if not result['error']:
document = result['content'] document = result['content']
@ -1249,7 +1275,7 @@ class XmppClient(slixmpp.ClientXMPP):
session['payload'] = form session['payload'] = form
break break
else: else:
result = await FeedDiscovery.probe_page(url, document) result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url, document)
if isinstance(result, list): if isinstance(result, list):
results = result results = result
form['instructions'] = ('Discovered {} subscriptions ' form['instructions'] = ('Discovered {} subscriptions '
@ -1323,7 +1349,7 @@ class XmppClient(slixmpp.ClientXMPP):
url = values['url'][0] url = values['url'][0]
# xep = values['xep'][0] # xep = values['xep'][0]
xep = None xep = None
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
if 'content' in result: if 'content' in result:
document = result['content'] document = result['content']
feed = parse(document) feed = parse(document)
@ -1375,7 +1401,7 @@ class XmppClient(slixmpp.ClientXMPP):
logger.debug('{}: jid_full: {}' logger.debug('{}: jid_full: {}'
.format(function_name, jid_full)) .format(function_name, jid_full))
jid_bare = session['from'].bare jid_bare = session['from'].bare
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
form = self['xep_0004'].make_form('form', 'Profile') form = self['xep_0004'].make_form('form', 'Profile')
@ -1472,7 +1498,7 @@ class XmppClient(slixmpp.ClientXMPP):
chat_type = await XmppUtilities.get_chat_type(self, jid_bare) chat_type = await XmppUtilities.get_chat_type(self, jid_bare)
if XmppUtilities.is_access(self, jid, chat_type): if XmppUtilities.is_access(self, jid, chat_type):
jid = session['from'].bare jid = session['from'].bare
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
form = self['xep_0004'].make_form('form', 'Filters') form = self['xep_0004'].make_form('form', 'Filters')
form['instructions'] = ('Filters allow you to skip news items ' form['instructions'] = ('Filters allow you to skip news items '
'that you may not be interested at. Use ' 'that you may not be interested at. Use '
@ -1547,7 +1573,7 @@ class XmppClient(slixmpp.ClientXMPP):
jid_bare = session['from'].bare jid_bare = session['from'].bare
# form = self['xep_0004'].make_form('result', 'Done') # form = self['xep_0004'].make_form('result', 'Done')
# form['instructions'] = ('✅️ Filters have been updated') # form['instructions'] = ('✅️ Filters have been updated')
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
# In this case (as is typical), the payload is a form # In this case (as is typical), the payload is a form
values = payload['values'] values = payload['values']
for key in values: for key in values:
@ -1556,7 +1582,7 @@ class XmppClient(slixmpp.ClientXMPP):
# an empty form instead of editing a form. # an empty form instead of editing a form.
# keywords = sqlite.get_filter_value(db_file, key) # keywords = sqlite.get_filter_value(db_file, key)
keywords = '' keywords = ''
val = await config.add_to_list(val, keywords) if val else '' val = config.add_to_list(val, keywords) if val else ''
if sqlite.is_filter_key(db_file, key): if sqlite.is_filter_key(db_file, key):
await sqlite.update_filter_value(db_file, [key, val]) await sqlite.update_filter_value(db_file, [key, val])
elif val: elif val:
@ -1700,7 +1726,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(var='jid', form.add_field(var='jid',
ftype='hidden', ftype='hidden',
value=jid_bare) value=jid_bare)
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
num = 100 num = 100
match values['action']: match values['action']:
case 'all': case 'all':
@ -1760,18 +1786,18 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(var='jid', form.add_field(var='jid',
ftype='hidden', ftype='hidden',
value=jid) value=jid)
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
title = sqlite.get_entry_title(db_file, ix) title = sqlite.get_entry_title(db_file, ix)
title = title[0] if title else 'Untitled' title = title[0] if title else 'Untitled'
form['instructions'] = title form['instructions'] = title
url = sqlite.get_entry_url(db_file, ix) url = sqlite.get_entry_url(db_file, ix)
url = url[0] # TODO Handle a situation when index is no longer exist url = url[0] # TODO Handle a situation when index is no longer exist
logger.debug('Original URL: {}'.format(url)) logger.debug('Original URL: {}'.format(url))
url = Url.remove_tracking_parameters(url) url = Url.remove_tracking_parameters(self.trackers, url)
logger.debug('Processed URL (tracker removal): {}'.format(url)) logger.debug('Processed URL (tracker removal): {}'.format(url))
url = (await Url.replace_hostname(url, 'link')) or url url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'link')) or url
logger.debug('Processed URL (replace hostname): {}'.format(url)) logger.debug('Processed URL (replace hostname): {}'.format(url))
# result = await fetch.http(url) # result = await fetch.http(self.settings_network, url)
# if 'content' in result: # if 'content' in result:
# data = result['content'] # data = result['content']
# summary = action.get_document_content_as_text(data) # summary = action.get_document_content_as_text(data)
@ -1836,7 +1862,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(var='jid', form.add_field(var='jid',
ftype='hidden', ftype='hidden',
value=jid_bare) value=jid_bare)
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if identifier and sqlite.check_identifier_exist(db_file, identifier): if identifier and sqlite.check_identifier_exist(db_file, identifier):
form['title'] = 'Conflict' form['title'] = 'Conflict'
form['instructions'] = ('Name "{}" already exists. Choose a ' form['instructions'] = ('Name "{}" already exists. Choose a '
@ -2039,7 +2065,7 @@ class XmppClient(slixmpp.ClientXMPP):
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values: if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid_bare = values['jid'][0] jid_bare = values['jid'][0]
del values['jid'] del values['jid']
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
for key in values: for key in values:
value = 1 if values[key] else 0 value = 1 if values[key] else 0
await sqlite.set_enabled_status(db_file, key, value) await sqlite.set_enabled_status(db_file, key, value)
@ -2064,7 +2090,7 @@ class XmppClient(slixmpp.ClientXMPP):
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values: if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid_bare = values['jid'][0] jid_bare = values['jid'][0]
del values['jid'] del values['jid']
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
subscriptions ='' subscriptions =''
ixs = values['subscriptions'] ixs = values['subscriptions']
for ix in ixs: for ix in ixs:
@ -2297,7 +2323,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(ftype='hidden', form.add_field(ftype='hidden',
value=jid_bare, value=jid_bare,
var='jid') var='jid')
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
match values['action']: match values['action']:
case 'browse': case 'browse':
form['instructions'] = 'Editing subscriptions' form['instructions'] = 'Editing subscriptions'
@ -2391,7 +2417,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(ftype='hidden', form.add_field(ftype='hidden',
value=jid_bare, value=jid_bare,
var='jid') var='jid')
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
tag_id = values['tag'] tag_id = values['tag']
tag_name = sqlite.get_tag_name(db_file, tag_id)[0] tag_name = sqlite.get_tag_name(db_file, tag_id)[0]
form['instructions'] = 'Subscriptions tagged with "{}"'.format(tag_name) form['instructions'] = 'Subscriptions tagged with "{}"'.format(tag_name)
@ -2429,7 +2455,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(ftype='hidden', form.add_field(ftype='hidden',
value=jid_bare, value=jid_bare,
var='jid') var='jid')
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if 'subscription' in values: urls = values['subscription'] if 'subscription' in values: urls = values['subscription']
elif 'subscriptions' in values: urls = values['subscriptions'] elif 'subscriptions' in values: urls = values['subscriptions']
url_count = len(urls) url_count = len(urls)
@ -2522,7 +2548,7 @@ class XmppClient(slixmpp.ClientXMPP):
values = payload['values'] values = payload['values']
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values: if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid_bare = values['jid'][0] jid_bare = values['jid'][0]
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
# url = values['url'] # url = values['url']
# feed_id = sqlite.get_feed_id(db_file, url) # feed_id = sqlite.get_feed_id(db_file, url)
# feed_id = feed_id[0] # feed_id = feed_id[0]
@ -2923,8 +2949,8 @@ class XmppClient(slixmpp.ClientXMPP):
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values: if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid = values['jid'] jid = values['jid']
jid_bare = jid[0] if isinstance(jid, list) else jid jid_bare = jid[0] if isinstance(jid, list) else jid
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
count = await Opml.import_from_file(db_file, result) count = await Opml.import_from_file(db_file, result)
try: try:
int(count) int(count)
@ -3011,7 +3037,7 @@ class XmppClient(slixmpp.ClientXMPP):
form = self['xep_0004'].make_form('form', 'Subscribe') form = self['xep_0004'].make_form('form', 'Subscribe')
# NOTE Refresh button would be of use # NOTE Refresh button would be of use
form['instructions'] = 'Featured subscriptions' form['instructions'] = 'Featured subscriptions'
url = Utilities.pick_a_feed() url = Utilities.pick_a_feed(self.dir_config)
# options = form.add_field(desc='Click to subscribe.', # options = form.add_field(desc='Click to subscribe.',
# ftype="boolean", # ftype="boolean",
# label='Subscribe to {}?'.format(url['name']), # label='Subscribe to {}?'.format(url['name']),
@ -3024,13 +3050,13 @@ class XmppClient(slixmpp.ClientXMPP):
label='Subscribe', label='Subscribe',
var='subscription') var='subscription')
for i in range(10): for i in range(10):
url = Utilities.pick_a_feed() url = Utilities.pick_a_feed(self.dir_config)
options.addOption(url['name'], url['link']) options.addOption(url['name'], url['link'])
# jid_bare = session['from'].bare # jid_bare = session['from'].bare
if '@' in jid_bare: if '@' in jid_bare:
hostname = jid_bare.split('@')[1] hostname = jid_bare.split('@')[1]
url = 'http://' + hostname url = 'http://' + hostname
result = await FeedDiscovery.probe_page(url) result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url)
if not result: if not result:
url = {'url' : url, url = {'url' : url,
'index' : None, 'index' : None,
@ -3448,7 +3474,7 @@ class XmppClient(slixmpp.ClientXMPP):
if key: if key:
jid_bare = key jid_bare = key
value = values[key] value = values[key]
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
await Config.set_setting_value( await Config.set_setting_value(
@ -3711,7 +3737,7 @@ class XmppClient(slixmpp.ClientXMPP):
jid_bare = session['from'].bare jid_bare = session['from'].bare
chat_type = await XmppUtilities.get_chat_type(self, jid_bare) chat_type = await XmppUtilities.get_chat_type(self, jid_bare)
if XmppUtilities.is_access(self, jid, chat_type): if XmppUtilities.is_access(self, jid, chat_type):
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
form = self['xep_0004'].make_form('form', 'Settings') form = self['xep_0004'].make_form('form', 'Settings')
@ -3828,7 +3854,7 @@ class XmppClient(slixmpp.ClientXMPP):
logger.debug('{}: jid_full: {}' logger.debug('{}: jid_full: {}'
.format(function_name, jid_full)) .format(function_name, jid_full))
jid_bare = session['from'].bare jid_bare = session['from'].bare
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
# In this case (as is typical), the payload is a form # In this case (as is typical), the payload is a form

View file

@ -4,7 +4,6 @@
from feedparser import parse from feedparser import parse
import os import os
from random import randrange from random import randrange
import slixfeed.config as config
from slixfeed.config import Config from slixfeed.config import Config
import slixfeed.fetch as fetch import slixfeed.fetch as fetch
from slixfeed.log import Logger from slixfeed.log import Logger
@ -39,21 +38,21 @@ logger = Logger(__name__)
class XmppCommands: class XmppCommands:
def print_help(): def print_help(dir_config):
result = Documentation.manual('commands.toml') result = Documentation.manual(dir_config)
message = '\n'.join(result) message = '\n'.join(result)
return message return message
def print_help_list(): def print_help_list(dir_config):
command_list = Documentation.manual('commands.toml', section='all') command_list = Documentation.manual(dir_config, section='all')
message = ('Complete list of commands:\n' message = ('Complete list of commands:\n'
f'```\n{command_list}\n```') f'```\n{command_list}\n```')
return message return message
def print_help_specific(command_root, command_name): def print_help_specific(dir_config, command_root, command_name):
command_list = Documentation.manual('commands.toml', command_list = Documentation.manual(dir_config,
section=command_root, section=command_root,
command=command_name) command=command_name)
if command_list: if command_list:
@ -64,8 +63,8 @@ class XmppCommands:
return message return message
def print_help_key(command): def print_help_key(dir_config, command):
command_list = Documentation.manual('commands.toml', command) command_list = Documentation.manual(dir_config, command)
if command_list: if command_list:
command_list = ' '.join(command_list) command_list = ' '.join(command_list)
message = (f'Available command `{command}` keys:\n' message = (f'Available command `{command}` keys:\n'
@ -133,7 +132,7 @@ class XmppCommands:
identifier) identifier)
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0] feed_id = feed_id[0]
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
if not result['error']: if not result['error']:
document = result['content'] document = result['content']
feed = parse(document) feed = parse(document)
@ -221,9 +220,9 @@ class XmppCommands:
keywords = sqlite.get_filter_value(db_file, 'allow') keywords = sqlite.get_filter_value(db_file, 'allow')
if keywords: keywords = str(keywords[0]) if keywords: keywords = str(keywords[0])
if axis: if axis:
val = await config.add_to_list(val, keywords) val = config.add_to_list(val, keywords)
else: else:
val = await config.remove_from_list(val, keywords) val = config.remove_from_list(val, keywords)
if sqlite.is_filter_key(db_file, 'allow'): if sqlite.is_filter_key(db_file, 'allow'):
await sqlite.update_filter_value(db_file, ['allow', val]) await sqlite.update_filter_value(db_file, ['allow', val])
else: else:
@ -267,12 +266,12 @@ class XmppCommands:
async def restore_default(self, jid_bare, key=None): async def restore_default(self, jid_bare, key=None):
if key: if key:
self.settings[jid_bare][key] = None self.settings[jid_bare][key] = None
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
await sqlite.delete_setting(db_file, key) await sqlite.delete_setting(db_file, key)
message = f'Setting {key} has been restored to default value.' message = f'Setting {key} has been restored to default value.'
else: else:
del self.settings[jid_bare] del self.settings[jid_bare]
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
await sqlite.delete_settings(db_file) await sqlite.delete_settings(db_file)
message = 'Default settings have been restored.' message = 'Default settings have been restored.'
return message return message
@ -316,9 +315,9 @@ class XmppCommands:
keywords = sqlite.get_filter_value(db_file, 'deny') keywords = sqlite.get_filter_value(db_file, 'deny')
if keywords: keywords = str(keywords[0]) if keywords: keywords = str(keywords[0])
if axis: if axis:
val = await config.add_to_list(val, keywords) val = config.add_to_list(val, keywords)
else: else:
val = await config.remove_from_list(val, keywords) val = config.remove_from_list(val, keywords)
if sqlite.is_filter_key(db_file, 'deny'): if sqlite.is_filter_key(db_file, 'deny'):
await sqlite.update_filter_value(db_file, ['deny', val]) await sqlite.update_filter_value(db_file, ['deny', val])
else: else:
@ -338,7 +337,7 @@ class XmppCommands:
async def import_opml(self, db_file, jid_bare, command): async def import_opml(self, db_file, jid_bare, command):
url = command url = command
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
count = await Opml.import_from_file(db_file, result) count = await Opml.import_from_file(db_file, result)
if count: if count:
message = f'Successfully imported {count} feeds.' message = f'Successfully imported {count} feeds.'
@ -382,7 +381,7 @@ class XmppCommands:
jid = info[0] jid = info[0]
if '/' not in jid: if '/' not in jid:
url = info[1] url = info[1]
db_file = config.get_pathname_to_database(jid) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid}.db')
if len(info) > 2: if len(info) > 2:
identifier = info[2] identifier = info[2]
else: else:
@ -408,7 +407,7 @@ class XmppCommands:
url.startswith('itpc:/') or url.startswith('itpc:/') or
url.startswith('rss:/')): url.startswith('rss:/')):
url = Url.feed_to_http(url) url = Url.feed_to_http(url)
url = (await Url.replace_hostname(url, 'feed')) or url url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'feed')) or url
result = await Feed.add_feed(self, jid_bare, db_file, url, result = await Feed.add_feed(self, jid_bare, db_file, url,
identifier) identifier)
if isinstance(result, list): if isinstance(result, list):
@ -471,7 +470,7 @@ class XmppCommands:
async def fetch_http(self, url, db_file, jid_bare): async def fetch_http(self, url, db_file, jid_bare):
if url.startswith('feed:/') or url.startswith('rss:/'): if url.startswith('feed:/') or url.startswith('rss:/'):
url = Url.feed_to_http(url) url = Url.feed_to_http(url)
url = (await Url.replace_hostname(url, 'feed')) or url url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'feed')) or url
counter = 0 counter = 0
while True: while True:
identifier = String.generate_identifier(url, counter) identifier = String.generate_identifier(url, counter)
@ -704,7 +703,7 @@ class XmppCommands:
# response = ( # response = (
# f'Every update will contain {response} news items.' # f'Every update will contain {response} news items.'
# ) # )
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
await Config.set_setting_value( await Config.set_setting_value(
self, jid_bare, db_file, 'quantum', val_new) self, jid_bare, db_file, 'quantum', val_new)
message = f'Next update will contain {val_new} news items (was: {val_old}).' message = f'Next update will contain {val_new} news items (was: {val_old}).'
@ -724,12 +723,12 @@ class XmppCommands:
async def feed_read(self, jid_bare, data, url): async def feed_read(self, jid_bare, data, url):
if url.startswith('feed:/') or url.startswith('rss:/'): if url.startswith('feed:/') or url.startswith('rss:/'):
url = Url.feed_to_http(url) url = Url.feed_to_http(url)
url = (await Url.replace_hostname(url, 'feed')) or url url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'feed')) or url
match len(data): match len(data):
case 1: case 1:
if url.startswith('http'): if url.startswith('http'):
while True: while True:
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
status = result['status_code'] status = result['status_code']
if result and not result['error']: if result and not result['error']:
document = result['content'] document = result['content']
@ -738,7 +737,7 @@ class XmppCommands:
message = Feed.view_feed(url, feed) message = Feed.view_feed(url, feed)
break break
else: else:
result = await FeedDiscovery.probe_page(url, document) result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url, document)
if isinstance(result, list): if isinstance(result, list):
results = result results = result
message = f"Syndication feeds found for {url}\n\n```\n" message = f"Syndication feeds found for {url}\n\n```\n"
@ -763,7 +762,7 @@ class XmppCommands:
num = data[1] num = data[1]
if url.startswith('http'): if url.startswith('http'):
while True: while True:
result = await fetch.http(url) result = await fetch.http(self.settings_network, url)
if result and not result['error']: if result and not result['error']:
document = result['content'] document = result['content']
status = result['status_code'] status = result['status_code']
@ -772,7 +771,7 @@ class XmppCommands:
message = Feed.view_entry(url, feed, num) message = Feed.view_entry(url, feed, num)
break break
else: else:
result = await FeedDiscovery.probe_page(url, document) result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url, document)
if isinstance(result, list): if isinstance(result, list):
results = result results = result
message = f"Syndication feeds found for {url}\n\n```\n" message = f"Syndication feeds found for {url}\n\n```\n"

View file

@ -254,9 +254,13 @@ class StorageImpl(Storage):
Example storage implementation that stores all data in a single JSON file. Example storage implementation that stores all data in a single JSON file.
""" """
omemo_dir = Data.get_pathname_to_omemo_directory() dir_data = Data.get_directory()
omemo_dir = os.path.join(dir_data, 'omemo')
JSON_FILE = os.path.join(omemo_dir, 'omemo.json') JSON_FILE = os.path.join(omemo_dir, 'omemo.json')
# TODO Pass JID
#JSON_FILE = os.path.join(omemo_dir, f'{jid_bare}.json')
def __init__(self) -> None: def __init__(self) -> None:
super().__init__() super().__init__()

View file

@ -11,7 +11,6 @@ socket (i.e. clients[fd]) from the respective client.
import asyncio import asyncio
import os import os
import slixfeed.config as config
from slixfeed.syndication import FeedTask from slixfeed.syndication import FeedTask
from slixfeed.xmpp.chat import XmppChatTask from slixfeed.xmpp.chat import XmppChatTask
from slixfeed.xmpp.commands import XmppCommands from slixfeed.xmpp.commands import XmppCommands
@ -85,7 +84,7 @@ class XmppIpcServer:
if '~' in data: if '~' in data:
data_list = data.split('~') data_list = data.split('~')
jid_bare = data_list[0] jid_bare = data_list[0]
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
command = data_list[1] command = data_list[1]
else: else:
command = data command = data
@ -231,7 +230,7 @@ class XmppIpcServer:
command.startswith('itpc:/') or command.startswith('itpc:/') or
command.startswith('rss:/')): command.startswith('rss:/')):
response = await XmppCommands.fetch_http( response = await XmppCommands.fetch_http(
self, command, db_file, jid_bare) self.settings_network, command, db_file, jid_bare)
case _ if command.startswith('interval'): case _ if command.startswith('interval'):
val = command[9:] val = command[9:]
if val: if val:

View file

@ -27,7 +27,6 @@ TODO
import glob import glob
from slixfeed.config import Config from slixfeed.config import Config
import slixfeed.config as config
from slixfeed.log import Logger from slixfeed.log import Logger
from slixmpp.exceptions import IqTimeout, IqError from slixmpp.exceptions import IqTimeout, IqError
import os import os
@ -109,8 +108,7 @@ def set_identity(self, category):
async def set_vcard(self): async def set_vcard(self):
vcard = self.plugin['xep_0054'].make_vcard() vcard = self.plugin['xep_0054'].make_vcard()
profile = config.get_values('accounts.toml', 'xmpp')['profile'] profile = self.data_accounts_xmpp['profile']
for key in profile: for key in profile: vcard[key] = profile[key]
vcard[key] = profile[key]
await self.plugin['xep_0054'].publish_vcard(vcard) await self.plugin['xep_0054'].publish_vcard(vcard)

View file

@ -9,9 +9,9 @@ Functions create_node and create_entry are derived from project atomtopubsub.
import asyncio import asyncio
import hashlib import hashlib
import os
import slixmpp.plugins.xep_0060.stanza.pubsub as pubsub import slixmpp.plugins.xep_0060.stanza.pubsub as pubsub
from slixmpp.xmlstream import ET from slixmpp.xmlstream import ET
import slixfeed.config as config
from slixfeed.config import Config from slixfeed.config import Config
from slixfeed.log import Logger from slixfeed.log import Logger
import slixfeed.sqlite as sqlite import slixfeed.sqlite as sqlite
@ -259,7 +259,7 @@ class XmppPubsubAction:
async def send_selected_entry(self, jid_bare, node_id, entry_id): async def send_selected_entry(self, jid_bare, node_id, entry_id):
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare)) logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare))
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
feed_id = sqlite.get_feed_id_by_entry_index(db_file, entry_id) feed_id = sqlite.get_feed_id_by_entry_index(db_file, entry_id)
feed_id = feed_id[0] feed_id = feed_id[0]
node_id, node_title, node_subtitle = sqlite.get_feed_properties(db_file, feed_id) node_id, node_title, node_subtitle = sqlite.get_feed_properties(db_file, feed_id)
@ -298,7 +298,7 @@ class XmppPubsubAction:
""" """
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare)) logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare))
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
report = {} report = {}
subscriptions = sqlite.get_active_feeds_url(db_file) subscriptions = sqlite.get_active_feeds_url(db_file)
for url in subscriptions: for url in subscriptions:
@ -372,7 +372,7 @@ class XmppPubsubTask:
async def loop_task(self, jid_bare): async def loop_task(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
while True: while True:
@ -393,7 +393,7 @@ class XmppPubsubTask:
def restart_task(self, jid_bare): def restart_task(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
if jid_bare not in self.task_manager: if jid_bare not in self.task_manager:
@ -411,7 +411,7 @@ class XmppPubsubTask:
async def task_publish(self, jid_bare): async def task_publish(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare) db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings: if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file) Config.add_settings_jid(self, jid_bare, db_file)
while True: while True:

View file

@ -2,8 +2,8 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
import asyncio import asyncio
import os
from slixfeed.config import Config from slixfeed.config import Config
import slixfeed.config as config
import slixfeed.sqlite as sqlite import slixfeed.sqlite as sqlite
from slixfeed.log import Logger from slixfeed.log import Logger
from slixfeed.xmpp.presence import XmppPresence from slixfeed.xmpp.presence import XmppPresence
@ -25,11 +25,11 @@ class XmppStatus:
Jabber ID. Jabber ID.
""" """
function_name = sys._getframe().f_code.co_name function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid: {}'.format(function_name, jid_bare)) logger.debug(f'{function_name}: jid: {jid_bare}')
status_text = '📜️ Slixfeed RSS News Bot' status_text = '📜️ Slixfeed RSS News Bot'
db_file = config.get_pathname_to_database(jid_bare)
enabled = Config.get_setting_value(self, jid_bare, 'enabled') enabled = Config.get_setting_value(self, jid_bare, 'enabled')
if enabled: if enabled:
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
jid_task = self.pending_tasks[jid_bare] if jid_bare in self.pending_tasks else None jid_task = self.pending_tasks[jid_bare] if jid_bare in self.pending_tasks else None
if jid_task and len(jid_task): if jid_task and len(jid_task):
# print('status dnd for ' + jid_bare) # print('status dnd for ' + jid_bare)
@ -47,7 +47,7 @@ class XmppStatus:
if unread: if unread:
# print('status unread for ' + jid_bare) # print('status unread for ' + jid_bare)
status_mode = 'chat' status_mode = 'chat'
status_text = '📬️ There are {} news items'.format(str(unread)) status_text = f'📬️ There are {str(unread)} news items'
else: else:
# print('status no news for ' + jid_bare) # print('status no news for ' + jid_bare)
status_mode = 'away' status_mode = 'away'
@ -73,14 +73,13 @@ class XmppStatusTask:
return return
if jid_bare not in self.task_manager: if jid_bare not in self.task_manager:
self.task_manager[jid_bare] = {} self.task_manager[jid_bare] = {}
logger.info('Creating new task manager for JID {}'.format(jid_bare)) logger.info('Creating new task manager for JID {jid_bare}')
logger.info('Stopping task "status" for JID {}'.format(jid_bare)) logger.info('Stopping task "status" for JID {jid_bare}')
try: try:
self.task_manager[jid_bare]['status'].cancel() self.task_manager[jid_bare]['status'].cancel()
except: except:
logger.info('No task "status" for JID {} (XmppStatusTask.start_task)' logger.info(f'No task "status" for JID {jid_bare} (XmppStatusTask.start_task)')
.format(jid_bare)) logger.info(f'Starting tasks "status" for JID {jid_bare}')
logger.info('Starting tasks "status" for JID {}'.format(jid_bare))
self.task_manager[jid_bare]['status'] = asyncio.create_task( self.task_manager[jid_bare]['status'] = asyncio.create_task(
XmppStatusTask.task_status(self, jid_bare)) XmppStatusTask.task_status(self, jid_bare))
@ -90,5 +89,4 @@ class XmppStatusTask:
'status' in self.task_manager[jid_bare]): 'status' in self.task_manager[jid_bare]):
self.task_manager[jid_bare]['status'].cancel() self.task_manager[jid_bare]['status'].cancel()
else: else:
logger.debug('No task "status" for JID {}' logger.debug(f'No task "status" for JID {jid_bare}')
.format(jid_bare))