Increase usage of "self" handlers;

Remove CSV database.
This commit is contained in:
Schimon Jehudah, Adv. 2024-11-24 17:25:49 +02:00
parent 6a78afd67f
commit 878753b26e
27 changed files with 380 additions and 2768 deletions

View file

@ -58,6 +58,8 @@ TODO
# res = response (HTTP)
from argparse import ArgumentParser
import logging
import os
import shutil
import sys
# from eliot import start_action, to_file
@ -65,8 +67,9 @@ import sys
# # with start_action(action_type='set_date()', jid=jid):
# # with start_action(action_type='message()', msg=msg):
import slixfeed.config as config
from slixfeed.config import Settings, Share, Cache
from slixfeed.log import Logger
from slixfeed.utilities import Toml
from slixfeed.version import __version__
logger = Logger(__name__)
@ -78,10 +81,44 @@ logger = Logger(__name__)
def main():
config_dir = config.get_default_config_directory()
logger.info('Reading configuration from {}'.format(config_dir))
print('Reading configuration from {}'.format(config_dir))
network_settings = config.get_values('settings.toml', 'network')
directory = os.path.dirname(__file__)
# Copy data files
directory_data = Share.get_directory()
if not os.path.exists(directory_data):
directory_assets = os.path.join(directory, 'assets')
directory_assets_new = shutil.copytree(directory_assets, directory_data)
print(f'Data directory {directory_assets_new} has been created and populated.')
# Copy settings files
directory_settings = Settings.get_directory()
if not os.path.exists(directory_settings):
directory_configs = os.path.join(directory, 'configs')
directory_settings_new = shutil.copytree(directory_configs, directory_settings)
print(f'Settings directory {directory_settings_new} has been created and populated.')
# Create cache directories
directory_cache = Cache.get_directory()
if not os.path.exists(directory_cache):
print(f'Creating a cache directory at {directory_cache}.')
os.mkdir(directory_cache)
for subdirectory in ('md', 'enclosure', 'markdown', 'opml', 'readability'):
subdirectory_cache = os.path.join(directory_cache, subdirectory)
if not os.path.exists(subdirectory_cache):
print(f'Creating a cache subdirectory at {subdirectory_cache}.')
os.mkdir(subdirectory_cache)
filename_settings = os.path.join(directory_settings, 'settings.toml')
settings = Toml.open_file(filename_settings)
network_settings = settings['network']
# Configure account
print('User agent:', network_settings['user_agent'] or 'Slixfeed/0.1')
if network_settings['http_proxy']: print('HTTP Proxy:', network_settings['http_proxy'])
@ -160,28 +197,33 @@ def main():
# if not alias:
# alias = (input('Alias: ')) or 'Slixfeed'
account_xmpp = config.get_values('accounts.toml', 'xmpp')
filename_accounts = os.path.join(directory_settings, 'accounts.toml')
accounts = Toml.open_file(filename_accounts)
accounts_xmpp = accounts['xmpp']
# Try configuration file
if 'client' in account_xmpp:
if 'client' in accounts_xmpp:
from slixfeed.xmpp.client import XmppClient
jid = account_xmpp['client']['jid']
password = account_xmpp['client']['password']
alias = account_xmpp['client']['alias'] if 'alias' in account_xmpp['client'] else None
hostname = account_xmpp['client']['hostname'] if 'hostname' in account_xmpp['client'] else None
port = account_xmpp['client']['port'] if 'port' in account_xmpp['client'] else None
accounts_xmpp_client = accounts_xmpp['client']
jid = accounts_xmpp_client['jid']
password = accounts_xmpp_client['password']
alias = accounts_xmpp_client['alias'] if 'alias' in accounts_xmpp_client else None
hostname = accounts_xmpp_client['hostname'] if 'hostname' in accounts_xmpp_client else None
port = accounts_xmpp_client['port'] if 'port' in accounts_xmpp_client else None
XmppClient(jid, password, hostname, port, alias)
# xmpp_client = Slixfeed(jid, password, hostname, port, alias)
# xmpp_client.connect((hostname, port)) if hostname and port else xmpp_client.connect()
# xmpp_client.process()
if 'component' in account_xmpp:
if 'component' in accounts_xmpp:
from slixfeed.xmpp.component import XmppComponent
jid = account_xmpp['component']['jid']
secret = account_xmpp['component']['password']
alias = account_xmpp['component']['alias'] if 'alias' in account_xmpp['component'] else None
hostname = account_xmpp['component']['hostname'] if 'hostname' in account_xmpp['component'] else None
port = account_xmpp['component']['port'] if 'port' in account_xmpp['component'] else None
accounts_xmpp_component = accounts_xmpp['component']
jid = accounts_xmpp_component['jid']
secret = accounts_xmpp_component['password']
alias = accounts_xmpp_component['alias'] if 'alias' in accounts_xmpp_component else None
hostname = accounts_xmpp_component['hostname'] if 'hostname' in accounts_xmpp_component else None
port = accounts_xmpp_component['port'] if 'port' in accounts_xmpp_component else None
XmppComponent(jid, secret, hostname, port, alias)
# xmpp_component = SlixfeedComponent(jid, secret, hostname, port, alias)
# xmpp_component.connect()

File diff suppressed because it is too large Load diff

View file

@ -3,12 +3,6 @@
"""
FIXME
1) Use dict for ConfigDefault
2) Store ConfigJabberID in dicts
TODO
1) Site-specific filter (i.e. audiobookbay).
@ -21,14 +15,6 @@ TODO
4) Copy file from /etc/slixfeed/ or /usr/share/slixfeed/
5) Merge get_value_default into get_value.
6) Use TOML https://ruudvanasseldonk.com/2023/01/11/the-yaml-document-from-hell
7) Make the program portable (directly use the directory assets) -- Thorsten
7.1) Read missing files from base directories or either set error message.
"""
import configparser
@ -45,49 +31,11 @@ except:
logger = Logger(__name__)
class Cache:
class Settings:
def get_default_cache_directory():
def get_directory():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to cache directory.
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_CACHE_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/cache')
else:
return os.path.abspath('.slixfeed/cache')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.cache'
)
return os.path.join(data_home, 'slixfeed')
# TODO Consider a class ConfigDefault for default values to be initiate at most
# basic level possible and a class ConfigJID for each JID (i.e. db_file) to be
# also initiated at same level or at least at event call, then check whether
# setting_jid.setting_key has value, otherwise resort to setting_default.setting_key.
class Config:
# TODO Write a similar function for file.
# NOTE the is a function of directory, noot file.
def get_default_config_directory():
"""
Determine the directory path where configuration will be stored.
Determine the directory path where setting files be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
@ -99,7 +47,105 @@ class Config:
str
Path to configuration directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
config_home = os.environ.get('APPDATA')
if config_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
config_home = os.path.join(
os.environ.get('HOME'), '.config'
)
return os.path.join(config_home, 'slixfeed')
class Share:
def get_directory():
"""
Determine the directory path where data files be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to database file.
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/data')
else:
return os.path.abspath('.slixfeed/data')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.local', 'share'
)
return os.path.join(data_home, 'slixfeed')
class Cache:
def get_directory():
"""
Determine the directory path where cache files be stored.
* If $XDG_CACHE_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to cache directory.
"""
# cache_home = xdg.BaseDirectory.xdg_cache_home
cache_home = os.environ.get('XDG_CACHE_HOME')
if cache_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
cache_home = os.environ.get('APPDATA')
if cache_home is None:
return os.path.abspath('.slixfeed/cache')
else:
return os.path.abspath('.slixfeed/cache')
else:
cache_home = os.path.join(
os.environ.get('HOME'), '.cache'
)
return os.path.join(cache_home, 'slixfeed')
class Config:
def get_directory():
"""
Determine the directory path where setting files be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to configuration directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
@ -120,10 +166,6 @@ class Config:
content = tomli_w.dumps(data)
new_file.write(content)
def add_settings_default(self):
settings_default = get_values('settings.toml', 'settings')
self.defaults = settings_default
# TODO Open SQLite file once
def add_settings_jid(self, jid_bare, db_file):
self.settings[jid_bare] = {}
@ -136,11 +178,6 @@ class Config:
# self.defaults of get_setting_value
self.settings[jid_bare][key] = self.defaults['default'][key]
def get_settings_xmpp(key=None):
result = get_values('accounts.toml', 'xmpp')
result = result[key] if key else result
return result
async def set_setting_value(self, jid_bare, db_file, key, val):
key = key.lower()
key_val = [key, val]
@ -159,29 +196,11 @@ class Config:
value = self.defaults['default'][key]
return value
class ConfigNetwork:
def __init__(self, settings):
settings['network'] = {}
for key in ('http_proxy', 'user_agent'):
value = get_value('settings', 'Network', key)
settings['network'][key] = value
class ConfigJabberID:
def __init__(self, settings, jid_bare, db_file):
settings[jid_bare] = {}
for key in ('archive', 'enabled', 'filter', 'formatting', 'interval',
'length', 'media', 'old', 'quantum'):
value = sqlite.get_setting_value(db_file, key)
if value: value = value[0]
print(value)
settings[jid_bare][key] = value
class Data:
def get_default_data_directory():
def get_directory():
"""
Determine the directory path where dbfile will be stored.
@ -221,28 +240,6 @@ class Data:
return os.path.join(data_home, 'slixfeed')
def get_pathname_to_omemo_directory():
"""
Get OMEMO directory.
Parameters
----------
None
Returns
-------
object
Coroutine object.
"""
db_dir = get_default_data_directory()
if not os.path.isdir(db_dir):
os.mkdir(db_dir)
if not os.path.isdir(db_dir + "/omemo"):
os.mkdir(db_dir + "/omemo")
omemo_dir = os.path.join(db_dir, "omemo")
return omemo_dir
def get_values(filename, key=None):
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
@ -274,78 +271,6 @@ def get_setting_value(db_file, key):
return value
# TODO Merge with backup_obsolete
def update_proxies(file, proxy_name, proxy_type, proxy_url, action='remove'):
"""
Add given URL to given list.
Parameters
----------
file : str
Filename.
proxy_name : str
Proxy name.
proxy_type : str
Proxy title.
proxy_url : str
Proxy URL.
action : str
add or remove
Returns
-------
None.
"""
data = open_config_file('proxies.toml')
proxy_list = data['proxies'][proxy_name][proxy_type]
# breakpoint()
print('####################### PROXY ######################')
proxy_index = proxy_list.index(proxy_url)
proxy_list.pop(proxy_index)
with open(file, 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
# TODO Merge with update_proxies
def backup_obsolete(file, proxy_name, proxy_type, proxy_url, action='add'):
"""
Add given URL to given list.
Parameters
----------
file : str
Filename.
proxy_name : str
Proxy name.
proxy_type : str
Proxy title.
proxy_url : str
Proxy URL.
action : str
add or remove
Returns
-------
None.
"""
data = open_config_file('proxies_obsolete.toml')
proxy_list = data['proxies'][proxy_name][proxy_type]
proxy_list.extend([proxy_url])
with open(file, 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
def create_skeleton(file):
with open(file, 'rb') as original_file:
data = tomllib.load(original_file)
data = clear_values(data)
with open('proxies_obsolete.toml', 'w') as new_file:
content = tomli_w.dumps(data)
new_file.write(content)
def clear_values(input):
if isinstance(input, dict):
return {k: clear_values(v) for k, v in input.items()}
@ -355,262 +280,7 @@ def clear_values(input):
return ''
# TODO Return dict instead of list
def get_value(filename, section, keys):
"""
Get setting value.
Parameters
----------
filename : str
INI filename.
keys : list or str
A single key as string or multiple keys as list.
section : str
INI Section.
Returns
-------
result : list or str
A single value as string or multiple values as list.
"""
result = None
config_res = configparser.RawConfigParser()
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
config_dir = '/usr/share/slixfeed/'
if not os.path.isdir(config_dir):
config_dir = os.path.dirname(__file__) + "/assets"
config_file = os.path.join(config_dir, filename + ".ini")
config_res.read(config_file)
if config_res.has_section(section):
section_res = config_res[section]
if isinstance(keys, list):
result = []
for key in keys:
if key in section_res:
value = section_res[key]
logger.debug("Found value {} for key {}".format(value, key))
else:
value = ''
logger.debug("Missing key:", key)
result.extend([value])
elif isinstance(keys, str):
key = keys
if key in section_res:
result = section_res[key]
logger.debug("Found value {} for key {}".format(result, key))
else:
result = ''
# logger.error("Missing key:", key)
if result == None:
logger.error(
"Check configuration file {}.ini for "
"missing key(s) \"{}\" under section [{}].".format(
filename, keys, section)
)
else:
return result
# TODO Store config file as an object in runtime, otherwise
# the file will be opened time and time again.
# TODO Copy file from /etc/slixfeed/ or /usr/share/slixfeed/
def get_value_default(filename, section, key):
"""
Get settings default value.
Parameters
----------
key : str
Key: archive, enabled, interval,
length, old, quantum, random.
Returns
-------
result : str
Value.
"""
config_res = configparser.RawConfigParser()
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
config_dir = '/usr/share/slixfeed/'
config_file = os.path.join(config_dir, filename + ".ini")
config_res.read(config_file)
if config_res.has_section(section):
result = config_res[section][key]
return result
# TODO DELETE THIS FUNCTION OR KEEP ONLY THE CODE BELOW NOTE
# IF CODE BELOW NOTE IS KEPT, RENAME FUNCTION TO open_toml
def open_config_file(filename):
"""
Get settings default value.
Parameters
----------
filename : str
Filename of toml file.
Returns
-------
result : list
List of pathnames or keywords.
"""
config_dir = get_default_config_directory()
if not os.path.isdir(config_dir):
config_dir = '/usr/share/slixfeed/'
if not os.path.isdir(config_dir):
config_dir = os.path.dirname(__file__) + "/assets"
config_file = os.path.join(config_dir, filename)
# NOTE THIS IS THE IMPORTANT CODE
with open(config_file, mode="rb") as defaults:
# default = yaml.safe_load(defaults)
# result = default[key]
result = tomllib.load(defaults)
return result
def get_default_data_directory():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to database file.
Note
----
This function was taken from project buku.
See https://github.com/jarun/buku
* Arun Prakash Jana (jarun)
* Dmitry Marakasov (AMDmi3)
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_DATA_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/data')
else:
return os.path.abspath('.slixfeed/data')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.local', 'share'
)
return os.path.join(data_home, 'slixfeed')
def get_default_cache_directory():
"""
Determine the directory path where dbfile will be stored.
* If $XDG_DATA_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to cache directory.
"""
# data_home = xdg.BaseDirectory.xdg_data_home
data_home = os.environ.get('XDG_CACHE_HOME')
if data_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
data_home = os.environ.get('APPDATA')
if data_home is None:
return os.path.abspath('.slixfeed/cache')
else:
return os.path.abspath('.slixfeed/cache')
else:
data_home = os.path.join(
os.environ.get('HOME'), '.cache'
)
return os.path.join(data_home, 'slixfeed')
# TODO Write a similar function for file.
# NOTE the is a function of directory, noot file.
def get_default_config_directory():
"""
Determine the directory path where configuration will be stored.
* If $XDG_CONFIG_HOME is defined, use it;
* else if $HOME exists, use it;
* else if the platform is Windows, use %APPDATA%;
* else use the current directory.
Returns
-------
str
Path to configuration directory.
"""
# config_home = xdg.BaseDirectory.xdg_config_home
config_home = os.environ.get('XDG_CONFIG_HOME')
if config_home is None:
if os.environ.get('HOME') is None:
if sys.platform == 'win32':
config_home = os.environ.get('APPDATA')
if config_home is None:
return os.path.abspath('.')
else:
return os.path.abspath('.')
else:
config_home = os.path.join(
os.environ.get('HOME'), '.config'
)
return os.path.join(config_home, 'slixfeed')
def get_pathname_to_database(jid_file):
"""
Callback function to instantiate action on database.
Parameters
----------
jid_file : str
Filename.
callback : ?
Function name.
message : str, optional
Optional kwarg when a message is a part or
required argument. The default is None.
Returns
-------
object
Coroutine object.
"""
db_dir = get_default_data_directory()
if not os.path.isdir(db_dir):
os.mkdir(db_dir)
if not os.path.isdir(db_dir + "/sqlite"):
os.mkdir(db_dir + "/sqlite")
db_file = os.path.join(db_dir, "sqlite", r"{}.db".format(jid_file))
sqlite.create_tables(db_file)
return db_file
# await set_default_values(db_file)
# if message:
# return await callback(db_file, message)
# else:
# return await callback(db_file)
async def add_to_list(newwords, keywords):
def add_to_list(newwords, keywords):
"""
Append new keywords to list.
@ -641,7 +311,7 @@ async def add_to_list(newwords, keywords):
return val
async def remove_from_list(newwords, keywords):
def remove_from_list(newwords, keywords):
"""
Remove given keywords from list.

View file

@ -0,0 +1 @@
proxies = {}

View file

@ -44,7 +44,6 @@ from asyncio import TimeoutError
# from lxml import html
# from xml.etree.ElementTree import ElementTree, ParseError
#import requests
import slixfeed.config as config
from slixfeed.log import Logger
# import urllib.request
# from urllib.error import HTTPError
@ -87,11 +86,10 @@ class Http:
# return status
async def fetch_headers(url):
network_settings = config.get_values('settings.toml', 'network')
user_agent = (network_settings['user_agent'] or 'Slixfeed/0.1')
async def fetch_headers(settings_network, url):
user_agent = (settings_network['user_agent'] or 'Slixfeed/0.1')
headers = {'User-Agent': user_agent}
proxy = (network_settings['http_proxy'] or None)
proxy = (settings_network['http_proxy'] or None)
timeout = ClientTimeout(total=10)
async with ClientSession(headers=headers) as session:
async with session.get(url, proxy=proxy,
@ -106,7 +104,7 @@ class Http:
# TODO Write file to disk. Consider aiofiles
async def fetch_media(url, pathname):
async def fetch_media(settings_network, url, pathname):
"""
Download media content of given URL.
@ -122,10 +120,9 @@ class Http:
msg: list or str
Document or error message.
"""
network_settings = config.get_values('settings.toml', 'network')
user_agent = (network_settings['user_agent'] or 'Slixfeed/0.1')
user_agent = (settings_network['user_agent'] or 'Slixfeed/0.1')
headers = {'User-Agent': user_agent}
proxy = (network_settings['http_proxy'] or None)
proxy = (settings_network['http_proxy'] or None)
timeout = ClientTimeout(total=10)
async with ClientSession(headers=headers) as session:
# async with ClientSession(trust_env=True) as session:
@ -179,7 +176,7 @@ class Http:
return result
def http_response(url):
def http_response(settings_network, url):
"""
Download response headers.
@ -201,10 +198,7 @@ class Http:
response.status_code
response.url
"""
user_agent = (
config.get_value(
"settings", "Network", "user_agent")
) or 'Slixfeed/0.1'
user_agent = settings_network['user_agent'] or 'Slixfeed/0.1'
headers = {
"User-Agent": user_agent
}
@ -220,7 +214,7 @@ class Http:
return response
async def http(url):
async def http(settings_network, url):
"""
Download content of given URL.
@ -234,10 +228,9 @@ async def http(url):
msg: list or str
Document or error message.
"""
network_settings = config.get_values('settings.toml', 'network')
user_agent = (network_settings['user_agent'] or 'Slixfeed/0.1')
user_agent = (settings_network['user_agent'] or 'Slixfeed/0.1')
headers = {'User-Agent': user_agent}
proxy = (network_settings['http_proxy'] or None)
proxy = (settings_network['http_proxy'] or None)
timeout = ClientTimeout(total=10)
async with ClientSession(headers=headers) as session:
# async with ClientSession(trust_env=True) as session:

View file

@ -27,7 +27,6 @@ TODO
import asyncio
from feedparser import parse
import os
import slixfeed.config as config
from slixfeed.config import Config
import slixfeed.fetch as fetch
from slixfeed.log import Logger,Message
@ -53,7 +52,7 @@ class Feed:
os.mkdir(dir_cache + '/' + ext)
filename = os.path.join(
dir_cache, ext, 'slixfeed_' + DateAndTime.timestamp() + '.' + ext)
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
results = sqlite.get_feeds(db_file)
match ext:
# case 'html':
@ -347,8 +346,10 @@ class Feed:
if new_entries:
await sqlite.add_entries_and_update_feed_state(
db_file, feed_id, new_entries)
old = Config.get_setting_value(self, jid_bare, 'old')
if not old: await sqlite.mark_feed_as_read(db_file, feed_id)
breakpoint()
old = self.settings[jid_bare]['old'] or self.defaults['default']['old']
if not old: await sqlite.mark_feed_as_read(db_file,
feed_id)
result_final = {'link' : url,
'index' : feed_id,
'name' : title,
@ -362,7 +363,8 @@ class Feed:
# NOTE Do not be tempted to return a compact dictionary.
# That is, dictionary within dictionary
# Return multiple dictionaries in a list or tuple.
result = await FeedDiscovery.probe_page(url, document)
result = await FeedDiscovery.probe_page(
self.settings_network, self.pathnames, url, document)
if not result:
# Get out of the loop with dict indicating error.
result_final = {'link' : url,
@ -520,7 +522,7 @@ class Feed:
# NOTE This function is not being utilized
async def download_feed(self, db_file, feed_url):
async def download_feed(settings_network, db_file, feed_url):
"""
Process feed content.
@ -535,7 +537,7 @@ class Feed:
logger.debug('{}: db_file: {} url: {}'
.format(function_name, db_file, feed_url))
if isinstance(feed_url, tuple): feed_url = feed_url[0]
result = await fetch.http(feed_url)
result = await fetch.http(settings_network, feed_url)
feed_id = sqlite.get_feed_id(db_file, feed_url)
feed_id = feed_id[0]
status_code = result['status_code']
@ -932,7 +934,7 @@ class FeedDiscovery:
# else:
# return await callback(url)
async def probe_page(url, document=None):
async def probe_page(settings_network, pathnames, url, document=None):
"""
Parameters
----------
@ -947,7 +949,7 @@ class FeedDiscovery:
Single URL as list or selection of URLs as str.
"""
if not document:
response = await fetch.http(url)
response = await fetch.http(settings_network, url)
if not response['error']:
document = response['content']
try:
@ -985,10 +987,10 @@ class FeedDiscovery:
result = FeedDiscovery.feed_mode_auto_discovery(url, tree)
if not result:
logger.debug("Feed link scan mode engaged for {}".format(url))
result = FeedDiscovery.feed_mode_scan(url, tree)
result = FeedDiscovery.feed_mode_scan(url, tree, pathnames)
if not result:
logger.debug("Feed arbitrary mode engaged for {}".format(url))
result = FeedDiscovery.feed_mode_guess(url, tree)
result = FeedDiscovery.feed_mode_guess(url, pathnames)
if not result:
logger.debug("No feeds were found for {}".format(url))
result = None
@ -997,7 +999,7 @@ class FeedDiscovery:
# TODO Improve scan by gradual decreasing of path
def feed_mode_guess(url, tree):
def feed_mode_guess(url, pathnames):
"""
Lookup for feeds by pathname using HTTP Requests.
@ -1007,8 +1009,8 @@ class FeedDiscovery:
Path to database file.
url : str
URL.
tree : TYPE
DESCRIPTION.
pathnames : list
pathnames.
Returns
-------
@ -1017,18 +1019,17 @@ class FeedDiscovery:
"""
urls = []
parted_url = urlsplit(url)
paths = config.open_config_file("lists.toml")["pathnames"]
# Check whether URL has path (i.e. not root)
# Check parted_url.path to avoid error in case root wasn't given
# TODO Make more tests
if parted_url.path and parted_url.path.split('/')[1]:
paths.extend(
pathnames.extend(
[".atom", ".feed", ".rdf", ".rss"]
) if '.rss' not in paths else -1
) if '.rss' not in pathnames else -1
# if paths.index('.rss'):
# paths.extend([".atom", ".feed", ".rdf", ".rss"])
parted_url_path = parted_url.path if parted_url.path else '/'
for path in paths:
for path in pathnames:
address = Url.join_url(url, parted_url_path.split('/')[1] + path)
if address not in urls:
urls.extend([address])
@ -1037,7 +1038,7 @@ class FeedDiscovery:
return urls
def feed_mode_scan(url, tree):
def feed_mode_scan(url, tree, pathnames):
"""
Scan page for potential feeds by pathname.
@ -1056,8 +1057,7 @@ class FeedDiscovery:
Message with URLs.
"""
urls = []
paths = config.open_config_file("lists.toml")["pathnames"]
for path in paths:
for path in pathnames:
# xpath_query = "//*[@*[contains(.,'{}')]]".format(path)
# xpath_query = "//a[contains(@href,'{}')]".format(path)
num = 5
@ -1274,7 +1274,7 @@ class FeedTask:
# print('Scanning for updates for JID {}'.format(jid_bare))
logger.info('Scanning for updates for JID {}'.format(jid_bare))
while True:
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
urls = sqlite.get_active_feeds_url_sorted_by_last_scanned(db_file)
for url in urls:
#Message.printer('Scanning updates for URL {} ...'.format(url))

View file

@ -46,7 +46,6 @@ import hashlib
from lxml import etree, html
import os
import random
import slixfeed.config as config
import slixfeed.fetch as fetch
from slixfeed.log import Logger
import sys
@ -286,11 +285,11 @@ class DateAndTime:
class Documentation:
def manual(filename, section=None, command=None):
def manual(config_dir, section=None, command=None):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: filename: {}'.format(function_name, filename))
config_dir = config.get_default_config_directory()
with open(config_dir + '/' + filename, mode="rb") as commands:
logger.debug('{}: filename: {}'.format(function_name, config_dir))
filename = os.path.join(config_dir, 'commands.toml')
with open(filename, mode="rb") as commands:
cmds = tomllib.load(commands)
if section == 'all':
cmd_list = ''
@ -450,6 +449,19 @@ class Task:
.format(task, jid_bare))
class Toml:
def open_file(filename: str) -> dict:
with open(filename, mode="rb") as fn:
data = tomllib.load(fn)
return data
def save_file(filename: str, data: dict) -> None:
with open(filename, 'w') as fn:
data_as_string = tomli_w.dumps(data)
fn.write(data_as_string)
"""
FIXME
@ -486,21 +498,23 @@ class Url:
return hostname
async def replace_hostname(url, url_type):
async def replace_hostname(configuration_directory, proxies, settings_network, url, url_type):
"""
Replace hostname.
Parameters
----------
proxies : list
A list of hostnames.
url : str
URL.
A URL.
url_type : str
"feed" or "link".
A "feed" or a "link".
Returns
-------
url : str
URL.
A processed URL.
"""
url_new = None
parted_url = urlsplit(url)
@ -510,7 +524,6 @@ class Url:
pathname = parted_url.path
queries = parted_url.query
fragment = parted_url.fragment
proxies = config.open_config_file('proxies.toml')['proxies']
for proxy_name in proxies:
proxy = proxies[proxy_name]
if hostname in proxy['hostname'] and url_type in proxy['type']:
@ -530,26 +543,22 @@ class Url:
print(proxy_url)
print(url_new)
print('>>>')
response = await fetch.http(url_new)
response = await fetch.http(settings_network, url_new)
if (response and
response['status_code'] == 200 and
# response.reason == 'OK' and
url_new.startswith(proxy_url)):
break
else:
config_dir = config.get_default_config_directory()
proxies_obsolete_file = config_dir + '/proxies_obsolete.toml'
proxies_file = config_dir + '/proxies.toml'
if not os.path.isfile(proxies_obsolete_file):
config.create_skeleton(proxies_file)
config.backup_obsolete(proxies_obsolete_file,
proxy_name, proxy_type,
proxy_url)
try:
config.update_proxies(proxies_file, proxy_name,
proxy_type, proxy_url)
except ValueError as e:
logger.error([str(e), proxy_url])
proxies_obsolete_file = os.path.join(configuration_directory, 'proxies_obsolete.toml')
proxies_file = os.path.join(configuration_directory, 'proxies.toml')
breakpoint()
proxies_obsolete = Toml.open_file(proxies_obsolete_file)
proxies_obsolete['proxies'][proxy_name][proxy_type].append(proxy_url)
Toml.save_file(proxies_obsolete_file, proxies_obsolete)
# TODO self.proxies might need to be changed, so self probably should be passed.
proxies['proxies'][proxy_name][proxy_type].remove(proxy_url)
Toml.save_file(proxies_file, proxies)
url_new = None
else:
logger.warning('No proxy URLs for {}. '
@ -560,19 +569,21 @@ class Url:
return url_new
def remove_tracking_parameters(url):
def remove_tracking_parameters(trackers, url):
"""
Remove queries with tracking parameters.
Parameters
----------
trackers : list
A list of queries.
url : str
URL.
A URL.
Returns
-------
url : str
URL.
A processed URL.
"""
if url.startswith('data:') and ';base64,' in url:
return url
@ -582,7 +593,6 @@ class Url:
pathname = parted_url.path
queries = parse_qs(parted_url.query)
fragment = parted_url.fragment
trackers = config.open_config_file('queries.toml')['trackers']
for tracker in trackers:
if tracker in queries: del queries[tracker]
queries_new = urlencode(queries, doseq=True)
@ -821,12 +831,12 @@ class Utilities:
return url_digest
def pick_a_feed(lang=None):
def pick_a_feed(dir_config, lang=None):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: lang: {}'
.format(function_name, lang))
config_dir = config.get_default_config_directory()
with open(config_dir + '/' + 'feeds.toml', mode="rb") as feeds:
filename_feeds = os.path.join(dir_config, 'feeds.toml')
with open(filename_feeds, mode="rb") as feeds:
urls = tomllib.load(feeds)
import random
url = random.choice(urls['feeds'])

View file

@ -1,2 +1,2 @@
__version__ = '0.1.101'
__version_info__ = (0, 1, 101)
__version__ = '0.1.102'
__version_info__ = (0, 1, 102)

View file

@ -27,7 +27,6 @@ import asyncio
import os
from pathlib import Path
from random import randrange # pending_tasks: Use a list and read the first index (i.e. index 0).
import slixfeed.config as config
from slixfeed.config import Config
import slixfeed.fetch as fetch
from slixfeed.fetch import Http
@ -131,20 +130,20 @@ class XmppChat:
return
response = None
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
match command_lowercase:
case 'help':
command_list = XmppCommands.print_help()
command_list = XmppCommands.print_help(self.dir_config)
response = ('Available command keys:\n'
'```\n{}\n```\n'
'Usage: `help <key>`'
.format(command_list))
f'```\n{command_list}\n```\n'
'Usage: `help <key>`')
case 'help all':
command_list = Documentation.manual(
'commands.toml', section='all')
self.dir_config, section='all')
response = ('Complete list of commands:\n'
'```\n{}\n```'
.format(command_list))
f'```\n{command_list}\n```'
.format())
case _ if command_lowercase.startswith('help'):
command = command[5:].lower()
command = command.split(' ')
@ -152,7 +151,7 @@ class XmppChat:
command_root = command[0]
command_name = command[1]
command_list = Documentation.manual(
'commands.toml', section=command_root,
self.dir_config, section=command_root,
command=command_name)
if command_list:
command_list = ''.join(command_list)
@ -162,7 +161,7 @@ class XmppChat:
elif len(command) == 1:
command = command[0]
command_list = Documentation.manual(
'commands.toml', command)
self.dir_config, command)
if command_list:
command_list = ' '.join(command_list)
response = (f'Available command `{command}` keys:\n'
@ -619,25 +618,6 @@ class XmppChat:
response_finished = f'Finished. Total time: {command_time_total}s'
XmppMessage.send_reply(self, message, response_finished)
# if not response: response = 'EMPTY MESSAGE - ACTION ONLY'
# data_dir = config.get_default_data_directory()
# if not os.path.isdir(data_dir):
# os.mkdir(data_dir)
# if not os.path.isdir(data_dir + '/logs/'):
# os.mkdir(data_dir + '/logs/')
# MD.log_to_markdown(
# dt.current_time(), os.path.join(data_dir, 'logs', jid_bare),
# jid_bare, command)
# MD.log_to_markdown(
# dt.current_time(), os.path.join(data_dir, 'logs', jid_bare),
# jid_bare, response)
# print(
# f'Message : {command}\n'
# f'JID : {jid_bare}\n'
# f'{response}\n'
# )
class XmppChatAction:
@ -655,7 +635,7 @@ class XmppChatAction:
"""
function_name = sys._getframe().f_code.co_name
logger.debug(f'{function_name}: jid: {jid_bare} num: {num}')
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
encrypt_omemo = Config.get_setting_value(self, jid_bare, 'omemo')
encrypted = True if encrypt_omemo else False
jid = JID(jid_bare)
@ -698,7 +678,7 @@ class XmppChatAction:
else:
media_url = await Html.extract_image_from_html(url)
try:
http_headers = await Http.fetch_headers(media_url)
http_headers = await Http.fetch_headers(self.settings_network, media_url)
if ('Content-Length' in http_headers):
if int(http_headers['Content-Length']) < 100000:
media_url = None
@ -727,10 +707,10 @@ class XmppChatAction:
if not filename: breakpoint()
pathname = os.path.join(self.dir_cache, filename)
# http_response = await Http.response(media_url)
http_headers = await Http.fetch_headers(media_url)
http_headers = await Http.fetch_headers(self.settings_network, media_url)
if ('Content-Length' in http_headers and
int(http_headers['Content-Length']) < 3000000):
status = await Http.fetch_media(media_url, pathname)
status = await Http.fetch_media(self.settings_network, media_url, pathname)
if status:
filesize = os.path.getsize(pathname)
media_url_new = await XmppUpload.start(
@ -766,7 +746,7 @@ class XmppChatAction:
# NOTE Tested against Gajim.
# FIXME Jandle data: URIs.
if not media_url.startswith('data:'):
http_headers = await Http.fetch_headers(media_url)
http_headers = await Http.fetch_headers(self.settings_network, media_url)
if ('Content-Length' in http_headers and
int(http_headers['Content-Length']) > 100000):
print(http_headers['Content-Length'])
@ -876,8 +856,8 @@ class XmppChatAction:
else:
summary = '*** No summary ***'
link = result[2]
link = Url.remove_tracking_parameters(link)
link = await Url.replace_hostname(link, "link") or link
link = Url.remove_tracking_parameters(self.trackers, link)
link = await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, link, "link") or link
feed_id = result[4]
# news_item = (f'\n{str(title)}\n{str(link)}\n{str(feed_title)} [{str(ix)}]\n')
formatting = Config.get_setting_value(self, jid, 'formatting')
@ -895,7 +875,7 @@ class XmppChatTask:
async def task_message(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
while True:

View file

@ -8,7 +8,7 @@ TODO
1) Assure message delivery before calling a new task.
See https://slixmpp.readthedocs.io/en/latest/event_index.html#term-marker_acknowledged
2) XHTTML-IM
2) XHTML-IM
case _ if message_lowercase.startswith("html"):
message['html']="
Parse me!
@ -46,13 +46,12 @@ import slixmpp
# import xml.etree.ElementTree as ET
# from lxml import etree
import slixfeed.config as config
from slixfeed.config import Cache, Config, Data
import slixfeed.fetch as fetch
from slixfeed.log import Logger
import slixfeed.sqlite as sqlite
from slixfeed.syndication import Feed, FeedDiscovery, FeedTask, Opml
from slixfeed.utilities import DateAndTime, Html, String, Task, Url, Utilities
from slixfeed.utilities import DateAndTime, Html, String, Task, Toml, Url, Utilities
from slixfeed.version import __version__
from slixfeed.xmpp.bookmark import XmppBookmark
from slixfeed.xmpp.chat import XmppChat, XmppChatTask
@ -118,19 +117,46 @@ class XmppClient(slixmpp.ClientXMPP):
self.task_ping_instance = {}
# Handlers for directories
self.dir_config = Config.get_default_config_directory()
self.dir_cache = Cache.get_default_cache_directory()
self.dir_data = Data.get_default_data_directory()
self.dir_config = Config.get_directory()
self.dir_cache = Cache.get_directory()
self.dir_data = Data.get_directory()
# Handler for default configuration
self.defaults = config.get_values('settings.toml')
filename_settings = os.path.join(self.dir_config, 'settings.toml')
data_settings = Toml.open_file(filename_settings)
# TODO self.defaults = data_settings['default']
self.defaults = data_settings
# Handler for network configurations
self.settings_network = data_settings['network']
# Handler for proxies
filename_proxies = os.path.join(self.dir_config, 'proxies.toml')
self.data_proxies = Toml.open_file(filename_proxies)
self.proxies = self.data_proxies['proxies']
# Handler for queries
filename_queries = os.path.join(self.dir_config, 'queries.toml')
self.data_queries = Toml.open_file(filename_queries)
self.trackers = self.data_queries['trackers']
# Handler for lists
filename_lists = os.path.join(self.dir_config, 'lists.toml')
self.data_lists = Toml.open_file(filename_lists)
self.pathnames = self.data_lists['pathnames']
# Handler for configuration
self.settings = {}
# Handler for operators
self.operators = config.get_values('accounts.toml', 'xmpp')['operators']
filename_accounts = os.path.join(self.dir_config, 'accounts.toml')
self.data_accounts = Toml.open_file(filename_accounts)
self.data_accounts_xmpp = self.data_accounts['xmpp']
self.operators = self.data_accounts_xmpp['operators']
# Handlers for whitelist and blacklist
self.selector = config.get_values('selector.toml')
filename_selector = os.path.join(self.dir_config, 'selector.toml')
self.selector = Toml.open_file(filename_selector)
paywall_enabled = self.selector['enabled']
self.whitelist = self.selector['whitelist']
self.blacklist = self.selector['blacklist']
@ -142,7 +168,7 @@ class XmppClient(slixmpp.ClientXMPP):
# Handlers for connection events
self.connection_attempts = 0
self.max_connection_attempts = 10
self.reconnect_timeout = config.get_values('accounts.toml', 'xmpp')['settings']['reconnect_timeout']
self.reconnect_timeout = self.data_accounts_xmpp['settings']['reconnect_timeout']
self.register_plugin('xep_0004') # Data Forms
self.register_plugin('xep_0030') # Service Discovery
@ -403,7 +429,7 @@ class XmppClient(slixmpp.ClientXMPP):
for result in await XmppPubsub.get_pubsub_services(self):
jid_bare = result['jid']
if jid_bare not in self.settings:
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
Config.add_settings_jid(self, jid_bare, db_file)
#await XmppPubsubTask.task_publish(self, jid_bare)
#await FeedTask.check_updates(self, jid_bare)
@ -465,7 +491,7 @@ class XmppClient(slixmpp.ClientXMPP):
message_log = '{}: jid_full: {}'
logger.debug(message_log.format(function_name, jid_full))
jid_bare = message['from'].bare
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
if jid_bare == self.boundjid.bare:
@ -1113,7 +1139,7 @@ class XmppClient(slixmpp.ClientXMPP):
ftype='hidden',
value=jid_bare)
num = 100
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
results = sqlite.get_entries(db_file, num)
subtitle = 'Recent {} updates'.format(num)
if results:
@ -1207,7 +1233,7 @@ class XmppClient(slixmpp.ClientXMPP):
if not node: node = Url.get_hostname(url)
form = self['xep_0004'].make_form('form', 'Publish')
while True:
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
status = result['status_code']
if not result['error']:
document = result['content']
@ -1249,7 +1275,7 @@ class XmppClient(slixmpp.ClientXMPP):
session['payload'] = form
break
else:
result = await FeedDiscovery.probe_page(url, document)
result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url, document)
if isinstance(result, list):
results = result
form['instructions'] = ('Discovered {} subscriptions '
@ -1323,7 +1349,7 @@ class XmppClient(slixmpp.ClientXMPP):
url = values['url'][0]
# xep = values['xep'][0]
xep = None
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
if 'content' in result:
document = result['content']
feed = parse(document)
@ -1375,7 +1401,7 @@ class XmppClient(slixmpp.ClientXMPP):
logger.debug('{}: jid_full: {}'
.format(function_name, jid_full))
jid_bare = session['from'].bare
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
form = self['xep_0004'].make_form('form', 'Profile')
@ -1472,7 +1498,7 @@ class XmppClient(slixmpp.ClientXMPP):
chat_type = await XmppUtilities.get_chat_type(self, jid_bare)
if XmppUtilities.is_access(self, jid, chat_type):
jid = session['from'].bare
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
form = self['xep_0004'].make_form('form', 'Filters')
form['instructions'] = ('Filters allow you to skip news items '
'that you may not be interested at. Use '
@ -1547,7 +1573,7 @@ class XmppClient(slixmpp.ClientXMPP):
jid_bare = session['from'].bare
# form = self['xep_0004'].make_form('result', 'Done')
# form['instructions'] = ('✅️ Filters have been updated')
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
# In this case (as is typical), the payload is a form
values = payload['values']
for key in values:
@ -1556,7 +1582,7 @@ class XmppClient(slixmpp.ClientXMPP):
# an empty form instead of editing a form.
# keywords = sqlite.get_filter_value(db_file, key)
keywords = ''
val = await config.add_to_list(val, keywords) if val else ''
val = config.add_to_list(val, keywords) if val else ''
if sqlite.is_filter_key(db_file, key):
await sqlite.update_filter_value(db_file, [key, val])
elif val:
@ -1700,7 +1726,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(var='jid',
ftype='hidden',
value=jid_bare)
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
num = 100
match values['action']:
case 'all':
@ -1760,18 +1786,18 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(var='jid',
ftype='hidden',
value=jid)
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
title = sqlite.get_entry_title(db_file, ix)
title = title[0] if title else 'Untitled'
form['instructions'] = title
url = sqlite.get_entry_url(db_file, ix)
url = url[0] # TODO Handle a situation when index is no longer exist
logger.debug('Original URL: {}'.format(url))
url = Url.remove_tracking_parameters(url)
url = Url.remove_tracking_parameters(self.trackers, url)
logger.debug('Processed URL (tracker removal): {}'.format(url))
url = (await Url.replace_hostname(url, 'link')) or url
url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'link')) or url
logger.debug('Processed URL (replace hostname): {}'.format(url))
# result = await fetch.http(url)
# result = await fetch.http(self.settings_network, url)
# if 'content' in result:
# data = result['content']
# summary = action.get_document_content_as_text(data)
@ -1836,7 +1862,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(var='jid',
ftype='hidden',
value=jid_bare)
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if identifier and sqlite.check_identifier_exist(db_file, identifier):
form['title'] = 'Conflict'
form['instructions'] = ('Name "{}" already exists. Choose a '
@ -2039,7 +2065,7 @@ class XmppClient(slixmpp.ClientXMPP):
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid_bare = values['jid'][0]
del values['jid']
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
for key in values:
value = 1 if values[key] else 0
await sqlite.set_enabled_status(db_file, key, value)
@ -2064,7 +2090,7 @@ class XmppClient(slixmpp.ClientXMPP):
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid_bare = values['jid'][0]
del values['jid']
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
subscriptions =''
ixs = values['subscriptions']
for ix in ixs:
@ -2297,7 +2323,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(ftype='hidden',
value=jid_bare,
var='jid')
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
match values['action']:
case 'browse':
form['instructions'] = 'Editing subscriptions'
@ -2391,7 +2417,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(ftype='hidden',
value=jid_bare,
var='jid')
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
tag_id = values['tag']
tag_name = sqlite.get_tag_name(db_file, tag_id)[0]
form['instructions'] = 'Subscriptions tagged with "{}"'.format(tag_name)
@ -2429,7 +2455,7 @@ class XmppClient(slixmpp.ClientXMPP):
form.add_field(ftype='hidden',
value=jid_bare,
var='jid')
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if 'subscription' in values: urls = values['subscription']
elif 'subscriptions' in values: urls = values['subscriptions']
url_count = len(urls)
@ -2522,7 +2548,7 @@ class XmppClient(slixmpp.ClientXMPP):
values = payload['values']
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid_bare = values['jid'][0]
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
# url = values['url']
# feed_id = sqlite.get_feed_id(db_file, url)
# feed_id = feed_id[0]
@ -2923,8 +2949,8 @@ class XmppClient(slixmpp.ClientXMPP):
if XmppUtilities.is_operator(self, jid_bare) and 'jid' in values:
jid = values['jid']
jid_bare = jid[0] if isinstance(jid, list) else jid
db_file = config.get_pathname_to_database(jid_bare)
result = await fetch.http(url)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
result = await fetch.http(self.settings_network, url)
count = await Opml.import_from_file(db_file, result)
try:
int(count)
@ -3011,7 +3037,7 @@ class XmppClient(slixmpp.ClientXMPP):
form = self['xep_0004'].make_form('form', 'Subscribe')
# NOTE Refresh button would be of use
form['instructions'] = 'Featured subscriptions'
url = Utilities.pick_a_feed()
url = Utilities.pick_a_feed(self.dir_config)
# options = form.add_field(desc='Click to subscribe.',
# ftype="boolean",
# label='Subscribe to {}?'.format(url['name']),
@ -3024,13 +3050,13 @@ class XmppClient(slixmpp.ClientXMPP):
label='Subscribe',
var='subscription')
for i in range(10):
url = Utilities.pick_a_feed()
url = Utilities.pick_a_feed(self.dir_config)
options.addOption(url['name'], url['link'])
# jid_bare = session['from'].bare
if '@' in jid_bare:
hostname = jid_bare.split('@')[1]
url = 'http://' + hostname
result = await FeedDiscovery.probe_page(url)
result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url)
if not result:
url = {'url' : url,
'index' : None,
@ -3448,7 +3474,7 @@ class XmppClient(slixmpp.ClientXMPP):
if key:
jid_bare = key
value = values[key]
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
await Config.set_setting_value(
@ -3711,7 +3737,7 @@ class XmppClient(slixmpp.ClientXMPP):
jid_bare = session['from'].bare
chat_type = await XmppUtilities.get_chat_type(self, jid_bare)
if XmppUtilities.is_access(self, jid, chat_type):
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
form = self['xep_0004'].make_form('form', 'Settings')
@ -3828,7 +3854,7 @@ class XmppClient(slixmpp.ClientXMPP):
logger.debug('{}: jid_full: {}'
.format(function_name, jid_full))
jid_bare = session['from'].bare
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
# In this case (as is typical), the payload is a form

View file

@ -4,7 +4,6 @@
from feedparser import parse
import os
from random import randrange
import slixfeed.config as config
from slixfeed.config import Config
import slixfeed.fetch as fetch
from slixfeed.log import Logger
@ -39,21 +38,21 @@ logger = Logger(__name__)
class XmppCommands:
def print_help():
result = Documentation.manual('commands.toml')
def print_help(dir_config):
result = Documentation.manual(dir_config)
message = '\n'.join(result)
return message
def print_help_list():
command_list = Documentation.manual('commands.toml', section='all')
def print_help_list(dir_config):
command_list = Documentation.manual(dir_config, section='all')
message = ('Complete list of commands:\n'
f'```\n{command_list}\n```')
return message
def print_help_specific(command_root, command_name):
command_list = Documentation.manual('commands.toml',
def print_help_specific(dir_config, command_root, command_name):
command_list = Documentation.manual(dir_config,
section=command_root,
command=command_name)
if command_list:
@ -64,8 +63,8 @@ class XmppCommands:
return message
def print_help_key(command):
command_list = Documentation.manual('commands.toml', command)
def print_help_key(dir_config, command):
command_list = Documentation.manual(dir_config, command)
if command_list:
command_list = ' '.join(command_list)
message = (f'Available command `{command}` keys:\n'
@ -133,7 +132,7 @@ class XmppCommands:
identifier)
feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0]
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
if not result['error']:
document = result['content']
feed = parse(document)
@ -221,9 +220,9 @@ class XmppCommands:
keywords = sqlite.get_filter_value(db_file, 'allow')
if keywords: keywords = str(keywords[0])
if axis:
val = await config.add_to_list(val, keywords)
val = config.add_to_list(val, keywords)
else:
val = await config.remove_from_list(val, keywords)
val = config.remove_from_list(val, keywords)
if sqlite.is_filter_key(db_file, 'allow'):
await sqlite.update_filter_value(db_file, ['allow', val])
else:
@ -267,12 +266,12 @@ class XmppCommands:
async def restore_default(self, jid_bare, key=None):
if key:
self.settings[jid_bare][key] = None
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
await sqlite.delete_setting(db_file, key)
message = f'Setting {key} has been restored to default value.'
else:
del self.settings[jid_bare]
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
await sqlite.delete_settings(db_file)
message = 'Default settings have been restored.'
return message
@ -316,9 +315,9 @@ class XmppCommands:
keywords = sqlite.get_filter_value(db_file, 'deny')
if keywords: keywords = str(keywords[0])
if axis:
val = await config.add_to_list(val, keywords)
val = config.add_to_list(val, keywords)
else:
val = await config.remove_from_list(val, keywords)
val = config.remove_from_list(val, keywords)
if sqlite.is_filter_key(db_file, 'deny'):
await sqlite.update_filter_value(db_file, ['deny', val])
else:
@ -338,7 +337,7 @@ class XmppCommands:
async def import_opml(self, db_file, jid_bare, command):
url = command
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
count = await Opml.import_from_file(db_file, result)
if count:
message = f'Successfully imported {count} feeds.'
@ -382,7 +381,7 @@ class XmppCommands:
jid = info[0]
if '/' not in jid:
url = info[1]
db_file = config.get_pathname_to_database(jid)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid}.db')
if len(info) > 2:
identifier = info[2]
else:
@ -408,7 +407,7 @@ class XmppCommands:
url.startswith('itpc:/') or
url.startswith('rss:/')):
url = Url.feed_to_http(url)
url = (await Url.replace_hostname(url, 'feed')) or url
url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'feed')) or url
result = await Feed.add_feed(self, jid_bare, db_file, url,
identifier)
if isinstance(result, list):
@ -471,7 +470,7 @@ class XmppCommands:
async def fetch_http(self, url, db_file, jid_bare):
if url.startswith('feed:/') or url.startswith('rss:/'):
url = Url.feed_to_http(url)
url = (await Url.replace_hostname(url, 'feed')) or url
url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'feed')) or url
counter = 0
while True:
identifier = String.generate_identifier(url, counter)
@ -704,7 +703,7 @@ class XmppCommands:
# response = (
# f'Every update will contain {response} news items.'
# )
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
await Config.set_setting_value(
self, jid_bare, db_file, 'quantum', val_new)
message = f'Next update will contain {val_new} news items (was: {val_old}).'
@ -724,12 +723,12 @@ class XmppCommands:
async def feed_read(self, jid_bare, data, url):
if url.startswith('feed:/') or url.startswith('rss:/'):
url = Url.feed_to_http(url)
url = (await Url.replace_hostname(url, 'feed')) or url
url = (await Url.replace_hostname(self.dir_config, self.proxies, self.settings_network, url, 'feed')) or url
match len(data):
case 1:
if url.startswith('http'):
while True:
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
status = result['status_code']
if result and not result['error']:
document = result['content']
@ -738,7 +737,7 @@ class XmppCommands:
message = Feed.view_feed(url, feed)
break
else:
result = await FeedDiscovery.probe_page(url, document)
result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url, document)
if isinstance(result, list):
results = result
message = f"Syndication feeds found for {url}\n\n```\n"
@ -763,7 +762,7 @@ class XmppCommands:
num = data[1]
if url.startswith('http'):
while True:
result = await fetch.http(url)
result = await fetch.http(self.settings_network, url)
if result and not result['error']:
document = result['content']
status = result['status_code']
@ -772,7 +771,7 @@ class XmppCommands:
message = Feed.view_entry(url, feed, num)
break
else:
result = await FeedDiscovery.probe_page(url, document)
result = await FeedDiscovery.probe_page(self.settings_network, self.pathnames, url, document)
if isinstance(result, list):
results = result
message = f"Syndication feeds found for {url}\n\n```\n"

View file

@ -254,9 +254,13 @@ class StorageImpl(Storage):
Example storage implementation that stores all data in a single JSON file.
"""
omemo_dir = Data.get_pathname_to_omemo_directory()
dir_data = Data.get_directory()
omemo_dir = os.path.join(dir_data, 'omemo')
JSON_FILE = os.path.join(omemo_dir, 'omemo.json')
# TODO Pass JID
#JSON_FILE = os.path.join(omemo_dir, f'{jid_bare}.json')
def __init__(self) -> None:
super().__init__()

View file

@ -11,7 +11,6 @@ socket (i.e. clients[fd]) from the respective client.
import asyncio
import os
import slixfeed.config as config
from slixfeed.syndication import FeedTask
from slixfeed.xmpp.chat import XmppChatTask
from slixfeed.xmpp.commands import XmppCommands
@ -85,7 +84,7 @@ class XmppIpcServer:
if '~' in data:
data_list = data.split('~')
jid_bare = data_list[0]
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
command = data_list[1]
else:
command = data
@ -231,7 +230,7 @@ class XmppIpcServer:
command.startswith('itpc:/') or
command.startswith('rss:/')):
response = await XmppCommands.fetch_http(
self, command, db_file, jid_bare)
self.settings_network, command, db_file, jid_bare)
case _ if command.startswith('interval'):
val = command[9:]
if val:

View file

@ -27,7 +27,6 @@ TODO
import glob
from slixfeed.config import Config
import slixfeed.config as config
from slixfeed.log import Logger
from slixmpp.exceptions import IqTimeout, IqError
import os
@ -109,8 +108,7 @@ def set_identity(self, category):
async def set_vcard(self):
vcard = self.plugin['xep_0054'].make_vcard()
profile = config.get_values('accounts.toml', 'xmpp')['profile']
for key in profile:
vcard[key] = profile[key]
profile = self.data_accounts_xmpp['profile']
for key in profile: vcard[key] = profile[key]
await self.plugin['xep_0054'].publish_vcard(vcard)

View file

@ -9,9 +9,9 @@ Functions create_node and create_entry are derived from project atomtopubsub.
import asyncio
import hashlib
import os
import slixmpp.plugins.xep_0060.stanza.pubsub as pubsub
from slixmpp.xmlstream import ET
import slixfeed.config as config
from slixfeed.config import Config
from slixfeed.log import Logger
import slixfeed.sqlite as sqlite
@ -259,7 +259,7 @@ class XmppPubsubAction:
async def send_selected_entry(self, jid_bare, node_id, entry_id):
function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare))
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
feed_id = sqlite.get_feed_id_by_entry_index(db_file, entry_id)
feed_id = feed_id[0]
node_id, node_title, node_subtitle = sqlite.get_feed_properties(db_file, feed_id)
@ -298,7 +298,7 @@ class XmppPubsubAction:
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid_bare: {}'.format(function_name, jid_bare))
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
report = {}
subscriptions = sqlite.get_active_feeds_url(db_file)
for url in subscriptions:
@ -372,7 +372,7 @@ class XmppPubsubTask:
async def loop_task(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
while True:
@ -393,7 +393,7 @@ class XmppPubsubTask:
def restart_task(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
if jid_bare not in self.task_manager:
@ -411,7 +411,7 @@ class XmppPubsubTask:
async def task_publish(self, jid_bare):
db_file = config.get_pathname_to_database(jid_bare)
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
if jid_bare not in self.settings:
Config.add_settings_jid(self, jid_bare, db_file)
while True:

View file

@ -2,8 +2,8 @@
# -*- coding: utf-8 -*-
import asyncio
import os
from slixfeed.config import Config
import slixfeed.config as config
import slixfeed.sqlite as sqlite
from slixfeed.log import Logger
from slixfeed.xmpp.presence import XmppPresence
@ -25,11 +25,11 @@ class XmppStatus:
Jabber ID.
"""
function_name = sys._getframe().f_code.co_name
logger.debug('{}: jid: {}'.format(function_name, jid_bare))
logger.debug(f'{function_name}: jid: {jid_bare}')
status_text = '📜️ Slixfeed RSS News Bot'
db_file = config.get_pathname_to_database(jid_bare)
enabled = Config.get_setting_value(self, jid_bare, 'enabled')
if enabled:
db_file = os.path.join(self.dir_data, 'sqlite', f'{jid_bare}.db')
jid_task = self.pending_tasks[jid_bare] if jid_bare in self.pending_tasks else None
if jid_task and len(jid_task):
# print('status dnd for ' + jid_bare)
@ -47,7 +47,7 @@ class XmppStatus:
if unread:
# print('status unread for ' + jid_bare)
status_mode = 'chat'
status_text = '📬️ There are {} news items'.format(str(unread))
status_text = f'📬️ There are {str(unread)} news items'
else:
# print('status no news for ' + jid_bare)
status_mode = 'away'
@ -73,14 +73,13 @@ class XmppStatusTask:
return
if jid_bare not in self.task_manager:
self.task_manager[jid_bare] = {}
logger.info('Creating new task manager for JID {}'.format(jid_bare))
logger.info('Stopping task "status" for JID {}'.format(jid_bare))
logger.info('Creating new task manager for JID {jid_bare}')
logger.info('Stopping task "status" for JID {jid_bare}')
try:
self.task_manager[jid_bare]['status'].cancel()
except:
logger.info('No task "status" for JID {} (XmppStatusTask.start_task)'
.format(jid_bare))
logger.info('Starting tasks "status" for JID {}'.format(jid_bare))
logger.info(f'No task "status" for JID {jid_bare} (XmppStatusTask.start_task)')
logger.info(f'Starting tasks "status" for JID {jid_bare}')
self.task_manager[jid_bare]['status'] = asyncio.create_task(
XmppStatusTask.task_status(self, jid_bare))
@ -90,5 +89,4 @@ class XmppStatusTask:
'status' in self.task_manager[jid_bare]):
self.task_manager[jid_bare]['status'].cancel()
else:
logger.debug('No task "status" for JID {}'
.format(jid_bare))
logger.debug(f'No task "status" for JID {jid_bare}')