2024-06-29 23:16:03 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
import csv
|
|
|
|
from email.utils import parseaddr
|
|
|
|
import hashlib
|
2024-11-21 12:40:43 +01:00
|
|
|
from kaikout.database import DatabaseToml
|
2024-06-29 23:16:03 +02:00
|
|
|
from kaikout.log import Logger
|
2024-07-30 06:56:00 +02:00
|
|
|
#import kaikout.sqlite as sqlite
|
2024-06-29 23:16:03 +02:00
|
|
|
import os
|
|
|
|
import sys
|
|
|
|
import tomli_w
|
|
|
|
from urllib.parse import urlsplit
|
|
|
|
|
|
|
|
try:
|
|
|
|
import tomllib
|
|
|
|
except:
|
|
|
|
import tomli as tomllib
|
|
|
|
|
|
|
|
logger = Logger(__name__)
|
|
|
|
|
|
|
|
|
|
|
|
class Config:
|
|
|
|
|
|
|
|
|
|
|
|
def get_default_data_directory():
|
|
|
|
"""
|
|
|
|
Determine the directory path where data will be stored.
|
|
|
|
|
|
|
|
* If $XDG_DATA_HOME is defined, use it;
|
|
|
|
* else if $HOME exists, use it;
|
|
|
|
* else if the platform is Windows, use %APPDATA%;
|
|
|
|
* else use the current directory.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Path to data directory.
|
|
|
|
"""
|
2024-11-21 11:11:11 +01:00
|
|
|
directory_home = os.environ.get('HOME')
|
|
|
|
if directory_home:
|
|
|
|
data_home = os.path.join(directory_home, '.local', 'share')
|
2024-06-29 23:16:03 +02:00
|
|
|
return os.path.join(data_home, 'kaikout')
|
|
|
|
elif sys.platform == 'win32':
|
|
|
|
data_home = os.environ.get('APPDATA')
|
|
|
|
if data_home is None:
|
2024-11-21 11:11:11 +01:00
|
|
|
return 'kaikout_data'
|
2024-06-29 23:16:03 +02:00
|
|
|
else:
|
2024-11-21 11:11:11 +01:00
|
|
|
return 'kaikout_data'
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_default_config_directory():
|
|
|
|
"""
|
|
|
|
Determine the directory path where configuration will be stored.
|
|
|
|
|
|
|
|
* If $XDG_CONFIG_HOME is defined, use it;
|
|
|
|
* else if $HOME exists, use it;
|
|
|
|
* else if the platform is Windows, use %APPDATA%;
|
|
|
|
* else use the current directory.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
str
|
|
|
|
Path to configuration directory.
|
|
|
|
"""
|
2024-11-21 11:11:11 +01:00
|
|
|
# directory_config_home = xdg.BaseDirectory.xdg_config_home
|
|
|
|
directory_config_home = os.environ.get('XDG_CONFIG_HOME')
|
|
|
|
if directory_config_home is None:
|
|
|
|
directory_home = os.environ.get('HOME')
|
|
|
|
if directory_home is None:
|
2024-06-29 23:16:03 +02:00
|
|
|
if sys.platform == 'win32':
|
2024-11-21 11:11:11 +01:00
|
|
|
directory_config_home = os.environ.get('APPDATA')
|
|
|
|
if directory_config_home is None:
|
|
|
|
return 'kaikout_config'
|
2024-06-29 23:16:03 +02:00
|
|
|
else:
|
2024-11-21 11:11:11 +01:00
|
|
|
return 'kaikout_config'
|
2024-06-29 23:16:03 +02:00
|
|
|
else:
|
2024-11-21 11:11:11 +01:00
|
|
|
directory_config_home = os.path.join(directory_home, '.config')
|
|
|
|
return os.path.join(directory_config_home, 'kaikout')
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
|
|
|
|
class Documentation:
|
|
|
|
|
|
|
|
|
|
|
|
def manual(filename, section=None, command=None):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: filename: {}'.format(function_name, filename))
|
|
|
|
config_dir = Config.get_default_config_directory()
|
|
|
|
with open(config_dir + '/' + filename, mode="rb") as f: cmds = tomllib.load(f)
|
|
|
|
if section == 'all':
|
|
|
|
cmd_list = ''
|
|
|
|
for cmd in cmds:
|
|
|
|
for i in cmds[cmd]:
|
|
|
|
cmd_list += cmds[cmd][i] + '\n'
|
|
|
|
elif command and section:
|
|
|
|
try:
|
|
|
|
cmd_list = cmds[section][command]
|
|
|
|
except KeyError as e:
|
|
|
|
logger.error(e)
|
|
|
|
cmd_list = None
|
|
|
|
elif section:
|
|
|
|
try:
|
|
|
|
cmd_list = []
|
|
|
|
for cmd in cmds[section]:
|
|
|
|
cmd_list.extend([cmd])
|
|
|
|
except KeyError as e:
|
|
|
|
logger.error('KeyError:' + str(e))
|
|
|
|
cmd_list = None
|
|
|
|
else:
|
|
|
|
cmd_list = []
|
|
|
|
for cmd in cmds:
|
|
|
|
cmd_list.extend([cmd])
|
|
|
|
return cmd_list
|
|
|
|
|
|
|
|
|
|
|
|
class Log:
|
|
|
|
|
|
|
|
|
2024-11-19 10:09:50 +01:00
|
|
|
def jid_exist(filename, fields):
|
|
|
|
"""
|
|
|
|
Ceck whether Alias and Jabber ID exist.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filename : str
|
|
|
|
Filename.
|
|
|
|
fields : list
|
|
|
|
jid, alias, timestamp.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
data_dir = Config.get_default_data_directory()
|
|
|
|
if not os.path.isdir(data_dir): return False
|
|
|
|
data_dir_logs = os.path.join(data_dir, 'logs')
|
|
|
|
if not os.path.isdir(data_dir_logs): return False
|
|
|
|
csv_file = os.path.join(data_dir_logs, f'{filename}.csv')
|
|
|
|
if not os.path.exists(csv_file): return False
|
|
|
|
with open(csv_file, 'r') as f:
|
|
|
|
reader = csv.reader(f)
|
|
|
|
for line in reader:
|
|
|
|
if line[0] == fields[0]:
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
def alias_jid_exist(filename, fields):
|
|
|
|
"""
|
|
|
|
Ceck whether Alias and Jabber ID exist.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filename : str
|
|
|
|
Filename.
|
|
|
|
fields : list
|
|
|
|
jid, alias, timestamp.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
data_dir = Config.get_default_data_directory()
|
|
|
|
if not os.path.isdir(data_dir): return False
|
|
|
|
data_dir_logs = os.path.join(data_dir, 'logs')
|
|
|
|
if not os.path.isdir(data_dir_logs): return False
|
|
|
|
csv_file = os.path.join(data_dir_logs, f'{filename}.csv')
|
|
|
|
if not os.path.exists(csv_file): return False
|
|
|
|
with open(csv_file, 'r') as f:
|
|
|
|
reader = csv.reader(f)
|
|
|
|
for line in reader:
|
|
|
|
if line[0] == fields[0] and line[1] == fields[1]:
|
|
|
|
return True
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def csv_jid(filename, fields):
|
|
|
|
"""
|
|
|
|
Log Jabber ID to CSV file.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
filename : str
|
|
|
|
Filename.
|
|
|
|
fields : list
|
|
|
|
jid, alias, timestamp.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
data_dir = Config.get_default_data_directory()
|
|
|
|
if not os.path.isdir(data_dir): os.mkdir(data_dir)
|
|
|
|
data_dir_logs = os.path.join(data_dir, 'logs')
|
|
|
|
if not os.path.isdir(data_dir_logs): os.mkdir(data_dir_logs)
|
|
|
|
csv_file = os.path.join(data_dir_logs, f'{filename}.csv')
|
|
|
|
if not os.path.exists(csv_file):
|
|
|
|
columns = ['jid', 'alias', 'timestamp']
|
|
|
|
with open(csv_file, 'w') as f:
|
|
|
|
writer = csv.writer(f)
|
|
|
|
writer.writerow(columns)
|
|
|
|
with open(csv_file, 'a') as f:
|
|
|
|
writer = csv.writer(f)
|
|
|
|
writer.writerow(fields)
|
|
|
|
|
|
|
|
|
2024-06-29 23:16:03 +02:00
|
|
|
def csv(filename, fields):
|
|
|
|
"""
|
2024-11-19 10:09:50 +01:00
|
|
|
Log message or presence to CSV file.
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2024-11-19 10:09:50 +01:00
|
|
|
filename : str
|
|
|
|
Filename.
|
|
|
|
fields : list
|
|
|
|
type, timestamp, alias, body, lang, and identifier.
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
data_dir = Config.get_default_data_directory()
|
|
|
|
if not os.path.isdir(data_dir): os.mkdir(data_dir)
|
2024-11-19 10:09:50 +01:00
|
|
|
data_dir_logs = os.path.join(data_dir, 'logs')
|
|
|
|
if not os.path.isdir(data_dir_logs): os.mkdir(data_dir_logs)
|
|
|
|
csv_file = os.path.join(data_dir_logs, f'{filename}.csv')
|
2024-06-29 23:16:03 +02:00
|
|
|
if not os.path.exists(csv_file):
|
|
|
|
columns = ['type', 'timestamp', 'alias', 'body', 'lang', 'identifier']
|
2024-11-19 10:09:50 +01:00
|
|
|
with open(csv_file, 'w') as f:
|
2024-06-29 23:16:03 +02:00
|
|
|
writer = csv.writer(f)
|
|
|
|
writer.writerow(columns)
|
|
|
|
with open(csv_file, 'a') as f:
|
|
|
|
writer = csv.writer(f)
|
|
|
|
writer.writerow(fields)
|
|
|
|
|
|
|
|
|
|
|
|
def toml(self, room, fields, stanza_type):
|
|
|
|
"""
|
|
|
|
Log message to TOML file.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
room : str
|
|
|
|
Group chat Jabber ID.
|
|
|
|
fields : list
|
|
|
|
alias, room, identifier, timestamp.
|
|
|
|
stanza_type: str
|
|
|
|
message or presence.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
alias, content, identifier, timestamp = fields
|
2024-11-21 12:40:43 +01:00
|
|
|
data_dir = DatabaseToml.get_default_data_directory()
|
|
|
|
filename = DatabaseToml.get_data_file(data_dir, room)
|
2024-06-29 23:16:03 +02:00
|
|
|
# filename = room + '.toml'
|
|
|
|
entry = {}
|
|
|
|
entry['alias'] = alias
|
|
|
|
entry['body'] = content
|
|
|
|
entry['id'] = identifier
|
|
|
|
entry['timestamp'] = timestamp
|
|
|
|
activity_type = 'activity_' + stanza_type
|
|
|
|
message_activity_list = self.settings[room][activity_type] if activity_type in self.settings[room] else []
|
|
|
|
while len(message_activity_list) > 20: message_activity_list.pop(0)
|
|
|
|
message_activity_list.append(entry)
|
|
|
|
self.settings[room][activity_type] = message_activity_list # NOTE This directive might not be needed
|
|
|
|
data = self.settings[room]
|
|
|
|
content = tomli_w.dumps(data)
|
|
|
|
with open(filename, 'w') as f: f.write(content)
|
|
|
|
|
|
|
|
|
2024-07-30 14:07:34 +02:00
|
|
|
class BlockList:
|
|
|
|
|
|
|
|
|
|
|
|
def get_filename():
|
|
|
|
"""
|
|
|
|
Get pathname of filename.
|
|
|
|
If filename does not exist, create it.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
None.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
filename : str
|
|
|
|
Pathname.
|
|
|
|
"""
|
|
|
|
data_dir = Config.get_default_data_directory()
|
|
|
|
if not os.path.isdir(data_dir): os.mkdir(data_dir)
|
|
|
|
filename = os.path.join(data_dir, r"blocklist.toml")
|
|
|
|
if not os.path.exists(filename):
|
|
|
|
data = {'entries' : {}}
|
|
|
|
content = tomli_w.dumps(data)
|
|
|
|
with open(filename, 'w') as f: f.write(content)
|
|
|
|
return filename
|
|
|
|
|
|
|
|
|
|
|
|
def add_entry_to_blocklist(self, jabber_id, node_id, item_id):
|
|
|
|
"""
|
|
|
|
Update blocklist file.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
jabber_id : str
|
|
|
|
Jabber ID.
|
|
|
|
node_id : str
|
|
|
|
Node name.
|
|
|
|
item_id : str
|
|
|
|
Item ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
if jabber_id not in self.blocklist['entries']:
|
|
|
|
self.blocklist['entries'][jabber_id] = {}
|
|
|
|
if node_id not in self.blocklist['entries'][jabber_id]:
|
|
|
|
self.blocklist['entries'][jabber_id][node_id] = []
|
|
|
|
self.blocklist['entries'][jabber_id][node_id].append(item_id)
|
|
|
|
data = self.blocklist
|
|
|
|
content = tomli_w.dumps(data)
|
|
|
|
filename = BlockList.get_filename()
|
|
|
|
with open(filename, 'w') as f: f.write(content)
|
|
|
|
|
2024-11-21 11:11:11 +01:00
|
|
|
class String:
|
2024-07-30 14:07:34 +02:00
|
|
|
|
2024-11-21 11:11:11 +01:00
|
|
|
def md5_hash(url):
|
2024-06-29 23:16:03 +02:00
|
|
|
"""
|
2024-11-21 11:11:11 +01:00
|
|
|
Hash URL string to MD5 checksum.
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2024-11-21 11:11:11 +01:00
|
|
|
url : str
|
|
|
|
URL.
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-11-21 11:11:11 +01:00
|
|
|
url_digest : str
|
|
|
|
Hashed URL as an MD5 checksum.
|
2024-06-29 23:16:03 +02:00
|
|
|
"""
|
2024-11-21 11:11:11 +01:00
|
|
|
url_encoded = url.encode()
|
|
|
|
url_hashed = hashlib.md5(url_encoded)
|
|
|
|
url_digest = url_hashed.hexdigest()
|
|
|
|
return url_digest
|
2024-06-29 23:16:03 +02:00
|
|
|
|
2024-11-21 11:11:11 +01:00
|
|
|
class Toml:
|
2024-06-29 23:16:03 +02:00
|
|
|
|
2024-11-21 11:11:11 +01:00
|
|
|
def open_file(filename: str) -> dict:
|
|
|
|
with open(filename, mode="rb") as fn:
|
|
|
|
data = tomllib.load(fn)
|
|
|
|
return data
|
2024-06-29 23:16:03 +02:00
|
|
|
|
2024-11-21 11:11:11 +01:00
|
|
|
def save_file(filename: str, data: dict) -> None:
|
|
|
|
with open(filename, 'w') as fn:
|
|
|
|
data_as_string = tomli_w.dumps(data)
|
|
|
|
fn.write(data_as_string)
|
2024-06-29 23:16:03 +02:00
|
|
|
|
2024-11-21 11:11:11 +01:00
|
|
|
class Url:
|
|
|
|
|
|
|
|
def check_xmpp_uri(uri):
|
2024-06-29 23:16:03 +02:00
|
|
|
"""
|
2024-11-21 11:11:11 +01:00
|
|
|
Check validity of XMPP URI.
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2024-11-21 11:11:11 +01:00
|
|
|
uri : str
|
|
|
|
URI.
|
2024-06-29 23:16:03 +02:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-11-21 11:11:11 +01:00
|
|
|
jid : str
|
|
|
|
JID or None.
|
2024-06-29 23:16:03 +02:00
|
|
|
"""
|
2024-11-21 11:11:11 +01:00
|
|
|
jid = urlsplit(uri).path
|
|
|
|
if parseaddr(jid)[1] != jid:
|
|
|
|
jid = False
|
|
|
|
return jid
|