Fix various of errors, consequent to the restructuring of the code;

Add missing dependencies and files;
Thank you to LeXofLeviafan and roughnecks.
This commit is contained in:
Schimon Jehudah, Adv. 2024-11-17 14:00:31 +02:00
parent 77ac4c0ed9
commit 24dbadf7dc
12 changed files with 307 additions and 284 deletions

View file

@ -12,10 +12,12 @@ TODO
"""
import argparse
from blasta.config import Share
from blasta.http.instance import HttpInstance
from blasta.sqlite import SQLite
from blasta.database.sqlite import DatabaseSQLite
import json
import logging
import os
from os.path import getsize, exists
import sys
import time
@ -32,8 +34,10 @@ except:
def main():
if not exists('main.sqlite') or not getsize('main.sqlite'):
SQLite.instantiate_database('main.sqlite')
directory_data = Share.get_directory()
db_file = os.path.join(directory_data, 'main.sqlite')
if not exists(db_file) or not getsize(db_file):
DatabaseSQLite.instantiate_database(db_file)
accounts = {}
sessions = {}
http_instance = HttpInstance(accounts, sessions)
@ -41,7 +45,6 @@ def main():
app = main()
# FIXME
if __name__ == 'blasta.__main__':
parser = argparse.ArgumentParser(
prog='blasta',
@ -52,7 +55,7 @@ if __name__ == 'blasta.__main__':
parser.add_argument('-p', '--port', help='port number', dest='port')
parser.add_argument('-o', '--open', help='open an html browser', action='store_const', const=True, dest='open')
args = parser.parse_args()
port = args.port if args.port else 8000
port = args.port or 8000
uvicorn.run(app, host='localhost', port=port)
if args.open:
# TODO Check first time

Binary file not shown.

After

Width:  |  Height:  |  Size: 318 B

View file

@ -8,7 +8,7 @@ import time
DBLOCK = Lock()
class SQLite:
class DatabaseSQLite:
def instantiate_database(db_file):
# db_dir = get_default_data_directory()
@ -17,8 +17,8 @@ class SQLite:
# if not os.path.isdir(db_dir + "/sqlite"):
# os.mkdir(db_dir + "/sqlite")
# db_file = os.path.join(db_dir, "sqlite", r"{}.db".format(jid_file))
SQLite.create_tables(db_file)
SQLite.add_statistics(db_file)
DatabaseSQLite.create_tables(db_file)
DatabaseSQLite.add_statistics(db_file)
return db_file
#from slixfeed.log import Logger
@ -75,7 +75,7 @@ class SQLite:
function_name = sys._getframe().f_code.co_name
# logger.debug('{}: db_file: {}'
# .format(function_name, db_file))
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
sql_table_main_entries = (
"""
CREATE TABLE IF NOT EXISTS main_entries (
@ -510,7 +510,7 @@ class SQLite:
('tags');
"""
)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
try:
cur.execute(sql)
@ -519,16 +519,16 @@ class SQLite:
async def associate_entries_tags_jids(db_file, entry):
async with DBLOCK:
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
jid = entry['jid']
url_hash = entry['url_hash']
entry_id = SQLite.get_entry_id_by_url_hash(db_file, url_hash)
jid_id = SQLite.get_jid_id_by_jid(db_file, jid)
entry_id = DatabaseSQLite.get_entry_id_by_url_hash(db_file, url_hash)
jid_id = DatabaseSQLite.get_jid_id_by_jid(db_file, jid)
if entry_id:
for tag in entry['tags']:
tag_id = SQLite.get_tag_id_by_tag(db_file, tag)
cet_id = SQLite.get_combination_id_by_entry_id_tag_id_jid_id(db_file, entry_id, tag_id, jid_id)
tag_id = DatabaseSQLite.get_tag_id_by_tag(db_file, tag)
cet_id = DatabaseSQLite.get_combination_id_by_entry_id_tag_id_jid_id(db_file, entry_id, tag_id, jid_id)
if not cet_id:
sql = (
"""
@ -565,7 +565,7 @@ class SQLite:
# logger.debug('{}: db_file: {}'
# .format(function_name, db_file))
async with DBLOCK:
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
for entry in entries:
tags = entry['tags']
@ -575,7 +575,7 @@ class SQLite:
# INSERT OR IGNORE INTO main_tags(tag) VALUES (?);
# """
# )
if not SQLite.get_tag_id_by_tag(db_file, tag):
if not DatabaseSQLite.get_tag_id_by_tag(db_file, tag):
sql = (
"""
INSERT INTO main_tags(tag) VALUES(?);
@ -607,7 +607,7 @@ class SQLite:
# .format(function_name, db_file))
async with DBLOCK:
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
for entry in entries:
@ -621,7 +621,7 @@ class SQLite:
# instances = entry['instances']
# Import entries
jid_id = SQLite.get_jid_id_by_jid(db_file, jid)
jid_id = DatabaseSQLite.get_jid_id_by_jid(db_file, jid)
sql = (
"""
INSERT
@ -672,7 +672,7 @@ class SQLite:
)
par = (jid, )
async with DBLOCK:
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
try:
cur.execute(sql, par)
@ -705,7 +705,7 @@ class SQLite:
WHERE type = "entries";
"""
)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql).fetchone()
return result[0] if result and len(result) == 1 else result
@ -745,7 +745,7 @@ class SQLite:
"tag_id": tag_id,
"jid_id": jid_id
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -783,7 +783,7 @@ class SQLite:
"""
)
async with DBLOCK:
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
for tag in tags:
par = {
"url_hash": url_hash,
@ -820,7 +820,7 @@ class SQLite:
"""
)
par = (tag,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
# return result[0] if result else None, None
@ -857,7 +857,7 @@ class SQLite:
"""
)
par = (url_hash,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -891,7 +891,7 @@ class SQLite:
"""
)
par = (entry_id,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -926,7 +926,7 @@ class SQLite:
"""
)
par = (entry_id,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -962,7 +962,7 @@ class SQLite:
"""
)
par = (url_hash,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -994,7 +994,7 @@ class SQLite:
"""
)
par = (tag,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1026,7 +1026,7 @@ class SQLite:
"""
)
par = (url_hash,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1058,7 +1058,7 @@ class SQLite:
"""
)
par = (url_hash,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1090,7 +1090,7 @@ class SQLite:
"""
)
par = (url_hash,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1125,7 +1125,7 @@ class SQLite:
"""
)
par = (index_first,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1160,7 +1160,7 @@ class SQLite:
"""
)
par = (index_first,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1195,7 +1195,7 @@ class SQLite:
"""
)
par = (index_first,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1236,7 +1236,7 @@ class SQLite:
"query": f'%{query}%',
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1272,7 +1272,7 @@ class SQLite:
par = {
"query": f'%{query}%',
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1319,7 +1319,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1360,7 +1360,7 @@ class SQLite:
"jid": jid,
"tag": tag
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1406,7 +1406,7 @@ class SQLite:
"query": f'%{query}%',
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1446,7 +1446,7 @@ class SQLite:
"jid": jid,
"query": f'%{query}%'
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1489,7 +1489,7 @@ class SQLite:
"jid": jid,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1526,7 +1526,7 @@ class SQLite:
par = {
"jid": jid
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1560,7 +1560,7 @@ class SQLite:
"""
)
par = (tag,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -1602,7 +1602,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1644,7 +1644,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1686,7 +1686,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1716,7 +1716,7 @@ class SQLite:
LIMIT 30;
"""
)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql).fetchall()
return result
@ -1757,7 +1757,7 @@ class SQLite:
"""
)
par = (index_first,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1804,7 +1804,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1851,7 +1851,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1898,7 +1898,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1939,7 +1939,7 @@ class SQLite:
"""
)
par = (index_first,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -1980,7 +1980,7 @@ class SQLite:
"""
)
par = (index_first,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2027,7 +2027,7 @@ class SQLite:
"query": f'%{query}%',
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2079,7 +2079,7 @@ class SQLite:
"tag": tag,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2131,7 +2131,7 @@ class SQLite:
"query": f'%{query}%',
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2178,7 +2178,7 @@ class SQLite:
"jid": jid,
"index_first": index_first
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2213,7 +2213,7 @@ class SQLite:
ORDER BY tag ASC;
"""
)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql).fetchall()
return result
@ -2251,7 +2251,7 @@ class SQLite:
par = {
"jid": jid
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2290,7 +2290,7 @@ class SQLite:
par = {
"jid": jid
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2331,7 +2331,7 @@ class SQLite:
"jid": jid,
"url_hash": url_hash
}
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -2374,7 +2374,7 @@ class SQLite:
"url_hash": url_hash
}
async with DBLOCK:
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
cur.execute(sql, par)
@ -2412,7 +2412,7 @@ class SQLite:
"""
)
par = (entry_id,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchall()
return result
@ -2444,7 +2444,7 @@ class SQLite:
"""
)
par = (jid,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result
@ -2476,7 +2476,7 @@ class SQLite:
"""
)
par = (jid_id,)
with SQLite.create_connection(db_file) as conn:
with DatabaseSQLite.create_connection(db_file) as conn:
cur = conn.cursor()
result = cur.execute(sql, par).fetchone()
return result[0] if result and len(result) == 1 else result

View file

@ -1,27 +0,0 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from datetime import datetime
import hashlib
class Utilities:
def convert_iso8601_to_readable(timestamp):
old_date_format = datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
new_date_format = old_date_format.strftime("%B %d, %Y")
return new_date_format
def hash_url_to_md5(url):
url_encoded = url.encode()
url_hashed = hashlib.md5(url_encoded)
url_digest = url_hashed.hexdigest()
return url_digest
def is_jid_matches_to_session(accounts, sessions, request):
jabber_id = request.cookies.get('jabber_id')
session_key = request.cookies.get('session_key')
if (jabber_id and
jabber_id in accounts and
jabber_id in sessions and
session_key == sessions[jabber_id]):
return jabber_id

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,12 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
import hashlib
class UtilitiesCryptography:
def hash_url_to_md5(url):
url_encoded = url.encode()
url_hashed = hashlib.md5(url_encoded)
url_digest = url_hashed.hexdigest()
return url_digest

View file

@ -1,9 +1,9 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from blasta.helpers.utilities import Utilities
from blasta.sqlite import SQLite
from blasta.xml.syndication import Syndication
from blasta.database.sqlite import DatabaseSQLite
from blasta.utilities.cryptography import UtilitiesCryptography
from blasta.utilities.syndication import UtilitiesSyndication
from blasta.xmpp.pubsub import XmppPubsub
import os
from slixmpp.stanza.iq import Iq
@ -14,7 +14,7 @@ try:
except:
import tomli as tomllib
class Data:
class UtilitiesData:
def cache_items_and_tags_search(directory_cache, entries, jid, query):
"""Create a cache file of node items and tags."""
@ -60,7 +60,7 @@ class Data:
directory = os.path.join(directory_cache, 'data', jid)
if not os.path.exists(directory):
os.mkdir(directory)
filename = os.path.join(directory, tag)
filename = os.path.join(directory, tag + '.toml')
# Add support for search query
#filename = 'data/{}/query:{}.toml'.format(jid, query)
#filename = 'data/{}/tag:{}.toml'.format(jid, tag)
@ -103,19 +103,18 @@ class Data:
entries.reverse()
return entries
def extract_iq_items_extra(iq, jabber_id, limit=None):
def extract_iq_items_extra(db_file, iq, jabber_id, limit=None):
iq_items = iq['pubsub']['items']
entries = []
name = jabber_id.split('@')[0]
for iq_item in iq_items:
item_payload = iq_item['payload']
entry = Syndication.extract_items(item_payload, limit)
url_hash = Utilities.hash_url_to_md5(entry['link'])
url_hash = UtilitiesCryptography.hash_url_to_md5(entry['link'])
iq_item_id = iq_item['id']
if iq_item_id != url_hash:
logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
db_file = 'main.sqlite'
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
if entry:
entry['instances'] = instances or 0
@ -166,7 +165,8 @@ class Data:
data_as_string = tomli_w.dumps(data)
fn.write(data_as_string)
async def update_cache_and_database(directory_cache, xmpp_instance, jabber_id: str, node_type: str, node_id: str):
async def update_cache_and_database(
db_file, directory_cache, xmpp_instance, jabber_id: str, node_type: str, node_id: str):
# Download identifiers of node items.
iq = await XmppPubsub.get_node_item_ids(xmpp_instance, jabber_id, node_id)
if isinstance(iq, Iq):
@ -196,7 +196,6 @@ class Data:
entries_cache = Data.open_file_toml(filename_items)
if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)]
entries_cache_node = entries_cache[node_type]
db_file = 'main.sqlite'
# Check whether items still exist on node
for entry in entries_cache_node:

11
blasta/utilities/date.py Normal file
View file

@ -0,0 +1,11 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
from datetime import datetime
class UtilitiesDate:
def convert_iso8601_to_readable(timestamp):
old_date_format = datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
new_date_format = old_date_format.strftime("%B %d, %Y")
return new_date_format

13
blasta/utilities/http.py Normal file
View file

@ -0,0 +1,13 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
class UtilitiesHttp:
def is_jid_matches_to_session(accounts, sessions, request):
jabber_id = request.cookies.get('jabber_id')
session_key = request.cookies.get('session_key')
if (jabber_id and
jabber_id in accounts and
jabber_id in sessions and
session_key == sessions[jabber_id]):
return jabber_id

View file

@ -3,7 +3,7 @@
import xml.etree.ElementTree as ET
class Syndication:
class UtilitiesSyndication:
def create_rfc4287_entry(feed_entry):
node_entry = ET.Element('entry')

View file

@ -1,13 +1,12 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
class Xml:
class DataForm:
def create_setting_entry(xmpp_instance, key : str, value : str):
form = xmpp_instance['xep_0004'].make_form('form', 'Settings')
form['type'] = 'result'
form.add_field(var=key,
value=value)
form.add_field(var=key, value=value)
return form
# def create_setting_entry(value : str):

View file

@ -21,7 +21,9 @@ classifiers = [
"Topic :: Office/Business :: News/Diary",
]
keywords = [
"annotation",
"atom",
"bibliography",
"bookmark",
"collaboration",
"gemini",
@ -37,11 +39,13 @@ keywords = [
dependencies = [
"fastapi",
"jinja2",
"lxml",
"python-dateutil",
"python-multipart",
"slixmpp",
"tomli", # Python 3.10
"tomli-w",
"uvicorn",
]