forked from sch/Blasta
Change node names;
Soft code (i.e. opposite of hard code) node names; Improve parsing of Netscape HTML boomark files; Add IQ module; Catch more exceptions; Display Jabber ID on template profile.
This commit is contained in:
parent
a3d50fe8d8
commit
2103b061a2
8 changed files with 277 additions and 214 deletions
|
@ -74,6 +74,12 @@
|
||||||
This page provides a general survey of your XMPP account and
|
This page provides a general survey of your XMPP account and
|
||||||
stored bookmarks.
|
stored bookmarks.
|
||||||
</p>
|
</p>
|
||||||
|
<h4 id="jid">
|
||||||
|
Jabber ID
|
||||||
|
</h4>
|
||||||
|
<p>
|
||||||
|
Your Jabber identifier is <a href="xmpp:{{jabber_id}}?message">{{jabber_id}}</a>.
|
||||||
|
</p>
|
||||||
<!--
|
<!--
|
||||||
<h4 id="enrollment">
|
<h4 id="enrollment">
|
||||||
Enrollment
|
Enrollment
|
||||||
|
@ -471,7 +477,7 @@ retrieve items only if on a whitelist managed by the node owner.">
|
||||||
to='{{jabber_id}}'
|
to='{{jabber_id}}'
|
||||||
id='delete1'>
|
id='delete1'>
|
||||||
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
|
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
|
||||||
<delete node='urn:xmpp:bibliography:0'/>
|
<delete node='blasta:annotation:0'/>
|
||||||
</pubsub>
|
</pubsub>
|
||||||
</iq>
|
</iq>
|
||||||
</pre>
|
</pre>
|
||||||
|
@ -484,7 +490,7 @@ retrieve items only if on a whitelist managed by the node owner.">
|
||||||
to='{{jabber_id}}'
|
to='{{jabber_id}}'
|
||||||
id='delete2'>
|
id='delete2'>
|
||||||
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
|
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
|
||||||
<delete node='xmpp:bibliography:private:0'/>
|
<delete node='blasta:annotation:private:0'/>
|
||||||
</pubsub>
|
</pubsub>
|
||||||
</iq>
|
</iq>
|
||||||
</pre>
|
</pre>
|
||||||
|
@ -497,7 +503,7 @@ retrieve items only if on a whitelist managed by the node owner.">
|
||||||
to='{{jabber_id}}'
|
to='{{jabber_id}}'
|
||||||
id='delete3'>
|
id='delete3'>
|
||||||
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
|
<pubsub xmlns='http://jabber.org/protocol/pubsub#owner'>
|
||||||
<delete node='xmpp:bibliography:read:0'/>
|
<delete node='blasta:annotation:read:0'/>
|
||||||
</pubsub>
|
</pubsub>
|
||||||
</iq>
|
</iq>
|
||||||
</pre>
|
</pre>
|
||||||
|
|
|
@ -13,22 +13,24 @@ journal = ""
|
||||||
pubsub = ""
|
pubsub = ""
|
||||||
|
|
||||||
# Bibliography
|
# Bibliography
|
||||||
node_id = "blasta:annotation:0"
|
node_public_id = "blasta:annotation:0"
|
||||||
node_title = "Blasta"
|
node_public_title = "Blasta (Public)"
|
||||||
node_subtitle = "Annotation"
|
node_public_subtitle = "Public annotations"
|
||||||
|
|
||||||
# Private bibliography
|
# Private bibliography
|
||||||
node_id_private = "blasta:annotation:private:0"
|
node_private_id = "blasta:annotation:private:0"
|
||||||
node_title_private = "Blasta (Private)"
|
node_private_title = "Blasta (Private)"
|
||||||
node_subtitle_private = "Private annotation"
|
node_private_subtitle = "Private annotations"
|
||||||
|
|
||||||
# Reading list
|
# Reading list
|
||||||
node_id_read = "blasta:annotation:read:0"
|
node_read_id = "blasta:annotation:read:0"
|
||||||
node_title_read = "Blasta (Read)"
|
node_read_title = "Blasta (Read)"
|
||||||
node_subtitle_read = "Reading list"
|
node_read_subtitle = "Reading list"
|
||||||
|
|
||||||
# Settings node
|
# Settings node
|
||||||
node_settings = "blasta:settings:0"
|
node_settings_id = "blasta:settings:0"
|
||||||
|
node_settings_title = "Blasta (Settings)"
|
||||||
|
node_settings_subtitle = "Blasta Settings Node"
|
||||||
|
|
||||||
# Acceptable protocol types that would be aggregated to the Blasta database
|
# Acceptable protocol types that would be aggregated to the Blasta database
|
||||||
schemes = [
|
schemes = [
|
||||||
|
|
|
@ -11,6 +11,7 @@ from blasta.utilities.http import UtilitiesHttp
|
||||||
from blasta.utilities.syndication import UtilitiesSyndication
|
from blasta.utilities.syndication import UtilitiesSyndication
|
||||||
from blasta.xmpp.form import DataForm
|
from blasta.xmpp.form import DataForm
|
||||||
from blasta.xmpp.instance import XmppInstance
|
from blasta.xmpp.instance import XmppInstance
|
||||||
|
from blasta.xmpp.iq import XmppIq
|
||||||
from blasta.xmpp.pubsub import XmppPubsub
|
from blasta.xmpp.pubsub import XmppPubsub
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from fastapi import Cookie, FastAPI, File, Form, HTTPException, Request, Response, UploadFile
|
from fastapi import Cookie, FastAPI, File, Form, HTTPException, Request, Response, UploadFile
|
||||||
|
@ -73,33 +74,37 @@ class HttpInstance:
|
||||||
jabber_id_pubsub = settings['pubsub']
|
jabber_id_pubsub = settings['pubsub']
|
||||||
journal = settings['journal']
|
journal = settings['journal']
|
||||||
|
|
||||||
node_id_public = settings['node_id']
|
node_settings_id = settings['node_settings_id']
|
||||||
node_title_public = settings['node_title']
|
node_settings_title = settings['node_settings_title']
|
||||||
node_subtitle_public = settings['node_subtitle']
|
node_settings_subtitle = settings['node_settings_subtitle']
|
||||||
|
|
||||||
node_id_private = settings['node_id_private']
|
node_public_id = settings['node_public_id']
|
||||||
node_title_private = settings['node_title_private']
|
node_public_title = settings['node_public_title']
|
||||||
node_subtitle_private = settings['node_subtitle_private']
|
node_public_subtitle = settings['node_public_subtitle']
|
||||||
|
|
||||||
node_id_read = settings['node_id_read']
|
node_private_id = settings['node_private_id']
|
||||||
node_title_read = settings['node_title_read']
|
node_private_title = settings['node_private_title']
|
||||||
node_subtitle_read = settings['node_subtitle_read']
|
node_private_subtitle = settings['node_private_subtitle']
|
||||||
|
|
||||||
|
node_read_id = settings['node_read_id']
|
||||||
|
node_read_title = settings['node_read_title']
|
||||||
|
node_read_subtitle = settings['node_read_subtitle']
|
||||||
|
|
||||||
nodes = {
|
nodes = {
|
||||||
'public' : {
|
'public' : {
|
||||||
'name' : node_id_public,
|
'name' : node_public_id,
|
||||||
'title' : node_title_public,
|
'title' : node_public_title,
|
||||||
'subtitle' : node_subtitle_public,
|
'subtitle' : node_public_subtitle,
|
||||||
'access_model' : 'presence'},
|
'access_model' : 'presence'},
|
||||||
'private' : {
|
'private' : {
|
||||||
'name' : node_id_private,
|
'name' : node_private_id,
|
||||||
'title' : node_title_private,
|
'title' : node_private_title,
|
||||||
'subtitle' : node_subtitle_private,
|
'subtitle' : node_private_subtitle,
|
||||||
'access_model' : 'whitelist'},
|
'access_model' : 'whitelist'},
|
||||||
'read' : {
|
'read' : {
|
||||||
'name' : node_id_read,
|
'name' : node_read_id,
|
||||||
'title' : node_title_read,
|
'title' : node_read_title,
|
||||||
'subtitle' : node_subtitle_read,
|
'subtitle' : node_read_subtitle,
|
||||||
'access_model' : 'whitelist'}
|
'access_model' : 'whitelist'}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -522,7 +527,7 @@ class HttpInstance:
|
||||||
result, reason = await UtilitiesData.update_cache_and_database(
|
result, reason = await UtilitiesData.update_cache_and_database(
|
||||||
db_file, directory_cache, xmpp_instance, jabber_id, node_type, node_id)
|
db_file, directory_cache, xmpp_instance, jabber_id, node_type, node_id)
|
||||||
if result == 'error':
|
if result == 'error':
|
||||||
message = 'XMPP system message » {}.'.format(reason)
|
message = f'XMPP system message » {reason}.'
|
||||||
description = 'IQ Error'
|
description = 'IQ Error'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
|
@ -545,7 +550,7 @@ class HttpInstance:
|
||||||
ask = invite = name = origin = start = ''
|
ask = invite = name = origin = start = ''
|
||||||
# pubsub_jid = syndicate = jid
|
# pubsub_jid = syndicate = jid
|
||||||
# message = 'Find and share bookmarks with family and friends!'
|
# message = 'Find and share bookmarks with family and friends!'
|
||||||
# description = 'Bookmarks of {}'.format(jid)
|
# description = f'Bookmarks of {jid}'
|
||||||
max_count = 10
|
max_count = 10
|
||||||
entries = None
|
entries = None
|
||||||
related_tags = None
|
related_tags = None
|
||||||
|
@ -585,43 +590,10 @@ class HttpInstance:
|
||||||
if param_query:
|
if param_query:
|
||||||
query = param_query
|
query = param_query
|
||||||
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
entries_cache_node = entries_cache[node_type]
|
if node_type in entries_cache:
|
||||||
filename_cache = os.path.join(directory_cache, 'data', jid + '_query.toml')
|
|
||||||
UtilitiesData.cache_items_and_tags_search(directory_cache, entries_cache_node, jid, query)
|
|
||||||
if os.path.exists(filename_cache) and os.path.getsize(filename_cache):
|
|
||||||
data = UtilitiesData.open_file_toml(filename_cache)
|
|
||||||
item_ids_all = data['item_ids']
|
|
||||||
related_tags = data['tags']
|
|
||||||
if len(item_ids_all) <= index_last:
|
|
||||||
index_last = len(item_ids_all)
|
|
||||||
page_next = None
|
|
||||||
item_ids_selection = []
|
|
||||||
for item_id in item_ids_all[index_first:index_last]:
|
|
||||||
item_ids_selection.append(item_id)
|
|
||||||
entries = []
|
|
||||||
for entry in entries_cache_node:
|
|
||||||
for item_id in item_ids_selection:
|
|
||||||
if entry['url_hash'] == item_id:
|
|
||||||
entries.append(entry)
|
|
||||||
for entry in entries:
|
|
||||||
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
|
||||||
entry['tags'] = entry['tags'][:5]
|
|
||||||
description = 'Your {} bookmarks with "{}"'.format(node_type, query)
|
|
||||||
message = 'Listing {} bookmarks {} - {} out of {}.'.format(node_type, index_first+1, index_last, len(item_ids_all))
|
|
||||||
#item_id_next = entries[len(entries)-1]
|
|
||||||
else:
|
|
||||||
description = 'No {} bookmarks with "{}" were found for {}'.format(node_type, query, jid)
|
|
||||||
message = 'Blasta system message » No entries.'
|
|
||||||
page_next = None
|
|
||||||
page_prev = None
|
|
||||||
elif param_tags or param_tld or param_filetype or param_protocol:
|
|
||||||
tags_list = param_tags.split('+')
|
|
||||||
if len(tags_list) == 1:
|
|
||||||
tag = param_tags
|
|
||||||
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
|
||||||
entries_cache_node = entries_cache[node_type]
|
entries_cache_node = entries_cache[node_type]
|
||||||
filename_cache = os.path.join(directory_cache, 'data', jid, tag + '.toml')
|
filename_cache = os.path.join(directory_cache, 'data', jid + '_query.toml')
|
||||||
UtilitiesData.cache_items_and_tags_filter(directory_cache, entries_cache_node, jid, tag)
|
UtilitiesData.cache_items_and_tags_search(directory_cache, entries_cache_node, jid, query)
|
||||||
if os.path.exists(filename_cache) and os.path.getsize(filename_cache):
|
if os.path.exists(filename_cache) and os.path.getsize(filename_cache):
|
||||||
data = UtilitiesData.open_file_toml(filename_cache)
|
data = UtilitiesData.open_file_toml(filename_cache)
|
||||||
item_ids_all = data['item_ids']
|
item_ids_all = data['item_ids']
|
||||||
|
@ -640,11 +612,56 @@ class HttpInstance:
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
||||||
entry['tags'] = entry['tags'][:5]
|
entry['tags'] = entry['tags'][:5]
|
||||||
description = 'Your {} bookmarks tagged with "{}"'.format(node_type, tag)
|
description = f'Your {node_type} bookmarks with "{query}"'
|
||||||
message = 'Listing {} bookmarks {} - {} out of {}.'.format(node_type, index_first+1, index_last, len(item_ids_all))
|
message = f'Listing {node_type} bookmarks {index_first+1} - {index_last} out of {len(item_ids_all)}.'
|
||||||
#item_id_next = entries[len(entries)-1]
|
#item_id_next = entries[len(entries)-1]
|
||||||
else:
|
else:
|
||||||
description = 'No {} bookmarks tagged with "{}" were found for {}'.format(node_type, tag, jid)
|
description = f'No {node_type} bookmarks with "{query}" were found for {jid}'
|
||||||
|
message = 'Blasta system message » No entries.'
|
||||||
|
page_next = None
|
||||||
|
page_prev = None
|
||||||
|
else:
|
||||||
|
description = f'No {node_type} bookmarks with "{query}" were found for {jid}'
|
||||||
|
message = 'Blasta system message » No entries.'
|
||||||
|
page_next = None
|
||||||
|
page_prev = None
|
||||||
|
elif param_tags or param_tld or param_filetype or param_protocol:
|
||||||
|
tags_list = param_tags.split('+')
|
||||||
|
if len(tags_list) == 1:
|
||||||
|
tag = param_tags
|
||||||
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
|
if node_type in entries_cache:
|
||||||
|
entries_cache_node = entries_cache[node_type]
|
||||||
|
filename_cache = os.path.join(directory_cache, 'data', jid, tag + '.toml')
|
||||||
|
UtilitiesData.cache_items_and_tags_filter(directory_cache, entries_cache_node, jid, tag)
|
||||||
|
if os.path.exists(filename_cache) and os.path.getsize(filename_cache):
|
||||||
|
data = UtilitiesData.open_file_toml(filename_cache)
|
||||||
|
item_ids_all = data['item_ids']
|
||||||
|
related_tags = data['tags']
|
||||||
|
if len(item_ids_all) <= index_last:
|
||||||
|
index_last = len(item_ids_all)
|
||||||
|
page_next = None
|
||||||
|
item_ids_selection = []
|
||||||
|
for item_id in item_ids_all[index_first:index_last]:
|
||||||
|
item_ids_selection.append(item_id)
|
||||||
|
entries = []
|
||||||
|
for entry in entries_cache_node:
|
||||||
|
for item_id in item_ids_selection:
|
||||||
|
if entry['url_hash'] == item_id:
|
||||||
|
entries.append(entry)
|
||||||
|
for entry in entries:
|
||||||
|
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
||||||
|
entry['tags'] = entry['tags'][:5]
|
||||||
|
description = f'Your {node_type} bookmarks tagged with "{tag}"'
|
||||||
|
message = f'Listing {node_type} bookmarks {index_first+1} - {index_last} out of {len(item_ids_all)}.'
|
||||||
|
#item_id_next = entries[len(entries)-1]
|
||||||
|
else:
|
||||||
|
description = 'No {node_type} bookmarks tagged with "{tag}" were found for {jid}'
|
||||||
|
message = 'Blasta system message » No entries.'
|
||||||
|
page_next = None
|
||||||
|
page_prev = None
|
||||||
|
else:
|
||||||
|
description = 'No {node_type} bookmarks tagged with "{tag}" were found for {jid}'
|
||||||
message = 'Blasta system message » No entries.'
|
message = 'Blasta system message » No entries.'
|
||||||
page_next = None
|
page_next = None
|
||||||
page_prev = None
|
page_prev = None
|
||||||
|
@ -654,31 +671,37 @@ class HttpInstance:
|
||||||
else:
|
else:
|
||||||
name = jabber_id.split('@')[0]
|
name = jabber_id.split('@')[0]
|
||||||
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
entries_cache_node = entries_cache[node_type]
|
if node_type in entries_cache:
|
||||||
filename_cache = os.path.join(directory_cache, 'data', jabber_id + '.toml')
|
entries_cache_node = entries_cache[node_type]
|
||||||
#if len(entries_cache_node) and not os.path.exists(filename_cache):
|
filename_cache = os.path.join(directory_cache, 'data', jabber_id + '.toml')
|
||||||
UtilitiesData.cache_items_and_tags(directory_cache, entries_cache_node, jabber_id)
|
#if len(entries_cache_node) and not os.path.exists(filename_cache):
|
||||||
if os.path.exists(filename_cache) and os.path.getsize(filename_cache):
|
UtilitiesData.cache_items_and_tags(directory_cache, entries_cache_node, jabber_id)
|
||||||
data = UtilitiesData.open_file_toml(filename_cache)
|
if os.path.exists(filename_cache) and os.path.getsize(filename_cache):
|
||||||
item_ids_all = data['item_ids']
|
data = UtilitiesData.open_file_toml(filename_cache)
|
||||||
related_tags = data['tags']
|
item_ids_all = data['item_ids']
|
||||||
if len(item_ids_all) <= index_last:
|
related_tags = data['tags']
|
||||||
index_last = len(item_ids_all)
|
if len(item_ids_all) <= index_last:
|
||||||
page_next = None
|
index_last = len(item_ids_all)
|
||||||
item_ids_selection = []
|
page_next = None
|
||||||
for item_id in item_ids_all[index_first:index_last]:
|
item_ids_selection = []
|
||||||
item_ids_selection.append(item_id)
|
for item_id in item_ids_all[index_first:index_last]:
|
||||||
entries = []
|
item_ids_selection.append(item_id)
|
||||||
for entry in entries_cache_node:
|
entries = []
|
||||||
for item_id in item_ids_selection:
|
for entry in entries_cache_node:
|
||||||
if entry['url_hash'] == item_id:
|
for item_id in item_ids_selection:
|
||||||
entries.append(entry)
|
if entry['url_hash'] == item_id:
|
||||||
for entry in entries:
|
entries.append(entry)
|
||||||
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
for entry in entries:
|
||||||
entry['tags'] = entry['tags'][:5]
|
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(
|
||||||
description = 'Your {} bookmarks'.format(node_type)
|
entry['published'])
|
||||||
message = 'Listing {} bookmarks {} - {} out of {}.'.format(node_type, index_first+1, index_last, len(item_ids_all))
|
entry['tags'] = entry['tags'][:5]
|
||||||
#item_id_next = entries[len(entries)-1]
|
description = f'Your {node_type} bookmarks'
|
||||||
|
message = f'Listing {node_type} bookmarks {index_first+1} - {index_last} out of {len(item_ids_all)}.'
|
||||||
|
#item_id_next = entries[len(entries)-1]
|
||||||
|
else:
|
||||||
|
description = 'Your bookmarks directory appears to be empty'
|
||||||
|
message = 'Blasta system message » Zero count.'
|
||||||
|
start = True
|
||||||
else:
|
else:
|
||||||
description = 'Your bookmarks directory appears to be empty'
|
description = 'Your bookmarks directory appears to be empty'
|
||||||
message = 'Blasta system message » Zero count.'
|
message = 'Blasta system message » Zero count.'
|
||||||
|
@ -692,19 +715,19 @@ class HttpInstance:
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
tags_dict = {}
|
tags_dict = {}
|
||||||
if param_query:
|
if param_query:
|
||||||
description = 'Bookmarks from {} with "{}"'.format(jid, param_query)
|
description = f'Bookmarks from {jid} with "{param_query}"'
|
||||||
entries_database = DatabaseSQLite.get_entries_by_jid_and_query(db_file, jid, param_query, index_first)
|
entries_database = DatabaseSQLite.get_entries_by_jid_and_query(db_file, jid, param_query, index_first)
|
||||||
entries_count = DatabaseSQLite.get_entries_count_by_jid_and_query(db_file, jid, param_query)
|
entries_count = DatabaseSQLite.get_entries_count_by_jid_and_query(db_file, jid, param_query)
|
||||||
for tag, instances in DatabaseSQLite.get_30_tags_by_jid_and_query(db_file, jid, param_query, index_first):
|
for tag, instances in DatabaseSQLite.get_30_tags_by_jid_and_query(db_file, jid, param_query, index_first):
|
||||||
tags_dict[tag] = instances
|
tags_dict[tag] = instances
|
||||||
elif param_tags:
|
elif param_tags:
|
||||||
description = 'Bookmarks from {} tagged with "{}"'.format(jid, param_tags)
|
description = f'Bookmarks from {jid} tagged with "{param_tags}"'
|
||||||
entries_database = DatabaseSQLite.get_entries_by_jid_and_tag(db_file, jid, param_tags, index_first)
|
entries_database = DatabaseSQLite.get_entries_by_jid_and_tag(db_file, jid, param_tags, index_first)
|
||||||
entries_count = DatabaseSQLite.get_entries_count_by_jid_and_tag(db_file, jid, param_tags)
|
entries_count = DatabaseSQLite.get_entries_count_by_jid_and_tag(db_file, jid, param_tags)
|
||||||
for tag, instances in DatabaseSQLite.get_30_tags_by_jid_and_tag(db_file, jid, param_tags, index_first):
|
for tag, instances in DatabaseSQLite.get_30_tags_by_jid_and_tag(db_file, jid, param_tags, index_first):
|
||||||
tags_dict[tag] = instances
|
tags_dict[tag] = instances
|
||||||
else:
|
else:
|
||||||
description = 'Bookmarks from {}'.format(jid)
|
description = f'Bookmarks from {jid}'
|
||||||
entries_database = DatabaseSQLite.get_entries_by_jid(db_file, jid, index_first)
|
entries_database = DatabaseSQLite.get_entries_by_jid(db_file, jid, index_first)
|
||||||
entries_count = DatabaseSQLite.get_entries_count_by_jid(db_file, jid)
|
entries_count = DatabaseSQLite.get_entries_count_by_jid(db_file, jid)
|
||||||
for tag, instances in DatabaseSQLite.get_30_tags_by_jid(db_file, jid, index_first):
|
for tag, instances in DatabaseSQLite.get_30_tags_by_jid(db_file, jid, index_first):
|
||||||
|
@ -749,10 +772,10 @@ class HttpInstance:
|
||||||
if entries_count <= index_last:
|
if entries_count <= index_last:
|
||||||
index_last = entries_count
|
index_last = entries_count
|
||||||
page_next = None
|
page_next = None
|
||||||
message = 'Listing bookmarks {} - {} out of {}.'.format(index_first+1, index_last, entries_count)
|
message = f'Listing bookmarks {index_first+1} - {index_last} out of {entries_count}.'
|
||||||
else:
|
else:
|
||||||
# TODO Check permission, so there is no unintended continuing to cached data which is not authorized for.
|
# TODO Check permission, so there is no unintended continuing to cached data which is not authorized for.
|
||||||
iq = await XmppPubsub.get_node_item_ids(xmpp_instance, jid, node_id_public)
|
iq = await XmppPubsub.get_node_item_ids(xmpp_instance, jid, node_public_id)
|
||||||
if isinstance(iq, Iq):
|
if isinstance(iq, Iq):
|
||||||
iq_items_remote = iq['disco_items']
|
iq_items_remote = iq['disco_items']
|
||||||
|
|
||||||
|
@ -776,19 +799,19 @@ class HttpInstance:
|
||||||
for item_id in item_ids_all[index_first:index_last]:
|
for item_id in item_ids_all[index_first:index_last]:
|
||||||
item_ids_selection.append(item_id)
|
item_ids_selection.append(item_id)
|
||||||
|
|
||||||
iq = await XmppPubsub.get_node_items(xmpp_instance, jid, node_id_public, item_ids_selection)
|
iq = await XmppPubsub.get_node_items(xmpp_instance, jid, node_public_id, item_ids_selection)
|
||||||
entries = UtilitiesData.extract_iq_items_extra(db_file, iq, jid)
|
entries = UtilitiesData.extract_iq_items_extra(db_file, iq, jid)
|
||||||
if entries:
|
if entries:
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
||||||
message = 'Listing bookmarks {} - {} out of {}.'.format(index_first+1, index_last, len(item_ids_all))
|
message = f'Listing bookmarks {index_first+1} - {index_last} out of {len(item_ids_all)}.'
|
||||||
description = 'Bookmarks from {}'.format(jid)
|
description = f'Bookmarks from {jid}'
|
||||||
else:
|
else:
|
||||||
message = 'Blasta system message » Zero count.'
|
message = 'Blasta system message » Zero count.'
|
||||||
description = 'Bookmarks directory appears to be empty'
|
description = 'Bookmarks directory appears to be empty'
|
||||||
invite = True
|
invite = True
|
||||||
else:
|
else:
|
||||||
message = 'XMPP system message » {}.'.format(iq)
|
message = f'XMPP system message » {iq}.'
|
||||||
name = jid.split('@')[0]
|
name = jid.split('@')[0]
|
||||||
path = 'error'
|
path = 'error'
|
||||||
if not iq:
|
if not iq:
|
||||||
|
@ -811,7 +834,7 @@ class HttpInstance:
|
||||||
invite = True
|
invite = True
|
||||||
elif 'DNS lookup failed' in iq:
|
elif 'DNS lookup failed' in iq:
|
||||||
domain = jid.split('@')[1] if '@' in jid else jid
|
domain = jid.split('@')[1] if '@' in jid else jid
|
||||||
description = 'Blasta could not connect to server {}'.format(domain)
|
description = f'Blasta could not connect to server {domain}'
|
||||||
elif iq == 'Connection failed: connection refused':
|
elif iq == 'Connection failed: connection refused':
|
||||||
description = 'Connection with ' + name + ' has been refused'
|
description = 'Connection with ' + name + ' has been refused'
|
||||||
elif 'Timeout' in iq or 'timeout' in iq:
|
elif 'Timeout' in iq or 'timeout' in iq:
|
||||||
|
@ -929,15 +952,15 @@ class HttpInstance:
|
||||||
entries_count = DatabaseSQLite.get_entries_count_by_tag(db_file, param_tags)
|
entries_count = DatabaseSQLite.get_entries_count_by_tag(db_file, param_tags)
|
||||||
match page_type:
|
match page_type:
|
||||||
case 'new':
|
case 'new':
|
||||||
description = 'New bookmarks tagged with "{}"'.format(param_tags)
|
description = f'New bookmarks tagged with "{param_tags}"'
|
||||||
entries_database = DatabaseSQLite.get_entries_new_by_tag(db_file, param_tags, index_first)
|
entries_database = DatabaseSQLite.get_entries_new_by_tag(db_file, param_tags, index_first)
|
||||||
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_new_by_tag(db_file, param_tags, index_first)
|
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_new_by_tag(db_file, param_tags, index_first)
|
||||||
case 'popular':
|
case 'popular':
|
||||||
description = 'Popular bookmarks tagged with "{}"'.format(param_tags) # 'Most popular'
|
description = f'Popular bookmarks tagged with "{param_tags}"' # 'Most popular'
|
||||||
entries_database = DatabaseSQLite.get_entries_popular_by_tag(db_file, param_tags, index_first)
|
entries_database = DatabaseSQLite.get_entries_popular_by_tag(db_file, param_tags, index_first)
|
||||||
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_popular_by_tag(db_file, param_tags, index_first)
|
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_popular_by_tag(db_file, param_tags, index_first)
|
||||||
case 'recent':
|
case 'recent':
|
||||||
description = 'Recent bookmarks tagged with "{}"'.format(param_tags)
|
description = f'Recent bookmarks tagged with "{param_tags}"'
|
||||||
entries_database = DatabaseSQLite.get_entries_recent_by_tag(db_file, param_tags, index_first)
|
entries_database = DatabaseSQLite.get_entries_recent_by_tag(db_file, param_tags, index_first)
|
||||||
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_recent_by_tag(db_file, param_tags, index_first)
|
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_recent_by_tag(db_file, param_tags, index_first)
|
||||||
# TODO case 'query':
|
# TODO case 'query':
|
||||||
|
@ -955,7 +978,7 @@ class HttpInstance:
|
||||||
entries_count = DatabaseSQLite.get_entries_count(db_file)
|
entries_count = DatabaseSQLite.get_entries_count(db_file)
|
||||||
case 'query':
|
case 'query':
|
||||||
node_id = syndicate = 'new'
|
node_id = syndicate = 'new'
|
||||||
description = 'Posted bookmarks with "{}"'.format(param_query)
|
description = f'Posted bookmarks with "{param_query}"'
|
||||||
entries_database = DatabaseSQLite.get_entries_by_query(db_file, param_query, index_first)
|
entries_database = DatabaseSQLite.get_entries_by_query(db_file, param_query, index_first)
|
||||||
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_by_query_recent(db_file, param_query, index_first)
|
tags_of_entries = DatabaseSQLite.get_30_tags_by_entries_by_query_recent(db_file, param_query, index_first)
|
||||||
entries_count = DatabaseSQLite.get_entries_count_by_query(db_file, param_query)
|
entries_count = DatabaseSQLite.get_entries_count_by_query(db_file, param_query)
|
||||||
|
@ -1010,7 +1033,7 @@ class HttpInstance:
|
||||||
page_next = None
|
page_next = None
|
||||||
#if page_type != 'new' or page_prev or param_tags or param_tld or param_filetype or param_protocol:
|
#if page_type != 'new' or page_prev or param_tags or param_tld or param_filetype or param_protocol:
|
||||||
if request.url.path != '/' or request.url.query:
|
if request.url.path != '/' or request.url.query:
|
||||||
message = 'Listing bookmarks {} - {} out of {}.'.format(index_first+1, index_last, entries_count)
|
message = f'Listing bookmarks {index_first+1} - {index_last} out of {entries_count}.'
|
||||||
message_link = None
|
message_link = None
|
||||||
else:
|
else:
|
||||||
message = ('Welcome to Blasta, an XMPP PubSub oriented social '
|
message = ('Welcome to Blasta, an XMPP PubSub oriented social '
|
||||||
|
@ -1052,8 +1075,8 @@ class HttpInstance:
|
||||||
@self.app.get('/tag/{tag}')
|
@self.app.get('/tag/{tag}')
|
||||||
async def tag_tag_get(request: Request, tag):
|
async def tag_tag_get(request: Request, tag):
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
node_id = 'tag:{}'.format(tag)
|
node_id = f'tag:{tag}'
|
||||||
syndicate = '?tag={}'.format(tag)
|
syndicate = f'?tag={tag}'
|
||||||
path = 'tag'
|
path = 'tag'
|
||||||
# NOTE Perhaps it would be beneficial to retrieve "published" and
|
# NOTE Perhaps it would be beneficial to retrieve "published" and
|
||||||
# tags ("category") of viewer to override the tags of Blasta
|
# tags ("category") of viewer to override the tags of Blasta
|
||||||
|
@ -1123,7 +1146,7 @@ class HttpInstance:
|
||||||
|
|
||||||
if jabber_id in accounts:
|
if jabber_id in accounts:
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
#await xmpp_instance.plugin['xep_0060'].delete_node(jabber_id, node_id_public)
|
#await xmpp_instance.plugin['xep_0060'].delete_node(jabber_id, node_public_id)
|
||||||
|
|
||||||
for node_properties in nodes:
|
for node_properties in nodes:
|
||||||
properties = nodes[node_properties]
|
properties = nodes[node_properties]
|
||||||
|
@ -1132,22 +1155,22 @@ class HttpInstance:
|
||||||
xmpp_instance, jabber_id, properties['name'],
|
xmpp_instance, jabber_id, properties['name'],
|
||||||
properties['title'], properties['subtitle'],
|
properties['title'], properties['subtitle'],
|
||||||
properties['access_model'])
|
properties['access_model'])
|
||||||
await iq.send(timeout=15)
|
await XmppIq.send(iq, 15)
|
||||||
|
|
||||||
#await XmppPubsub.set_node_private(xmpp_instance, node_id_private)
|
#await XmppPubsub.set_node_private(xmpp_instance, node_private_id)
|
||||||
#await XmppPubsub.set_node_private(xmpp_instance, node_id_read)
|
#await XmppPubsub.set_node_private(xmpp_instance, node_read_id)
|
||||||
#configuration_form = await xmpp_instance['xep_0060'].get_node_config(jabber_id, properties['name'])
|
#configuration_form = await xmpp_instance['xep_0060'].get_node_config(jabber_id, properties['name'])
|
||||||
#print(configuration_form)
|
#print(configuration_form)
|
||||||
node_id = nodes['public']['name']
|
node_id = nodes['public']['name']
|
||||||
result, reason = await UtilitiesData.update_cache_and_database(
|
result, reason = await UtilitiesData.update_cache_and_database(
|
||||||
db_file, directory_cache, xmpp_instance, jabber_id, 'public', node_id)
|
db_file, directory_cache, xmpp_instance, jabber_id, 'public', node_id)
|
||||||
if result == 'error':
|
if result == 'error':
|
||||||
message = 'XMPP system message » {}.'.format(reason)
|
message = f'XMPP system message » {reason}.'
|
||||||
description = 'IQ Error'
|
description = 'IQ Error'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
else:
|
else:
|
||||||
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, 'xmpp:blasta:configuration:0', 'routine')
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_settings_id, 'routine')
|
||||||
routine = None
|
routine = None
|
||||||
if isinstance(iq, Iq):
|
if isinstance(iq, Iq):
|
||||||
payload = iq['pubsub']['items']['item']['payload']
|
payload = iq['pubsub']['items']['item']['payload']
|
||||||
|
@ -1182,7 +1205,7 @@ class HttpInstance:
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
XmppMessage.send(xmpp_instance, jid, body)
|
XmppMessage.send(xmpp_instance, jid, body)
|
||||||
alias = jid.split('@')[0]
|
alias = jid.split('@')[0]
|
||||||
message = 'Your message has been sent to {}.'.format(alias)
|
message = f'Your message has been sent to {alias}.'
|
||||||
description = 'Message has been sent'
|
description = 'Message has been sent'
|
||||||
path = 'message'
|
path = 'message'
|
||||||
else:
|
else:
|
||||||
|
@ -1215,7 +1238,7 @@ class HttpInstance:
|
||||||
result, reason = await UtilitiesData.update_cache_and_database(
|
result, reason = await UtilitiesData.update_cache_and_database(
|
||||||
db_file, directory_cache, xmpp_instance, jabber_id, node_type, node_id)
|
db_file, directory_cache, xmpp_instance, jabber_id, node_type, node_id)
|
||||||
if result == 'error':
|
if result == 'error':
|
||||||
message = 'Blasta system message » {}.'.format(reason)
|
message = f'Blasta system message » {reason}.'
|
||||||
description = 'Directory "private" appears to be empty'
|
description = 'Directory "private" appears to be empty'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
|
@ -1233,18 +1256,19 @@ class HttpInstance:
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
if jabber_id:
|
if jabber_id:
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
if not await XmppPubsub.is_node_exist(xmpp_instance, 'xmpp:blasta:configuration:0'):
|
if not await XmppPubsub.is_node_exist(xmpp_instance, node_settings_id):
|
||||||
iq = XmppPubsub.create_node_config(xmpp_instance, jabber_id)
|
iq = XmppPubsub.create_node_config(xmpp_instance, jabber_id, node_settings_id)
|
||||||
await iq.send(timeout=15)
|
await XmppIq.send(iq, 15)
|
||||||
access_models = {}
|
access_models = {}
|
||||||
for node_type in nodes:
|
for node_type in nodes:
|
||||||
node_id = nodes[node_type]['name']
|
node_id = nodes[node_type]['name']
|
||||||
iq = await XmppPubsub.get_node_configuration(xmpp_instance, jabber_id, node_id)
|
iq = await XmppPubsub.get_node_configuration(xmpp_instance, jabber_id, node_id)
|
||||||
access_model = iq['pubsub_owner']['configure']['form']['values']['pubsub#access_model']
|
if isinstance(iq, Iq):
|
||||||
access_models[node_type] = access_model
|
access_model = iq['pubsub_owner']['configure']['form']['values']['pubsub#access_model']
|
||||||
|
access_models[node_type] = access_model
|
||||||
settings = {}
|
settings = {}
|
||||||
for setting in ['enrollment', 'routine']:
|
for setting in ['enrollment', 'routine']:
|
||||||
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, 'xmpp:blasta:configuration:0', setting)
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_settings_id, setting)
|
||||||
if isinstance(iq, Iq):
|
if isinstance(iq, Iq):
|
||||||
payload = iq['pubsub']['items']['item']['payload']
|
payload = iq['pubsub']['items']['item']['payload']
|
||||||
if payload:
|
if payload:
|
||||||
|
@ -1276,16 +1300,16 @@ class HttpInstance:
|
||||||
if jabber_id:
|
if jabber_id:
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
if routine:
|
if routine:
|
||||||
message = 'The routine directory has been set to {}'.format(routine)
|
message = f'The routine directory has been set to {routine}'
|
||||||
payload = DataForm.create_setting_entry(xmpp_instance, 'routine', routine)
|
payload = DataForm.create_setting_entry(xmpp_instance, 'routine', routine)
|
||||||
iq = await XmppPubsub.publish_node_item( # NOTE Consider "configurations" as item ID (see Movim)
|
iq = await XmppPubsub.publish_node_item( # NOTE Consider "configurations" as item ID (see Movim)
|
||||||
xmpp_instance, jabber_id, 'xmpp:blasta:configuration:0', 'routine', payload)
|
xmpp_instance, jabber_id, node_settings_id, 'routine', payload)
|
||||||
if enroll:
|
if enroll:
|
||||||
if enroll == '1': message = 'Your database is shared with the Blasta system'
|
if enroll == '1': message = 'Your database is shared with the Blasta system'
|
||||||
else: message = 'Your database is excluded from the Blasta system'
|
else: message = 'Your database is excluded from the Blasta system'
|
||||||
payload = DataForm.create_setting_entry(xmpp_instance, 'enroll', enroll)
|
payload = DataForm.create_setting_entry(xmpp_instance, 'enroll', enroll)
|
||||||
iq = await XmppPubsub.publish_node_item(
|
iq = await XmppPubsub.publish_node_item(
|
||||||
xmpp_instance, jabber_id, 'xmpp:blasta:configuration:0', 'enrollment', payload)
|
xmpp_instance, jabber_id, node_settings_id, 'enrollment', payload)
|
||||||
description = 'Setting has been saved'
|
description = 'Setting has been saved'
|
||||||
template_file = 'result.xhtml'
|
template_file = 'result.xhtml'
|
||||||
template_dict = {
|
template_dict = {
|
||||||
|
@ -1318,9 +1342,9 @@ class HttpInstance:
|
||||||
if entries:
|
if entries:
|
||||||
filename = os.path.join(directory_cache, 'export', jabber_id + '_' + node_type + '.' + filetype)
|
filename = os.path.join(directory_cache, 'export', jabber_id + '_' + node_type + '.' + filetype)
|
||||||
#filename = 'export/' + jabber_id + '_' + node_type + '.' + filetype
|
#filename = 'export/' + jabber_id + '_' + node_type + '.' + filetype
|
||||||
#filename = 'export/{}_{}.{}'.format(jabber_id, node_type, filetype)
|
#filename = f'export/{jabber_id}_{node_type}.{filetype}'
|
||||||
#filename = 'export_' + node_type + '/' + jabber_id + '_' + '.' + filetype
|
#filename = 'export_' + node_type + '/' + jabber_id + '_' + '.' + filetype
|
||||||
#filename = 'export_{}/{}.{}'.format(node_type, jabber_id, filetype)
|
#filename = f'export_{node_type}/{jabber_id}.{filetype}'
|
||||||
match filetype:
|
match filetype:
|
||||||
case 'json':
|
case 'json':
|
||||||
UtilitiesData.save_to_json(filename, entries)
|
UtilitiesData.save_to_json(filename, entries)
|
||||||
|
@ -1360,7 +1384,7 @@ class HttpInstance:
|
||||||
iq = XmppPubsub.create_node_atom(
|
iq = XmppPubsub.create_node_atom(
|
||||||
xmpp_instance, jabber_id, node_id, node_title,
|
xmpp_instance, jabber_id, node_id, node_title,
|
||||||
node_subtitle, node_access_model)
|
node_subtitle, node_access_model)
|
||||||
await iq.send(timeout=15)
|
await XmppIq.send(iq, 15)
|
||||||
|
|
||||||
#return {"filename": file.filename}
|
#return {"filename": file.filename}
|
||||||
content = file.file.read().decode()
|
content = file.file.read().decode()
|
||||||
|
@ -1408,10 +1432,10 @@ class HttpInstance:
|
||||||
payload = UtilitiesSyndication.create_rfc4287_entry(entry_new)
|
payload = UtilitiesSyndication.create_rfc4287_entry(entry_new)
|
||||||
iq = await XmppPubsub.publish_node_item(
|
iq = await XmppPubsub.publish_node_item(
|
||||||
xmpp_instance, jabber_id, node_id, item_id, payload)
|
xmpp_instance, jabber_id, node_id, item_id, payload)
|
||||||
#await iq.send(timeout=15)
|
#await XmppIq.send(iq, 15)
|
||||||
counter += 1
|
counter += 1
|
||||||
|
|
||||||
message = 'Blasta system message » Imported {} items.'.format(counter)
|
message = f'Blasta system message » Imported {counter} items.'
|
||||||
description = 'Import successful'
|
description = 'Import successful'
|
||||||
path = 'profile'
|
path = 'profile'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
|
@ -1441,7 +1465,7 @@ class HttpInstance:
|
||||||
if (isinstance(iq, Iq) and
|
if (isinstance(iq, Iq) and
|
||||||
url_hash == iq['pubsub']['items']['item']['id']):
|
url_hash == iq['pubsub']['items']['item']['id']):
|
||||||
return RedirectResponse(url='/url/' + url_hash + '/edit')
|
return RedirectResponse(url='/url/' + url_hash + '/edit')
|
||||||
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, 'xmpp:blasta:configuration:0', 'routine')
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_settings_id, 'routine')
|
||||||
if isinstance(iq, Iq):
|
if isinstance(iq, Iq):
|
||||||
payload = iq['pubsub']['items']['item']['payload']
|
payload = iq['pubsub']['items']['item']['payload']
|
||||||
if payload:
|
if payload:
|
||||||
|
@ -1541,7 +1565,7 @@ class HttpInstance:
|
||||||
result, reason = await UtilitiesData.update_cache_and_database(
|
result, reason = await UtilitiesData.update_cache_and_database(
|
||||||
db_file, directory_cache, xmpp_instance, jabber_id, node_type, node_id)
|
db_file, directory_cache, xmpp_instance, jabber_id, node_type, node_id)
|
||||||
if result == 'error':
|
if result == 'error':
|
||||||
message = 'Blasta system message » {}.'.format(reason)
|
message = f'Blasta system message » {reason}.'
|
||||||
description = 'Directory "read" appears to be empty'
|
description = 'Directory "read" appears to be empty'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
|
@ -1626,7 +1650,7 @@ class HttpInstance:
|
||||||
description = 'Search your own bookmarks'
|
description = 'Search your own bookmarks'
|
||||||
message = 'Search for bookmarks from your own directory.'
|
message = 'Search for bookmarks from your own directory.'
|
||||||
else:
|
else:
|
||||||
description = 'Search bookmarks of {}'.format(jid)
|
description = f'Search bookmarks of {jid}'
|
||||||
message = 'Search for bookmarks of a given Jabber ID.'
|
message = 'Search for bookmarks of a given Jabber ID.'
|
||||||
form_action = '/jid/' + jid
|
form_action = '/jid/' + jid
|
||||||
input_id = input_name = label_for = 'q'
|
input_id = input_name = label_for = 'q'
|
||||||
|
@ -1712,7 +1736,7 @@ class HttpInstance:
|
||||||
#if jabber_id == jid or node_type in ('private', 'read'):
|
#if jabber_id == jid or node_type in ('private', 'read'):
|
||||||
tag_list = DatabaseSQLite.get_500_tags_by_jid_sorted_by_name(db_file, jid)
|
tag_list = DatabaseSQLite.get_500_tags_by_jid_sorted_by_name(db_file, jid)
|
||||||
message = 'Common 500 tags sorted by name and sized by commonality.'
|
message = 'Common 500 tags sorted by name and sized by commonality.'
|
||||||
description = 'Common tags of {}'.format(jid)
|
description = f'Common tags of {jid}'
|
||||||
template_file = 'tag.xhtml'
|
template_file = 'tag.xhtml'
|
||||||
template_dict = {
|
template_dict = {
|
||||||
'request' : request,
|
'request' : request,
|
||||||
|
@ -1734,7 +1758,7 @@ class HttpInstance:
|
||||||
@self.app.get('/url/{url_hash}')
|
@self.app.get('/url/{url_hash}')
|
||||||
async def url_hash_get(request: Request, url_hash):
|
async def url_hash_get(request: Request, url_hash):
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
node_id = 'hash:{}'.format(url_hash)
|
node_id = f'hash:{url_hash}'
|
||||||
param_hash = url_hash
|
param_hash = url_hash
|
||||||
syndicate = path = 'url'
|
syndicate = path = 'url'
|
||||||
entries = []
|
entries = []
|
||||||
|
@ -1753,7 +1777,7 @@ class HttpInstance:
|
||||||
exist = True
|
exist = True
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
message = 'XMPP system message » Error: {}.'.format(iq)
|
message = f'XMPP system message » Error: {iq}.'
|
||||||
description = 'The requested bookmark could not be retrieved'
|
description = 'The requested bookmark could not be retrieved'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
|
@ -1810,7 +1834,7 @@ class HttpInstance:
|
||||||
'jid' : jid,
|
'jid' : jid,
|
||||||
'name' : jid, # jid.split('@')[0] if '@' in jid else jid,
|
'name' : jid, # jid.split('@')[0] if '@' in jid else jid,
|
||||||
'instances' : instances})
|
'instances' : instances})
|
||||||
# message = 'XMPP system message » {}.'.format(iq)
|
# message = f'XMPP system message » {iq}.'
|
||||||
# if iq == 'Node not found':
|
# if iq == 'Node not found':
|
||||||
# description = 'An error has occurred'
|
# description = 'An error has occurred'
|
||||||
# else:
|
# else:
|
||||||
|
@ -1853,7 +1877,7 @@ class HttpInstance:
|
||||||
description = 'The requested bookmark does not exist'
|
description = 'The requested bookmark does not exist'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
message = 'Information for URI {}'.format(entries[0]['link']) # entry[2]
|
message = f'Information for URI {entries[0]["link"]}' # entry[2]
|
||||||
if not instances: instances = 0
|
if not instances: instances = 0
|
||||||
if instances > 1:
|
if instances > 1:
|
||||||
description = 'Discover new resources and see who shares them'
|
description = 'Discover new resources and see who shares them'
|
||||||
|
@ -1904,7 +1928,7 @@ class HttpInstance:
|
||||||
tags_old: str = Form(''),
|
tags_old: str = Form(''),
|
||||||
title: str = Form(...),
|
title: str = Form(...),
|
||||||
url: str = Form(...)):
|
url: str = Form(...)):
|
||||||
node_id = 'hash:{}'.format(url_hash)
|
node_id = f'hash:{url_hash}'
|
||||||
param_hash = url_hash
|
param_hash = url_hash
|
||||||
syndicate = path = 'url'
|
syndicate = path = 'url'
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
|
@ -1923,7 +1947,7 @@ class HttpInstance:
|
||||||
'jid' : jabber_id,
|
'jid' : jabber_id,
|
||||||
'name' : name,
|
'name' : name,
|
||||||
'instances' : instances or 1}
|
'instances' : instances or 1}
|
||||||
message = 'Information for URL {}'.format(url)
|
message = f'Information for URL {url}'
|
||||||
description = 'Bookmark properties'
|
description = 'Bookmark properties'
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
payload = UtilitiesSyndication.create_rfc4287_entry(entry)
|
payload = UtilitiesSyndication.create_rfc4287_entry(entry)
|
||||||
|
@ -1938,29 +1962,29 @@ class HttpInstance:
|
||||||
case 'private':
|
case 'private':
|
||||||
print('Set item as private (XEP-0223)')
|
print('Set item as private (XEP-0223)')
|
||||||
#iq = await XmppPubsub.publish_node_item_private(
|
#iq = await XmppPubsub.publish_node_item_private(
|
||||||
# xmpp_instance, node_id_private, url_hash, iq)
|
# xmpp_instance, node_private_id, url_hash, iq)
|
||||||
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_public, url_hash)
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_public_id, url_hash)
|
||||||
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'public', url_hash)
|
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'public', url_hash)
|
||||||
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_read, url_hash)
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_read_id, url_hash)
|
||||||
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'read', url_hash)
|
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'read', url_hash)
|
||||||
case 'public':
|
case 'public':
|
||||||
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_private, url_hash)
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_private_id, url_hash)
|
||||||
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'private', url_hash)
|
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'private', url_hash)
|
||||||
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_read, url_hash)
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_read_id, url_hash)
|
||||||
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'read', url_hash)
|
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'read', url_hash)
|
||||||
case 'read':
|
case 'read':
|
||||||
#iq = await XmppPubsub.publish_node_item_private(
|
#iq = await XmppPubsub.publish_node_item_private(
|
||||||
# xmpp_instance, node_id_read, url_hash, iq)
|
# xmpp_instance, node_read_id, url_hash, iq)
|
||||||
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_public, url_hash)
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_public_id, url_hash)
|
||||||
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'public', url_hash)
|
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'public', url_hash)
|
||||||
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_private, url_hash)
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_private_id, url_hash)
|
||||||
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'private', url_hash)
|
UtilitiesData.remove_item_from_cache(directory_cache, jabber_id, 'private', url_hash)
|
||||||
if isinstance(iq, str):
|
if isinstance(iq, str):
|
||||||
description = 'Could not save bookmark'
|
description = 'Could not save bookmark'
|
||||||
message = 'XMPP system message » {}.'.format(iq)
|
message = f'XMPP system message » {iq}.'
|
||||||
path = 'error'
|
path = 'error'
|
||||||
return result_post(request, jabber_id, description, message, path)
|
return result_post(request, jabber_id, description, message, path)
|
||||||
#await iq.send(timeout=15)
|
#await XmppIq.send(iq, 15)
|
||||||
# Save changes to cache file
|
# Save changes to cache file
|
||||||
entries_cache_filename = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
entries_cache_filename = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
||||||
entries_cache = UtilitiesData.open_file_toml(entries_cache_filename)
|
entries_cache = UtilitiesData.open_file_toml(entries_cache_filename)
|
||||||
|
@ -2055,7 +2079,7 @@ class HttpInstance:
|
||||||
@self.app.get('/url/{url_hash}/confirm')
|
@self.app.get('/url/{url_hash}/confirm')
|
||||||
async def url_hash_confirm_get(request: Request, url_hash):
|
async def url_hash_confirm_get(request: Request, url_hash):
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
node_id = 'hash:{}'.format(url_hash)
|
node_id = f'hash:{url_hash}'
|
||||||
param_hash = url_hash
|
param_hash = url_hash
|
||||||
syndicate = path = 'url'
|
syndicate = path = 'url'
|
||||||
if len(url_hash) == 32:
|
if len(url_hash) == 32:
|
||||||
|
@ -2073,7 +2097,7 @@ class HttpInstance:
|
||||||
exist = True
|
exist = True
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
message = 'XMPP system message » {}.'.format(iq)
|
message = f'XMPP system message » {iq}.'
|
||||||
if iq == 'Node not found':
|
if iq == 'Node not found':
|
||||||
description = 'An error has occurred'
|
description = 'An error has occurred'
|
||||||
else:
|
else:
|
||||||
|
@ -2093,7 +2117,7 @@ class HttpInstance:
|
||||||
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
entry['published_mod'] = UtilitiesDate.convert_iso8601_to_readable(entry['published'])
|
||||||
entries.append(entry)
|
entries.append(entry)
|
||||||
description = 'Confirm deletion of a bookmark'
|
description = 'Confirm deletion of a bookmark'
|
||||||
message = 'Details for bookmark {}'.format(entries[0]['link'])
|
message = f'Details for bookmark {entries[0]["link"]}'
|
||||||
template_file = 'browse.xhtml'
|
template_file = 'browse.xhtml'
|
||||||
template_dict = {
|
template_dict = {
|
||||||
'request' : request,
|
'request' : request,
|
||||||
|
@ -2127,7 +2151,7 @@ class HttpInstance:
|
||||||
@self.app.get('/url/{url_hash}/delete')
|
@self.app.get('/url/{url_hash}/delete')
|
||||||
async def url_hash_delete_get(request: Request, url_hash):
|
async def url_hash_delete_get(request: Request, url_hash):
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
node_id = 'hash:{}'.format(url_hash)
|
node_id = f'hash:{url_hash}'
|
||||||
param_hash = url_hash
|
param_hash = url_hash
|
||||||
syndicate = path = 'url'
|
syndicate = path = 'url'
|
||||||
if len(url_hash) == 32:
|
if len(url_hash) == 32:
|
||||||
|
@ -2145,7 +2169,7 @@ class HttpInstance:
|
||||||
exist = True
|
exist = True
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
message = 'XMPP system message » {}.'.format(iq)
|
message = f'XMPP system message » {iq}.'
|
||||||
if iq == 'Node not found':
|
if iq == 'Node not found':
|
||||||
description = 'An error has occurred'
|
description = 'An error has occurred'
|
||||||
else:
|
else:
|
||||||
|
@ -2168,7 +2192,7 @@ class HttpInstance:
|
||||||
# Set a title
|
# Set a title
|
||||||
description = 'A bookmark has been deleted'
|
description = 'A bookmark has been deleted'
|
||||||
# Set a message
|
# Set a message
|
||||||
message = 'Details for bookmark {}'.format(entry['link'])
|
message = f'Details for bookmark {entry["link"]}'
|
||||||
|
|
||||||
# Create a link to restore bookmark
|
# Create a link to restore bookmark
|
||||||
link_save = ('/save?url=' + urllib.parse.quote(entry['link']) +
|
link_save = ('/save?url=' + urllib.parse.quote(entry['link']) +
|
||||||
|
@ -2222,7 +2246,7 @@ class HttpInstance:
|
||||||
@self.app.post('/url/{url_hash}/edit')
|
@self.app.post('/url/{url_hash}/edit')
|
||||||
async def url_hash_edit_get(request: Request, url_hash):
|
async def url_hash_edit_get(request: Request, url_hash):
|
||||||
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
jabber_id = UtilitiesHttp.is_jid_matches_to_session(accounts, sessions, request)
|
||||||
# node_id = 'hash:{}'.format(url_hash)
|
# node_id = f'hash:{url_hash}'
|
||||||
if len(url_hash) == 32:
|
if len(url_hash) == 32:
|
||||||
if jabber_id:
|
if jabber_id:
|
||||||
xmpp_instance = accounts[jabber_id]
|
xmpp_instance = accounts[jabber_id]
|
||||||
|
@ -2241,7 +2265,7 @@ class HttpInstance:
|
||||||
exist = True
|
exist = True
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
message = 'XMPP system message » {}.'.format(iq)
|
message = f'XMPP system message » {iq}.'
|
||||||
if iq == 'Node not found':
|
if iq == 'Node not found':
|
||||||
description = 'An error has occurred'
|
description = 'An error has occurred'
|
||||||
else:
|
else:
|
||||||
|
@ -2281,7 +2305,7 @@ class HttpInstance:
|
||||||
entry['name'] = name
|
entry['name'] = name
|
||||||
entry['url_hash'] = url_hash
|
entry['url_hash'] = url_hash
|
||||||
else:
|
else:
|
||||||
message = 'XMPP system message » {}.'.format(iq)
|
message = f'XMPP system message » {iq}.'
|
||||||
if iq == 'Node not found':
|
if iq == 'Node not found':
|
||||||
description = 'An error has occurred'
|
description = 'An error has occurred'
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -6,8 +6,8 @@ from blasta.utilities.cryptography import UtilitiesCryptography
|
||||||
from blasta.utilities.syndication import UtilitiesSyndication
|
from blasta.utilities.syndication import UtilitiesSyndication
|
||||||
from blasta.xmpp.pubsub import XmppPubsub
|
from blasta.xmpp.pubsub import XmppPubsub
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from lxml import etree
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
from slixmpp.stanza.iq import Iq
|
from slixmpp.stanza.iq import Iq
|
||||||
import time
|
import time
|
||||||
import tomli_w
|
import tomli_w
|
||||||
|
@ -116,8 +116,8 @@ class UtilitiesData:
|
||||||
url_hash = UtilitiesCryptography.hash_url_to_md5(entry['link'])
|
url_hash = UtilitiesCryptography.hash_url_to_md5(entry['link'])
|
||||||
iq_item_id = iq_item['id']
|
iq_item_id = iq_item['id']
|
||||||
if iq_item_id != url_hash:
|
if iq_item_id != url_hash:
|
||||||
logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
logging.error(f'Item ID does not match MD5. id: {iq_item_id} hash: {url_hash}')
|
||||||
logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
logging.warn(f'Item ID does not match MD5. id: {iq_item_id} hash: {url_hash}')
|
||||||
instances = DatabaseSQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
instances = DatabaseSQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
||||||
if entry:
|
if entry:
|
||||||
entry['instances'] = instances or 0
|
entry['instances'] = instances or 0
|
||||||
|
@ -136,44 +136,53 @@ class UtilitiesData:
|
||||||
def load_data_netscape(html: str) -> dict:
|
def load_data_netscape(html: str) -> dict:
|
||||||
bookmarks = []
|
bookmarks = []
|
||||||
current_summary = ""
|
current_summary = ""
|
||||||
|
parser = etree.XMLParser(recover=True)
|
||||||
|
|
||||||
lines = html.splitlines()
|
lines = html.splitlines()
|
||||||
for line in lines:
|
for line in lines:
|
||||||
line = line.strip()
|
line = line.strip()
|
||||||
|
if line:
|
||||||
|
# Parse given line
|
||||||
|
root = etree.fromstring(line, parser)
|
||||||
|
|
||||||
# Check for <DT> tag
|
# Check for <DT> tag
|
||||||
if line.startswith("<DT>"):
|
if line.startswith("<DT>"):
|
||||||
# Look for <A> tag within <DT>
|
# Look for <A> tag within <DT>
|
||||||
a_match = re.search(r'<A HREF="(.*?)" ADD_DATE="(.*?)" LAST_MODIFIED="(.*?)" PRIVATE="(.*?)" TAGS="(.*?)">(.*?)</A>', line)
|
a_element = root.find('.//A')
|
||||||
if a_match:
|
if a_element is not None:
|
||||||
link, published, updated, private, tags, title = a_match.groups()
|
link = a_element.get('HREF')
|
||||||
|
add_date = a_element.get('ADD_DATE') or time.time()
|
||||||
|
last_modified = a_element.get('LAST_MODIFIED') or time.time()
|
||||||
|
tags = a_element.get('TAGS')
|
||||||
|
title = a_element.text or link
|
||||||
|
|
||||||
# Convert timestamps from seconds since epoch to ISO format
|
# Convert timestamps from seconds since epoch to ISO format
|
||||||
published_date = datetime.fromtimestamp(int(published)).isoformat()
|
added_date = datetime.fromtimestamp(float(add_date)).isoformat()
|
||||||
updated_date = datetime.fromtimestamp(int(updated)).isoformat()
|
modified_date = datetime.fromtimestamp(float(last_modified)).isoformat()
|
||||||
|
|
||||||
# Create bookmark dictionary
|
# Create bookmark dictionary
|
||||||
bookmark = {
|
bookmark = {
|
||||||
'title': title,
|
'title': title,
|
||||||
'link': link,
|
'link': link,
|
||||||
'summary': current_summary,
|
'summary': current_summary,
|
||||||
'published': published_date,
|
'published': added_date,
|
||||||
'updated': updated_date,
|
'updated': modified_date,
|
||||||
'tags': [tag.strip() for tag in tags.split(',')] if tags else []
|
'tags': [tag.strip() for tag in tags.split(',')] if tags else ['unclassified']
|
||||||
}
|
}
|
||||||
|
|
||||||
# Append bookmark to the list
|
# Append bookmark to the list
|
||||||
bookmarks.append(bookmark)
|
bookmarks.append(bookmark)
|
||||||
|
|
||||||
# Reset summary for the next bookmark
|
# Reset summary for the next bookmark
|
||||||
current_summary = ""
|
current_summary = ""
|
||||||
|
|
||||||
# Check for <DD> tag
|
# Check for <DD> tag
|
||||||
elif line.startswith("<DD>"):
|
elif line.startswith("<DD>"):
|
||||||
# Extract summary from <DD>
|
# Extract summary from <DD>
|
||||||
summary_match = re.search(r'<DD>(.*?)</DD>|<DD>(.*?)(?=s*<DT>|$)', line)
|
bookmarks[len(bookmarks)-1]['summary'] = line[4:].strip()
|
||||||
if summary_match:
|
#dd_element = root.find('.//DD')
|
||||||
bookmarks[len(bookmarks)-1]['summary'] = summary_match.group(2).strip()
|
#if dd_element:
|
||||||
|
# bookmarks[len(bookmarks)-1]['summary'] = dd_element.text.strip()
|
||||||
|
|
||||||
return {'entries': bookmarks}
|
return {'entries': bookmarks}
|
||||||
|
|
||||||
|
@ -195,16 +204,18 @@ class UtilitiesData:
|
||||||
|
|
||||||
def remove_item_from_cache(directory_cache, jabber_id, node, url_hash):
|
def remove_item_from_cache(directory_cache, jabber_id, node, url_hash):
|
||||||
filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
||||||
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
if os.path.exists(filename_items):
|
||||||
if node in entries_cache:
|
#if os.path.exists(filename_items) and os.path.getsize(filename_items):
|
||||||
entries_cache_node = entries_cache[node]
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
for entry_cache in entries_cache_node:
|
if node in entries_cache:
|
||||||
if entry_cache['url_hash'] == url_hash:
|
entries_cache_node = entries_cache[node]
|
||||||
entry_cache_index = entries_cache_node.index(entry_cache)
|
for entry_cache in entries_cache_node:
|
||||||
del entries_cache_node[entry_cache_index]
|
if entry_cache['url_hash'] == url_hash:
|
||||||
break
|
entry_cache_index = entries_cache_node.index(entry_cache)
|
||||||
data_items = entries_cache
|
del entries_cache_node[entry_cache_index]
|
||||||
UtilitiesData.save_to_toml(filename_items, data_items)
|
break
|
||||||
|
data_items = entries_cache
|
||||||
|
UtilitiesData.save_to_toml(filename_items, data_items)
|
||||||
|
|
||||||
def save_to_json(filename: str, data) -> None:
|
def save_to_json(filename: str, data) -> None:
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w') as f:
|
||||||
|
@ -244,7 +255,8 @@ class UtilitiesData:
|
||||||
return ['error', iq]
|
return ['error', iq]
|
||||||
else:
|
else:
|
||||||
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)]
|
if not node_type in entries_cache:
|
||||||
|
return ['error', f'Directory "{node_type}" is empty']
|
||||||
entries_cache_node = entries_cache[node_type]
|
entries_cache_node = entries_cache[node_type]
|
||||||
|
|
||||||
# Check whether items still exist on node
|
# Check whether items still exist on node
|
||||||
|
|
|
@ -1,2 +1,2 @@
|
||||||
__version__ = '0.2'
|
__version__ = '0.3'
|
||||||
__version_info__ = (0, 2)
|
__version_info__ = (0, 3)
|
||||||
|
|
19
blasta/xmpp/iq.py
Normal file
19
blasta/xmpp/iq.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
#!/usr/bin/python
|
||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
from slixmpp.exceptions import IqError, IqTimeout
|
||||||
|
|
||||||
|
class XmppIq:
|
||||||
|
|
||||||
|
async def send(iq, timeout):
|
||||||
|
try:
|
||||||
|
await iq.send(timeout=timeout)
|
||||||
|
except IqError as e:
|
||||||
|
raise Exception('IQ Error!')
|
||||||
|
print(str(e))
|
||||||
|
except IqTimeout as e:
|
||||||
|
raise Exception('IQ Timeout!')
|
||||||
|
print(str(e))
|
||||||
|
except Exception as e:
|
||||||
|
raise Exception('Error!')
|
||||||
|
print(str(e))
|
|
@ -29,7 +29,7 @@ class XmppPubsub:
|
||||||
value=subtitle)
|
value=subtitle)
|
||||||
form.addField('pubsub#max_items',
|
form.addField('pubsub#max_items',
|
||||||
ftype='text-single',
|
ftype='text-single',
|
||||||
value='255')
|
value=255)
|
||||||
form.addField('pubsub#notify_retract',
|
form.addField('pubsub#notify_retract',
|
||||||
ftype='boolean',
|
ftype='boolean',
|
||||||
value=1)
|
value=1)
|
||||||
|
@ -47,12 +47,12 @@ class XmppPubsub:
|
||||||
value='http://www.w3.org/2005/Atom')
|
value='http://www.w3.org/2005/Atom')
|
||||||
return iq
|
return iq
|
||||||
|
|
||||||
def create_node_config(xmpp_instance, jid):
|
def create_node_config(xmpp_instance, jid, node_settings_id):
|
||||||
jid_from = str(xmpp_instance.boundjid) if xmpp_instance.is_component else None
|
jid_from = str(xmpp_instance.boundjid) if xmpp_instance.is_component else None
|
||||||
iq = xmpp_instance.Iq(stype='set',
|
iq = xmpp_instance.Iq(stype='set',
|
||||||
sto=jid,
|
sto=jid,
|
||||||
sfrom=jid_from)
|
sfrom=jid_from)
|
||||||
iq['pubsub']['create']['node'] = 'xmpp:blasta:configuration:0'
|
iq['pubsub']['create']['node'] = node_settings_id
|
||||||
form = iq['pubsub']['configure']['form']
|
form = iq['pubsub']['configure']['form']
|
||||||
form['type'] = 'submit'
|
form['type'] = 'submit'
|
||||||
form.addField('pubsub#access_model',
|
form.addField('pubsub#access_model',
|
||||||
|
@ -63,7 +63,7 @@ class XmppPubsub:
|
||||||
value=0)
|
value=0)
|
||||||
form.addField('pubsub#description',
|
form.addField('pubsub#description',
|
||||||
ftype='text-single',
|
ftype='text-single',
|
||||||
value='Settings of the Blasta PubSub bookmarks system')
|
value='Settings of the Blasta PubSub annotation system')
|
||||||
form.addField('pubsub#max_items',
|
form.addField('pubsub#max_items',
|
||||||
ftype='text-single',
|
ftype='text-single',
|
||||||
value='30')
|
value='30')
|
||||||
|
|
|
@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
name = "Blasta"
|
name = "Blasta"
|
||||||
version = "0.2"
|
version = "0.3"
|
||||||
description = "A collaborative annotation management system for XMPP"
|
description = "A collaborative annotation management system for XMPP"
|
||||||
authors = [{name = "Schimon Zachary", email = "sch@fedora.email"}]
|
authors = [{name = "Schimon Zachary", email = "sch@fedora.email"}]
|
||||||
license = {text = "AGPL-3.0"}
|
license = {text = "AGPL-3.0"}
|
||||||
|
|
Loading…
Reference in a new issue