forked from sch/Blasta
Fix aggregation of annoations of a connecting Jabber ID.
This commit is contained in:
parent
24dbadf7dc
commit
799cd80ebe
1 changed files with 22 additions and 22 deletions
|
@ -37,7 +37,7 @@ class UtilitiesData:
|
||||||
data = {
|
data = {
|
||||||
'item_ids' : item_ids,
|
'item_ids' : item_ids,
|
||||||
'tags' : tags}
|
'tags' : tags}
|
||||||
Data.save_to_toml(filename, data)
|
UtilitiesData.save_to_toml(filename, data)
|
||||||
|
|
||||||
def cache_items_and_tags_filter(directory_cache, entries, jid, tag):
|
def cache_items_and_tags_filter(directory_cache, entries, jid, tag):
|
||||||
"""Create a cache file of node items and tags."""
|
"""Create a cache file of node items and tags."""
|
||||||
|
@ -67,7 +67,7 @@ class UtilitiesData:
|
||||||
data = {
|
data = {
|
||||||
'item_ids' : item_ids,
|
'item_ids' : item_ids,
|
||||||
'tags' : tags}
|
'tags' : tags}
|
||||||
Data.save_to_toml(filename, data)
|
UtilitiesData.save_to_toml(filename, data)
|
||||||
|
|
||||||
def cache_items_and_tags(directory_cache, entries, jid):
|
def cache_items_and_tags(directory_cache, entries, jid):
|
||||||
"""Create a cache file of node items and tags."""
|
"""Create a cache file of node items and tags."""
|
||||||
|
@ -89,7 +89,7 @@ class UtilitiesData:
|
||||||
data = {
|
data = {
|
||||||
'item_ids' : item_ids,
|
'item_ids' : item_ids,
|
||||||
'tags' : tags}
|
'tags' : tags}
|
||||||
Data.save_to_toml(filename, data)
|
UtilitiesData.save_to_toml(filename, data)
|
||||||
|
|
||||||
def extract_iq_items(iq, jabber_id):
|
def extract_iq_items(iq, jabber_id):
|
||||||
iq_items = iq['pubsub']['items']
|
iq_items = iq['pubsub']['items']
|
||||||
|
@ -115,7 +115,7 @@ class UtilitiesData:
|
||||||
if iq_item_id != url_hash:
|
if iq_item_id != url_hash:
|
||||||
logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
||||||
logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
||||||
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
instances = DatabaseSQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
||||||
if entry:
|
if entry:
|
||||||
entry['instances'] = instances or 0
|
entry['instances'] = instances or 0
|
||||||
entry['jid'] = jabber_id
|
entry['jid'] = jabber_id
|
||||||
|
@ -145,7 +145,7 @@ class UtilitiesData:
|
||||||
|
|
||||||
def remove_item_from_cache(directory_cache, jabber_id, node, url_hash):
|
def remove_item_from_cache(directory_cache, jabber_id, node, url_hash):
|
||||||
filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
||||||
entries_cache = Data.open_file_toml(filename_items)
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
if node in entries_cache:
|
if node in entries_cache:
|
||||||
entries_cache_node = entries_cache[node]
|
entries_cache_node = entries_cache[node]
|
||||||
for entry_cache in entries_cache_node:
|
for entry_cache in entries_cache_node:
|
||||||
|
@ -154,7 +154,7 @@ class UtilitiesData:
|
||||||
del entries_cache_node[entry_cache_index]
|
del entries_cache_node[entry_cache_index]
|
||||||
break
|
break
|
||||||
data_items = entries_cache
|
data_items = entries_cache
|
||||||
Data.save_to_toml(filename_items, data_items)
|
UtilitiesData.save_to_toml(filename_items, data_items)
|
||||||
|
|
||||||
def save_to_json(filename: str, data) -> None:
|
def save_to_json(filename: str, data) -> None:
|
||||||
with open(filename, 'w') as f:
|
with open(filename, 'w') as f:
|
||||||
|
@ -186,14 +186,14 @@ class UtilitiesData:
|
||||||
if not os.path.exists(filename_items) or os.path.getsize(filename_items) in (0, 13):
|
if not os.path.exists(filename_items) or os.path.getsize(filename_items) in (0, 13):
|
||||||
iq = await XmppPubsub.get_node_items(xmpp_instance, jabber_id, node_id)
|
iq = await XmppPubsub.get_node_items(xmpp_instance, jabber_id, node_id)
|
||||||
if isinstance(iq, Iq):
|
if isinstance(iq, Iq):
|
||||||
entries_cache_node = Data.extract_iq_items_extra(iq, jabber_id)
|
entries_cache_node = UtilitiesData.extract_iq_items_extra(iq, jabber_id)
|
||||||
data_items = {node_type : entries_cache_node}
|
data_items = {node_type : entries_cache_node}
|
||||||
Data.save_to_toml(filename_items, data_items)
|
UtilitiesData.save_to_toml(filename_items, data_items)
|
||||||
return ['fine', iq] # TODO Remove this line
|
return ['fine', iq] # TODO Remove this line
|
||||||
else:
|
else:
|
||||||
return ['error', iq]
|
return ['error', iq]
|
||||||
else:
|
else:
|
||||||
entries_cache = Data.open_file_toml(filename_items)
|
entries_cache = UtilitiesData.open_file_toml(filename_items)
|
||||||
if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)]
|
if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)]
|
||||||
entries_cache_node = entries_cache[node_type]
|
entries_cache_node = entries_cache[node_type]
|
||||||
|
|
||||||
|
@ -206,7 +206,7 @@ class UtilitiesData:
|
||||||
iq_item_remote_exist = True
|
iq_item_remote_exist = True
|
||||||
break
|
break
|
||||||
if url_hash and not iq_item_remote_exist:
|
if url_hash and not iq_item_remote_exist:
|
||||||
await SQLite.delete_combination_row_by_jid_and_url_hash(
|
await DatabaseSQLite.delete_combination_row_by_jid_and_url_hash(
|
||||||
db_file, url_hash, jabber_id)
|
db_file, url_hash, jabber_id)
|
||||||
entry_index = entries_cache_node.index(entry)
|
entry_index = entries_cache_node.index(entry)
|
||||||
del entries_cache_node[entry_index]
|
del entries_cache_node[entry_index]
|
||||||
|
@ -223,7 +223,7 @@ class UtilitiesData:
|
||||||
iq = await XmppPubsub.get_node_item(
|
iq = await XmppPubsub.get_node_item(
|
||||||
xmpp_instance, jabber_id, node_id, url_hash)
|
xmpp_instance, jabber_id, node_id, url_hash)
|
||||||
if isinstance(iq, Iq):
|
if isinstance(iq, Iq):
|
||||||
entries_iq = Data.extract_iq_items_extra(iq, jabber_id)
|
entries_iq = UtilitiesData.extract_iq_items_extra(iq, jabber_id)
|
||||||
entries_cache_node_new += entries_iq
|
entries_cache_node_new += entries_iq
|
||||||
else:
|
else:
|
||||||
# TODO
|
# TODO
|
||||||
|
@ -234,23 +234,23 @@ class UtilitiesData:
|
||||||
|
|
||||||
if node_type == 'public':
|
if node_type == 'public':
|
||||||
# Fast (low I/O)
|
# Fast (low I/O)
|
||||||
if not SQLite.get_jid_id_by_jid(db_file, jabber_id):
|
if not DatabaseSQLite.get_jid_id_by_jid(db_file, jabber_id):
|
||||||
await SQLite.set_jid(db_file, jabber_id)
|
await DatabaseSQLite.set_jid(db_file, jabber_id)
|
||||||
#await SQLite.add_new_entries(db_file, entries)
|
#await DatabaseSQLite.add_new_entries(db_file, entries)
|
||||||
await SQLite.add_tags(db_file, entries_cache_node)
|
await DatabaseSQLite.add_tags(db_file, entries_cache_node)
|
||||||
# Slow (high I/O)
|
# Slow (high I/O)
|
||||||
for entry in entries_cache_node:
|
for entry in entries_cache_node:
|
||||||
url_hash = entry['url_hash']
|
url_hash = entry['url_hash']
|
||||||
if not SQLite.get_entry_id_by_url_hash(db_file, url_hash):
|
if not DatabaseSQLite.get_entry_id_by_url_hash(db_file, url_hash):
|
||||||
await SQLite.add_new_entries(db_file, entries_cache_node)
|
await DatabaseSQLite.add_new_entries(db_file, entries_cache_node)
|
||||||
await SQLite.associate_entries_tags_jids(db_file, entry)
|
await DatabaseSQLite.associate_entries_tags_jids(db_file, entry)
|
||||||
#elif not SQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash):
|
#elif not DatabaseSQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash):
|
||||||
# await SQLite.associate_entries_tags_jids(db_file, entry)
|
# await DatabaseSQLite.associate_entries_tags_jids(db_file, entry)
|
||||||
else:
|
else:
|
||||||
await SQLite.associate_entries_tags_jids(db_file, entry)
|
await DatabaseSQLite.associate_entries_tags_jids(db_file, entry)
|
||||||
|
|
||||||
data_items = entries_cache
|
data_items = entries_cache
|
||||||
Data.save_to_toml(filename_items, data_items)
|
UtilitiesData.save_to_toml(filename_items, data_items)
|
||||||
return ['fine', iq] # TODO Remove this line
|
return ['fine', iq] # TODO Remove this line
|
||||||
else:
|
else:
|
||||||
return ['error', iq]
|
return ['error', iq]
|
||||||
|
|
Loading…
Reference in a new issue