diff --git a/blasta/utilities/data.py b/blasta/utilities/data.py index 8bdc56c..9583467 100644 --- a/blasta/utilities/data.py +++ b/blasta/utilities/data.py @@ -37,7 +37,7 @@ class UtilitiesData: data = { 'item_ids' : item_ids, 'tags' : tags} - Data.save_to_toml(filename, data) + UtilitiesData.save_to_toml(filename, data) def cache_items_and_tags_filter(directory_cache, entries, jid, tag): """Create a cache file of node items and tags.""" @@ -67,7 +67,7 @@ class UtilitiesData: data = { 'item_ids' : item_ids, 'tags' : tags} - Data.save_to_toml(filename, data) + UtilitiesData.save_to_toml(filename, data) def cache_items_and_tags(directory_cache, entries, jid): """Create a cache file of node items and tags.""" @@ -89,7 +89,7 @@ class UtilitiesData: data = { 'item_ids' : item_ids, 'tags' : tags} - Data.save_to_toml(filename, data) + UtilitiesData.save_to_toml(filename, data) def extract_iq_items(iq, jabber_id): iq_items = iq['pubsub']['items'] @@ -115,7 +115,7 @@ class UtilitiesData: if iq_item_id != url_hash: logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash)) logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash)) - instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash) + instances = DatabaseSQLite.get_entry_instances_by_url_hash(db_file, url_hash) if entry: entry['instances'] = instances or 0 entry['jid'] = jabber_id @@ -145,7 +145,7 @@ class UtilitiesData: def remove_item_from_cache(directory_cache, jabber_id, node, url_hash): filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml') - entries_cache = Data.open_file_toml(filename_items) + entries_cache = UtilitiesData.open_file_toml(filename_items) if node in entries_cache: entries_cache_node = entries_cache[node] for entry_cache in entries_cache_node: @@ -154,7 +154,7 @@ class UtilitiesData: del entries_cache_node[entry_cache_index] break data_items = entries_cache - Data.save_to_toml(filename_items, data_items) + UtilitiesData.save_to_toml(filename_items, data_items) def save_to_json(filename: str, data) -> None: with open(filename, 'w') as f: @@ -186,14 +186,14 @@ class UtilitiesData: if not os.path.exists(filename_items) or os.path.getsize(filename_items) in (0, 13): iq = await XmppPubsub.get_node_items(xmpp_instance, jabber_id, node_id) if isinstance(iq, Iq): - entries_cache_node = Data.extract_iq_items_extra(iq, jabber_id) + entries_cache_node = UtilitiesData.extract_iq_items_extra(iq, jabber_id) data_items = {node_type : entries_cache_node} - Data.save_to_toml(filename_items, data_items) + UtilitiesData.save_to_toml(filename_items, data_items) return ['fine', iq] # TODO Remove this line else: return ['error', iq] else: - entries_cache = Data.open_file_toml(filename_items) + entries_cache = UtilitiesData.open_file_toml(filename_items) if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)] entries_cache_node = entries_cache[node_type] @@ -206,7 +206,7 @@ class UtilitiesData: iq_item_remote_exist = True break if url_hash and not iq_item_remote_exist: - await SQLite.delete_combination_row_by_jid_and_url_hash( + await DatabaseSQLite.delete_combination_row_by_jid_and_url_hash( db_file, url_hash, jabber_id) entry_index = entries_cache_node.index(entry) del entries_cache_node[entry_index] @@ -223,7 +223,7 @@ class UtilitiesData: iq = await XmppPubsub.get_node_item( xmpp_instance, jabber_id, node_id, url_hash) if isinstance(iq, Iq): - entries_iq = Data.extract_iq_items_extra(iq, jabber_id) + entries_iq = UtilitiesData.extract_iq_items_extra(iq, jabber_id) entries_cache_node_new += entries_iq else: # TODO @@ -234,23 +234,23 @@ class UtilitiesData: if node_type == 'public': # Fast (low I/O) - if not SQLite.get_jid_id_by_jid(db_file, jabber_id): - await SQLite.set_jid(db_file, jabber_id) - #await SQLite.add_new_entries(db_file, entries) - await SQLite.add_tags(db_file, entries_cache_node) + if not DatabaseSQLite.get_jid_id_by_jid(db_file, jabber_id): + await DatabaseSQLite.set_jid(db_file, jabber_id) + #await DatabaseSQLite.add_new_entries(db_file, entries) + await DatabaseSQLite.add_tags(db_file, entries_cache_node) # Slow (high I/O) for entry in entries_cache_node: url_hash = entry['url_hash'] - if not SQLite.get_entry_id_by_url_hash(db_file, url_hash): - await SQLite.add_new_entries(db_file, entries_cache_node) - await SQLite.associate_entries_tags_jids(db_file, entry) - #elif not SQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash): - # await SQLite.associate_entries_tags_jids(db_file, entry) + if not DatabaseSQLite.get_entry_id_by_url_hash(db_file, url_hash): + await DatabaseSQLite.add_new_entries(db_file, entries_cache_node) + await DatabaseSQLite.associate_entries_tags_jids(db_file, entry) + #elif not DatabaseSQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash): + # await DatabaseSQLite.associate_entries_tags_jids(db_file, entry) else: - await SQLite.associate_entries_tags_jids(db_file, entry) + await DatabaseSQLite.associate_entries_tags_jids(db_file, entry) data_items = entries_cache - Data.save_to_toml(filename_items, data_items) + UtilitiesData.save_to_toml(filename_items, data_items) return ['fine', iq] # TODO Remove this line else: return ['error', iq]