2024-08-22 16:09:04 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
TODO
|
|
|
|
|
|
|
|
* Delete cookie if session does not match
|
|
|
|
|
|
|
|
* Delete entry/tag/jid combination row upon removal of a tag.
|
|
|
|
|
|
|
|
"""
|
|
|
|
|
|
|
|
import asyncio
|
|
|
|
from asyncio import Lock
|
|
|
|
from datetime import datetime
|
|
|
|
from fastapi import Cookie, FastAPI, File, Form, HTTPException, Request, Response, UploadFile
|
|
|
|
from fastapi.middleware.cors import CORSMiddleware
|
|
|
|
from fastapi.responses import FileResponse, HTMLResponse
|
|
|
|
from fastapi.staticfiles import StaticFiles
|
|
|
|
from fastapi.templating import Jinja2Templates
|
|
|
|
import hashlib
|
|
|
|
import json
|
|
|
|
import logging
|
|
|
|
from os import mkdir
|
|
|
|
from os.path import getsize, exists
|
|
|
|
import random
|
|
|
|
import slixmpp
|
|
|
|
from slixmpp import ClientXMPP
|
|
|
|
from slixmpp.exceptions import IqError, IqTimeout
|
|
|
|
import slixmpp.plugins.xep_0060.stanza.pubsub as pubsub
|
|
|
|
import slixmpp.plugins.xep_0059.rsm as rsm
|
|
|
|
from sqlite3 import connect, Error, IntegrityError
|
|
|
|
from starlette.responses import RedirectResponse
|
|
|
|
import sys
|
|
|
|
import time
|
|
|
|
import tomli_w
|
|
|
|
from typing import Optional
|
|
|
|
import urllib.parse
|
|
|
|
import uvicorn
|
2024-09-04 16:31:52 +02:00
|
|
|
import webbrowser
|
2024-08-22 16:09:04 +02:00
|
|
|
import xml.etree.ElementTree as ET
|
|
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
import tomllib
|
|
|
|
except:
|
|
|
|
import tomli as tomllib
|
|
|
|
|
|
|
|
DBLOCK = Lock()
|
|
|
|
|
|
|
|
class Data:
|
|
|
|
|
|
|
|
def cache_items_and_tags(entries, jid, tag=None):
|
|
|
|
"""Create a cache file of node items and tags."""
|
|
|
|
item_ids = []
|
|
|
|
tags = {}
|
|
|
|
for entry in entries:
|
|
|
|
entry_tags = entry['tags']
|
|
|
|
entry_url_hash = entry['url_hash']
|
|
|
|
tags_to_include = []
|
|
|
|
if tag:
|
|
|
|
if tag in entry_tags:
|
|
|
|
item_ids.append(entry_url_hash)
|
|
|
|
tags_to_include += entry_tags
|
|
|
|
for tag_to_include in tags_to_include:
|
|
|
|
tags[tag_to_include] = tags[tag_to_include]+1 if tag_to_include in tags else 1
|
|
|
|
else:
|
|
|
|
item_ids.append(entry_url_hash)
|
|
|
|
tags_to_include += entry_tags
|
|
|
|
for tag_to_include in tags_to_include:
|
|
|
|
tags[tag_to_include] = tags[tag_to_include]+1 if tag_to_include in tags else 1
|
|
|
|
if tags:
|
|
|
|
tags = dict(sorted(tags.items(), key=lambda item: (-item[1], item[0])))
|
|
|
|
tags = dict(list(tags.items())[:30])
|
|
|
|
if tag: del tags[tag]
|
|
|
|
if item_ids:
|
|
|
|
directory = 'data/{}/'.format(jid)
|
|
|
|
if not exists(directory):
|
|
|
|
mkdir(directory)
|
|
|
|
if tag:
|
|
|
|
filename = 'data/{}/{}.toml'.format(jid, tag)
|
|
|
|
# Add support for search query
|
|
|
|
#if tag:
|
|
|
|
# filename = 'data/{}/query:{}.toml'.format(jid, query)
|
|
|
|
#if tag:
|
|
|
|
# filename = 'data/{}/tag:{}.toml'.format(jid, tag)
|
|
|
|
else:
|
|
|
|
filename = 'data/{}.toml'.format(jid)
|
|
|
|
data = {
|
|
|
|
'item_ids' : item_ids,
|
|
|
|
'tags' : tags}
|
|
|
|
Data.save_to_toml(filename, data)
|
|
|
|
|
|
|
|
def extract_iq_items(iq, jabber_id):
|
|
|
|
iq_items = iq['pubsub']['items']
|
|
|
|
entries = []
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
for iq_item in iq_items:
|
|
|
|
item_payload = iq_item['payload']
|
|
|
|
entry = Syndication.extract_items(item_payload)
|
|
|
|
entries.append(entry)
|
|
|
|
# TODO Handle this with XEP-0059 (reverse: bool), instead of reversing it.
|
|
|
|
entries.reverse()
|
|
|
|
return entries
|
|
|
|
|
|
|
|
def extract_iq_items_extra(iq, jabber_id, limit=None):
|
|
|
|
iq_items = iq['pubsub']['items']
|
|
|
|
entries = []
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
for iq_item in iq_items:
|
|
|
|
item_payload = iq_item['payload']
|
|
|
|
entry = Syndication.extract_items(item_payload, limit)
|
|
|
|
url_hash = Utilities.hash_url_to_md5(entry['link'])
|
|
|
|
iq_item_id = iq_item['id']
|
|
|
|
if iq_item_id != url_hash:
|
|
|
|
logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
|
|
|
logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
|
|
|
if entry:
|
|
|
|
entry['instances'] = instances or 0
|
|
|
|
entry['jid'] = jabber_id
|
|
|
|
entry['name'] = name
|
|
|
|
entry['url_hash'] = url_hash
|
|
|
|
entries.append(entry)
|
|
|
|
# TODO Handle this with XEP-0059 (reverse: bool), instead of reversing it.
|
|
|
|
entries.reverse()
|
|
|
|
result = entries
|
|
|
|
return result
|
|
|
|
|
|
|
|
def open_file_toml(filename: str) -> dict:
|
|
|
|
with open(filename, mode="rb") as fn:
|
|
|
|
data = tomllib.load(fn)
|
|
|
|
return data
|
|
|
|
|
|
|
|
def organize_tags(tags):
|
|
|
|
tags_organized = []
|
|
|
|
tags = tags.split(',')
|
|
|
|
#tags = sorted(set(tags))
|
|
|
|
for tag in tags:
|
|
|
|
if tag:
|
|
|
|
tag = tag.lower().strip()
|
|
|
|
if tag not in tags_organized:
|
|
|
|
tags_organized.append(tag)
|
|
|
|
return sorted(tags_organized)
|
|
|
|
|
|
|
|
def remove_item_from_cache(jabber_id, node, url_hash):
|
|
|
|
filename_items = 'items/' + jabber_id + '.toml'
|
|
|
|
entries_cache = Data.open_file_toml(filename_items)
|
|
|
|
if node in entries_cache:
|
|
|
|
entries_cache_node = entries_cache[node]
|
|
|
|
for entry_cache in entries_cache_node:
|
|
|
|
if entry_cache['url_hash'] == url_hash:
|
|
|
|
entry_cache_index = entries_cache_node.index(entry_cache)
|
|
|
|
del entries_cache_node[entry_cache_index]
|
|
|
|
break
|
|
|
|
data_items = entries_cache
|
|
|
|
Data.save_to_toml(filename_items, data_items)
|
|
|
|
|
|
|
|
def save_to_json(filename: str, data) -> None:
|
|
|
|
with open(filename, 'w') as f:
|
|
|
|
json.dump(data, f)
|
|
|
|
|
|
|
|
def save_to_toml(filename: str, data: dict) -> None:
|
|
|
|
with open(filename, 'w') as fn:
|
|
|
|
data_as_string = tomli_w.dumps(data)
|
|
|
|
fn.write(data_as_string)
|
|
|
|
|
|
|
|
async def update_cache_and_database(xmpp_instance, jabber_id: str, node_type: str, node_id: str):
|
|
|
|
# Download identifiers of node items.
|
|
|
|
iq = await XmppPubsub.get_node_item_ids(xmpp_instance, jabber_id, node_id)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
iq_items_remote = iq['disco_items']
|
|
|
|
|
|
|
|
# Cache a list of identifiers of node items to a file.
|
|
|
|
iq_items_remote_name = []
|
|
|
|
for iq_item_remote in iq_items_remote:
|
|
|
|
iq_item_remote_name = iq_item_remote['name']
|
|
|
|
iq_items_remote_name.append(iq_item_remote_name)
|
|
|
|
|
|
|
|
#data_item_ids = {'iq_items' : iq_items_remote_name}
|
|
|
|
#filename_item_ids = 'item_ids/' + jabber_id + '.toml'
|
|
|
|
#Data.save_to_toml(filename_item_ids, data_item_ids)
|
|
|
|
|
|
|
|
filename_items = 'items/' + jabber_id + '.toml'
|
|
|
|
if not exists(filename_items) or getsize(filename_items) == 13:
|
|
|
|
iq = await XmppPubsub.get_node_items(xmpp_instance, jabber_id, node_id)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
entries_cache_node = Data.extract_iq_items_extra(iq, jabber_id)
|
|
|
|
data_items = {node_type : entries_cache_node}
|
|
|
|
Data.save_to_toml(filename_items, data_items)
|
|
|
|
else:
|
|
|
|
print('iq problem')
|
|
|
|
breakpoint()
|
|
|
|
print('iq problem')
|
|
|
|
else:
|
|
|
|
entries_cache = Data.open_file_toml(filename_items)
|
|
|
|
if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)]
|
|
|
|
entries_cache_node = entries_cache[node_type]
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
|
|
|
|
# Check whether items still exist on node
|
|
|
|
for entry in entries_cache_node:
|
|
|
|
iq_item_remote_exist = False
|
|
|
|
url_hash = None
|
|
|
|
for url_hash in iq_items_remote_name:
|
|
|
|
if url_hash == entry['url_hash']:
|
|
|
|
iq_item_remote_exist = True
|
|
|
|
break
|
|
|
|
if url_hash and not iq_item_remote_exist:
|
|
|
|
await SQLite.delete_combination_row_by_jid_and_url_hash(
|
|
|
|
db_file, url_hash, jabber_id)
|
|
|
|
entry_index = entries_cache_node.index(entry)
|
|
|
|
del entries_cache_node[entry_index]
|
|
|
|
|
|
|
|
# Check for new items on node
|
|
|
|
entries_cache_node_new = []
|
|
|
|
for url_hash in iq_items_remote_name:
|
|
|
|
iq_item_local_exist = False
|
|
|
|
for entry in entries_cache_node:
|
|
|
|
if url_hash == entry['url_hash']:
|
|
|
|
iq_item_local_exist = True
|
|
|
|
break
|
|
|
|
if not iq_item_local_exist:
|
|
|
|
iq = await XmppPubsub.get_node_item(
|
|
|
|
xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
entries_iq = Data.extract_iq_items_extra(iq, jabber_id)
|
|
|
|
entries_cache_node_new += entries_iq
|
|
|
|
else:
|
|
|
|
print('iq problem')
|
|
|
|
breakpoint()
|
|
|
|
print('iq problem')
|
|
|
|
|
|
|
|
entries_cache_node += entries_cache_node_new
|
|
|
|
|
|
|
|
if node_type == 'public':
|
|
|
|
# Fast (low I/O)
|
|
|
|
if not SQLite.get_jid_id_by_jid(db_file, jabber_id):
|
|
|
|
await SQLite.set_jid(db_file, jabber_id)
|
|
|
|
#await SQLite.add_new_entries(db_file, entries)
|
|
|
|
await SQLite.add_tags(db_file, entries_cache_node)
|
|
|
|
# Slow (high I/O)
|
|
|
|
for entry in entries_cache_node:
|
|
|
|
url_hash = entry['url_hash']
|
|
|
|
if not SQLite.get_entry_id_by_url_hash(db_file, url_hash):
|
|
|
|
await SQLite.add_new_entries(db_file, entries_cache_node)
|
|
|
|
await SQLite.associate_entries_tags_jids(db_file, entry)
|
|
|
|
#elif not SQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash):
|
|
|
|
# await SQLite.associate_entries_tags_jids(db_file, entry)
|
|
|
|
else:
|
|
|
|
await SQLite.associate_entries_tags_jids(db_file, entry)
|
|
|
|
|
|
|
|
data_items = entries_cache
|
|
|
|
Data.save_to_toml(filename_items, data_items)
|
|
|
|
return ['fine', iq] # TODO Remove this line
|
|
|
|
else:
|
|
|
|
return ['error', iq]
|
|
|
|
|
|
|
|
class HttpInstance:
|
|
|
|
def __init__(self, accounts, sessions):
|
|
|
|
|
|
|
|
self.app = FastAPI()
|
2024-08-27 13:26:41 +02:00
|
|
|
templates = Jinja2Templates(directory='template')
|
2024-08-22 16:09:04 +02:00
|
|
|
|
|
|
|
self.app.mount('/data', StaticFiles(directory='data'), name='data')
|
|
|
|
self.app.mount('/export', StaticFiles(directory='export'), name='export')
|
|
|
|
self.app.mount('/graphic', StaticFiles(directory='graphic'), name='graphic')
|
2024-09-04 16:31:52 +02:00
|
|
|
self.app.mount('/script', StaticFiles(directory='script'), name='script')
|
2024-08-22 16:09:04 +02:00
|
|
|
self.app.mount('/stylesheet', StaticFiles(directory='stylesheet'), name='stylesheet')
|
|
|
|
|
|
|
|
filename_configuration = 'configuration.toml'
|
|
|
|
data = Data.open_file_toml(filename_configuration)
|
|
|
|
|
|
|
|
contacts = data['contacts']
|
|
|
|
contact_email = contacts['email']
|
|
|
|
contact_irc_channel = contacts['irc_channel']
|
|
|
|
contact_irc_server = contacts['irc_server']
|
|
|
|
contact_mix = contacts['mix']
|
|
|
|
contact_muc = contacts['muc']
|
|
|
|
contact_xmpp = contacts['xmpp']
|
|
|
|
|
|
|
|
settings = data['settings']
|
|
|
|
|
|
|
|
jabber_id_pubsub = settings['pubsub']
|
|
|
|
journal = settings['journal']
|
|
|
|
|
|
|
|
node_id_public = settings['node_id']
|
|
|
|
node_title_public = settings['node_title']
|
|
|
|
node_subtitle_public = settings['node_subtitle']
|
|
|
|
|
|
|
|
node_id_private = settings['node_id_private']
|
|
|
|
node_title_private = settings['node_title_private']
|
|
|
|
node_subtitle_private = settings['node_subtitle_private']
|
|
|
|
|
|
|
|
node_id_read = settings['node_id_read']
|
|
|
|
node_title_read = settings['node_title_read']
|
|
|
|
node_subtitle_read = settings['node_subtitle_read']
|
|
|
|
|
|
|
|
nodes = {
|
|
|
|
'public' : {
|
|
|
|
'name' : node_id_public,
|
|
|
|
'title' : node_title_public,
|
|
|
|
'subtitle' : node_subtitle_public,
|
|
|
|
'access_model' : 'presence'},
|
|
|
|
'private' : {
|
|
|
|
'name' : node_id_private,
|
|
|
|
'title' : node_title_private,
|
|
|
|
'subtitle' : node_subtitle_private,
|
|
|
|
'access_model' : 'whitelist'},
|
|
|
|
'read' : {
|
|
|
|
'name' : node_id_read,
|
|
|
|
'title' : node_title_read,
|
|
|
|
'subtitle' : node_subtitle_read,
|
|
|
|
'access_model' : 'whitelist'}
|
|
|
|
}
|
|
|
|
|
|
|
|
origins = [
|
|
|
|
"http://localhost",
|
|
|
|
"http://localhost:8080",
|
|
|
|
"http://127.0.0.1",
|
|
|
|
"http://127.0.0.1:8080",
|
|
|
|
]
|
|
|
|
|
|
|
|
self.app.add_middleware(
|
|
|
|
CORSMiddleware,
|
|
|
|
allow_origins=origins,
|
|
|
|
allow_credentials=True,
|
|
|
|
allow_methods=["*"],
|
|
|
|
allow_headers=["*"],
|
|
|
|
)
|
|
|
|
|
|
|
|
# This is workaround for setting a "cookie" issue
|
|
|
|
# It appears that there is a problem to set or send a "cookie" when a template is returned.
|
|
|
|
@self.app.middleware('http')
|
|
|
|
async def middleware_handler(request: Request, call_next):
|
|
|
|
|
|
|
|
# Handle URL query
|
|
|
|
if request.url.path != '/save':
|
|
|
|
param_url = request.query_params.get('url', '') or None
|
|
|
|
param_hash = request.query_params.get('hash', '') or None
|
|
|
|
if param_hash:
|
|
|
|
return RedirectResponse(url='/url/' + param_hash)
|
|
|
|
if param_url:
|
|
|
|
url_hash = Utilities.hash_url_to_md5(param_url)
|
|
|
|
return RedirectResponse(url='/url/' + url_hash)
|
|
|
|
|
|
|
|
response = await call_next(request)
|
|
|
|
jabber_id = session_key = None
|
|
|
|
|
|
|
|
infoo = {
|
|
|
|
'accounts' : accounts,
|
|
|
|
'sessions' : sessions
|
|
|
|
}
|
|
|
|
print(infoo)
|
|
|
|
|
|
|
|
# Handle credentials (i.e. so called "cookies")
|
|
|
|
if request.url.path == '/disconnect':
|
|
|
|
jid = request.cookies.get('jabber_id')
|
|
|
|
if jid in accounts: del accounts[jid]
|
|
|
|
if jid in sessions: del sessions[jid]
|
|
|
|
response.delete_cookie('session_key')
|
|
|
|
response.delete_cookie('jabber_id')
|
|
|
|
else:
|
|
|
|
try:
|
|
|
|
# Access the variable from the request state
|
|
|
|
jabber_id = request.app.state.jabber_id
|
|
|
|
except Exception as e:
|
|
|
|
print(request.cookies.get('jabber_id'))
|
|
|
|
print(e)
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
# Access the variable from the request state
|
|
|
|
session_key = request.app.state.session_key
|
|
|
|
except Exception as e:
|
|
|
|
print(request.cookies.get('session_key'))
|
|
|
|
print(e)
|
|
|
|
pass
|
|
|
|
if jabber_id and session_key:
|
|
|
|
print(['Establishing a sessiong for:', jabber_id, session_key])
|
|
|
|
response.set_cookie(key='jabber_id', value=jabber_id)
|
|
|
|
response.set_cookie(key='session_key', value=session_key)
|
|
|
|
# del request.app.state.jabber_id
|
|
|
|
# del request.app.state.session_key
|
|
|
|
request.app.state.jabber_id = request.app.state.session_key = None
|
|
|
|
return response
|
|
|
|
|
|
|
|
# response.set_cookie(key='session', value=str(jid) + '/' + str(session_key))
|
|
|
|
# response.set_cookie(key='session',
|
|
|
|
# value=jid + '/' + session_key,
|
|
|
|
# expires=datetime.now().replace(tzinfo=timezone.utc) + timedelta(days=30),
|
|
|
|
# max_age=3600,
|
|
|
|
# domain='localhost',
|
|
|
|
# path='/',
|
|
|
|
# secure=True,
|
|
|
|
# httponly=False, # True
|
|
|
|
# samesite='lax')
|
|
|
|
|
|
|
|
@self.app.exception_handler(404)
|
|
|
|
def not_found_exception_handler(request: Request, exc: HTTPException):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
message = 'Blasta system message » Not Found.'
|
|
|
|
description = 'Not found (404)'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.exception_handler(405)
|
|
|
|
def not_allowed_exception_handler(request: Request, exc: HTTPException):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
message = 'Blasta system message » Method Not Allowed.'
|
|
|
|
description = 'Not allowed (405)'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.exception_handler(500)
|
|
|
|
def internal_error__exception_handler(request: Request, exc: HTTPException):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
message = 'Blasta system message » Internal Server Error.'
|
|
|
|
description = 'Internal error (500)'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/connect')
|
|
|
|
def connect_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
response = RedirectResponse(url='/jid/' + jabber_id)
|
|
|
|
else:
|
|
|
|
template_file = 'connect.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/contact')
|
|
|
|
def contact_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'contact.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'contact_email' : contact_email,
|
|
|
|
'contact_irc_channel' : contact_irc_channel,
|
|
|
|
'contact_irc_server' : contact_irc_server,
|
|
|
|
'contact_mix' : contact_mix,
|
|
|
|
'contact_muc' : contact_muc,
|
|
|
|
'contact_xmpp' : contact_xmpp,
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/disconnect')
|
|
|
|
def disconnect_get(request: Request,
|
|
|
|
response: Response,
|
|
|
|
jabber_id: str = Cookie(None),
|
|
|
|
session_key: str = Cookie(None)):
|
|
|
|
# response.set_cookie(max_age=0, value='', key='jabber_id')
|
|
|
|
# response.set_cookie(max_age=0, value='', key='session_key')
|
|
|
|
response = RedirectResponse(url='/')
|
|
|
|
response.delete_cookie('session_key')
|
|
|
|
response.delete_cookie('jabber_id')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/favicon.ico', include_in_schema=False)
|
|
|
|
def favicon_get():
|
|
|
|
return FileResponse('graphic/blasta.ico')
|
|
|
|
|
|
|
|
@self.app.get('/help')
|
|
|
|
def help_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'help.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about')
|
|
|
|
def help_about_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'about.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
2024-08-28 14:03:19 +02:00
|
|
|
@self.app.get('/help/about/folksonomy')
|
|
|
|
def help_about_folksonomies_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'folksonomy.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
2024-08-22 16:09:04 +02:00
|
|
|
@self.app.get('/help/about/ideas')
|
|
|
|
def help_about_ideas_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
protocol = request.url.scheme
|
|
|
|
hostname = request.url.hostname + ':' + str(request.url.port)
|
|
|
|
origin = protocol + '://' + hostname
|
|
|
|
template_file = 'ideas.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'origin' : origin}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/philosophy')
|
|
|
|
def help_about_philosophy_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'philosophy.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/projects')
|
|
|
|
def help_about_projects_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'projects.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/software')
|
|
|
|
def help_about_software_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'software.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/thanks')
|
|
|
|
def help_about_thanks_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'thanks.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/xmpp')
|
|
|
|
def help_about_xmpp_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'xmpp.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/xmpp/atomsub')
|
|
|
|
def help_about_xmpp_atomsub_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'atomsub.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/xmpp/libervia')
|
|
|
|
def help_about_xmpp_libervia_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'libervia.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/xmpp/movim')
|
|
|
|
def help_about_xmpp_movim_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'movim.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/about/xmpp/pubsub')
|
|
|
|
def help_about_xmpp_pubsub_get(request: Request):
|
|
|
|
date_now_iso = datetime.now().isoformat()
|
|
|
|
date_now_readable = Utilities.convert_iso8601_to_readable(date_now_iso)
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'pubsub.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'date_now_iso' : date_now_iso,
|
|
|
|
'date_now_readable' : date_now_readable,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/feeds')
|
|
|
|
def help_about_feeds_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'feeds.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/questions')
|
|
|
|
def help_questions_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'questions.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/syndication')
|
|
|
|
def help_syndication_get(request: Request):
|
|
|
|
hostname = request.url.hostname + ':' + str(request.url.port)
|
|
|
|
protocol = request.url.scheme
|
|
|
|
origin = protocol + '://' + hostname
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'syndication.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'origin' : origin,
|
|
|
|
'pubsub_jid' : jabber_id_pubsub}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/help/utilities')
|
|
|
|
def help_utilities_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
hostname = request.url.hostname
|
|
|
|
protocol = request.url.scheme
|
2024-08-28 17:13:58 +02:00
|
|
|
hostname = request.url.netloc
|
2024-08-22 16:09:04 +02:00
|
|
|
origin = protocol + '://' + hostname
|
|
|
|
bookmarklet = 'location.href=`' + origin + '/save?url=${encodeURIComponent(window.location.href)}&title=${encodeURIComponent(document.title)}`;'
|
|
|
|
template_file = 'utilities.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'bookmarklet' : bookmarklet,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/jid', response_class=HTMLResponse)
|
|
|
|
@self.app.post('/jid')
|
|
|
|
async def jid_get(request: Request, response : Response):
|
|
|
|
node_type = 'public'
|
|
|
|
path = 'jid'
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
result, reason = await Data.update_cache_and_database(xmpp_instance, jabber_id, node_type, node_id)
|
|
|
|
if result == 'error':
|
|
|
|
message = 'XMPP system message » {}.'.format(reason)
|
|
|
|
description = 'IQ Error'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
response = await jid_main_get(request, node_type, path, jid=jabber_id)
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
description = 'An XMPP account is required'
|
|
|
|
message = 'Blasta system message » Please connect with your XMPP account to view this directory.'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/jid/{jid}')
|
|
|
|
@self.app.post('/jid/{jid}')
|
|
|
|
async def jid_jid_get(request: Request, response : Response, jid):
|
|
|
|
response = await jid_main_get(request, node_type='public', path='jid', jid=jid)
|
|
|
|
return response
|
|
|
|
|
|
|
|
async def jid_main_get(request: Request, node_type=None, path=None, jid=None):
|
|
|
|
ask = invite = name = origin = start = ''
|
|
|
|
# pubsub_jid = syndicate = jid
|
|
|
|
# message = 'Find and share bookmarks with family and friends!'
|
|
|
|
# description = 'Bookmarks of {}'.format(jid)
|
|
|
|
max_count = 10
|
|
|
|
entries = None
|
|
|
|
related_tags = None
|
|
|
|
tags_dict = None
|
|
|
|
param_filetype = request.query_params.get('filetype', '') or None
|
2024-08-27 13:26:41 +02:00
|
|
|
param_mode = request.query_params.get('mode', '') or None
|
2024-08-22 16:09:04 +02:00
|
|
|
param_page = request.query_params.get('page', '') or None
|
|
|
|
param_protocol = request.query_params.get('protocol', '') or None
|
|
|
|
param_query = request.query_params.get('q', '') or None
|
|
|
|
if param_query: param_query = param_query.strip()
|
|
|
|
param_tags = request.query_params.get('tags', '') or None
|
|
|
|
param_tld = request.query_params.get('tld', '') or None
|
2024-08-27 13:26:41 +02:00
|
|
|
if param_page and param_mode != 'feed':
|
2024-08-22 16:09:04 +02:00
|
|
|
try:
|
|
|
|
page = int(param_page)
|
|
|
|
page_next = page + 1
|
|
|
|
page_prev = page - 1
|
|
|
|
except:
|
|
|
|
page = 1
|
|
|
|
page_next = 2
|
|
|
|
else:
|
|
|
|
page = 1
|
|
|
|
page_next = 2
|
|
|
|
page_prev = page - 1
|
|
|
|
index_first = (page - 1)*10
|
|
|
|
index_last = index_first+10
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id == jid or node_type in ('private', 'read'):
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
# NOTE You need something other than an iterator (XEP-0059).
|
|
|
|
# You need a PubSub key that would hold tags.
|
|
|
|
filename_items = 'items/' + jabber_id + '.toml'
|
|
|
|
# NOTE Does it work?
|
|
|
|
# It does not seem to actually filter tags.
|
|
|
|
# NOTE Yes. It does work.
|
|
|
|
# See function "cache_items_and_tags".
|
2024-08-25 11:44:59 +02:00
|
|
|
|
|
|
|
# TODO Search by query
|
|
|
|
#if param_query:
|
|
|
|
|
2024-08-22 16:09:04 +02:00
|
|
|
if param_tags or param_tld or param_filetype or param_protocol:
|
|
|
|
tags_list = param_tags.split('+')
|
|
|
|
if len(tags_list) == 1:
|
|
|
|
tag = param_tags
|
|
|
|
entries_cache = Data.open_file_toml(filename_items)
|
|
|
|
entries_cache_node = entries_cache[node_type]
|
|
|
|
filename_cache = 'data/{}/{}.toml'.format(jid, tag)
|
|
|
|
Data.cache_items_and_tags(entries_cache_node, jid, tag)
|
|
|
|
if exists(filename_cache) or getsize(filename_cache):
|
|
|
|
data = Data.open_file_toml(filename_cache)
|
|
|
|
item_ids_all = data['item_ids']
|
|
|
|
related_tags = data['tags']
|
|
|
|
if len(item_ids_all) <= index_last:
|
|
|
|
index_last = len(item_ids_all)
|
|
|
|
page_next = None
|
|
|
|
item_ids_selection = []
|
|
|
|
for item_id in item_ids_all[index_first:index_last]:
|
|
|
|
item_ids_selection.append(item_id)
|
|
|
|
entries = []
|
|
|
|
for entry in entries_cache_node:
|
|
|
|
for item_id in item_ids_selection:
|
|
|
|
if entry['url_hash'] == item_id:
|
|
|
|
entries.append(entry)
|
|
|
|
for entry in entries:
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(entry['published'])
|
|
|
|
entry['tags'] = entry['tags'][:5]
|
|
|
|
description = 'Your {} bookmarks tagged with "{}"'.format(node_type, tag)
|
|
|
|
message = 'Listing {} bookmarks {} - {} out of {}.'.format(node_type, index_first+1, index_last, len(item_ids_all))
|
|
|
|
#item_id_next = entries[len(entries)-1]
|
|
|
|
else:
|
|
|
|
description = 'No {} bookmarks tagged with "{}" were found for {}'.format(node_type, tag, jid)
|
|
|
|
message = 'Blasta system message » No entries.'
|
|
|
|
page_next = None
|
|
|
|
page_prev = None
|
|
|
|
elif len(tag_list) > 1:
|
|
|
|
pass #TODO Support multiple tags
|
|
|
|
# if not param_tags and not param_tld and not param_filetype and not param_protocol and not param_url and not param_hash:
|
|
|
|
else:
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
entries_cache = Data.open_file_toml(filename_items)
|
|
|
|
entries_cache_node = entries_cache[node_type]
|
|
|
|
filename_cache = 'data/{}.toml'.format(jid)
|
|
|
|
#if len(entries_cache_node) and not exists(filename_cache):
|
|
|
|
Data.cache_items_and_tags(entries_cache_node, jid)
|
|
|
|
if exists(filename_cache) or getsize(filename_cache):
|
|
|
|
data = Data.open_file_toml(filename_cache)
|
|
|
|
item_ids_all = data['item_ids']
|
|
|
|
related_tags = data['tags']
|
|
|
|
if len(item_ids_all) <= index_last:
|
|
|
|
index_last = len(item_ids_all)
|
|
|
|
page_next = None
|
|
|
|
item_ids_selection = []
|
|
|
|
for item_id in item_ids_all[index_first:index_last]:
|
|
|
|
item_ids_selection.append(item_id)
|
|
|
|
entries = []
|
|
|
|
for entry in entries_cache_node:
|
|
|
|
for item_id in item_ids_selection:
|
|
|
|
if entry['url_hash'] == item_id:
|
|
|
|
entries.append(entry)
|
|
|
|
for entry in entries:
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(entry['published'])
|
|
|
|
entry['tags'] = entry['tags'][:5]
|
|
|
|
description = 'Your {} bookmarks'.format(node_type)
|
|
|
|
message = 'Listing {} bookmarks {} - {} out of {}.'.format(node_type, index_first+1, index_last, len(item_ids_all))
|
|
|
|
#item_id_next = entries[len(entries)-1]
|
|
|
|
else:
|
|
|
|
description = 'Your bookmarks directory appears to be empty'
|
|
|
|
message = 'Blasta system message » Zero count.'
|
|
|
|
start = True
|
|
|
|
elif jabber_id in accounts:
|
|
|
|
# NOTE Keep this IQ function call as an exception.
|
|
|
|
# If one wants to see contents of someone else, an
|
|
|
|
# authorization is required.
|
|
|
|
# NOTE It might be wiser to use cached items or item identifiers
|
|
|
|
# provided that the viewer is authorized to view items.
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
tags_dict = {}
|
|
|
|
if param_query:
|
|
|
|
description = 'Bookmarks from {} with "{}"'.format(jid, param_query)
|
|
|
|
entries_database = SQLite.get_entries_by_jid_and_query(db_file, jid, param_query, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count_by_jid_and_query(db_file, jid, param_query)
|
|
|
|
for tag, instances in SQLite.get_30_tags_by_jid_and_query(db_file, jid, param_query, index_first):
|
|
|
|
tags_dict[tag] = instances
|
|
|
|
elif param_tags:
|
|
|
|
description = 'Bookmarks from {} tagged with "{}"'.format(jid, param_tags)
|
|
|
|
entries_database = SQLite.get_entries_by_jid_and_tag(db_file, jid, param_tags, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count_by_jid_and_tag(db_file, jid, param_tags)
|
|
|
|
for tag, instances in SQLite.get_30_tags_by_jid_and_tag(db_file, jid, param_tags, index_first):
|
|
|
|
tags_dict[tag] = instances
|
|
|
|
else:
|
|
|
|
description = 'Bookmarks from {}'.format(jid)
|
|
|
|
entries_database = SQLite.get_entries_by_jid(db_file, jid, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count_by_jid(db_file, jid)
|
|
|
|
for tag, instances in SQLite.get_30_tags_by_jid(db_file, jid, index_first):
|
|
|
|
tags_dict[tag] = instances
|
2024-08-28 14:03:19 +02:00
|
|
|
if not entries_database:
|
|
|
|
message = 'Blasta system message » Error: No entries were found.'
|
|
|
|
description = 'No results'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
2024-08-22 16:09:04 +02:00
|
|
|
if entries_count:
|
|
|
|
entries = []
|
|
|
|
for entry in entries_database:
|
|
|
|
tags_sorted = []
|
|
|
|
for tag in SQLite.get_tags_by_entry_id(db_file, entry[0]):
|
|
|
|
tags_sorted.append(tag[0])
|
|
|
|
entry_jid = SQLite.get_jid_by_jid_id(db_file, entry[5])
|
2024-08-28 14:03:19 +02:00
|
|
|
url_hash = Utilities.hash_url_to_md5(entry[2])
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
2024-08-22 16:09:04 +02:00
|
|
|
entries.append(
|
|
|
|
{'title' : entry[3],
|
|
|
|
'link' : entry[2],
|
|
|
|
'summary' : entry[4],
|
|
|
|
'published' : entry[6],
|
|
|
|
'updated' : entry[7],
|
|
|
|
'tags' : tags_sorted,
|
2024-08-28 14:03:19 +02:00
|
|
|
'url_hash' : url_hash,
|
2024-08-22 16:09:04 +02:00
|
|
|
'jid' : entry_jid,
|
|
|
|
'name' : entry_jid, # jid.split('@')[0] if '@' in jid else jid,
|
2024-08-28 14:03:19 +02:00
|
|
|
'instances' : instances})
|
2024-08-22 16:09:04 +02:00
|
|
|
for entry in entries:
|
|
|
|
try:
|
|
|
|
date_iso = entry['published']
|
|
|
|
date_wrt = Utilities.convert_iso8601_to_readable(date_iso)
|
|
|
|
entry['published_mod'] = date_wrt
|
|
|
|
except:
|
|
|
|
print('ERROR: Probably due to an attempt to convert a non ISO 8601.')
|
|
|
|
print(entry['published'])
|
|
|
|
print(entry['published_mod'])
|
|
|
|
print(entry)
|
|
|
|
index_last = index_first+len(entries_database)
|
|
|
|
if entries_count <= index_last:
|
|
|
|
index_last = entries_count
|
|
|
|
page_next = None
|
|
|
|
message = 'Listing bookmarks {} - {} out of {}.'.format(index_first+1, index_last, entries_count)
|
|
|
|
else:
|
|
|
|
# TODO Check permission, so there is no unintended continuing to cached data which is not authorized for.
|
|
|
|
iq = await XmppPubsub.get_node_item_ids(xmpp_instance, jid, node_id_public)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
iq_items_remote = iq['disco_items']
|
|
|
|
|
|
|
|
# Cache a list of identifiers of node items to a file.
|
|
|
|
iq_items_remote_name = []
|
|
|
|
for iq_item_remote in iq_items_remote:
|
|
|
|
iq_item_remote_name = iq_item_remote['name']
|
|
|
|
iq_items_remote_name.append(iq_item_remote_name)
|
|
|
|
|
|
|
|
#data_item_ids = {'iq_items' : iq_items_remote_name}
|
|
|
|
#filename_item_ids = 'item_ids/' + jid + '.toml'
|
|
|
|
#Data.save_to_toml(filename_item_ids, data_item_ids)
|
|
|
|
|
|
|
|
item_ids_all = iq_items_remote_name
|
|
|
|
#item_ids_all = data['item_ids']
|
|
|
|
#related_tags = data['tags']
|
|
|
|
if len(item_ids_all) <= index_last:
|
|
|
|
page_next = None
|
|
|
|
index_last = len(item_ids_all)
|
|
|
|
item_ids_selection = []
|
|
|
|
for item_id in item_ids_all[index_first:index_last]:
|
|
|
|
item_ids_selection.append(item_id)
|
|
|
|
|
|
|
|
iq = await XmppPubsub.get_node_items(xmpp_instance, jid, node_id_public, item_ids_selection)
|
|
|
|
entries = Data.extract_iq_items_extra(iq, jid)
|
|
|
|
if entries:
|
|
|
|
for entry in entries:
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(entry['published'])
|
|
|
|
message = 'Listing bookmarks {} - {} out of {}.'.format(index_first+1, index_last, len(item_ids_all))
|
|
|
|
description = 'Bookmarks from {}'.format(jid)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Zero count.'
|
|
|
|
description = 'Bookmarks directory appears to be empty'
|
|
|
|
invite = True
|
|
|
|
else:
|
|
|
|
message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
name = jid.split('@')[0]
|
|
|
|
path = 'error'
|
|
|
|
if not iq:
|
|
|
|
message = 'XMPP system message » Empty.'
|
|
|
|
description = 'An unknown error has occurred'
|
|
|
|
invite = True
|
|
|
|
elif iq == 'Item not found':
|
|
|
|
description = 'Bookmarks directory appears to be empty'
|
|
|
|
invite = True
|
|
|
|
elif iq == 'forbidden':
|
|
|
|
description = 'Access forbidden'
|
|
|
|
elif iq == 'item-not-found':
|
|
|
|
description = 'Jabber ID does not appear to be exist'
|
|
|
|
elif iq == 'not-authorized':
|
|
|
|
description = 'You have no authorization to view ' + name + '\'s bookmarks.'
|
|
|
|
|
|
|
|
ask = True
|
|
|
|
elif iq == 'Node not found':
|
|
|
|
description = name + '\'s bookmarks directory appears to be empty.'
|
|
|
|
invite = True
|
|
|
|
elif 'DNS lookup failed' in iq:
|
|
|
|
domain = jid.split('@')[1] if '@' in jid else jid
|
|
|
|
description = 'Blasta could not connect to server {}'.format(domain)
|
|
|
|
elif iq == 'Connection failed: connection refused':
|
|
|
|
description = 'Connection with ' + name + ' has been refused'
|
|
|
|
elif 'Timeout' in iq or 'timeout' in iq:
|
|
|
|
description = 'Connection with ' + name + ' has been timed out'
|
|
|
|
else:
|
|
|
|
breakpoint()
|
|
|
|
description = 'An unknown error has occurred'
|
|
|
|
if invite:
|
|
|
|
hostname = request.url.hostname + ':' + str(request.url.port)
|
|
|
|
protocol = request.url.scheme
|
|
|
|
origin = protocol + '://' + hostname
|
|
|
|
template_file = 'ask.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request': request,
|
|
|
|
'ask' : ask,
|
|
|
|
'alias' : jabber_id.split('@')[0],
|
|
|
|
'description': description,
|
|
|
|
'invite' : invite,
|
|
|
|
'jabber_id': jabber_id,
|
|
|
|
'jid': jid,
|
|
|
|
'journal': journal,
|
|
|
|
'message': message,
|
|
|
|
'name': name,
|
|
|
|
'origin': origin,
|
|
|
|
'path': path}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
description = 'An XMPP account is required'
|
|
|
|
message = 'Blasta system message » Please connect with your XMPP account to view this directory.'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
template_dict = {
|
|
|
|
'request': request,
|
|
|
|
'description': description,
|
|
|
|
'entries': entries,
|
|
|
|
'jabber_id': jabber_id,
|
|
|
|
'jid': jid,
|
|
|
|
'journal': journal,
|
|
|
|
'message': message,
|
2024-08-28 14:03:19 +02:00
|
|
|
'node_type': node_type,
|
2024-08-22 16:09:04 +02:00
|
|
|
'page_next': page_next,
|
|
|
|
'page_prev': page_prev,
|
|
|
|
'pager' : True,
|
|
|
|
'param_query' : param_query,
|
|
|
|
'param_tags': param_tags,
|
|
|
|
'path': path,
|
|
|
|
'pubsub_jid': jid,
|
|
|
|
'node_id': nodes[node_type]['name'],
|
|
|
|
'start': start,
|
|
|
|
'syndicate': jid,
|
|
|
|
'tags' : tags_dict or related_tags or ''}
|
2024-08-27 13:26:41 +02:00
|
|
|
if param_mode == 'feed':
|
|
|
|
template_file = 'browse.atom'
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
else:
|
|
|
|
template_file = 'browse.xhtml'
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
2024-08-22 16:09:04 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/blasta.svg')
|
|
|
|
def logo_get():
|
|
|
|
return FileResponse('graphic/blasta.svg')
|
|
|
|
|
|
|
|
@self.app.get('/', response_class=HTMLResponse)
|
|
|
|
@self.app.get('/new', response_class=HTMLResponse)
|
|
|
|
async def root_get_new(request: Request, response : Response):
|
|
|
|
response = await root_main_get(request, response, page_type='new')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/popular', response_class=HTMLResponse)
|
|
|
|
async def root_get_popular(request: Request, response : Response):
|
|
|
|
response = await root_main_get(request, response, page_type='popular')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/query', response_class=HTMLResponse)
|
|
|
|
async def root_get_query(request: Request, response : Response):
|
|
|
|
response = await root_main_get(request, response, page_type='query')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/recent', response_class=HTMLResponse)
|
|
|
|
async def root_get_recent(request: Request, response : Response):
|
|
|
|
response = await root_main_get(request, response, page_type='recent')
|
|
|
|
return response
|
|
|
|
|
|
|
|
async def root_main_get(request: Request, response : Response, page_type=None):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
node_id = path = syndicate = page_type
|
2024-08-27 13:26:41 +02:00
|
|
|
param_filetype = request.query_params.get('filetype', '') or None
|
|
|
|
param_mode = request.query_params.get('mode', '') or None
|
|
|
|
param_page = request.query_params.get('page', '') or None
|
|
|
|
param_protocol = request.query_params.get('protocol', '') or None
|
2024-08-22 16:09:04 +02:00
|
|
|
param_query = request.query_params.get('q', '') or None
|
|
|
|
if param_query: param_query = param_query.strip()
|
|
|
|
param_tags = request.query_params.get('tags', '') or None
|
|
|
|
param_tld = request.query_params.get('tld', '') or None
|
2024-08-27 13:26:41 +02:00
|
|
|
if param_page and param_mode != 'feed':
|
2024-08-22 16:09:04 +02:00
|
|
|
try:
|
|
|
|
page = int(param_page)
|
|
|
|
page_next = page + 1
|
|
|
|
page_prev = page - 1
|
|
|
|
except:
|
|
|
|
page = 1
|
|
|
|
page_next = 2
|
|
|
|
else:
|
|
|
|
page = 1
|
|
|
|
page_next = 2
|
|
|
|
page_prev = page - 1
|
|
|
|
index_first = (page - 1)*10
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
if param_tags or param_tld or param_filetype or param_protocol:
|
|
|
|
entries_count = SQLite.get_entries_count_by_tag(db_file, param_tags)
|
|
|
|
match page_type:
|
|
|
|
case 'new':
|
|
|
|
description = 'New bookmarks tagged with "{}"'.format(param_tags)
|
|
|
|
entries_database = SQLite.get_entries_new_by_tag(db_file, param_tags, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_new_by_tag(db_file, param_tags, index_first)
|
|
|
|
case 'popular':
|
|
|
|
description = 'Popular bookmarks tagged with "{}"'.format(param_tags) # 'Most popular'
|
|
|
|
entries_database = SQLite.get_entries_popular_by_tag(db_file, param_tags, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_popular_by_tag(db_file, param_tags, index_first)
|
|
|
|
case 'recent':
|
|
|
|
description = 'Recent bookmarks tagged with "{}"'.format(param_tags)
|
|
|
|
entries_database = SQLite.get_entries_recent_by_tag(db_file, param_tags, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_recent_by_tag(db_file, param_tags, index_first)
|
|
|
|
# TODO case 'query':
|
|
|
|
else:
|
|
|
|
match page_type:
|
|
|
|
case 'new':
|
|
|
|
description = 'New bookmarks'
|
|
|
|
entries_database = SQLite.get_entries_new(db_file, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_new(db_file, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count(db_file)
|
|
|
|
case 'popular':
|
|
|
|
description = 'Popular bookmarks' # 'Most popular'
|
|
|
|
entries_database = SQLite.get_entries_popular(db_file, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_popular(db_file, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count(db_file)
|
|
|
|
case 'query':
|
|
|
|
node_id = syndicate = 'new'
|
|
|
|
description = 'Posted bookmarks with "{}"'.format(param_query)
|
|
|
|
entries_database = SQLite.get_entries_by_query(db_file, param_query, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_by_query_recent(db_file, param_query, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count_by_query(db_file, param_query)
|
|
|
|
case 'recent':
|
|
|
|
description = 'Recent bookmarks'
|
|
|
|
entries_database = SQLite.get_entries_recent(db_file, index_first)
|
|
|
|
tags_of_entries = SQLite.get_30_tags_by_entries_recent(db_file, index_first)
|
|
|
|
entries_count = SQLite.get_entries_count(db_file)
|
2024-08-28 14:03:19 +02:00
|
|
|
if not entries_database:
|
|
|
|
message = 'Blasta system message » Error: No entries were found.'
|
|
|
|
description = 'No results'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
2024-08-22 16:09:04 +02:00
|
|
|
tags_dict = {}
|
|
|
|
#for tag, instances in SQLite.get_tags_30(db_file):
|
|
|
|
for tag, instances in tags_of_entries:
|
|
|
|
tags_dict[tag] = instances
|
|
|
|
entries = []
|
|
|
|
for entry in entries_database:
|
|
|
|
tags_sorted = []
|
|
|
|
for tag in SQLite.get_tags_by_entry_id(db_file, entry[0]):
|
|
|
|
tags_sorted.append(tag[0])
|
|
|
|
jid = SQLite.get_jid_by_jid_id(db_file, entry[5])
|
2024-08-28 14:03:19 +02:00
|
|
|
url_hash = Utilities.hash_url_to_md5(entry[2])
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
2024-08-22 16:09:04 +02:00
|
|
|
entries.append(
|
|
|
|
{'title' : entry[3],
|
|
|
|
'link' : entry[2],
|
|
|
|
'summary' : entry[4],
|
|
|
|
'published' : entry[6],
|
|
|
|
'updated' : entry[7],
|
|
|
|
'tags' : tags_sorted,
|
2024-08-28 14:03:19 +02:00
|
|
|
'url_hash' : url_hash, #entry[1]
|
2024-08-22 16:09:04 +02:00
|
|
|
'jid' : jid,
|
|
|
|
'name' : jid, # jid.split('@')[0] if '@' in jid else jid,
|
2024-08-28 14:03:19 +02:00
|
|
|
'instances' : instances})
|
2024-08-22 16:09:04 +02:00
|
|
|
for entry in entries:
|
|
|
|
try:
|
|
|
|
date_iso = entry['published']
|
|
|
|
date_wrt = Utilities.convert_iso8601_to_readable(date_iso)
|
|
|
|
entry['published_mod'] = date_wrt
|
|
|
|
except:
|
|
|
|
print('ERROR: Probably due to an attempt to convert a non ISO 8601.')
|
|
|
|
print(entry['published'])
|
|
|
|
print(entry['published_mod'])
|
|
|
|
print(entry)
|
|
|
|
index_last = index_first+len(entries_database)
|
|
|
|
if entries_count <= index_last:
|
|
|
|
# NOTE Did you forget to modify index_last?
|
|
|
|
# NOTE No. It appears that it probably not needed index_last = entries_count
|
|
|
|
page_next = None
|
|
|
|
#if page_type != 'new' or page_prev or param_tags or param_tld or param_filetype or param_protocol:
|
|
|
|
if request.url.path != '/' or request.url.query:
|
|
|
|
message = 'Listing bookmarks {} - {} out of {}.'.format(index_first+1, index_last, entries_count)
|
|
|
|
message_link = None
|
|
|
|
else:
|
|
|
|
message = ('Welcome to Blasta, an XMPP PubSub oriented social '
|
|
|
|
'bookmarks manager for organizing online content.')
|
|
|
|
message_link = {'href' : '/help/about', 'text' : 'Learn more'}
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'entries' : entries,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'message_link' : message_link,
|
|
|
|
'node_id' : node_id,
|
|
|
|
'page_next' : page_next,
|
|
|
|
'page_prev' : page_prev,
|
2024-08-28 14:03:19 +02:00
|
|
|
'page_type' : page_type,
|
2024-08-22 16:09:04 +02:00
|
|
|
'pager' : True,
|
|
|
|
'param_query' : param_query,
|
|
|
|
'param_tags' : param_tags,
|
|
|
|
'path' : path,
|
|
|
|
'pubsub_jid' : jabber_id_pubsub,
|
|
|
|
'syndicate' : syndicate,
|
|
|
|
'tags' : tags_dict}
|
2024-08-27 13:26:41 +02:00
|
|
|
if param_mode == 'feed':
|
2024-08-28 14:03:19 +02:00
|
|
|
# NOTE Consider scheme "feed" in order to prompt news
|
|
|
|
# reader 'feed://' + request.url.netloc + request.url.path
|
2024-08-27 13:26:41 +02:00
|
|
|
template_file = 'browse.atom'
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xml"
|
|
|
|
else:
|
|
|
|
template_file = 'browse.xhtml'
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
2024-08-22 16:09:04 +02:00
|
|
|
return response
|
|
|
|
|
|
|
|
"""
|
|
|
|
# TODO Return to code /tag and / (root) once SQLite database is ready.
|
|
|
|
@self.app.get('/tag/{tag}')
|
|
|
|
async def tag_tag_get(request: Request, tag):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
node_id = 'tag:{}'.format(tag)
|
|
|
|
syndicate = '?tag={}'.format(tag)
|
|
|
|
path = 'tag'
|
|
|
|
# NOTE Perhaps it would be beneficial to retrieve "published" and
|
|
|
|
# tags ("category") of viewer to override the tags of Blasta
|
|
|
|
# TODO If URL exist in visitor's bookmarks, display its properties
|
|
|
|
# (summary, tags title etc.) before data of others.
|
|
|
|
# if Utilities.is_jid_matches_to_session(accounts, sessions, request):
|
|
|
|
page = request.query_params.get('page', '') or None
|
|
|
|
if page:
|
|
|
|
try:
|
|
|
|
page = int(page)
|
|
|
|
page_next = page + 1
|
|
|
|
page_prev = page - 1
|
|
|
|
except:
|
|
|
|
page = 1
|
|
|
|
page_next = 2
|
|
|
|
else:
|
|
|
|
page = 1
|
|
|
|
page_next = 2
|
|
|
|
page_prev = page - 1
|
|
|
|
index_first = (page - 1)*10
|
|
|
|
index_last = index_first+10
|
|
|
|
tags_dict = {}
|
|
|
|
for entry in entries_database:
|
|
|
|
for entry_tag in entry['tags']:
|
|
|
|
if entry_tag in tags_dict:
|
|
|
|
tags_dict[entry_tag] = tags_dict[entry_tag]+1
|
|
|
|
else:
|
|
|
|
tags_dict[entry_tag] = 1
|
|
|
|
tags_dict = dict(sorted(tags_dict.items(), key=lambda item: (-item[1], item[0])))
|
|
|
|
tags_dict = dict(list(tags_dict.items())[:30])
|
|
|
|
#tags_dict = dict(sorted(tags_dict.items(), key=lambda item: (-item[1], item[0]))[:30])
|
|
|
|
print(tags_dict)
|
|
|
|
entries = []
|
|
|
|
for entry in entries_database:
|
|
|
|
if tag in entry['tags']:
|
|
|
|
entries.append(entry)
|
|
|
|
for entry in entries:
|
|
|
|
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
|
|
|
|
"""
|
|
|
|
|
|
|
|
@self.app.post('/', response_class=HTMLResponse)
|
|
|
|
async def root_post(request: Request,
|
|
|
|
response: Response,
|
|
|
|
jabber_id: str = Form(...),
|
|
|
|
password: str = Form(...)):
|
|
|
|
if not Utilities.is_jid_matches_to_session(accounts, sessions, request):
|
|
|
|
# Store a variable in the request's state
|
|
|
|
request.app.state.jabber_id = jabber_id
|
|
|
|
session_key = str(random.random())
|
|
|
|
request.app.state.session_key = session_key
|
|
|
|
accounts[jabber_id] = XmppInstance(jabber_id + '/blasta', password)
|
|
|
|
# accounts[jabber_id].authenticated
|
|
|
|
# dir(accounts[jabber_id])
|
|
|
|
# accounts[jabber_id].failed_auth
|
|
|
|
# accounts[jabber_id].event_when_connected
|
|
|
|
sessions[jabber_id] = session_key
|
|
|
|
# Check if the user and password are present and valid
|
|
|
|
# If not valid, return "Could not connect to JID"
|
|
|
|
|
|
|
|
# FIXME Instead of an arbitrary number (i.e. 5 seconds), write a
|
|
|
|
# while loop with a timeout of 10 seconds.
|
|
|
|
|
|
|
|
# Check whether an account is connected.
|
|
|
|
# Wait for 5 seconds to connect.
|
|
|
|
await asyncio.sleep(5)
|
|
|
|
#if jabber_id in accounts and accounts[jabber_id].connection_accepted:
|
|
|
|
|
|
|
|
if jabber_id in accounts:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
#await xmpp_instance.plugin['xep_0060'].delete_node(jabber_id, node_id_public)
|
|
|
|
|
|
|
|
for node_properties in nodes:
|
|
|
|
properties = nodes[node_properties]
|
|
|
|
if not await XmppPubsub.is_node_exist(xmpp_instance, properties['name']):
|
|
|
|
iq = XmppPubsub.create_node_atom(
|
|
|
|
xmpp_instance, jabber_id, properties['name'],
|
|
|
|
properties['title'], properties['subtitle'],
|
|
|
|
properties['access_model'])
|
|
|
|
await iq.send(timeout=15)
|
|
|
|
|
|
|
|
#await XmppPubsub.set_node_private(xmpp_instance, node_id_private)
|
|
|
|
#await XmppPubsub.set_node_private(xmpp_instance, node_id_read)
|
|
|
|
#configuration_form = await xmpp_instance['xep_0060'].get_node_config(jabber_id, properties['name'])
|
|
|
|
#print(configuration_form)
|
|
|
|
node_id = nodes['public']['name']
|
|
|
|
result, reason = await Data.update_cache_and_database(xmpp_instance, jabber_id, 'public', node_id)
|
|
|
|
if result == 'error':
|
|
|
|
message = 'XMPP system message » {}.'.format(reason)
|
|
|
|
description = 'IQ Error'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, 'xmpp:blasta:settings:0', 'routine')
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
payload = iq['pubsub']['items']['item']['payload']
|
|
|
|
routine = payload.text if payload else None
|
|
|
|
else:
|
|
|
|
routine = None
|
|
|
|
match routine:
|
|
|
|
case 'private':
|
|
|
|
response = RedirectResponse(url='/private')
|
|
|
|
case 'read':
|
|
|
|
response = RedirectResponse(url='/read')
|
|
|
|
case _:
|
|
|
|
response = RedirectResponse(url='/jid/' + jabber_id)
|
|
|
|
|
|
|
|
else:
|
|
|
|
#del accounts[jabber_id]
|
|
|
|
#del sessions[jabber_id]
|
|
|
|
message = 'Blasta system message » Authorization has failed.'
|
|
|
|
description = 'Connection has failed'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.post('/message')
|
|
|
|
async def message_post(request: Request,
|
|
|
|
jid: str = Form(...),
|
|
|
|
body: str = Form(...)):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
#headline = 'This is a message from Blasta'
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
XmppMessage.send(xmpp_instance, jid, body)
|
|
|
|
alias = jid.split('@')[0]
|
|
|
|
message = 'Your message has been sent to {}.'.format(alias)
|
|
|
|
description = 'Message has been sent'
|
|
|
|
path = 'message'
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/now')
|
|
|
|
def now_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'now.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/private', response_class=HTMLResponse)
|
|
|
|
@self.app.post('/private')
|
|
|
|
async def private_get(request: Request, response : Response):
|
|
|
|
node_type = 'private'
|
|
|
|
path = 'private'
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
result, reason = await Data.update_cache_and_database(xmpp_instance, jabber_id, node_type, node_id)
|
|
|
|
if result == 'error':
|
|
|
|
message = 'Blasta system message » {}.'.format(reason)
|
|
|
|
description = 'Directory "private" appears to be empty'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
response = await jid_main_get(request, node_type, path)
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
description = 'An XMPP account is required'
|
|
|
|
message = 'Blasta system message » Please connect with your XMPP account to view this directory.'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/profile')
|
|
|
|
async def profile_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
if not await XmppPubsub.is_node_exist(xmpp_instance, 'xmpp:blasta:settings:0'):
|
|
|
|
iq = XmppPubsub.create_node_config(xmpp_instance, jabber_id)
|
|
|
|
await iq.send(timeout=15)
|
|
|
|
access_models = {}
|
|
|
|
for node_type in nodes:
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
iq = await XmppPubsub.get_node_configuration(xmpp_instance, jabber_id, node_id)
|
|
|
|
access_model = iq['pubsub_owner']['configure']['form']['values']['pubsub#access_model']
|
|
|
|
access_models[node_type] = access_model
|
|
|
|
settings = {}
|
|
|
|
for setting in ['enrollment', 'routine']:
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, 'xmpp:blasta:settings:0', setting)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
payload = iq['pubsub']['items']['item']['payload']
|
|
|
|
if payload: settings[setting] = payload.text
|
|
|
|
template_file = 'profile.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'access_models' : access_models,
|
|
|
|
'enroll' : settings['enrollment'] if 'enrollment' in settings else None,
|
|
|
|
'request' : request,
|
|
|
|
'routine' : settings['routine'] if 'routine' in settings else None,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.post('/profile')
|
|
|
|
async def profile_post(request: Request,
|
|
|
|
routine: str = Form(None),
|
|
|
|
enroll: str = Form(None)):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
if routine:
|
|
|
|
message = 'The routine directory has been set to {}'.format(routine)
|
|
|
|
payload = Xml.create_setting_entry(routine)
|
|
|
|
iq = await XmppPubsub.publish_node_item(
|
|
|
|
xmpp_instance, jabber_id, 'xmpp:blasta:settings:0', 'routine', payload)
|
|
|
|
if enroll:
|
|
|
|
if enroll == '1': message = 'Your database is shared with the Blasta system'
|
|
|
|
else: message = 'Your database is excluded from the Blasta system'
|
|
|
|
payload = Xml.create_setting_entry(enroll)
|
|
|
|
iq = await XmppPubsub.publish_node_item(
|
|
|
|
xmpp_instance, jabber_id, 'xmpp:blasta:settings:0', 'enrollment', payload)
|
|
|
|
description = 'Setting has been saved'
|
|
|
|
template_file = 'result.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'description' : description,
|
|
|
|
'enroll' : enroll,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'request' : request,
|
|
|
|
'routine' : routine}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/profile/export/{node_type}/{filetype}')
|
|
|
|
async def profile_export_get(request: Request, node_type, filetype):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
iq = await XmppPubsub.get_node_items(xmpp_instance, jabber_id, node_id)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
entries = Data.extract_iq_items(iq, jabber_id)
|
|
|
|
# TODO Append a bookmark or bookmarks of Blasta
|
|
|
|
if entries:
|
|
|
|
filename = 'export/' + jabber_id + '_' + node_type + '.' + filetype
|
|
|
|
#filename = 'export/{}_{}.{}'.format(jabber_id, node_type, filetype)
|
|
|
|
#filename = 'export_' + node_type + '/' + jabber_id + '_' + '.' + filetype
|
|
|
|
#filename = 'export_{}/{}.{}'.format(node_type, jabber_id, filetype)
|
|
|
|
match filetype:
|
|
|
|
case 'json':
|
|
|
|
Data.save_to_json(filename, entries)
|
|
|
|
case 'toml':
|
|
|
|
# NOTE Should the dict be named with 'entries' or 'private'/'public'/'read'?
|
|
|
|
data = {'entries' : entries}
|
|
|
|
Data.save_to_toml(filename, data)
|
|
|
|
response = FileResponse(filename)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.post('/profile/import')
|
|
|
|
# def profile_import_post(file: UploadFile = File(...)):
|
|
|
|
async def profile_import_post(request: Request,
|
|
|
|
file: UploadFile | None = None,
|
|
|
|
merge: str = Form(None),
|
|
|
|
node: str = Form(...),
|
|
|
|
override: str = Form(None)):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
2024-08-23 02:56:10 +02:00
|
|
|
xmpp_instance = accounts[jabber_id]
|
2024-08-22 16:09:04 +02:00
|
|
|
if file:
|
|
|
|
|
|
|
|
# TODO If node does not exist, redirect to result page with
|
|
|
|
# a message that bookmarks are empty.
|
|
|
|
# NOTE No.
|
|
|
|
|
2024-08-23 02:56:10 +02:00
|
|
|
node_id = nodes[node]['name']
|
|
|
|
node_title = nodes[node]['title']
|
|
|
|
node_subtitle = nodes[node]['subtitle']
|
|
|
|
node_access_model = nodes[node]['access_model']
|
2024-08-22 16:09:04 +02:00
|
|
|
if not await XmppPubsub.is_node_exist(xmpp_instance, node_id):
|
|
|
|
iq = XmppPubsub.create_node_atom(
|
|
|
|
xmpp_instance, jabber_id, node_id, node_title,
|
|
|
|
node_subtitle, node_access_model)
|
|
|
|
await iq.send(timeout=15)
|
|
|
|
|
|
|
|
#return {"filename": file.filename}
|
|
|
|
content = file.file.read().decode()
|
2024-08-23 02:56:10 +02:00
|
|
|
|
|
|
|
# TODO Add match/case for filetype.
|
|
|
|
|
2024-08-22 16:09:04 +02:00
|
|
|
entries = tomllib.loads(content)
|
|
|
|
# entries_node = entries[node]
|
|
|
|
|
2024-08-23 02:56:10 +02:00
|
|
|
#breakpoint()
|
|
|
|
#for entry in entries: print(entry)
|
|
|
|
|
2024-08-22 16:09:04 +02:00
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
# timestamp = datetime.now().isoformat()
|
|
|
|
db_file = 'main.sqlite'
|
2024-08-23 02:56:10 +02:00
|
|
|
counter = 0
|
|
|
|
|
|
|
|
for entry_type in entries:
|
|
|
|
for entry in entries[entry_type]:
|
|
|
|
url_hash = item_id = Utilities.hash_url_to_md5(entry['link'])
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
|
|
|
entry_new = {
|
|
|
|
'title' : entry['title'],
|
|
|
|
'link' : entry['link'],
|
|
|
|
'summary' : entry['summary'],
|
|
|
|
'published' : entry['published'],
|
|
|
|
'updated' : entry['published'],
|
|
|
|
#'updated' : entry['updated'],
|
|
|
|
'tags' : entry['tags'],
|
|
|
|
'url_hash' : url_hash,
|
|
|
|
'jid' : jabber_id,
|
|
|
|
'name' : name,
|
|
|
|
'instances' : instances}
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
payload = Syndication.create_rfc4287_entry(entry_new)
|
|
|
|
iq = await XmppPubsub.publish_node_item(
|
|
|
|
xmpp_instance, jabber_id, node_id, item_id, payload)
|
|
|
|
#await iq.send(timeout=15)
|
|
|
|
counter += 1
|
|
|
|
|
|
|
|
message = 'Blasta system message » Imported {} items.'.format(counter)
|
2024-08-22 16:09:04 +02:00
|
|
|
description = 'Import successful'
|
|
|
|
path = 'profile'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No upload file sent.'
|
|
|
|
description = 'Import error'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/save')
|
|
|
|
async def save_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
param_url = request.query_params.get('url', '')
|
|
|
|
url_hash = Utilities.hash_url_to_md5(param_url)
|
|
|
|
for node_type in nodes:
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
#if len(iq['pubsub']['items']):
|
|
|
|
if (isinstance(iq, slixmpp.stanza.iq.Iq) and
|
|
|
|
url_hash == iq['pubsub']['items']['item']['id']):
|
|
|
|
return RedirectResponse(url='/url/' + url_hash + '/edit')
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, 'xmpp:blasta:settings:0', 'routine')
|
2024-08-23 03:36:10 +02:00
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
payload = iq['pubsub']['items']['item']['payload']
|
|
|
|
routine = payload.text if payload else None
|
|
|
|
else:
|
|
|
|
routine = None
|
2024-08-22 16:09:04 +02:00
|
|
|
# NOTE Is "message" missing?
|
|
|
|
description = 'Add a new bookmark' # 'Enter properties for a bookmark'
|
|
|
|
param_title = request.query_params.get('title', '')
|
|
|
|
param_tags = request.query_params.get('tags', '')
|
|
|
|
param_summary = request.query_params.get('summary', '')
|
|
|
|
path = 'save'
|
2024-08-28 14:03:19 +02:00
|
|
|
if request.query_params:
|
|
|
|
message = message_link = None
|
|
|
|
else:
|
|
|
|
message = 'For greater ease, you migh want to try our'
|
|
|
|
message_link = {'href' : '/help/utilities#buttons', 'text' : 'bookmarklets'}
|
2024-08-22 16:09:04 +02:00
|
|
|
template_file = 'edit.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
2024-08-28 14:03:19 +02:00
|
|
|
'message' : message,
|
|
|
|
'message_link' : message_link,
|
2024-08-22 16:09:04 +02:00
|
|
|
'path' : path,
|
|
|
|
'routine' : routine,
|
|
|
|
'summary' : param_summary,
|
|
|
|
'tags' : param_tags,
|
|
|
|
'title' : param_title,
|
|
|
|
'url' : param_url}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.post('/save')
|
|
|
|
async def save_post(request: Request,
|
|
|
|
node: str = Form(...),
|
|
|
|
summary: str = Form(''),
|
|
|
|
tags: str = Form(''),
|
|
|
|
title: str = Form(...),
|
|
|
|
url: str = Form(...)):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
url_hash = Utilities.hash_url_to_md5(url)
|
|
|
|
for node_type in nodes:
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
iq = await XmppPubsub.get_node_item(
|
|
|
|
xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
if (isinstance(iq, slixmpp.stanza.iq.Iq) and
|
|
|
|
url_hash == iq['pubsub']['items']['item']['id']):
|
|
|
|
return RedirectResponse(url='/url/' + url_hash + '/edit')
|
|
|
|
description = 'Confirm properties of a bookmark'
|
|
|
|
path = 'save'
|
|
|
|
published = datetime.now().isoformat()
|
|
|
|
template_file = 'edit.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'confirm' : True,
|
|
|
|
'description' : description,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'node' : node,
|
|
|
|
'path' : path,
|
|
|
|
'published' : published,
|
|
|
|
'summary' : summary,
|
|
|
|
'tags' : tags,
|
|
|
|
'title' : title,
|
|
|
|
'url' : url,
|
|
|
|
'url_hash' : url_hash}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/read', response_class=HTMLResponse)
|
|
|
|
@self.app.post('/read')
|
|
|
|
async def read_get(request: Request, response : Response):
|
|
|
|
node_type = 'read'
|
|
|
|
path = 'read'
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
result, reason = await Data.update_cache_and_database(xmpp_instance, jabber_id, node_type, node_id)
|
|
|
|
if result == 'error':
|
|
|
|
message = 'Blasta system message » {}.'.format(reason)
|
|
|
|
description = 'Directory "read" appears to be empty'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
response = await jid_main_get(request, node_type, path)
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
description = 'An XMPP account is required'
|
|
|
|
message = 'Blasta system message » Please connect with your XMPP account to view this directory.'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
def result_post(request: Request, jabber_id: str, description: str, message: str, path: str, http_code=None):
|
|
|
|
template_file = 'result.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'description' : description,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'path' : path,
|
|
|
|
'request' : request}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/register')
|
|
|
|
def register_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
template_file = 'register.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/rss')
|
|
|
|
def rss(request: Request):
|
|
|
|
return RedirectResponse(url='/help/syndication')
|
|
|
|
|
|
|
|
@self.app.get('/search')
|
|
|
|
async def search_get(request: Request):
|
|
|
|
response = RedirectResponse(url='/search/all')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/search/all')
|
|
|
|
async def search_all_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
description = 'Search for public bookmarks'
|
|
|
|
form_action = '/query'
|
|
|
|
input_id = input_name = label_for = 'q'
|
|
|
|
input_placeholder = 'Enter a search query.'
|
|
|
|
input_type = 'search'
|
|
|
|
label = 'Search'
|
|
|
|
message = 'Search for bookmarks in the Blasta system.'
|
|
|
|
path = 'all'
|
|
|
|
template_file = 'search.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'form_action' : form_action,
|
|
|
|
'input_id' : input_id,
|
|
|
|
'input_name' : input_name,
|
|
|
|
'input_placeholder' : input_placeholder,
|
|
|
|
'input_type' : input_type,
|
|
|
|
'label' : label,
|
|
|
|
'label_for' : label_for,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'path' : path}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/search/jid/{jid}')
|
|
|
|
async def search_jid_get(request: Request, jid):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
if jabber_id == jid:
|
|
|
|
description = 'Search your own bookmarks'
|
|
|
|
message = 'Search for bookmarks from your own directory.'
|
|
|
|
else:
|
|
|
|
description = 'Search bookmarks of {}'.format(jid)
|
|
|
|
message = 'Search for bookmarks of a given Jabber ID.'
|
|
|
|
form_action = '/jid/' + jid
|
|
|
|
input_id = input_name = label_for = 'q'
|
|
|
|
input_placeholder = 'Enter a search query.'
|
|
|
|
input_type = 'search'
|
|
|
|
label = 'Search'
|
|
|
|
path = 'jid'
|
|
|
|
template_file = 'search.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'form_action' : form_action,
|
|
|
|
'input_id' : input_id,
|
|
|
|
'input_name' : input_name,
|
|
|
|
'input_placeholder' : input_placeholder,
|
|
|
|
'input_type' : input_type,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'jid' : jid,
|
|
|
|
'label' : label,
|
|
|
|
'label_for' : label_for,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'path' : path}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
response = RedirectResponse(url='/search/all')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/search/url')
|
|
|
|
async def search_url_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
description = 'Search for a bookmark'
|
|
|
|
form_action = None # This is not relevant due to function middleware. Maybe / or /url.
|
|
|
|
input_id = input_name = label_for = 'url'
|
|
|
|
input_placeholder = 'Enter a URL.'
|
|
|
|
input_type = 'url'
|
|
|
|
label = 'URL'
|
|
|
|
message = 'Search for a bookmark by a URL.'
|
|
|
|
path = 'url'
|
|
|
|
template_file = 'search.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
# 'form_action' : form_action,
|
|
|
|
'input_id' : input_id,
|
|
|
|
'input_name' : input_name,
|
|
|
|
'input_placeholder' : input_placeholder,
|
|
|
|
'input_type' : input_type,
|
|
|
|
'label' : label,
|
|
|
|
'label_for' : label_for,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'path' : path}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/tag')
|
|
|
|
def tag_get(request: Request):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
tag_list = SQLite.get_tags_500(db_file)
|
|
|
|
message = 'Common 500 tags sorted by name and sized by commonality.'
|
|
|
|
description = 'Common tags'
|
|
|
|
template_file = 'tag.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'tag_list' : tag_list}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/tag/{jid}')
|
|
|
|
def tag_get_jid(request: Request, jid):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
# NOTE Consider retrieval of tags from cache file.
|
|
|
|
# This is relevant to private and read nodes.
|
|
|
|
#if jabber_id == jid or node_type in ('private', 'read'):
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
tag_list = SQLite.get_500_tags_by_jid_sorted_by_name(db_file, jid)
|
|
|
|
message = 'Common 500 tags sorted by name and sized by commonality.'
|
|
|
|
description = 'Common tags of {}'.format(jid)
|
|
|
|
template_file = 'tag.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'jid' : jid,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'tag_list' : tag_list}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/url')
|
|
|
|
async def url_get(request: Request):
|
|
|
|
response = RedirectResponse(url='/search/url')
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/url/{url_hash}')
|
|
|
|
async def url_hash_get(request: Request, url_hash):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
node_id = 'hash:{}'.format(url_hash)
|
|
|
|
param_hash = url_hash
|
|
|
|
syndicate = path = 'url'
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
entries = []
|
|
|
|
exist = False
|
|
|
|
if len(url_hash) == 32:
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
for node in nodes:
|
|
|
|
node_id = nodes[node]['name']
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
# TODO If URL exist in visitor's bookmarks, display its properties (summary, tags title etc.) before data of others.
|
|
|
|
iq_item = iq['pubsub']['items']['item']
|
|
|
|
item_payload = iq_item['payload']
|
|
|
|
if item_payload:
|
|
|
|
exist = True
|
|
|
|
break
|
|
|
|
else:
|
2024-08-25 11:44:59 +02:00
|
|
|
message = 'XMPP system message » Error: {}.'.format(iq)
|
|
|
|
description = 'The requested bookmark could not be retrieved'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
2024-08-22 16:09:04 +02:00
|
|
|
if exist:
|
|
|
|
# TODO Perhaps adding a paragraph with "your tags" and "who else has tagged this link"
|
|
|
|
# and keep the (5 item) limit.
|
2024-08-25 11:44:59 +02:00
|
|
|
#entry = Syndication.extract_items(item_payload)
|
|
|
|
# NOTE Display only 5 items, as all the other tags appear at the list of "Related tags".
|
|
|
|
entry = Syndication.extract_items(item_payload, limit=True)
|
2024-08-22 16:09:04 +02:00
|
|
|
if entry:
|
|
|
|
#url_hash = iq_item['id']
|
|
|
|
url_hash = Utilities.hash_url_to_md5(entry['link'])
|
|
|
|
# TODO Add a check: if iq_item['id'] == url_hash:
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
|
|
|
entry['instances'] = instances
|
|
|
|
entry['jid'] = jabber_id
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
entry['name'] = name
|
|
|
|
entry['url_hash'] = url_hash
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(entry['published'])
|
2024-08-25 11:44:59 +02:00
|
|
|
#entry['tags'] = entry['tags'][:5]
|
2024-08-22 16:09:04 +02:00
|
|
|
entries.append(entry)
|
|
|
|
tags_list = {}
|
|
|
|
tags_and_instances = SQLite.get_tags_and_instances_by_url_hash(db_file, url_hash)
|
2024-08-28 14:03:19 +02:00
|
|
|
for tag, tag_instances in tags_and_instances: tags_list[tag] = tag_instances
|
2024-08-25 11:44:59 +02:00
|
|
|
else: # NOTE Is it possible to activate this else statement? Consider removal.
|
2024-08-22 16:09:04 +02:00
|
|
|
# https://fastapi.tiangolo.com/tutorial/handling-errors/
|
|
|
|
#raise HTTPException(status_code=404, detail="Item not found")
|
|
|
|
message = 'Blasta system message » Error: Not found.'
|
|
|
|
description = 'The requested bookmark does not exist'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
entry = SQLite.get_entry_by_url_hash(db_file, url_hash)
|
|
|
|
tags_sorted = []
|
|
|
|
if entry:
|
|
|
|
for tag in SQLite.get_tags_by_entry_id(db_file, entry[0]):
|
|
|
|
tags_sorted.append(tag[0])
|
|
|
|
tags_list = {}
|
|
|
|
tags_and_instances = SQLite.get_tags_and_instances_by_entry_id(db_file, entry[0])
|
2024-08-28 14:03:19 +02:00
|
|
|
for tag, tag_instances in tags_and_instances: tags_list[tag] = tag_instances
|
2024-08-22 16:09:04 +02:00
|
|
|
jid = SQLite.get_jid_by_jid_id(db_file, entry[5])
|
2024-08-28 14:03:19 +02:00
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
2024-08-22 16:09:04 +02:00
|
|
|
entries.append(
|
|
|
|
{'title' : entry[3],
|
|
|
|
'link' : entry[2],
|
|
|
|
'summary' : entry[4],
|
|
|
|
'published' : entry[6],
|
|
|
|
'published_mod' : Utilities.convert_iso8601_to_readable(entry[6]),
|
|
|
|
'updated' : entry[7],
|
|
|
|
'tags' : tags_sorted,
|
2024-08-28 14:03:19 +02:00
|
|
|
'url_hash' : url_hash,
|
2024-08-22 16:09:04 +02:00
|
|
|
'jid' : jid,
|
|
|
|
'name' : jid, # jid.split('@')[0] if '@' in jid else jid,
|
2024-08-28 14:03:19 +02:00
|
|
|
'instances' : instances})
|
2024-08-22 16:09:04 +02:00
|
|
|
# message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
# if iq == 'Node not found':
|
|
|
|
# description = 'An error has occurred'
|
|
|
|
# else:
|
|
|
|
# description = 'An unknown error has occurred'
|
|
|
|
else:
|
|
|
|
# https://fastapi.tiangolo.com/tutorial/handling-errors/
|
|
|
|
#raise HTTPException(status_code=404, detail="Item not found")
|
|
|
|
message = 'Blasta system message » Error: Not found.'
|
|
|
|
description = 'The requested bookmark does not exist'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
entry = SQLite.get_entry_by_url_hash(db_file, url_hash)
|
|
|
|
if entry:
|
|
|
|
tags_sorted = []
|
|
|
|
for tag in SQLite.get_tags_by_entry_id(db_file, entry[0]):
|
|
|
|
tags_sorted.append(tag[0])
|
|
|
|
tags_list = {}
|
|
|
|
tags_and_instances = SQLite.get_tags_and_instances_by_entry_id(db_file, entry[0])
|
2024-08-28 14:03:19 +02:00
|
|
|
for tag, tag_instances in tags_and_instances: tags_list[tag] = tag_instances
|
2024-08-22 16:09:04 +02:00
|
|
|
jid = SQLite.get_jid_by_jid_id(db_file, entry[5])
|
2024-08-28 14:03:19 +02:00
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
2024-08-22 16:09:04 +02:00
|
|
|
entries.append(
|
|
|
|
{'title' : entry[3],
|
|
|
|
'link' : entry[2],
|
|
|
|
'summary' : entry[4],
|
|
|
|
'published' : entry[6],
|
|
|
|
'published_mod' : Utilities.convert_iso8601_to_readable(entry[6]),
|
|
|
|
'updated' : entry[7],
|
|
|
|
'tags' : tags_sorted,
|
2024-08-28 14:03:19 +02:00
|
|
|
'url_hash' : url_hash,
|
2024-08-22 16:09:04 +02:00
|
|
|
'jid' : jid,
|
|
|
|
'name' : jid, # jid.split('@')[0] if '@' in jid else jid,
|
2024-08-28 14:03:19 +02:00
|
|
|
'instances' : instances})
|
2024-08-22 16:09:04 +02:00
|
|
|
else:
|
|
|
|
# https://fastapi.tiangolo.com/tutorial/handling-errors/
|
|
|
|
#raise HTTPException(status_code=404, detail="Item not found")
|
|
|
|
message = 'Blasta system message » Error: Not found.'
|
|
|
|
description = 'The requested bookmark does not exist'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
2024-08-28 14:03:19 +02:00
|
|
|
message = 'Information for URI {}'.format(entries[0]['link']) # entry[2]
|
|
|
|
if instances > 1:
|
|
|
|
description = 'Discover new resources and see who shares them'
|
|
|
|
template_file = 'people.xhtml'
|
|
|
|
people_list = {}
|
|
|
|
jids_and_tags = SQLite.get_jids_and_tags_by_url_hash(db_file, url_hash)
|
|
|
|
for jid, tag in jids_and_tags:
|
|
|
|
if jid in people_list and isinstance(people_list[jid], list):
|
|
|
|
people_list[jid].append(tag)
|
|
|
|
else:
|
|
|
|
people_list[jid] = [tag]
|
|
|
|
else:
|
|
|
|
people_list = None
|
|
|
|
description = 'Resource properties'
|
|
|
|
template_file = 'browse.xhtml'
|
2024-08-22 16:09:04 +02:00
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'entries' : entries,
|
|
|
|
'exist' : exist,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'node_id' : node_id,
|
|
|
|
'param_hash' : param_hash,
|
|
|
|
'path' : path,
|
2024-08-28 14:03:19 +02:00
|
|
|
'people' : people_list,
|
2024-08-22 16:09:04 +02:00
|
|
|
'pubsub_jid' : jabber_id_pubsub,
|
|
|
|
'syndicate' : syndicate,
|
|
|
|
'tags' : tags_list}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: MD5 message-digest algorithm.'
|
|
|
|
description = 'The argument for URL does not appear to be a valid MD5 Checksum'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.post('/url/{url_hash}')
|
|
|
|
async def url_hash_post(request: Request,
|
|
|
|
url_hash,
|
|
|
|
node: str = Form(...),
|
|
|
|
published: str = Form(...),
|
|
|
|
summary: str = Form(''),
|
|
|
|
tags: str = Form(''),
|
|
|
|
#tags_old: str = Form(...),
|
|
|
|
tags_old: str = Form(''),
|
|
|
|
title: str = Form(...),
|
|
|
|
url: str = Form(...)):
|
|
|
|
node_id = 'hash:{}'.format(url_hash)
|
|
|
|
param_hash = url_hash
|
|
|
|
syndicate = path = 'url'
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
if jabber_id:
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
|
|
|
timestamp = datetime.now().isoformat()
|
2024-08-25 11:44:59 +02:00
|
|
|
tags_new = Data.organize_tags(tags) if tags else ''
|
2024-08-22 16:09:04 +02:00
|
|
|
entry = {'title' : title.strip(),
|
|
|
|
'link' : url.strip(),
|
|
|
|
'summary' : summary.strip() if summary else '',
|
|
|
|
'published' : published,
|
|
|
|
'updated' : timestamp,
|
2024-08-25 11:44:59 +02:00
|
|
|
'tags' : tags_new,
|
2024-08-22 16:09:04 +02:00
|
|
|
'url_hash' : url_hash,
|
|
|
|
'jid' : jabber_id,
|
|
|
|
'name' : name,
|
|
|
|
'instances' : instances or 1}
|
|
|
|
message = 'Information for URL {}'.format(url)
|
|
|
|
description = 'Bookmark properties'
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
payload = Syndication.create_rfc4287_entry(entry)
|
|
|
|
# TODO Add try/except for IQ
|
|
|
|
print('Publish item')
|
|
|
|
# TODO Check.
|
|
|
|
# NOTE You might not need to append to an open node before appending to a whitelist node.
|
|
|
|
node_id = nodes[node]['name']
|
|
|
|
iq = await XmppPubsub.publish_node_item(
|
|
|
|
xmpp_instance, jabber_id, node_id, url_hash, payload)
|
|
|
|
match node:
|
|
|
|
case 'private':
|
|
|
|
print('Set item as private (XEP-0223)')
|
|
|
|
#iq = await XmppPubsub.publish_node_item_private(
|
|
|
|
# xmpp_instance, node_id_private, url_hash, iq)
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_public, url_hash)
|
|
|
|
Data.remove_item_from_cache(jabber_id, 'public', url_hash)
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_read, url_hash)
|
|
|
|
Data.remove_item_from_cache(jabber_id, 'read', url_hash)
|
|
|
|
case 'public':
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_private, url_hash)
|
|
|
|
Data.remove_item_from_cache(jabber_id, 'private', url_hash)
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_read, url_hash)
|
|
|
|
Data.remove_item_from_cache(jabber_id, 'read', url_hash)
|
|
|
|
case 'read':
|
|
|
|
#iq = await XmppPubsub.publish_node_item_private(
|
|
|
|
# xmpp_instance, node_id_read, url_hash, iq)
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_public, url_hash)
|
|
|
|
Data.remove_item_from_cache(jabber_id, 'public', url_hash)
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id_private, url_hash)
|
|
|
|
Data.remove_item_from_cache(jabber_id, 'private', url_hash)
|
|
|
|
if isinstance(iq, str):
|
|
|
|
description = 'Could not save bookmark'
|
|
|
|
message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
#await iq.send(timeout=15)
|
|
|
|
# Save changes to cache file
|
|
|
|
entries_cache_filename = 'items/' + jabber_id + '.toml'
|
|
|
|
entries_cache = Data.open_file_toml(entries_cache_filename)
|
|
|
|
entries_cache_node = entries_cache[node] if node in entries_cache else []
|
|
|
|
entries_cache_mod = []
|
|
|
|
#for entry_cache in entries_cache_node:
|
|
|
|
# if entry_cache['url_hash'] == url_hash:
|
|
|
|
# entry_cache = entry
|
|
|
|
# break
|
|
|
|
is_entry_modified = False
|
|
|
|
# You already have this code in the HTML form, which indicates that this is an edit of an existing item
|
|
|
|
# <input type="hidden" id="update" name="update" value="yes" required/>
|
|
|
|
for entry_cache in entries_cache_node:
|
|
|
|
if entry_cache['url_hash'] == url_hash:
|
|
|
|
is_entry_modified = True
|
|
|
|
entries_cache_mod.append(entry)
|
|
|
|
else:
|
|
|
|
entries_cache_mod.append(entry_cache)
|
|
|
|
if not is_entry_modified: entries_cache_mod.append(entry)
|
|
|
|
entries_cache[node] = entries_cache_mod
|
|
|
|
entries_cache_data = entries_cache
|
|
|
|
Data.save_to_toml(entries_cache_filename, entries_cache_data)
|
|
|
|
# Save changes to database
|
|
|
|
if node == 'public':
|
|
|
|
tags_valid = []
|
|
|
|
tags_invalid = []
|
2024-08-25 11:44:59 +02:00
|
|
|
#tags_list_new = tags.split(',')
|
|
|
|
tags_list_new = tags_new
|
2024-08-28 14:03:19 +02:00
|
|
|
tags_list_old = tags_old.split(', ')
|
2024-08-25 11:44:59 +02:00
|
|
|
for tag in tags_list_old:
|
2024-08-28 14:03:19 +02:00
|
|
|
tag_trim = tag.strip()
|
2024-08-25 11:44:59 +02:00
|
|
|
if tag not in tags_list_new:
|
2024-08-28 14:03:19 +02:00
|
|
|
tags_invalid.append(tag_trim)
|
2024-08-25 11:44:59 +02:00
|
|
|
for tag in tags_list_new:
|
2024-08-28 14:03:19 +02:00
|
|
|
if tag:
|
|
|
|
tag_trim = tag.strip()
|
|
|
|
if tag_trim not in tags_list_old:
|
|
|
|
tags_valid.append(tag_trim)
|
|
|
|
# FIXME Variable tags_valid is not in use.
|
|
|
|
# NOTE Variable tags_valid might not be needed. See function associate_entries_tags_jids.
|
|
|
|
entry['tags'] = tags_valid
|
2024-08-22 16:09:04 +02:00
|
|
|
await SQLite.add_tags(db_file, [entry])
|
|
|
|
# Slow (high I/O)
|
2024-08-25 11:44:59 +02:00
|
|
|
entry_id = SQLite.get_entry_id_by_url_hash(db_file, url_hash)
|
|
|
|
if not entry_id:
|
2024-08-22 16:09:04 +02:00
|
|
|
await SQLite.add_new_entries(db_file, [entry]) # Is this line needed?
|
|
|
|
await SQLite.associate_entries_tags_jids(db_file, entry)
|
|
|
|
#elif not SQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash):
|
|
|
|
# await SQLite.associate_entries_tags_jids(db_file, entry)
|
|
|
|
else:
|
|
|
|
await SQLite.associate_entries_tags_jids(db_file, entry)
|
2024-08-28 14:03:19 +02:00
|
|
|
print('tags_new')
|
|
|
|
print(tags_new)
|
|
|
|
print('tags_old')
|
|
|
|
print(tags_old)
|
|
|
|
print('tags_valid')
|
|
|
|
print(tags_valid)
|
|
|
|
print('tags_invalid')
|
|
|
|
print(tags_invalid)
|
|
|
|
print(url_hash)
|
|
|
|
print(jabber_id)
|
|
|
|
await SQLite.delete_combination_row_by_url_hash_and_tag_and_jid(db_file, url_hash, tags_invalid, jabber_id)
|
2024-08-22 16:09:04 +02:00
|
|
|
# Entry for HTML
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(published)
|
|
|
|
entry['updated_mod'] = Utilities.convert_iso8601_to_readable(timestamp)
|
2024-08-25 11:44:59 +02:00
|
|
|
entry['tags'] = entry['tags'][:5]
|
2024-08-22 16:09:04 +02:00
|
|
|
entries = [entry]
|
|
|
|
template_file = 'browse.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request': request,
|
|
|
|
'description': description,
|
|
|
|
'entries': entries,
|
2024-08-25 11:44:59 +02:00
|
|
|
'exist': True,
|
2024-08-22 16:09:04 +02:00
|
|
|
'jabber_id': jabber_id,
|
|
|
|
'journal': journal,
|
|
|
|
'message': message,
|
|
|
|
'node_id': node_id,
|
|
|
|
'param_hash': param_hash,
|
|
|
|
'path': path,
|
|
|
|
'pubsub_jid': jabber_id_pubsub,
|
2024-08-25 11:44:59 +02:00
|
|
|
'syndicate': syndicate,
|
|
|
|
'tags' : tags_new}
|
2024-08-22 16:09:04 +02:00
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
return response
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
@self.app.get('/url/{url_hash}/confirm')
|
|
|
|
async def url_hash_confirm_get(request: Request, url_hash):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
node_id = 'hash:{}'.format(url_hash)
|
|
|
|
param_hash = url_hash
|
|
|
|
syndicate = path = 'url'
|
|
|
|
if len(url_hash) == 32:
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
exist = False
|
|
|
|
for node in nodes:
|
|
|
|
node_id = nodes[node]['name']
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
# TODO If URL exist in visitor's bookmarks, display its properties (summary, tags title etc.) before data of others.
|
|
|
|
iq_item = iq['pubsub']['items']['item']
|
|
|
|
item_payload = iq_item['payload']
|
|
|
|
if item_payload:
|
|
|
|
exist = True
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
if iq == 'Node not found':
|
|
|
|
description = 'An error has occurred'
|
|
|
|
else:
|
|
|
|
description = 'An unknown error has occurred'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
if exist:
|
|
|
|
# TODO Add a check: if iq_item['id'] == url_hash:
|
|
|
|
entries = []
|
|
|
|
entry = Syndication.extract_items(item_payload)
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
|
|
|
entry['instances'] = instances
|
|
|
|
entry['jid'] = jabber_id
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
entry['name'] = name
|
|
|
|
entry['url_hash'] = url_hash
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(entry['published'])
|
|
|
|
entries.append(entry)
|
|
|
|
description = 'Confirm deletion of a bookmark'
|
|
|
|
message = 'Details for bookmark {}'.format(entries[0]['link'])
|
|
|
|
template_file = 'browse.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'delete' : True,
|
|
|
|
'description' : description,
|
|
|
|
'entries' : entries,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'message' : message,
|
|
|
|
'node_id' : node_id,
|
|
|
|
'param_hash' : param_hash,
|
|
|
|
'path' : path,
|
|
|
|
'pubsub_jid' : jabber_id_pubsub,
|
|
|
|
'syndicate' : syndicate}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
response = RedirectResponse(url='/jid/' + jabber_id)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: MD5 message-digest algorithm.'
|
|
|
|
description = 'The argument for URL does not appear to be a valid MD5 Checksum'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/url/{url_hash}/delete')
|
|
|
|
async def url_hash_delete_get(request: Request, url_hash):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
node_id = 'hash:{}'.format(url_hash)
|
|
|
|
param_hash = url_hash
|
|
|
|
syndicate = path = 'url'
|
|
|
|
if len(url_hash) == 32:
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
exist = False
|
|
|
|
for node_type in nodes:
|
|
|
|
node_id = nodes[node_type]['name']
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
# TODO If URL exist in visitor's bookmarks, display its properties (summary, tags title etc.) before data of others.
|
|
|
|
iq_item = iq['pubsub']['items']['item']
|
|
|
|
item_payload = iq_item['payload']
|
|
|
|
if item_payload:
|
|
|
|
exist = True
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
if iq == 'Node not found':
|
|
|
|
description = 'An error has occurred'
|
|
|
|
else:
|
|
|
|
description = 'An unknown error has occurred'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
if exist:
|
|
|
|
# TODO Add a check: if iq_item['id'] == url_hash:
|
|
|
|
entries = []
|
|
|
|
entry = Syndication.extract_items(item_payload)
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
|
|
|
entry['instances'] = instances
|
|
|
|
entry['jid'] = jabber_id
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
entry['name'] = name
|
|
|
|
entry['url_hash'] = url_hash
|
|
|
|
entry['published_mod'] = Utilities.convert_iso8601_to_readable(entry['published'])
|
|
|
|
entries.append(entry)
|
|
|
|
|
|
|
|
# Set a title
|
|
|
|
description = 'A bookmark has been deleted'
|
|
|
|
# Set a message
|
|
|
|
message = 'Details for bookmark {}'.format(entry['link'])
|
|
|
|
|
|
|
|
# Create a link to restore bookmark
|
|
|
|
link_save = ('/save?url=' + urllib.parse.quote(entry['link']) +
|
|
|
|
'&title=' + urllib.parse.quote(entry['title']) +
|
|
|
|
'&summary=' + urllib.parse.quote(entry['summary']) +
|
|
|
|
'&tags=' + urllib.parse.quote(','.join(entry['tags'])))
|
|
|
|
|
|
|
|
# Remove the item from node
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
await XmppPubsub.del_node_item(xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
|
|
|
|
# Remove the item association from database
|
|
|
|
await SQLite.delete_combination_row_by_jid_and_url_hash(db_file, url_hash, jabber_id)
|
2024-08-28 14:03:19 +02:00
|
|
|
#await SQLite.delete_combination_row_by_url_hash_and_tag_and_jid(db_file, url_hash, entry['tags'], jabber_id)
|
2024-08-22 16:09:04 +02:00
|
|
|
|
|
|
|
# Remove the item from cache
|
|
|
|
Data.remove_item_from_cache(jabber_id, node_type, url_hash)
|
|
|
|
|
|
|
|
template_file = 'browse.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'entries' : entries,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'link_save' : link_save,
|
|
|
|
'message' : message,
|
|
|
|
'node_id' : node_id,
|
|
|
|
'param_hash' : param_hash,
|
|
|
|
'path' : path,
|
|
|
|
'pubsub_jid' : jabber_id_pubsub,
|
|
|
|
'restore' : True,
|
|
|
|
'syndicate' : syndicate}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
response = RedirectResponse(url='/jid/' + jabber_id)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: MD5 message-digest algorithm.'
|
|
|
|
description = 'The argument for URL does not appear to be a valid MD5 Checksum'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
@self.app.get('/url/{url_hash}/edit')
|
|
|
|
@self.app.post('/url/{url_hash}/edit')
|
|
|
|
async def url_hash_edit_get(request: Request, url_hash):
|
|
|
|
jabber_id = Utilities.is_jid_matches_to_session(accounts, sessions, request)
|
|
|
|
# node_id = 'hash:{}'.format(url_hash)
|
|
|
|
if len(url_hash) == 32:
|
|
|
|
if jabber_id:
|
|
|
|
xmpp_instance = accounts[jabber_id]
|
|
|
|
exist = False
|
|
|
|
for node in nodes:
|
|
|
|
node_id = nodes[node]['name']
|
|
|
|
iq = await XmppPubsub.get_node_item(xmpp_instance, jabber_id, node_id, url_hash)
|
|
|
|
if isinstance(iq, slixmpp.stanza.iq.Iq):
|
|
|
|
name = jabber_id.split('@')[0]
|
|
|
|
iq_item = iq['pubsub']['items']['item']
|
|
|
|
# TODO Add a check: if iq_item['id'] == url_hash:
|
|
|
|
# Is this valid entry['url_hash'] = iq['id'] or should it be iq_item['id']
|
|
|
|
db_file = 'main.sqlite'
|
|
|
|
entry = None
|
|
|
|
item_payload = iq_item['payload']
|
|
|
|
if item_payload:
|
|
|
|
exist = True
|
|
|
|
break
|
|
|
|
else:
|
|
|
|
message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
if iq == 'Node not found':
|
|
|
|
description = 'An error has occurred'
|
|
|
|
else:
|
|
|
|
description = 'An unknown error has occurred'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
|
|
|
|
if exist:
|
|
|
|
path = 'edit'
|
|
|
|
description = 'Edit an existing bookmark'
|
|
|
|
entry = Syndication.extract_items(item_payload)
|
|
|
|
entry['instances'] = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
2024-08-28 14:03:19 +02:00
|
|
|
print(jabber_id)
|
|
|
|
print(entry['tags'])
|
2024-08-22 16:09:04 +02:00
|
|
|
else:
|
|
|
|
# TODO Consider redirect to path /save (function save_get)
|
|
|
|
# NOTE This seems to be the best to do, albeit, perhaps the pathname should be /save instead of /url/hash/edit.
|
|
|
|
path = 'save' # 'add'
|
|
|
|
description = 'Add a new bookmark'
|
|
|
|
result = SQLite.get_entry_by_url_hash(db_file, url_hash)
|
|
|
|
tags_sorted = []
|
|
|
|
if result:
|
|
|
|
for tag in SQLite.get_tags_by_entry_id(db_file, result[0]):
|
|
|
|
tags_sorted.append(tag[0])
|
|
|
|
entry = {'title' : result[3],
|
|
|
|
'link' : result[2],
|
|
|
|
'summary' : result[4],
|
|
|
|
'published' : result[6],
|
|
|
|
'updated' : result[7],
|
2024-08-28 14:03:19 +02:00
|
|
|
'tags' : tags_sorted}
|
|
|
|
#'instances' : result[8],
|
2024-08-22 16:09:04 +02:00
|
|
|
#'jid' = jabber_id,
|
|
|
|
#'name' : name,
|
|
|
|
#'url_hash' : url_hash
|
|
|
|
if entry:
|
|
|
|
entry['jid'] = jabber_id
|
|
|
|
entry['name'] = name
|
|
|
|
entry['url_hash'] = url_hash
|
2024-08-28 14:03:19 +02:00
|
|
|
else:
|
|
|
|
message = 'XMPP system message » {}.'.format(iq)
|
|
|
|
if iq == 'Node not found':
|
|
|
|
description = 'An error has occurred'
|
|
|
|
else:
|
|
|
|
description = 'An unknown error has occurred'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
2024-08-22 16:09:04 +02:00
|
|
|
template_file = 'edit.xhtml'
|
|
|
|
template_dict = {
|
|
|
|
'request' : request,
|
|
|
|
'description' : description,
|
|
|
|
'edit' : True,
|
|
|
|
'jabber_id' : jabber_id,
|
|
|
|
'journal' : journal,
|
|
|
|
'node' : node,
|
|
|
|
'path' : path,
|
|
|
|
'published' : entry['published'],
|
|
|
|
'summary' : entry['summary'],
|
|
|
|
'tags' : ', '.join(entry['tags']),
|
|
|
|
'title' : entry['title'],
|
|
|
|
'url' : entry['link'],
|
|
|
|
'url_hash' : url_hash}
|
|
|
|
response = templates.TemplateResponse(template_file, template_dict)
|
|
|
|
response.headers["Content-Type"] = "application/xhtml+xml"
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: No active session.'
|
|
|
|
description = 'You are not connected'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
else:
|
|
|
|
message = 'Blasta system message » Error: MD5 message-digest algorithm.'
|
|
|
|
description = 'The argument for URL does not appear to be a valid MD5 Checksum'
|
|
|
|
path = 'error'
|
|
|
|
return result_post(request, jabber_id, description, message, path)
|
|
|
|
return response
|
|
|
|
|
|
|
|
class SQLite:
|
|
|
|
|
|
|
|
#from slixfeed.log import Logger
|
|
|
|
#from slixfeed.utilities import DateAndTime, Url
|
|
|
|
|
|
|
|
# DBLOCK = Lock()
|
|
|
|
|
|
|
|
#logger = Logger(__name__)
|
|
|
|
|
|
|
|
def create_connection(db_file):
|
|
|
|
"""
|
|
|
|
Create a database connection to the SQLite database
|
|
|
|
specified by db_file.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
conn : object
|
|
|
|
Connection object or None.
|
|
|
|
"""
|
|
|
|
time_begin = time.time()
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# message_log = '{}'
|
|
|
|
# logger.debug(message_log.format(function_name))
|
|
|
|
conn = None
|
|
|
|
try:
|
|
|
|
conn = connect(db_file)
|
|
|
|
conn.execute("PRAGMA foreign_keys = ON")
|
|
|
|
# return conn
|
|
|
|
except Error as e:
|
|
|
|
print(e)
|
|
|
|
# logger.warning('Error creating a connection to database {}.'.format(db_file))
|
|
|
|
# logger.error(e)
|
|
|
|
time_end = time.time()
|
|
|
|
difference = time_end - time_begin
|
|
|
|
if difference > 1: logger.warning('{} (time: {})'.format(function_name,
|
|
|
|
difference))
|
|
|
|
return conn
|
|
|
|
|
|
|
|
|
|
|
|
def create_tables(db_file):
|
|
|
|
"""
|
|
|
|
Create SQLite tables.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
sql_table_main_entries = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS main_entries (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
url_hash TEXT NOT NULL UNIQUE,
|
|
|
|
url TEXT NOT NULL UNIQUE,
|
|
|
|
title TEXT NOT NULL,
|
|
|
|
summary TEXT,
|
|
|
|
jid_id TEXT NOT NULL,
|
|
|
|
date_first TEXT NOT NULL,
|
|
|
|
date_last TEXT NOT NULL,
|
|
|
|
instances INTEGER NOT NULL DEFAULT 1,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_table_main_jids = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS main_jids (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
jid TEXT NOT NULL UNIQUE,
|
|
|
|
opt_in INTEGER NOT NULL DEFAULT 0,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_table_main_tags = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS main_tags (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
tag TEXT NOT NULL UNIQUE,
|
|
|
|
instances INTEGER NOT NULL DEFAULT 1,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_table_main_statistics = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS main_statistics (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
type TEXT NOT NULL UNIQUE,
|
|
|
|
count INTEGER NOT NULL DEFAULT 0,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_table_combination_entries_tags_jids = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS combination_entries_tags_jids (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
tag_id INTEGER NOT NULL,
|
|
|
|
jid_id INTEGER NOT NULL,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "main_entries" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
FOREIGN KEY ("tag_id") REFERENCES "main_tags" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
FOREIGN KEY ("jid_id") REFERENCES "main_jids" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
# NOTE Digit for JID which is authorized;
|
|
|
|
# Zero (0) for private;
|
|
|
|
# Empty (no row) for public.
|
|
|
|
sql_table_authorization_entries_jids = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS authorization_entries_jids (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
jid_id INTEGER NOT NULL,
|
|
|
|
authorization INTEGER NOT NULL,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "main_entries" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
FOREIGN KEY ("jid_id") REFERENCES "main_jids" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_table_report_entries = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS report_entries (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
url_hash_subject TEXT NOT NULL,
|
|
|
|
jid_reporter TEXT NOT NULL,
|
|
|
|
type TEXT,
|
|
|
|
comment TEXT,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_table_report_jids = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS report_jids (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
jid_subject TEXT NOT NULL,
|
|
|
|
jid_reporter TEXT NOT NULL,
|
|
|
|
type TEXT,
|
|
|
|
comment TEXT,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-08-28 14:03:19 +02:00
|
|
|
sql_trigger_instances_entry_decrease = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER instances_entry_decrease
|
|
|
|
AFTER DELETE ON combination_entries_tags_jids
|
|
|
|
FOR EACH ROW
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_entries
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(DISTINCT jid_id)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE entry_id = OLD.entry_id
|
|
|
|
)
|
|
|
|
WHERE id = OLD.entry_id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_instances_entry_increase = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER instances_entry_increase
|
|
|
|
AFTER INSERT ON combination_entries_tags_jids
|
|
|
|
FOR EACH ROW
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_entries
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(DISTINCT jid_id)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE entry_id = NEW.entry_id
|
|
|
|
)
|
|
|
|
WHERE id = NEW.entry_id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_instances_entry_update = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER instances_entry_update
|
|
|
|
AFTER UPDATE ON combination_entries_tags_jids
|
|
|
|
FOR EACH ROW
|
|
|
|
BEGIN
|
|
|
|
-- Decrease instances for the old tag_id
|
|
|
|
UPDATE main_entries
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(DISTINCT jid_id)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE entry_id = OLD.entry_id
|
|
|
|
)
|
|
|
|
WHERE id = OLD.entry_id;
|
|
|
|
|
|
|
|
-- Increase instances for the new tag_id
|
|
|
|
UPDATE main_entries
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(DISTINCT jid_id)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE entry_id = NEW.entry_id
|
|
|
|
)
|
|
|
|
WHERE id = NEW.entry_id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
2024-08-22 16:09:04 +02:00
|
|
|
sql_trigger_instances_tag_decrease = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER instances_tag_decrease
|
|
|
|
AFTER DELETE ON combination_entries_tags_jids
|
|
|
|
FOR EACH ROW
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_tags
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE tag_id = OLD.tag_id
|
|
|
|
)
|
|
|
|
WHERE id = OLD.tag_id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_instances_tag_increase = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER instances_tag_increase
|
|
|
|
AFTER INSERT ON combination_entries_tags_jids
|
|
|
|
FOR EACH ROW
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_tags
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE tag_id = NEW.tag_id
|
|
|
|
)
|
|
|
|
WHERE id = NEW.tag_id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_instances_tag_update = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER instances_tag_update
|
|
|
|
AFTER UPDATE ON combination_entries_tags_jids
|
|
|
|
FOR EACH ROW
|
|
|
|
BEGIN
|
|
|
|
-- Decrease instances for the old tag_id
|
|
|
|
UPDATE main_tags
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE tag_id = OLD.tag_id
|
|
|
|
)
|
|
|
|
WHERE id = OLD.tag_id;
|
|
|
|
|
|
|
|
-- Increase instances for the new tag_id
|
|
|
|
UPDATE main_tags
|
|
|
|
SET instances = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE tag_id = NEW.tag_id
|
|
|
|
)
|
|
|
|
WHERE id = NEW.tag_id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_entry_count_increase = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER entry_count_increase
|
|
|
|
AFTER INSERT ON main_entries
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_entries
|
|
|
|
)
|
|
|
|
WHERE type = 'entries';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_entry_count_decrease = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER entry_count_decrease
|
|
|
|
AFTER DELETE ON main_entries
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_entries
|
|
|
|
)
|
|
|
|
WHERE type = 'entries';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_entry_count_update = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER entry_count_update
|
|
|
|
AFTER UPDATE ON main_entries
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_entries
|
|
|
|
)
|
|
|
|
WHERE type = 'entries';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
2024-08-28 14:03:19 +02:00
|
|
|
sql_trigger_entry_remove = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER entry_remove
|
|
|
|
AFTER UPDATE ON main_entries
|
|
|
|
FOR EACH ROW
|
|
|
|
WHEN NEW.instances < 1
|
|
|
|
BEGIN
|
|
|
|
DELETE FROM main_entries WHERE id = OLD.id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
2024-08-22 16:09:04 +02:00
|
|
|
sql_trigger_jid_count_increase = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER jid_count_increase
|
|
|
|
AFTER INSERT ON main_jids
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_jids
|
|
|
|
)
|
|
|
|
WHERE type = 'jids';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_jid_count_decrease = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER jid_count_decrease
|
|
|
|
AFTER DELETE ON main_jids
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_jids
|
|
|
|
)
|
|
|
|
WHERE type = 'jids';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_jid_count_update = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER jid_count_update
|
|
|
|
AFTER UPDATE ON main_jids
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_jids
|
|
|
|
)
|
|
|
|
WHERE type = 'jids';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_tag_count_increase = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER tag_count_increase
|
|
|
|
AFTER INSERT ON main_tags
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_tags
|
|
|
|
)
|
|
|
|
WHERE type = 'tags';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_tag_count_decrease = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER tag_count_decrease
|
|
|
|
AFTER DELETE ON main_tags
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_tags
|
|
|
|
)
|
|
|
|
WHERE type = 'tags';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
sql_trigger_tag_count_update = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER tag_count_update
|
|
|
|
AFTER UPDATE ON main_tags
|
|
|
|
BEGIN
|
|
|
|
UPDATE main_statistics
|
|
|
|
SET count = (
|
|
|
|
SELECT COUNT(*)
|
|
|
|
FROM main_tags
|
|
|
|
)
|
|
|
|
WHERE type = 'tags';
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
2024-08-28 14:03:19 +02:00
|
|
|
sql_trigger_tag_remove = (
|
|
|
|
"""
|
|
|
|
CREATE TRIGGER tag_remove
|
|
|
|
AFTER UPDATE ON main_tags
|
|
|
|
FOR EACH ROW
|
|
|
|
WHEN NEW.instances < 1
|
|
|
|
BEGIN
|
|
|
|
DELETE FROM main_tags WHERE id = OLD.id;
|
|
|
|
END;
|
|
|
|
"""
|
|
|
|
)
|
2024-08-22 16:09:04 +02:00
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute(sql_table_main_entries)
|
|
|
|
cur.execute(sql_table_main_jids)
|
|
|
|
cur.execute(sql_table_main_tags)
|
|
|
|
cur.execute(sql_table_main_statistics)
|
|
|
|
cur.execute(sql_table_combination_entries_tags_jids)
|
|
|
|
cur.execute(sql_table_authorization_entries_jids)
|
|
|
|
cur.execute(sql_table_report_entries)
|
|
|
|
cur.execute(sql_table_report_jids)
|
2024-08-28 14:03:19 +02:00
|
|
|
cur.execute(sql_trigger_instances_entry_decrease)
|
|
|
|
cur.execute(sql_trigger_instances_entry_increase)
|
|
|
|
cur.execute(sql_trigger_instances_entry_update)
|
2024-08-22 16:09:04 +02:00
|
|
|
cur.execute(sql_trigger_instances_tag_decrease)
|
|
|
|
cur.execute(sql_trigger_instances_tag_increase)
|
|
|
|
cur.execute(sql_trigger_instances_tag_update)
|
|
|
|
cur.execute(sql_trigger_entry_count_increase)
|
|
|
|
cur.execute(sql_trigger_entry_count_decrease)
|
|
|
|
cur.execute(sql_trigger_entry_count_update)
|
2024-08-28 14:03:19 +02:00
|
|
|
cur.execute(sql_trigger_entry_remove)
|
2024-08-22 16:09:04 +02:00
|
|
|
cur.execute(sql_trigger_jid_count_increase)
|
|
|
|
cur.execute(sql_trigger_jid_count_decrease)
|
|
|
|
cur.execute(sql_trigger_jid_count_update)
|
|
|
|
cur.execute(sql_trigger_tag_count_increase)
|
|
|
|
cur.execute(sql_trigger_tag_count_decrease)
|
|
|
|
cur.execute(sql_trigger_tag_count_update)
|
2024-08-28 14:03:19 +02:00
|
|
|
cur.execute(sql_trigger_tag_remove)
|
2024-08-22 16:09:04 +02:00
|
|
|
|
|
|
|
def add_statistics(db_file):
|
|
|
|
"""
|
|
|
|
Batch insertion of tags.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entries : list
|
|
|
|
Set of entries.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
|
|
|
|
Note
|
|
|
|
----
|
|
|
|
This function is executed immediately after the creation of the database
|
|
|
|
and, therefore, the directive "async with DBLOCK:" is not necessary.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO main_statistics(
|
|
|
|
type)
|
|
|
|
VALUES ('entries'),
|
|
|
|
('jids'),
|
|
|
|
('tags');
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
|
|
cur.execute(sql)
|
|
|
|
except IntegrityError as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
async def associate_entries_tags_jids(db_file, entry):
|
|
|
|
async with DBLOCK:
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
jid = entry['jid']
|
|
|
|
url_hash = entry['url_hash']
|
|
|
|
entry_id = SQLite.get_entry_id_by_url_hash(db_file, url_hash)
|
|
|
|
jid_id = SQLite.get_jid_id_by_jid(db_file, jid)
|
|
|
|
if entry_id:
|
|
|
|
for tag in entry['tags']:
|
|
|
|
tag_id = SQLite.get_tag_id_by_tag(db_file, tag)
|
|
|
|
cet_id = SQLite.get_combination_id_by_entry_id_tag_id_jid_id(db_file, entry_id, tag_id, jid_id)
|
|
|
|
if not cet_id:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO combination_entries_tags_jids (
|
|
|
|
entry_id, tag_id, jid_id)
|
|
|
|
VALUES (
|
|
|
|
?, ?, ?);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id, tag_id, jid_id)
|
|
|
|
try:
|
|
|
|
cur.execute(sql, par)
|
|
|
|
except IntegrityError as e:
|
|
|
|
print('associate_entries_tags_jids')
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
async def add_tags(db_file, entries):
|
|
|
|
"""
|
|
|
|
Batch insertion of tags.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entries : list
|
|
|
|
Set of entries.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
async with DBLOCK:
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
for entry in entries:
|
|
|
|
tags = entry['tags']
|
|
|
|
for tag in tags:
|
|
|
|
# sql = (
|
|
|
|
# """
|
|
|
|
# INSERT OR IGNORE INTO main_tags(tag) VALUES (?);
|
|
|
|
# """
|
|
|
|
# )
|
|
|
|
if not SQLite.get_tag_id_by_tag(db_file, tag):
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT INTO main_tags(tag) VALUES(?);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (tag,)
|
|
|
|
try:
|
|
|
|
cur.execute(sql, par)
|
|
|
|
except IntegrityError as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
async def add_new_entries(db_file, entries):
|
|
|
|
"""
|
|
|
|
Batch insert of new entries into table entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entries : list
|
|
|
|
Set of entries.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
async with DBLOCK:
|
|
|
|
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
|
|
|
|
for entry in entries:
|
|
|
|
url_hash = entry['url_hash']
|
|
|
|
url = entry['link']
|
|
|
|
title = entry['title']
|
|
|
|
summary = entry['summary']
|
|
|
|
jid = entry['jid']
|
|
|
|
date_first = entry['published']
|
|
|
|
date_last = entry['published']
|
|
|
|
# instances = entry['instances']
|
|
|
|
|
|
|
|
# Import entries
|
|
|
|
jid_id = SQLite.get_jid_id_by_jid(db_file, jid)
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO main_entries(
|
|
|
|
url_hash, url, title, summary, jid_id, date_first, date_last)
|
|
|
|
VALUES(
|
|
|
|
?, ?, ?, ?, ?, ?, ?);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url_hash, url, title, summary, jid_id, date_first, date_last)
|
|
|
|
|
|
|
|
try:
|
|
|
|
cur.execute(sql, par)
|
|
|
|
except IntegrityError as e:
|
|
|
|
print(e)
|
|
|
|
print(jid_id)
|
|
|
|
print(entry)
|
|
|
|
# logger.warning("Skipping: " + str(url))
|
|
|
|
# logger.error(e)
|
|
|
|
|
|
|
|
# TODO An additional function to ssociate jid_id (jid) with entry_id (hash_url)
|
|
|
|
async def set_jid(db_file, jid):
|
|
|
|
"""
|
|
|
|
Add a JID to database.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
A Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid: {}'
|
|
|
|
# .format(function_name, db_file, jid))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO main_jids(
|
|
|
|
jid)
|
|
|
|
VALUES(
|
|
|
|
?);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (jid, )
|
|
|
|
async with DBLOCK:
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
|
|
cur.execute(sql, par)
|
|
|
|
except IntegrityError as e:
|
|
|
|
print(e)
|
|
|
|
# logger.warning("Skipping: " + str(url))
|
|
|
|
# logger.error(e)
|
|
|
|
|
|
|
|
def get_entries_count(db_file):
|
|
|
|
"""
|
|
|
|
Get entries count.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Number.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT count
|
|
|
|
FROM main_statistics
|
|
|
|
WHERE type = "entries";
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_combination_id_by_entry_id_tag_id_jid_id(db_file, entry_id, tag_id, jid_id):
|
|
|
|
"""
|
|
|
|
Get ID by a given Entry ID and a given Tag ID and a given Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entry_id : str
|
|
|
|
Entry ID.
|
|
|
|
tag_id : str
|
|
|
|
Tag ID.
|
|
|
|
jid_id : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} entry_id: {} tag_id: {} jid_id: {}'
|
|
|
|
# .format(function_name, db_file, entry_id, tag_id, jid_id))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE entry_id = :entry_id AND tag_id = :tag_id AND jid_id = :jid_id;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"tag_id": tag_id,
|
|
|
|
"jid_id": jid_id
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
2024-08-28 14:03:19 +02:00
|
|
|
async def delete_combination_row_by_url_hash_and_tag_and_jid(db_file, url_hash, tags, jid):
|
2024-08-22 16:09:04 +02:00
|
|
|
"""
|
|
|
|
Delete a row by a given entry ID and a given Jabber ID and given tags.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url_hash : str
|
|
|
|
URL hash.
|
|
|
|
tags : list
|
|
|
|
Tags.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-08-28 14:03:19 +02:00
|
|
|
# logger.debug('{}: db_file: {} url_hash: {} tag_id: {} jid_id: {}'
|
|
|
|
# .format(function_name, db_file, url_hash, tag_id, jid_id))
|
2024-08-22 16:09:04 +02:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE
|
|
|
|
FROM combination_entries_tags_jids
|
|
|
|
WHERE
|
|
|
|
entry_id = (SELECT id FROM main_entries WHERE url_hash = :url_hash) AND
|
|
|
|
tag_id = (SELECT id FROM main_tags WHERE tag = :tag) AND
|
|
|
|
jid_id = (SELECT id FROM main_jids WHERE jid = :jid);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
async with DBLOCK:
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
for tag in tags:
|
|
|
|
par = {
|
|
|
|
"url_hash": url_hash,
|
|
|
|
"tag": tag,
|
|
|
|
"jid": jid
|
|
|
|
}
|
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
def get_tag_id_and_instances_by_tag(db_file, tag):
|
|
|
|
"""
|
|
|
|
Get a tag ID and instances by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
Tag.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tag ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {}'
|
|
|
|
# .format(function_name, db_file, tag))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id, instances
|
|
|
|
FROM main_tags
|
|
|
|
WHERE tag = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (tag,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
# return result[0] if result else None, None
|
|
|
|
if not result: result = None, None
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_tags_and_instances_by_url_hash(db_file, url_hash):
|
|
|
|
"""
|
|
|
|
Get tags and instances by a given URL hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url_hash : str
|
|
|
|
A hash of a URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT mt.tag, mt.instances
|
|
|
|
FROM main_tags AS mt
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON mt.id = co.tag_id
|
|
|
|
INNER JOIN main_entries AS me ON me.id = co.entry_id
|
|
|
|
WHERE me.url_hash = ?
|
|
|
|
ORDER BY mt.instances DESC;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url_hash,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_tags_and_instances_by_entry_id(db_file, entry_id):
|
|
|
|
"""
|
|
|
|
Get tags and instances by a given ID entry.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entry_id : str
|
|
|
|
An ID of an entry.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT main_tags.tag, main_tags.instances
|
|
|
|
FROM main_tags
|
|
|
|
INNER JOIN combination_entries_tags_jids ON main_tags.id = combination_entries_tags_jids.tag_id
|
|
|
|
WHERE combination_entries_tags_jids.entry_id = ?
|
|
|
|
ORDER BY main_tags.instances DESC;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
2024-08-28 14:03:19 +02:00
|
|
|
def get_jids_and_tags_by_entry_id(db_file, entry_id):
|
|
|
|
"""
|
|
|
|
Get JIDs and tags by a given ID entry.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entry_id : str
|
|
|
|
An ID of an entry.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
JIDs and tags.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT main_jids.jid, main_tags.tag
|
|
|
|
FROM main_tags
|
|
|
|
INNER JOIN combination_entries_tags_jids ON main_tags.id = combination_entries_tags_jids.tag_id
|
|
|
|
INNER JOIN main_jids ON main_jids.id = combination_entries_tags_jids.jid_id
|
|
|
|
WHERE combination_entries_tags_jids.entry_id = ?
|
|
|
|
ORDER BY main_tags.instances DESC;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_jids_and_tags_by_url_hash(db_file, url_hash):
|
|
|
|
"""
|
|
|
|
Get JIDs and tags by a given URI hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url_hash : str
|
|
|
|
A URL hash of an entry.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
JIDs and tags.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT main_jids.jid, main_tags.tag
|
|
|
|
FROM main_tags
|
|
|
|
INNER JOIN combination_entries_tags_jids ON main_tags.id = combination_entries_tags_jids.tag_id
|
|
|
|
INNER JOIN main_jids ON main_jids.id = combination_entries_tags_jids.jid_id
|
|
|
|
INNER JOIN main_entries ON main_entries.id = combination_entries_tags_jids.entry_id
|
|
|
|
WHERE main_entries.url_hash = ?
|
|
|
|
ORDER BY main_tags.instances DESC;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url_hash,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
2024-08-22 16:09:04 +02:00
|
|
|
def get_tag_id_by_tag(db_file, tag):
|
|
|
|
"""
|
|
|
|
Get a tag ID by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
Tag.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tag ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {}'
|
|
|
|
# .format(function_name, db_file, tag))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
|
|
|
FROM main_tags
|
|
|
|
WHERE tag = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (tag,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entry_id_by_url_hash(db_file, url_hash):
|
|
|
|
"""
|
|
|
|
Get an entry ID by a given URL hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url_hash : str
|
|
|
|
MD5 hash of URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entry ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} url_hash: {}'
|
|
|
|
# .format(function_name, db_file, url_hash))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
|
|
|
FROM main_entries
|
|
|
|
WHERE url_hash = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url_hash,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entry_instances_by_url_hash(db_file, url_hash):
|
|
|
|
"""
|
|
|
|
Get value of entry instances by a given URL hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url_hash : str
|
|
|
|
MD5 hash of URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Value of entry instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} url_hash: {}'
|
|
|
|
# .format(function_name, db_file, url_hash))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT instances
|
|
|
|
FROM main_entries
|
|
|
|
WHERE url_hash = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url_hash,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entry_by_url_hash(db_file, url_hash):
|
|
|
|
"""
|
|
|
|
Get entry of a given URL hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url_hash : str
|
|
|
|
MD5 hash of URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entry properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} url_hash: {}'
|
|
|
|
# .format(function_name, db_file, url_hash))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM main_entries
|
|
|
|
WHERE url_hash = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url_hash,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entries_new(db_file, index_first):
|
|
|
|
"""
|
|
|
|
Get new entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM main_entries
|
|
|
|
ORDER BY date_first DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (index_first,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_popular(db_file, index_first):
|
|
|
|
"""
|
|
|
|
Get popular entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM main_entries
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (index_first,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_recent(db_file, index_first):
|
|
|
|
"""
|
|
|
|
Get recent entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM main_entries
|
|
|
|
ORDER BY date_last DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (index_first,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_by_query(db_file, query, index_first):
|
|
|
|
"""
|
|
|
|
Get entries by a query.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
Search query.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM main_entries
|
|
|
|
WHERE title LIKE :query OR url LIKE :query OR summary LIKE :query
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"query": f'%{query}%',
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_count_by_query(db_file, query):
|
|
|
|
"""
|
|
|
|
Get entries count by a query.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
Search query.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT COUNT(id)
|
|
|
|
FROM main_entries
|
|
|
|
WHERE title LIKE :query OR url LIKE :query OR summary LIKE :query
|
|
|
|
ORDER BY instances DESC;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"query": f'%{query}%',
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entries_by_jid_and_tag(db_file, jid, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get entries by a tag and a Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
Tag.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {} jid: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, tag, jid, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT me.*
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE mj.jid = :jid AND mt.tag = :tag
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_count_by_jid_and_tag(db_file, jid, tag):
|
|
|
|
"""
|
|
|
|
Get entries count by a tag and a Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
Tag.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {} jid: {}'
|
|
|
|
# .format(function_name, db_file, tag, jid))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT COUNT(DISTINCT me.id)
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE mj.jid = :jid AND mt.tag = :tag;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"tag": tag
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entries_by_jid_and_query(db_file, jid, query, index_first):
|
|
|
|
"""
|
|
|
|
Get entries by a query and a Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
Search query.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} query: {} jid: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, query, jid, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT me.*
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
WHERE mj.jid = :jid AND (title LIKE :query OR url LIKE :query OR summary LIKE :query)
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"query": f'%{query}%',
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_count_by_jid_and_query(db_file, jid, query):
|
|
|
|
"""
|
|
|
|
Get entries count by a query and a Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
Search query.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} query: {} jid: {}'
|
|
|
|
# .format(function_name, db_file, query, jid))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT COUNT(DISTINCT me.id)
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
WHERE mj.jid = :jid AND (title LIKE :query OR url LIKE :query OR summary LIKE :query);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"query": f'%{query}%'
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entries_by_jid(db_file, jid, index_first):
|
|
|
|
"""
|
|
|
|
Get entries by a Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, jid, index_first))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT me.*
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
WHERE mj.jid = :jid
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_count_by_jid(db_file, jid):
|
|
|
|
"""
|
|
|
|
Get entries count by a Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries properties.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid: {}'
|
|
|
|
# .format(function_name, db_file, jid))
|
|
|
|
# NOTE Consider date_first
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT COUNT(DISTINCT me.id)
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
WHERE mj.jid = :jid;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entries_count_by_tag(db_file, tag):
|
|
|
|
"""
|
|
|
|
Get entries count by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
A tag.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {}'
|
|
|
|
# .format(function_name, db_file, tag))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT COUNT(entries.id)
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (tag,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_entries_popular_by_tag(db_file, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get popular entries by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
A tag.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {}'
|
|
|
|
# .format(function_name, db_file, tag))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT entries.*
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag
|
|
|
|
ORDER BY entries.instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_recent_by_tag(db_file, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get recent entries by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
A tag.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {}'
|
|
|
|
# .format(function_name, db_file, tag))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT entries.*
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag
|
|
|
|
ORDER BY date_last DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_entries_new_by_tag(db_file, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get new entries by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
A tag.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entries.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} tag: {}'
|
|
|
|
# .format(function_name, db_file, tag))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT entries.*
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag
|
|
|
|
ORDER BY date_first DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_tags_30(db_file):
|
|
|
|
"""
|
|
|
|
Get 30 tags.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT tag, instances
|
|
|
|
FROM main_tags
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_popular(db_file, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed popular entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT id
|
|
|
|
FROM main_entries
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET ?
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (index_first,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_new_by_tag(db_file, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed new entries by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT DISTINCT entries.id
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag
|
|
|
|
ORDER BY date_first DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_popular_by_tag(db_file, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed popular entries by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT DISTINCT entries.id
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag
|
|
|
|
ORDER BY entries.instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_recent_by_tag(db_file, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed recent entries by a given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT DISTINCT entries.id
|
|
|
|
FROM main_entries AS entries
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON entries.id = co.entry_id
|
|
|
|
INNER JOIN main_tags AS tags ON tags.id = co.tag_id
|
|
|
|
WHERE tags.tag = :tag
|
|
|
|
ORDER BY date_last DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_new(db_file, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed new entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT id
|
|
|
|
FROM main_entries
|
|
|
|
ORDER BY date_first DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET ?
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (index_first,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_recent(db_file, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed recent entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT id
|
|
|
|
FROM main_entries
|
|
|
|
ORDER BY date_last DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET ?
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (index_first,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_entries_by_query_recent(db_file, query, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by currently viewed entries by query.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
A search query.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT id
|
|
|
|
FROM main_entries
|
|
|
|
WHERE title LIKE :query OR url LIKE :query OR summary LIKE :query
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"query": f'%{query}%',
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_jid_and_tag(db_file, jid, tag, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by Jabber ID and tags.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
tag : str
|
|
|
|
A tag.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT co.entry_id
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE mj.jid = :jid AND mt.tag = :tag
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"tag": tag,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_jid_and_query(db_file, jid, query, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by Jabber ID and query.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
query : str
|
|
|
|
A search query.
|
|
|
|
index_first : str
|
|
|
|
.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT co.entry_id
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE mj.jid = :jid AND (title LIKE :query OR url LIKE :query OR summary LIKE :query)
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"query": f'%{query}%',
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_30_tags_by_jid(db_file, jid, index_first):
|
|
|
|
"""
|
|
|
|
Get 30 tags by Jabber ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT mt.tag, mt.instances
|
|
|
|
FROM combination_entries_tags_jids AS co
|
|
|
|
INNER JOIN main_tags AS mt ON mt.id = co.tag_id
|
|
|
|
WHERE co.entry_id IN (
|
|
|
|
SELECT DISTINCT me.id
|
|
|
|
FROM main_entries AS me
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON co.entry_id = me.id
|
|
|
|
INNER JOIN main_jids AS mj ON mj.id = co.jid_id
|
|
|
|
WHERE mj.jid = :jid
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 10
|
|
|
|
OFFSET :index_first
|
|
|
|
)
|
|
|
|
ORDER BY mt.instances DESC
|
|
|
|
LIMIT 30;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"index_first": index_first
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_tags_500(db_file):
|
|
|
|
"""
|
|
|
|
Get 500 tags.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {}'
|
|
|
|
# .format(function_name, db_file))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
WITH Common500Tags AS (
|
|
|
|
SELECT tag, instances
|
|
|
|
FROM main_tags
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 500
|
|
|
|
)
|
|
|
|
SELECT tag, instances
|
|
|
|
FROM Common500Tags
|
|
|
|
ORDER BY tag ASC;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_500_tags_by_jid_sorted_by_name(db_file, jid):
|
|
|
|
"""
|
|
|
|
Get 500 tags by Jabber ID, sorted by name.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT mt.tag, COUNT(*) AS instances
|
|
|
|
FROM main_tags mt
|
|
|
|
JOIN combination_entries_tags_jids combination ON mt.id = combination.tag_id
|
|
|
|
JOIN main_jids mj ON combination.jid_id = mj.id
|
|
|
|
WHERE mj.jid = :jid
|
|
|
|
GROUP BY mt.tag
|
|
|
|
LIMIT 500;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_500_tags_by_jid_sorted_by_instance(db_file, jid):
|
|
|
|
"""
|
|
|
|
Get 500 tags by Jabber ID, sorted by instance.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags and number of instances.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT mt.tag, COUNT(*) AS instances
|
|
|
|
FROM main_tags mt
|
|
|
|
JOIN combination_entries_tags_jids combination ON mt.id = combination.tag_id
|
|
|
|
JOIN main_jids mj ON combination.jid_id = mj.id
|
|
|
|
WHERE mj.jid = :jid
|
|
|
|
GROUP BY mt.tag
|
|
|
|
ORDER BY instances DESC
|
|
|
|
LIMIT 500;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
# FIXME It appear that the wrong table is fetched
|
|
|
|
# The table to be fetched is combination_entries_tags_jids
|
|
|
|
def is_jid_associated_with_url_hash(db_file, jid, url_hash):
|
|
|
|
"""
|
|
|
|
Check whether a given Jabber ID is associated with a given URL hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
A Jabber ID.
|
|
|
|
url_hash : str
|
|
|
|
An MD5 checksuum of a URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid: {} url_hash: {}'
|
|
|
|
# .format(function_name, db_file, jid, url_hash))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT mj.jid, me.url_hash
|
|
|
|
FROM main_jids AS mj
|
|
|
|
INNER JOIN combination_entries_tags_jids AS co ON mj.id = co.jid_id
|
|
|
|
INNER JOIN main_entries AS me ON me.id = co.entry_id
|
|
|
|
WHERE mj.jid = :jid AND me.url_hash = :url_hash;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"url_hash": url_hash
|
|
|
|
}
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
#deassociate_entry_from_jid
|
|
|
|
async def delete_combination_row_by_jid_and_url_hash(db_file, url_hash, jid):
|
|
|
|
"""
|
|
|
|
Remove association of a given Jabber ID and a given URL hash.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
A Jabber ID.
|
|
|
|
url_hash : str
|
|
|
|
An MD5 checksuum of a URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid: {} url_hash: {}'
|
|
|
|
# .format(function_name, db_file, jid, url_hash))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE FROM combination_entries_tags_jids
|
|
|
|
WHERE id IN (
|
|
|
|
SELECT co.id
|
|
|
|
FROM combination_entries_tags_jids co
|
|
|
|
JOIN main_entries me ON co.entry_id = me.id
|
|
|
|
JOIN main_jids mj ON co.jid_id = mj.id
|
|
|
|
WHERE me.url_hash = :url_hash AND mj.jid = :jid
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"jid": jid,
|
|
|
|
"url_hash": url_hash
|
|
|
|
}
|
|
|
|
async with DBLOCK:
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
2024-08-25 11:44:59 +02:00
|
|
|
# NOTE The result was ordered by number of instances
|
|
|
|
# ORDER BY main_tags.instances DESC
|
|
|
|
# And has been changed to order of alphabet
|
|
|
|
# ORDER BY main_tags.tag ASC
|
2024-08-22 16:09:04 +02:00
|
|
|
def get_tags_by_entry_id(db_file, entry_id):
|
|
|
|
"""
|
|
|
|
Get tags by an ID entry.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
entry_id : str
|
|
|
|
An ID of an entry.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Tags.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} index_first: {}'
|
|
|
|
# .format(function_name, db_file, index_first))
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-08-25 11:44:59 +02:00
|
|
|
SELECT DISTINCT main_tags.tag
|
2024-08-22 16:09:04 +02:00
|
|
|
FROM main_tags
|
|
|
|
INNER JOIN combination_entries_tags_jids ON main_tags.id = combination_entries_tags_jids.tag_id
|
|
|
|
WHERE combination_entries_tags_jids.entry_id = ?
|
2024-08-25 11:44:59 +02:00
|
|
|
ORDER BY main_tags.tag ASC
|
2024-08-22 16:09:04 +02:00
|
|
|
LIMIT 5;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_jid_id_by_jid(db_file, jid):
|
|
|
|
"""
|
|
|
|
Get id of a given jid.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid : str
|
|
|
|
Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid: {}'
|
|
|
|
# .format(function_name, db_file, jid))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
|
|
|
FROM main_jids
|
|
|
|
WHERE jid = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (jid,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
def get_jid_by_jid_id(db_file, jid_id):
|
|
|
|
"""
|
|
|
|
Get jid of a given jid_id.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
jid_id : str
|
|
|
|
ID of Jabber ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
# logger.debug('{}: db_file: {} jid_id: {}'
|
|
|
|
# .format(function_name, db_file, jid_id))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT jid
|
|
|
|
FROM main_jids
|
|
|
|
WHERE id = ?;
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (jid_id,)
|
|
|
|
with SQLite.create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result[0] if result and len(result) == 1 else result
|
|
|
|
|
|
|
|
class Syndication:
|
|
|
|
|
|
|
|
def create_rfc4287_entry(feed_entry):
|
|
|
|
node_entry = ET.Element('entry')
|
|
|
|
node_entry.set('xmlns', 'http://www.w3.org/2005/Atom')
|
|
|
|
# Title
|
|
|
|
title = ET.SubElement(node_entry, 'title')
|
|
|
|
title.set('type', 'text')
|
|
|
|
title.text = feed_entry['title']
|
|
|
|
# Summary
|
|
|
|
summary = ET.SubElement(node_entry, 'summary') # TODO Try 'content'
|
|
|
|
summary.set('type', 'text')
|
|
|
|
#summary.set('lang', feed_entry['summary_lang'])
|
|
|
|
summary.text = feed_entry['summary']
|
|
|
|
# Tags
|
|
|
|
if feed_entry['tags']:
|
|
|
|
for term in feed_entry['tags']:
|
|
|
|
tag = ET.SubElement(node_entry, 'category')
|
|
|
|
tag.set('term', term)
|
|
|
|
# Link
|
|
|
|
link = ET.SubElement(node_entry, "link")
|
|
|
|
link.set('href', feed_entry['link'])
|
|
|
|
# Links
|
|
|
|
# for feed_entry_link in feed_entry['links']:
|
|
|
|
# link = ET.SubElement(node_entry, "link")
|
|
|
|
# link.set('href', feed_entry_link['url'])
|
|
|
|
# link.set('type', feed_entry_link['type'])
|
|
|
|
# link.set('rel', feed_entry_link['rel'])
|
|
|
|
# Date saved
|
|
|
|
if 'published' in feed_entry and feed_entry['published']:
|
|
|
|
published = ET.SubElement(node_entry, 'published')
|
|
|
|
published.text = feed_entry['published']
|
|
|
|
# Date edited
|
|
|
|
if 'updated' in feed_entry and feed_entry['updated']:
|
|
|
|
updated = ET.SubElement(node_entry, 'updated')
|
|
|
|
updated.text = feed_entry['updated']
|
|
|
|
return node_entry
|
|
|
|
|
|
|
|
def extract_items(item_payload, limit=False):
|
|
|
|
namespace = '{http://www.w3.org/2005/Atom}'
|
|
|
|
title = item_payload.find(namespace + 'title')
|
|
|
|
links = item_payload.find(namespace + 'link')
|
|
|
|
if (not isinstance(title, ET.Element) and
|
|
|
|
not isinstance(links, ET.Element)): return None
|
|
|
|
title_text = '' if title == None else title.text
|
|
|
|
if isinstance(links, ET.Element):
|
|
|
|
for link in item_payload.findall(namespace + 'link'):
|
|
|
|
link_href = link.attrib['href'] if 'href' in link.attrib else ''
|
|
|
|
if link_href: break
|
|
|
|
contents = item_payload.find(namespace + 'summary')
|
|
|
|
summary_text = ''
|
|
|
|
if isinstance(contents, ET.Element):
|
|
|
|
for summary in item_payload.findall(namespace + 'summary'):
|
|
|
|
summary_text = summary.text or ''
|
|
|
|
if summary_text: break
|
|
|
|
published = item_payload.find(namespace + 'published')
|
|
|
|
published_text = '' if published == None else published.text
|
|
|
|
categories = item_payload.find(namespace + 'category')
|
|
|
|
tags = []
|
|
|
|
if isinstance(categories, ET.Element):
|
|
|
|
for category in item_payload.findall(namespace + 'category'):
|
|
|
|
if 'term' in category.attrib and category.attrib['term']:
|
|
|
|
category_term = category.attrib['term']
|
|
|
|
if len(category_term) < 20:
|
|
|
|
tags.append(category_term)
|
|
|
|
elif len(category_term) < 50:
|
|
|
|
tags.append(category_term)
|
|
|
|
if limit and len(tags) > 4: break
|
|
|
|
|
|
|
|
|
|
|
|
identifier = item_payload.find(namespace + 'id')
|
|
|
|
if identifier and identifier.attrib: print(identifier.attrib)
|
|
|
|
identifier_text = '' if identifier == None else identifier.text
|
|
|
|
|
|
|
|
instances = '' # TODO Check the Blasta database for instances.
|
|
|
|
|
|
|
|
entry = {'title' : title_text,
|
|
|
|
'link' : link_href,
|
|
|
|
'summary' : summary_text,
|
|
|
|
'published' : published_text,
|
|
|
|
'updated' : published_text, # TODO "Updated" is missing
|
|
|
|
'tags' : tags}
|
|
|
|
return entry
|
|
|
|
|
|
|
|
class Utilities:
|
|
|
|
|
|
|
|
def convert_iso8601_to_readable(timestamp):
|
|
|
|
old_date_format = datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
|
|
|
|
new_date_format = old_date_format.strftime("%B %d, %Y")
|
|
|
|
return new_date_format
|
|
|
|
|
|
|
|
def hash_url_to_md5(url):
|
|
|
|
url_encoded = url.encode()
|
|
|
|
url_hashed = hashlib.md5(url_encoded)
|
|
|
|
url_digest = url_hashed.hexdigest()
|
|
|
|
return url_digest
|
|
|
|
|
|
|
|
def is_jid_matches_to_session(accounts, sessions, request):
|
|
|
|
jabber_id = request.cookies.get('jabber_id')
|
|
|
|
session_key = request.cookies.get('session_key')
|
|
|
|
if (jabber_id and
|
|
|
|
jabber_id in accounts and
|
|
|
|
jabber_id in sessions and
|
|
|
|
session_key == sessions[jabber_id]):
|
|
|
|
return jabber_id
|
|
|
|
|
|
|
|
class Xml:
|
|
|
|
|
|
|
|
def create_setting_entry(value : str):
|
|
|
|
element = ET.Element('value')
|
|
|
|
element.text = value
|
|
|
|
return element
|
|
|
|
|
|
|
|
class Configuration:
|
|
|
|
|
|
|
|
def instantiate_database(db_file):
|
|
|
|
# db_dir = get_default_data_directory()
|
|
|
|
# if not os.path.isdir(db_dir):
|
|
|
|
# os.mkdir(db_dir)
|
|
|
|
# if not os.path.isdir(db_dir + "/sqlite"):
|
|
|
|
# os.mkdir(db_dir + "/sqlite")
|
|
|
|
# db_file = os.path.join(db_dir, "sqlite", r"{}.db".format(jid_file))
|
|
|
|
SQLite.create_tables(db_file)
|
|
|
|
SQLite.add_statistics(db_file)
|
|
|
|
return db_file
|
|
|
|
|
|
|
|
class XmppInstance(ClientXMPP):
|
|
|
|
def __init__(self, jid, password):
|
|
|
|
super().__init__(jid, password)
|
|
|
|
#self.add_event_handler("connection_failed", self.on_connection_failed)
|
|
|
|
#self.add_event_handler("failed_auth", self.on_failed_auth)
|
|
|
|
self.add_event_handler("session_start", self.on_session_start)
|
|
|
|
self.register_plugin('xep_0004') # XEP-0004: Data Forms
|
|
|
|
self.register_plugin('xep_0030') # XEP-0030: Service Discovery
|
|
|
|
self.register_plugin('xep_0059') # XEP-0059: Result Set Management
|
|
|
|
self.register_plugin('xep_0060') # XEP-0060: Publish-Subscribe
|
|
|
|
self.register_plugin('xep_0078') # XEP-0078: Non-SASL Authentication
|
|
|
|
self.register_plugin('xep_0163') # XEP-0163: Personal Eventing Protocol
|
|
|
|
self.register_plugin('xep_0223') # XEP-0223: Persistent Storage of Private Data via PubSub
|
|
|
|
self.connect()
|
|
|
|
# self.process(forever=False)
|
|
|
|
|
|
|
|
self.connection_accepted = False
|
|
|
|
|
|
|
|
# def on_connection_failed(self, event):
|
|
|
|
# self.connection_accepted = False
|
|
|
|
|
|
|
|
# def on_failed_auth(self, event):
|
|
|
|
# self.connection_accepted = False
|
|
|
|
|
|
|
|
def on_session_start(self, event):
|
|
|
|
self.connection_accepted = True
|
|
|
|
|
|
|
|
class XmppMessage:
|
|
|
|
|
|
|
|
def send(self, jid, message_body):
|
|
|
|
jid_from = str(self.boundjid) if self.is_component else None
|
|
|
|
self.send_message(
|
|
|
|
mto=jid,
|
|
|
|
mfrom=jid_from,
|
|
|
|
mbody=message_body,
|
|
|
|
mtype='chat')
|
|
|
|
|
|
|
|
# NOTE It appears to not work.
|
|
|
|
def send_headline(self, jid, message_subject, message_body):
|
|
|
|
jid_from = str(self.boundjid) if self.is_component else None
|
|
|
|
self.send_message(
|
|
|
|
mto=jid,
|
|
|
|
mfrom=jid_from,
|
|
|
|
msubject=message_subject,
|
|
|
|
mbody=message_body,
|
|
|
|
mtype='headline')
|
|
|
|
|
|
|
|
class XmppPubsub:
|
|
|
|
|
|
|
|
# TODO max-items might be limited (CanChat: 255), so iterate from a bigger number to a smaller.
|
|
|
|
# NOTE This function was copied from atomtopubsub
|
|
|
|
def create_node_atom(xmpp_instance, jid, node, title, subtitle, access_model):
|
|
|
|
jid_from = str(xmpp_instance.boundjid) if xmpp_instance.is_component else None
|
|
|
|
iq = xmpp_instance.Iq(stype='set',
|
|
|
|
sto=jid,
|
|
|
|
sfrom=jid_from)
|
|
|
|
iq['pubsub']['create']['node'] = node
|
|
|
|
form = iq['pubsub']['configure']['form']
|
|
|
|
form['type'] = 'submit'
|
|
|
|
form.addField('pubsub#access_model',
|
|
|
|
ftype='list-single',
|
|
|
|
value=access_model)
|
|
|
|
form.addField('pubsub#deliver_payloads',
|
|
|
|
ftype='boolean',
|
|
|
|
value=0)
|
|
|
|
form.addField('pubsub#description',
|
|
|
|
ftype='text-single',
|
|
|
|
value=subtitle)
|
|
|
|
form.addField('pubsub#max_items',
|
|
|
|
ftype='text-single',
|
|
|
|
value='255')
|
|
|
|
form.addField('pubsub#notify_retract',
|
|
|
|
ftype='boolean',
|
|
|
|
value=1)
|
|
|
|
form.addField('pubsub#persist_items',
|
|
|
|
ftype='boolean',
|
|
|
|
value=1)
|
|
|
|
form.addField('pubsub#send_last_published_item',
|
|
|
|
ftype='text-single',
|
|
|
|
value='never')
|
|
|
|
form.addField('pubsub#title',
|
|
|
|
ftype='text-single',
|
|
|
|
value=title)
|
|
|
|
form.addField('pubsub#type',
|
|
|
|
ftype='text-single',
|
|
|
|
value='http://www.w3.org/2005/Atom')
|
|
|
|
return iq
|
|
|
|
|
|
|
|
def create_node_config(xmpp_instance, jid):
|
|
|
|
jid_from = str(xmpp_instance.boundjid) if xmpp_instance.is_component else None
|
|
|
|
iq = xmpp_instance.Iq(stype='set',
|
|
|
|
sto=jid,
|
|
|
|
sfrom=jid_from)
|
|
|
|
iq['pubsub']['create']['node'] = 'xmpp:blasta:settings:0'
|
|
|
|
form = iq['pubsub']['configure']['form']
|
|
|
|
form['type'] = 'submit'
|
|
|
|
form.addField('pubsub#access_model',
|
|
|
|
ftype='list-single',
|
|
|
|
value='whitelist')
|
|
|
|
form.addField('pubsub#deliver_payloads',
|
|
|
|
ftype='boolean',
|
|
|
|
value=0)
|
|
|
|
form.addField('pubsub#description',
|
|
|
|
ftype='text-single',
|
|
|
|
value='Settings of the Blasta PubSub bookmarks system')
|
|
|
|
form.addField('pubsub#max_items',
|
|
|
|
ftype='text-single',
|
|
|
|
value='30')
|
|
|
|
form.addField('pubsub#notify_retract',
|
|
|
|
ftype='boolean',
|
|
|
|
value=1)
|
|
|
|
form.addField('pubsub#persist_items',
|
|
|
|
ftype='boolean',
|
|
|
|
value=1)
|
|
|
|
form.addField('pubsub#send_last_published_item',
|
|
|
|
ftype='text-single',
|
|
|
|
value='never')
|
|
|
|
form.addField('pubsub#title',
|
|
|
|
ftype='text-single',
|
|
|
|
value='Blasta Settings')
|
|
|
|
form.addField('pubsub#type',
|
|
|
|
ftype='text-single',
|
|
|
|
value='settings')
|
|
|
|
return iq
|
|
|
|
|
|
|
|
async def del_node_item(xmpp_instance, pubsub, node, item_id):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].retract(
|
|
|
|
pubsub, node, item_id, timeout=5, notify=None)
|
|
|
|
result = iq
|
|
|
|
except IqError as e:
|
|
|
|
result = e.iq['error']['text']
|
|
|
|
print(e)
|
|
|
|
except IqTimeout as e:
|
|
|
|
result = 'Timeout'
|
|
|
|
print(e)
|
|
|
|
print(result)
|
|
|
|
return result
|
|
|
|
|
|
|
|
def get_iterator(xmpp_instance, pubsub, node, max_items, iterator):
|
|
|
|
iterator = xmpp_instance.plugin['xep_0060'].get_items(
|
|
|
|
pubsub, node, timeout=5, max_items=max_items, iterator=iterator)
|
|
|
|
return iterator
|
|
|
|
|
|
|
|
async def get_node_configuration(xmpp_instance, pubsub, node):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].get_node_config(
|
|
|
|
pubsub, node)
|
|
|
|
return iq
|
|
|
|
except (IqError, IqTimeout) as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
async def get_node_item(xmpp_instance, pubsub, node, item_id):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].get_item(
|
|
|
|
pubsub, node, item_id, timeout=5)
|
|
|
|
result = iq
|
|
|
|
except IqError as e:
|
|
|
|
result = e.iq['error']['text']
|
|
|
|
print(e)
|
|
|
|
except IqTimeout as e:
|
|
|
|
result = 'Timeout'
|
|
|
|
print(e)
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def get_node_item_ids(xmpp_instance, pubsub, node):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0030'].get_items(
|
|
|
|
pubsub, node)
|
|
|
|
# Broken. See https://codeberg.org/poezio/slixmpp/issues/3548
|
|
|
|
#iq = await xmpp_instance.plugin['xep_0060'].get_item_ids(
|
|
|
|
# pubsub, node, timeout=5)
|
|
|
|
result = iq
|
|
|
|
except IqError as e:
|
|
|
|
if e.iq['error']['text'] == 'Node not found':
|
|
|
|
result = 'Node not found'
|
|
|
|
elif e.iq['error']['condition'] == 'item-not-found':
|
|
|
|
result = 'Item not found'
|
|
|
|
else:
|
|
|
|
result = None
|
|
|
|
print(e)
|
|
|
|
except IqTimeout as e:
|
|
|
|
result = 'Timeout'
|
|
|
|
print(e)
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def get_node_item_private(xmpp_instance, node, item_id):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0223'].retrieve(
|
|
|
|
node, item_id, timeout=5)
|
|
|
|
result = iq
|
|
|
|
except IqError as e:
|
|
|
|
result = e.iq['error']['text']
|
|
|
|
print(e)
|
|
|
|
except IqTimeout as e:
|
|
|
|
result = 'Timeout'
|
|
|
|
print(e)
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def get_node_items(xmpp_instance, pubsub, node, item_ids=None, max_items=None):
|
|
|
|
try:
|
|
|
|
if max_items:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].get_items(
|
|
|
|
pubsub, node, timeout=5)
|
|
|
|
it = xmpp_instance.plugin['xep_0060'].get_items(
|
|
|
|
pubsub, node, timeout=5, max_items=max_items, iterator=True)
|
|
|
|
q = rsm.Iq()
|
|
|
|
q['to'] = pubsub
|
|
|
|
q['disco_items']['node'] = node
|
|
|
|
async for item in rsm.ResultIterator(q, 'disco_items', '10'):
|
|
|
|
print(item['disco_items']['items'])
|
|
|
|
|
|
|
|
else:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].get_items(
|
|
|
|
pubsub, node, timeout=5, item_ids=item_ids)
|
|
|
|
result = iq
|
|
|
|
except IqError as e:
|
|
|
|
if e.iq['error']['text'] == 'Node not found':
|
|
|
|
result = 'Node not found'
|
|
|
|
elif e.iq['error']['condition'] == 'item-not-found':
|
|
|
|
result = 'Item not found'
|
|
|
|
else:
|
|
|
|
result = None
|
|
|
|
print(e)
|
|
|
|
except IqTimeout as e:
|
|
|
|
result = 'Timeout'
|
|
|
|
print(e)
|
|
|
|
return result
|
|
|
|
|
|
|
|
async def get_nodes(xmpp_instance):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].get_nodes()
|
|
|
|
return iq
|
|
|
|
except (IqError, IqTimeout) as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
async def is_node_exist(xmpp_instance, node_name):
|
|
|
|
iq = await XmppPubsub.get_nodes(xmpp_instance)
|
|
|
|
nodes = iq['disco_items']['items']
|
|
|
|
for node in nodes:
|
|
|
|
if node[1] == node_name:
|
|
|
|
return True
|
|
|
|
|
|
|
|
async def publish_node_item(xmpp_instance, jid, node, item_id, payload):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0060'].publish(
|
|
|
|
jid, node, id=item_id, payload=payload)
|
|
|
|
print(iq)
|
|
|
|
return iq
|
|
|
|
except (IqError, IqTimeout) as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
async def publish_node_item_private(xmpp_instance, node, item_id, stanza):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0223'].store(
|
|
|
|
stanza, node, item_id)
|
|
|
|
print(iq)
|
|
|
|
return iq
|
|
|
|
except (IqError, IqTimeout) as e:
|
|
|
|
print(e)
|
|
|
|
if e.iq['error']['text'] == 'Field does not match: access_model':
|
|
|
|
return 'Error: Could not set private bookmark due to Access Model mismatch'
|
|
|
|
|
|
|
|
async def set_node_private(xmpp_instance, node):
|
|
|
|
try:
|
|
|
|
iq = await xmpp_instance.plugin['xep_0223'].configure(node)
|
|
|
|
print(iq)
|
|
|
|
return iq
|
|
|
|
except (IqError, IqTimeout) as e:
|
|
|
|
print(e)
|
|
|
|
|
|
|
|
def main():
|
|
|
|
if not exists('main.sqlite') or not getsize('main.sqlite'):
|
|
|
|
Configuration.instantiate_database('main.sqlite')
|
|
|
|
accounts = {}
|
|
|
|
sessions = {}
|
|
|
|
http_instance = HttpInstance(accounts, sessions)
|
|
|
|
return http_instance.app
|
|
|
|
|
|
|
|
app = main()
|
2024-09-04 16:31:52 +02:00
|
|
|
webbrowser.open('http://localhost:8000/help/about')
|
|
|
|
# TODO Check first time
|
|
|
|
webbrowser.open_new_tab('http://localhost:8000')
|
2024-08-22 16:09:04 +02:00
|
|
|
|
|
|
|
# FIXME
|
|
|
|
if __name__ == '__main__':
|
2024-09-04 16:31:52 +02:00
|
|
|
uvicorn.run(app, host='localhost', port=8000, reload=True)
|
|
|
|
|