Modularize code;
Update document README.
57
README.md
|
@ -17,8 +17,8 @@ desire.
|
|||
|
||||
## Screenshots
|
||||
|
||||
[<img alt="browse view" src="graphic/browse.png" width="200px"/>](screenshot/browse.png)
|
||||
[<img alt="tags view" src="graphic/tag.png" width="200px"/>](screenshot/tag.png)
|
||||
[<img alt="browse view" src="blasta/screenshot/browse.png" width="200px"/>](blasta/screenshot/browse.png)
|
||||
[<img alt="tags view" src="blasta/screenshot/tag.png" width="200px"/>](blasta/screenshot/tag.png)
|
||||
|
||||
## Technicalities
|
||||
|
||||
|
@ -61,21 +61,60 @@ The connection to the Blasta system is made with XMPP accounts.
|
|||
* Python >= 3.5
|
||||
* fastapi
|
||||
* lxml
|
||||
* python-dateutil
|
||||
* python-multipart
|
||||
* slixmpp
|
||||
* tomllib (Python <= 3.10)
|
||||
* uvicorn
|
||||
|
||||
## Instructions
|
||||
## Installation
|
||||
|
||||
Use the following commands to start Blasta.
|
||||
It is possible to install Blasta using pip and pipx.
|
||||
|
||||
```shell
|
||||
$ git clone https://git.xmpp-it.net/sch/Blasta
|
||||
$ cd Blasta/
|
||||
$ python -m uvicorn blasta:app --reload
|
||||
#### pip inside venv
|
||||
|
||||
```
|
||||
$ python3 -m venv .venv
|
||||
$ source .venv/bin/activate
|
||||
```
|
||||
|
||||
Open URL http://localhost:8000/ and connect with your Jabber ID.
|
||||
##### Install
|
||||
|
||||
```
|
||||
$ pip install git+https://git.xmpp-it.net/sch/Blasta
|
||||
```
|
||||
|
||||
#### pipx
|
||||
|
||||
##### Install
|
||||
|
||||
```
|
||||
$ pipx install git+https://git.xmpp-it.net/sch/Blasta
|
||||
```
|
||||
|
||||
##### Update
|
||||
|
||||
```
|
||||
$ pipx uninstall blasta
|
||||
$ pipx install git+https://git.xmpp-it.net/sch/Blasta
|
||||
```
|
||||
|
||||
### Configure
|
||||
|
||||
Copy file`settings.toml` to `~/.config/blasta/`.
|
||||
|
||||
Copy directories `stylesheet`, `graphic`, `template`, and `script` to
|
||||
`~/.local/share/blasta/`.
|
||||
|
||||
Create directories `data`, `export`, `items` under `~/.cache/blasta/`.
|
||||
|
||||
### Start
|
||||
|
||||
```
|
||||
$ blasta
|
||||
```
|
||||
|
||||
Open URL http://localhost:8000 and connect with your Jabber ID.
|
||||
|
||||
## License
|
||||
|
||||
|
|
3
blasta/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
from blasta.version import __version__, __version_info__
|
||||
|
||||
print('Blasta', __version__)
|
61
blasta/__main__.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
|
||||
TODO
|
||||
|
||||
* Delete cookie if session does not match
|
||||
|
||||
* Delete entry/tag/jid combination row upon removal of a tag.
|
||||
|
||||
"""
|
||||
|
||||
import argparse
|
||||
from blasta.http.instance import HttpInstance
|
||||
from blasta.sqlite import SQLite
|
||||
import json
|
||||
import logging
|
||||
from os.path import getsize, exists
|
||||
import sys
|
||||
import time
|
||||
from typing import Optional
|
||||
import urllib.parse
|
||||
import uvicorn
|
||||
import webbrowser
|
||||
|
||||
|
||||
try:
|
||||
import tomllib
|
||||
except:
|
||||
import tomli as tomllib
|
||||
|
||||
|
||||
def main():
|
||||
if not exists('main.sqlite') or not getsize('main.sqlite'):
|
||||
SQLite.instantiate_database('main.sqlite')
|
||||
accounts = {}
|
||||
sessions = {}
|
||||
http_instance = HttpInstance(accounts, sessions)
|
||||
return http_instance.app
|
||||
|
||||
app = main()
|
||||
|
||||
# FIXME
|
||||
if __name__ == 'blasta.__main__':
|
||||
parser = argparse.ArgumentParser(
|
||||
prog='blasta',
|
||||
description='Blasta - A collaborative annotation system.',
|
||||
usage='%(prog)s [OPTION]...')
|
||||
parser.add_argument('-v', '--version', help='print version',
|
||||
action='version', version='0.1')
|
||||
parser.add_argument('-p', '--port', help='port number', dest='port')
|
||||
parser.add_argument('-o', '--open', help='open an html browser', action='store_const', const=True, dest='open')
|
||||
args = parser.parse_args()
|
||||
port = args.port if args.port else 8000
|
||||
uvicorn.run(app, host='localhost', port=port)
|
||||
if args.open:
|
||||
# TODO Check first time
|
||||
webbrowser.open('http://localhost:{}/help/about'.format(port))
|
||||
webbrowser.open_new_tab('http://localhost:{}'.format(port))
|
||||
|
Before Width: | Height: | Size: 316 B After Width: | Height: | Size: 316 B |
Before Width: | Height: | Size: 316 B After Width: | Height: | Size: 316 B |
19
blasta/assets/graphic/syndicate.svg
Normal file
|
@ -0,0 +1,19 @@
|
|||
<svg xmlns="http://www.w3.org/2000/svg" width="128" height="128" viewBox="0 0 256 256">
|
||||
<defs>
|
||||
<linearGradient id="RSSg" x1="0.085" y1="0.085" x2="0.915" y2="0.915">
|
||||
<stop offset="0" stop-color="#E3702D" />
|
||||
<stop offset="0.1071" stop-color="#EA7D31" />
|
||||
<stop offset="0.3503" stop-color="#F69537" />
|
||||
<stop offset="0.5" stop-color="#FB9E3A" />
|
||||
<stop offset="0.7016" stop-color="#EA7C31" />
|
||||
<stop offset="0.8866" stop-color="#DE642B" />
|
||||
<stop offset="1" stop-color="#D95B29" />
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="256" height="256" rx="55" ry="55" x="0" y="0" fill="#CC5D15"/>
|
||||
<rect width="246" height="246" rx="50" ry="50" x="5" y="5" fill="#F49C52"/>
|
||||
<rect width="236" height="236" rx="47" ry="47" x="10" y="10" fill="url(#RSSg)"/>
|
||||
<circle cx="68" cy="189" r="24" fill="#FFF"/>
|
||||
<path d="M160 213h-34a82 82 0 0 0 -82 -82v-34a116 116 0 0 1 116 116z" fill="#FFF"/>
|
||||
<path d="M184 213A140 140 0 0 0 44 73V38a175 175 0 0 1 175 175z" fill="#FFF"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1,016 B |
Before Width: | Height: | Size: 2.6 KiB After Width: | Height: | Size: 2.6 KiB |
|
@ -76,7 +76,9 @@
|
|||
» Information and resources about Blasta, collaborative
|
||||
bookmarks with an Irish manner.
|
||||
</p>
|
||||
<h3>About Blasta</h3>
|
||||
<h3>
|
||||
About Blasta
|
||||
</h3>
|
||||
<p>
|
||||
Blasta is a collaborative bookmarks manager for organizing
|
||||
online content. It allows you to add links to your personal
|
||||
|
@ -114,7 +116,12 @@
|
|||
monero, mms, news, sip, udp, xmpp and any scheme and type
|
||||
that you desire.
|
||||
</p>
|
||||
<h4>Why Blasta?</h4>
|
||||
<p>
|
||||
Blasta was inspired by projects Movim and Rivista.
|
||||
</p>
|
||||
<h4>
|
||||
Why Blasta?
|
||||
</h4>
|
||||
<p>
|
||||
Corporate search engines are archaic and outdated, and often
|
||||
prioritize their own interests, leading to censorship and
|
||||
|
@ -128,14 +135,18 @@
|
|||
references and resources that you need in order to be
|
||||
productive and get that you need.
|
||||
</p>
|
||||
<h4>The things that you can do with Blasta are endless</h4>
|
||||
<h4>
|
||||
The things that you can do with Blasta are endless
|
||||
</h4>
|
||||
<p>
|
||||
Blasta is an open-ended indexing system, and, as such, it
|
||||
provides a versatile platform with which you have the
|
||||
ability to tailor its usage according to your desired
|
||||
preferences. <a href="/help/about/ideas">Learn more</a>.
|
||||
</p>
|
||||
<h4>The difference from other services</h4>
|
||||
<h4>
|
||||
The difference from other services
|
||||
</h4>
|
||||
<p>
|
||||
Unlike some so called "social" bookmarking systems, Blasta
|
||||
does not own your information; your bookmarks are
|
||||
|
@ -151,7 +162,9 @@
|
|||
your personal XMPP account under PubSub node
|
||||
<code>urn:xmpp:bibliography:0</code>.
|
||||
</p>
|
||||
<h4>Information that is stored by Blasta</h4>
|
||||
<h4>
|
||||
Information that is stored by Blasta
|
||||
</h4>
|
||||
<p>
|
||||
In order for Blasta to facilitate sharing of information and
|
||||
accessibility to information, Blasta aggregates your own
|
||||
|
@ -166,14 +179,18 @@
|
|||
all of their owners as private and no one else has stored
|
||||
them in a public fashion (i.e. not classified private).
|
||||
</p>
|
||||
<h4>Blasta source code</h4>
|
||||
<h4>
|
||||
Blasta source code
|
||||
</h4>
|
||||
<p>
|
||||
The source code of Blasta is available under the terms of
|
||||
the license <a href="/license/agpl-3.0.txt">AGPL-3.0</a> at
|
||||
<a href="https://git.xmpp-it.net/sch/Blasta">
|
||||
git.xmpp-it.net</a>.
|
||||
</p>
|
||||
<h4>Our motives</h4>
|
||||
<h4>
|
||||
Our motives
|
||||
</h4>
|
||||
<p>
|
||||
We are adopting the attitude towards life and towards death,
|
||||
which was implicit in the old Vikings' and in Schopenhauer's
|
||||
|
@ -186,7 +203,9 @@
|
|||
particular for and through his racial community, which is
|
||||
eternal.
|
||||
</p>
|
||||
<h4>About us</h4>
|
||||
<h4>
|
||||
About us
|
||||
</h4>
|
||||
<p>
|
||||
Blasta was proudly made in the Republic of Ireland, by a
|
||||
group of bible loving, religious, and stylish Irish men, who
|
||||
|
@ -200,12 +219,16 @@
|
|||
proceeding year, and he was the one who has initiated the
|
||||
idea of XMPP PubSub bookmarks.
|
||||
</p>
|
||||
<h4>Conclusion</h4>
|
||||
<h4>
|
||||
Conclusion
|
||||
</h4>
|
||||
<p>
|
||||
Blasta is for you to enjoy, excite, instigate, investigate,
|
||||
learn and research.
|
||||
</p>
|
||||
<p>We hope you would have productive outcomes with Blasta.</p>
|
||||
<p>
|
||||
We hope you would have productive outcomes with Blasta.
|
||||
</p>
|
||||
<br/>
|
||||
<p class="quote bottom">
|
||||
“All you can take with you; is that which you have given
|
|
@ -168,7 +168,7 @@
|
|||
xmpp.org
|
||||
</a>
|
||||
​ 
|
||||
<a href="https://libervia.org/">
|
||||
<a href="https://libervia.org">
|
||||
libervia.org
|
||||
</a>
|
||||
</p>
|
||||
|
@ -188,13 +188,15 @@
|
|||
xmpp.org
|
||||
</a>
|
||||
​ 
|
||||
<a href="https://movim.eu/">
|
||||
<a href="https://movim.eu">
|
||||
movim.eu
|
||||
</a>
|
||||
</p>
|
||||
</li>
|
||||
</ul>
|
||||
<h4>Of note</h4>
|
||||
<h4>
|
||||
Of note
|
||||
</h4>
|
||||
<p>
|
||||
These type of technologies are public information for over
|
||||
a couple of decades (i.e. more than 20 years); and people
|
|
@ -69,7 +69,7 @@
|
|||
<label for="remember">Remember</label -->
|
||||
</form>
|
||||
<p>
|
||||
Log in to Blasta with your XMPP account or
|
||||
Connect to Blasta with your XMPP account or
|
||||
<a href="/register">register</a> for an account.
|
||||
</p>
|
||||
</div>
|
|
@ -226,7 +226,9 @@
|
|||
</p>
|
||||
<br/>
|
||||
<p class="quote bottom">
|
||||
Blasta was inspired by Movim and Rivista.
|
||||
“Talent hits a target no one else can hit.
|
||||
Genius hits a target no one else can see.”
|
||||
― Arthur Schopenhauer
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
|
@ -278,11 +278,19 @@
|
|||
</li>
|
||||
</ul>
|
||||
<br/>
|
||||
<p class="quote bottom"
|
||||
title="Arthur Schopenhauer speaks about Bob Wyman, Jérôme Poisson, Joe Hildebrand, Peter Saint-Andre, and Timothée Jaussoin.">
|
||||
“Talent hits a target no one else can hit.
|
||||
Genius hits a target no one else can see.”
|
||||
― Arthur Schopenhauer
|
||||
<p class="quote bottom">
|
||||
“Technology is extremely powerful and has the potential to
|
||||
change the world; however, it cannot realize its full
|
||||
potential unless people feel the need to use it. Some
|
||||
researchers agree, that to ensure the success of new
|
||||
technology, the focus should be on the people’s perspective
|
||||
rather than on the technology itself. Designing a new
|
||||
experience is a process that facilitates the relationship
|
||||
between technology and people; thus, balanced research
|
||||
should be conducted from both perspectives.”
|
||||
― <a href="https://www.diva.exchange/en/privacy/trust-in-the-cryptocurrency-economy-resolving-the-problem-experience-of-diva-exchange-part-2/">
|
||||
DIVA.EXCHANGE
|
||||
</a>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
|
@ -76,8 +76,9 @@
|
|||
</h3>
|
||||
<p>
|
||||
As with email, you need an account with a service provider
|
||||
to operate Blasta, so if you already have an XMPP account,
|
||||
you can <a href="/connect">connect</a> and start to Blasta.
|
||||
to utilize Blasta; if you already have an XMPP account, you
|
||||
can <a href="/connect">connect</a> and start to utilize
|
||||
Blasta.
|
||||
</p>
|
||||
<p>
|
||||
If you do not have an XMPP account, yet, you can use a
|
116
blasta/config.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
"""
|
||||
Functions get_directory() were taken from project jarun/buku.
|
||||
By Arun Prakash Jana (jarun) and Dmitry Marakasov (AMDmi3).
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
import tomllib
|
||||
except:
|
||||
import tomli as tomllib
|
||||
|
||||
class Settings:
|
||||
|
||||
def get_directory():
|
||||
"""
|
||||
Determine the directory path where setting files be stored.
|
||||
|
||||
* If $XDG_CONFIG_HOME is defined, use it;
|
||||
* else if $HOME exists, use it;
|
||||
* else if the platform is Windows, use %APPDATA%;
|
||||
* else use the current directory.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
Path to configuration directory.
|
||||
"""
|
||||
# config_home = xdg.BaseDirectory.xdg_config_home
|
||||
config_home = os.environ.get('XDG_CONFIG_HOME')
|
||||
if config_home is None:
|
||||
if os.environ.get('HOME') is None:
|
||||
if sys.platform == 'win32':
|
||||
config_home = os.environ.get('APPDATA')
|
||||
if config_home is None:
|
||||
return os.path.abspath('.')
|
||||
else:
|
||||
return os.path.abspath('.')
|
||||
else:
|
||||
config_home = os.path.join(
|
||||
os.environ.get('HOME'), '.config'
|
||||
)
|
||||
return os.path.join(config_home, 'blasta')
|
||||
|
||||
def get_setting(filename, section):
|
||||
with open(filename, mode="rb") as settings:
|
||||
result = tomllib.load(settings)[section]
|
||||
return result
|
||||
|
||||
|
||||
class Share:
|
||||
|
||||
def get_directory():
|
||||
"""
|
||||
Determine the directory path where data files be stored.
|
||||
|
||||
* If $XDG_DATA_HOME is defined, use it;
|
||||
* else if $HOME exists, use it;
|
||||
* else if the platform is Windows, use %APPDATA%;
|
||||
* else use the current directory.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
Path to database file.
|
||||
"""
|
||||
# data_home = xdg.BaseDirectory.xdg_data_home
|
||||
data_home = os.environ.get('XDG_DATA_HOME')
|
||||
if data_home is None:
|
||||
if os.environ.get('HOME') is None:
|
||||
if sys.platform == 'win32':
|
||||
data_home = os.environ.get('APPDATA')
|
||||
if data_home is None:
|
||||
return os.path.abspath('.blasta/data')
|
||||
else:
|
||||
return os.path.abspath('.blasta/data')
|
||||
else:
|
||||
data_home = os.path.join(
|
||||
os.environ.get('HOME'), '.local', 'share'
|
||||
)
|
||||
return os.path.join(data_home, 'blasta')
|
||||
|
||||
class Cache:
|
||||
|
||||
def get_directory():
|
||||
"""
|
||||
Determine the directory path where cache files be stored.
|
||||
|
||||
* If $XDG_CACHE_HOME is defined, use it;
|
||||
* else if $HOME exists, use it;
|
||||
* else if the platform is Windows, use %APPDATA%;
|
||||
* else use the current directory.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
Path to cache directory.
|
||||
"""
|
||||
# cache_home = xdg.BaseDirectory.xdg_cache_home
|
||||
cache_home = os.environ.get('XDG_CACHE_HOME')
|
||||
if cache_home is None:
|
||||
if os.environ.get('HOME') is None:
|
||||
if sys.platform == 'win32':
|
||||
cache_home = os.environ.get('APPDATA')
|
||||
if cache_home is None:
|
||||
return os.path.abspath('.blasta/cache')
|
||||
else:
|
||||
return os.path.abspath('.blasta/cache')
|
||||
else:
|
||||
cache_home = os.path.join(
|
||||
os.environ.get('HOME'), '.cache'
|
||||
)
|
||||
return os.path.join(cache_home, 'blasta')
|
257
blasta/helpers/data.py
Normal file
|
@ -0,0 +1,257 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from blasta.helpers.utilities import Utilities
|
||||
from blasta.sqlite import SQLite
|
||||
from blasta.xml.syndication import Syndication
|
||||
from blasta.xmpp.pubsub import XmppPubsub
|
||||
import os
|
||||
from slixmpp.stanza.iq import Iq
|
||||
import tomli_w
|
||||
|
||||
try:
|
||||
import tomllib
|
||||
except:
|
||||
import tomli as tomllib
|
||||
|
||||
class Data:
|
||||
|
||||
def cache_items_and_tags_search(directory_cache, entries, jid, query):
|
||||
"""Create a cache file of node items and tags."""
|
||||
item_ids = []
|
||||
tags = {}
|
||||
for entry in entries:
|
||||
entry_tags = entry['tags']
|
||||
entry_url_hash = entry['url_hash']
|
||||
tags_to_include = []
|
||||
if query in ' '.join([entry['title'], entry['link'], entry['summary'], ' '.join(entry_tags)]):
|
||||
item_ids.append(entry_url_hash)
|
||||
tags_to_include += entry_tags
|
||||
for tag_to_include in tags_to_include:
|
||||
tags[tag_to_include] = tags[tag_to_include]+1 if tag_to_include in tags else 1
|
||||
if tags:
|
||||
tags = dict(sorted(tags.items(), key=lambda item: (-item[1], item[0])))
|
||||
tags = dict(list(tags.items())[:30])
|
||||
if item_ids:
|
||||
filename = os.path.join(directory_cache, 'data', jid + '_query.toml')
|
||||
data = {
|
||||
'item_ids' : item_ids,
|
||||
'tags' : tags}
|
||||
Data.save_to_toml(filename, data)
|
||||
|
||||
def cache_items_and_tags_filter(directory_cache, entries, jid, tag):
|
||||
"""Create a cache file of node items and tags."""
|
||||
item_ids = []
|
||||
tags = {}
|
||||
for entry in entries:
|
||||
entry_tags = entry['tags']
|
||||
entry_url_hash = entry['url_hash']
|
||||
tags_to_include = []
|
||||
if tag in entry_tags:
|
||||
item_ids.append(entry_url_hash)
|
||||
tags_to_include += entry_tags
|
||||
for tag_to_include in tags_to_include:
|
||||
tags[tag_to_include] = tags[tag_to_include]+1 if tag_to_include in tags else 1
|
||||
if tags:
|
||||
tags = dict(sorted(tags.items(), key=lambda item: (-item[1], item[0])))
|
||||
tags = dict(list(tags.items())[:30])
|
||||
del tags[tag]
|
||||
if item_ids:
|
||||
directory = os.path.join(directory_cache, 'data', jid)
|
||||
if not os.path.exists(directory):
|
||||
os.mkdir(directory)
|
||||
filename = os.path.join(directory, tag)
|
||||
# Add support for search query
|
||||
#filename = 'data/{}/query:{}.toml'.format(jid, query)
|
||||
#filename = 'data/{}/tag:{}.toml'.format(jid, tag)
|
||||
data = {
|
||||
'item_ids' : item_ids,
|
||||
'tags' : tags}
|
||||
Data.save_to_toml(filename, data)
|
||||
|
||||
def cache_items_and_tags(directory_cache, entries, jid):
|
||||
"""Create a cache file of node items and tags."""
|
||||
item_ids = []
|
||||
tags = {}
|
||||
for entry in entries:
|
||||
entry_tags = entry['tags']
|
||||
entry_url_hash = entry['url_hash']
|
||||
tags_to_include = []
|
||||
item_ids.append(entry_url_hash)
|
||||
tags_to_include += entry_tags
|
||||
for tag_to_include in tags_to_include:
|
||||
tags[tag_to_include] = tags[tag_to_include]+1 if tag_to_include in tags else 1
|
||||
if tags:
|
||||
tags = dict(sorted(tags.items(), key=lambda item: (-item[1], item[0])))
|
||||
tags = dict(list(tags.items())[:30])
|
||||
if item_ids:
|
||||
filename = os.path.join(directory_cache, 'data', jid + '.toml')
|
||||
data = {
|
||||
'item_ids' : item_ids,
|
||||
'tags' : tags}
|
||||
Data.save_to_toml(filename, data)
|
||||
|
||||
def extract_iq_items(iq, jabber_id):
|
||||
iq_items = iq['pubsub']['items']
|
||||
entries = []
|
||||
name = jabber_id.split('@')[0]
|
||||
for iq_item in iq_items:
|
||||
item_payload = iq_item['payload']
|
||||
entry = Syndication.extract_items(item_payload)
|
||||
entries.append(entry)
|
||||
# TODO Handle this with XEP-0059 (reverse: bool), instead of reversing it.
|
||||
entries.reverse()
|
||||
return entries
|
||||
|
||||
def extract_iq_items_extra(iq, jabber_id, limit=None):
|
||||
iq_items = iq['pubsub']['items']
|
||||
entries = []
|
||||
name = jabber_id.split('@')[0]
|
||||
for iq_item in iq_items:
|
||||
item_payload = iq_item['payload']
|
||||
entry = Syndication.extract_items(item_payload, limit)
|
||||
url_hash = Utilities.hash_url_to_md5(entry['link'])
|
||||
iq_item_id = iq_item['id']
|
||||
if iq_item_id != url_hash:
|
||||
logging.error('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
||||
logging.warn('Item ID does not match MD5. id: {} hash: {}'.format(iq_item_id, url_hash))
|
||||
db_file = 'main.sqlite'
|
||||
instances = SQLite.get_entry_instances_by_url_hash(db_file, url_hash)
|
||||
if entry:
|
||||
entry['instances'] = instances or 0
|
||||
entry['jid'] = jabber_id
|
||||
entry['name'] = name
|
||||
entry['url_hash'] = url_hash
|
||||
entries.append(entry)
|
||||
# TODO Handle this with XEP-0059 (reverse: bool), instead of reversing it.
|
||||
entries.reverse()
|
||||
result = entries
|
||||
return result
|
||||
|
||||
def open_file_toml(filename: str) -> dict:
|
||||
with open(filename, mode="rb") as fn:
|
||||
data = tomllib.load(fn)
|
||||
return data
|
||||
|
||||
def organize_tags(tags):
|
||||
tags_organized = []
|
||||
tags = tags.split(',')
|
||||
#tags = sorted(set(tags))
|
||||
for tag in tags:
|
||||
if tag:
|
||||
tag = tag.lower().strip()
|
||||
if tag not in tags_organized:
|
||||
tags_organized.append(tag)
|
||||
return sorted(tags_organized)
|
||||
|
||||
def remove_item_from_cache(directory_cache, jabber_id, node, url_hash):
|
||||
filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
||||
entries_cache = Data.open_file_toml(filename_items)
|
||||
if node in entries_cache:
|
||||
entries_cache_node = entries_cache[node]
|
||||
for entry_cache in entries_cache_node:
|
||||
if entry_cache['url_hash'] == url_hash:
|
||||
entry_cache_index = entries_cache_node.index(entry_cache)
|
||||
del entries_cache_node[entry_cache_index]
|
||||
break
|
||||
data_items = entries_cache
|
||||
Data.save_to_toml(filename_items, data_items)
|
||||
|
||||
def save_to_json(filename: str, data) -> None:
|
||||
with open(filename, 'w') as f:
|
||||
json.dump(data, f)
|
||||
|
||||
def save_to_toml(filename: str, data: dict) -> None:
|
||||
with open(filename, 'w') as fn:
|
||||
data_as_string = tomli_w.dumps(data)
|
||||
fn.write(data_as_string)
|
||||
|
||||
async def update_cache_and_database(directory_cache, xmpp_instance, jabber_id: str, node_type: str, node_id: str):
|
||||
# Download identifiers of node items.
|
||||
iq = await XmppPubsub.get_node_item_ids(xmpp_instance, jabber_id, node_id)
|
||||
if isinstance(iq, Iq):
|
||||
iq_items_remote = iq['disco_items']
|
||||
|
||||
# Cache a list of identifiers of node items to a file.
|
||||
iq_items_remote_name = []
|
||||
for iq_item_remote in iq_items_remote:
|
||||
iq_item_remote_name = iq_item_remote['name']
|
||||
iq_items_remote_name.append(iq_item_remote_name)
|
||||
|
||||
#data_item_ids = {'iq_items' : iq_items_remote_name}
|
||||
#filename_item_ids = 'item_ids/' + jabber_id + '.toml'
|
||||
#Data.save_to_toml(filename_item_ids, data_item_ids)
|
||||
|
||||
filename_items = os.path.join(directory_cache, 'items', jabber_id + '.toml')
|
||||
if not os.path.exists(filename_items) or os.path.getsize(filename_items) in (0, 13):
|
||||
iq = await XmppPubsub.get_node_items(xmpp_instance, jabber_id, node_id)
|
||||
if isinstance(iq, Iq):
|
||||
entries_cache_node = Data.extract_iq_items_extra(iq, jabber_id)
|
||||
data_items = {node_type : entries_cache_node}
|
||||
Data.save_to_toml(filename_items, data_items)
|
||||
return ['fine', iq] # TODO Remove this line
|
||||
else:
|
||||
return ['error', iq]
|
||||
else:
|
||||
entries_cache = Data.open_file_toml(filename_items)
|
||||
if not node_type in entries_cache: return ['error', 'Directory "{}" is empty'. format(node_type)]
|
||||
entries_cache_node = entries_cache[node_type]
|
||||
db_file = 'main.sqlite'
|
||||
|
||||
# Check whether items still exist on node
|
||||
for entry in entries_cache_node:
|
||||
iq_item_remote_exist = False
|
||||
url_hash = None
|
||||
for url_hash in iq_items_remote_name:
|
||||
if url_hash == entry['url_hash']:
|
||||
iq_item_remote_exist = True
|
||||
break
|
||||
if url_hash and not iq_item_remote_exist:
|
||||
await SQLite.delete_combination_row_by_jid_and_url_hash(
|
||||
db_file, url_hash, jabber_id)
|
||||
entry_index = entries_cache_node.index(entry)
|
||||
del entries_cache_node[entry_index]
|
||||
|
||||
# Check for new items on node
|
||||
entries_cache_node_new = []
|
||||
for url_hash in iq_items_remote_name:
|
||||
iq_item_local_exist = False
|
||||
for entry in entries_cache_node:
|
||||
if url_hash == entry['url_hash']:
|
||||
iq_item_local_exist = True
|
||||
break
|
||||
if not iq_item_local_exist:
|
||||
iq = await XmppPubsub.get_node_item(
|
||||
xmpp_instance, jabber_id, node_id, url_hash)
|
||||
if isinstance(iq, Iq):
|
||||
entries_iq = Data.extract_iq_items_extra(iq, jabber_id)
|
||||
entries_cache_node_new += entries_iq
|
||||
else:
|
||||
# TODO
|
||||
# Handle this concern in a different fashion,
|
||||
# instead of stopping the whole operation.
|
||||
return ['error', iq]
|
||||
entries_cache_node += entries_cache_node_new
|
||||
|
||||
if node_type == 'public':
|
||||
# Fast (low I/O)
|
||||
if not SQLite.get_jid_id_by_jid(db_file, jabber_id):
|
||||
await SQLite.set_jid(db_file, jabber_id)
|
||||
#await SQLite.add_new_entries(db_file, entries)
|
||||
await SQLite.add_tags(db_file, entries_cache_node)
|
||||
# Slow (high I/O)
|
||||
for entry in entries_cache_node:
|
||||
url_hash = entry['url_hash']
|
||||
if not SQLite.get_entry_id_by_url_hash(db_file, url_hash):
|
||||
await SQLite.add_new_entries(db_file, entries_cache_node)
|
||||
await SQLite.associate_entries_tags_jids(db_file, entry)
|
||||
#elif not SQLite.is_jid_associated_with_url_hash(db_file, jabber_id, url_hash):
|
||||
# await SQLite.associate_entries_tags_jids(db_file, entry)
|
||||
else:
|
||||
await SQLite.associate_entries_tags_jids(db_file, entry)
|
||||
|
||||
data_items = entries_cache
|
||||
Data.save_to_toml(filename_items, data_items)
|
||||
return ['fine', iq] # TODO Remove this line
|
||||
else:
|
||||
return ['error', iq]
|
27
blasta/helpers/utilities.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from datetime import datetime
|
||||
import hashlib
|
||||
|
||||
class Utilities:
|
||||
|
||||
def convert_iso8601_to_readable(timestamp):
|
||||
old_date_format = datetime.fromisoformat(timestamp.replace("Z", "+00:00"))
|
||||
new_date_format = old_date_format.strftime("%B %d, %Y")
|
||||
return new_date_format
|
||||
|
||||
def hash_url_to_md5(url):
|
||||
url_encoded = url.encode()
|
||||
url_hashed = hashlib.md5(url_encoded)
|
||||
url_digest = url_hashed.hexdigest()
|
||||
return url_digest
|
||||
|
||||
def is_jid_matches_to_session(accounts, sessions, request):
|
||||
jabber_id = request.cookies.get('jabber_id')
|
||||
session_key = request.cookies.get('session_key')
|
||||
if (jabber_id and
|
||||
jabber_id in accounts and
|
||||
jabber_id in sessions and
|
||||
session_key == sessions[jabber_id]):
|
||||
return jabber_id
|
16
blasta/helpers/xml.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
class Xml:
|
||||
|
||||
def create_setting_entry(xmpp_instance, key : str, value : str):
|
||||
form = xmpp_instance['xep_0004'].make_form('form', 'Settings')
|
||||
form['type'] = 'result'
|
||||
form.add_field(var=key,
|
||||
value=value)
|
||||
return form
|
||||
|
||||
# def create_setting_entry(value : str):
|
||||
# element = ET.Element('value')
|
||||
# element.text = value
|
||||
# return element
|
Before Width: | Height: | Size: 262 KiB After Width: | Height: | Size: 262 KiB |
Before Width: | Height: | Size: 279 KiB After Width: | Height: | Size: 279 KiB |
2482
blasta/sqlite.py
Normal file
2
blasta/version.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
__version__ = '0.1'
|
||||
__version_info__ = (0, 1)
|
88
blasta/xml/syndication.py
Normal file
|
@ -0,0 +1,88 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
class Syndication:
|
||||
|
||||
def create_rfc4287_entry(feed_entry):
|
||||
node_entry = ET.Element('entry')
|
||||
node_entry.set('xmlns', 'http://www.w3.org/2005/Atom')
|
||||
# Title
|
||||
title = ET.SubElement(node_entry, 'title')
|
||||
title.set('type', 'text')
|
||||
title.text = feed_entry['title']
|
||||
# Summary
|
||||
summary = ET.SubElement(node_entry, 'summary') # TODO Try 'content'
|
||||
summary.set('type', 'text')
|
||||
#summary.set('lang', feed_entry['summary_lang'])
|
||||
summary.text = feed_entry['summary']
|
||||
# Tags
|
||||
if feed_entry['tags']:
|
||||
for term in feed_entry['tags']:
|
||||
tag = ET.SubElement(node_entry, 'category')
|
||||
tag.set('term', term)
|
||||
# Link
|
||||
link = ET.SubElement(node_entry, "link")
|
||||
link.set('href', feed_entry['link'])
|
||||
# Links
|
||||
# for feed_entry_link in feed_entry['links']:
|
||||
# link = ET.SubElement(node_entry, "link")
|
||||
# link.set('href', feed_entry_link['url'])
|
||||
# link.set('type', feed_entry_link['type'])
|
||||
# link.set('rel', feed_entry_link['rel'])
|
||||
# Date saved
|
||||
if 'published' in feed_entry and feed_entry['published']:
|
||||
published = ET.SubElement(node_entry, 'published')
|
||||
published.text = feed_entry['published']
|
||||
# Date edited
|
||||
if 'updated' in feed_entry and feed_entry['updated']:
|
||||
updated = ET.SubElement(node_entry, 'updated')
|
||||
updated.text = feed_entry['updated']
|
||||
return node_entry
|
||||
|
||||
def extract_items(item_payload, limit=False):
|
||||
namespace = '{http://www.w3.org/2005/Atom}'
|
||||
title = item_payload.find(namespace + 'title')
|
||||
links = item_payload.find(namespace + 'link')
|
||||
if (not isinstance(title, ET.Element) and
|
||||
not isinstance(links, ET.Element)): return None
|
||||
title_text = '' if title == None else title.text
|
||||
if isinstance(links, ET.Element):
|
||||
for link in item_payload.findall(namespace + 'link'):
|
||||
link_href = link.attrib['href'] if 'href' in link.attrib else ''
|
||||
if link_href: break
|
||||
contents = item_payload.find(namespace + 'summary')
|
||||
summary_text = ''
|
||||
if isinstance(contents, ET.Element):
|
||||
for summary in item_payload.findall(namespace + 'summary'):
|
||||
summary_text = summary.text or ''
|
||||
if summary_text: break
|
||||
published = item_payload.find(namespace + 'published')
|
||||
published_text = '' if published == None else published.text
|
||||
categories = item_payload.find(namespace + 'category')
|
||||
tags = []
|
||||
if isinstance(categories, ET.Element):
|
||||
for category in item_payload.findall(namespace + 'category'):
|
||||
if 'term' in category.attrib and category.attrib['term']:
|
||||
category_term = category.attrib['term']
|
||||
if len(category_term) < 20:
|
||||
tags.append(category_term)
|
||||
elif len(category_term) < 50:
|
||||
tags.append(category_term)
|
||||
if limit and len(tags) > 4: break
|
||||
|
||||
|
||||
identifier = item_payload.find(namespace + 'id')
|
||||
if identifier and identifier.attrib: print(identifier.attrib)
|
||||
identifier_text = '' if identifier == None else identifier.text
|
||||
|
||||
instances = '' # TODO Check the Blasta database for instances.
|
||||
|
||||
entry = {'title' : title_text,
|
||||
'link' : link_href,
|
||||
'summary' : summary_text,
|
||||
'published' : published_text,
|
||||
'updated' : published_text, # TODO "Updated" is missing
|
||||
'tags' : tags}
|
||||
return entry
|
31
blasta/xmpp/instance.py
Normal file
|
@ -0,0 +1,31 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from slixmpp import ClientXMPP
|
||||
|
||||
class XmppInstance(ClientXMPP):
|
||||
def __init__(self, jid, password):
|
||||
super().__init__(jid, password)
|
||||
#self.add_event_handler("connection_failed", self.on_connection_failed)
|
||||
#self.add_event_handler("failed_auth", self.on_failed_auth)
|
||||
self.add_event_handler("session_start", self.on_session_start)
|
||||
self.register_plugin('xep_0004') # XEP-0004: Data Forms
|
||||
self.register_plugin('xep_0030') # XEP-0030: Service Discovery
|
||||
self.register_plugin('xep_0059') # XEP-0059: Result Set Management
|
||||
self.register_plugin('xep_0060') # XEP-0060: Publish-Subscribe
|
||||
self.register_plugin('xep_0078') # XEP-0078: Non-SASL Authentication
|
||||
self.register_plugin('xep_0163') # XEP-0163: Personal Eventing Protocol
|
||||
self.register_plugin('xep_0223') # XEP-0223: Persistent Storage of Private Data via PubSub
|
||||
self.connect()
|
||||
# self.process(forever=False)
|
||||
|
||||
self.connection_accepted = False
|
||||
|
||||
# def on_connection_failed(self, event):
|
||||
# self.connection_accepted = False
|
||||
|
||||
# def on_failed_auth(self, event):
|
||||
# self.connection_accepted = False
|
||||
|
||||
def on_session_start(self, event):
|
||||
self.connection_accepted = True
|
22
blasta/xmpp/message.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
class XmppMessage:
|
||||
|
||||
def send(self, jid, message_body):
|
||||
jid_from = str(self.boundjid) if self.is_component else None
|
||||
self.send_message(
|
||||
mto=jid,
|
||||
mfrom=jid_from,
|
||||
mbody=message_body,
|
||||
mtype='chat')
|
||||
|
||||
# NOTE It appears to not work.
|
||||
def send_headline(self, jid, message_subject, message_body):
|
||||
jid_from = str(self.boundjid) if self.is_component else None
|
||||
self.send_message(
|
||||
mto=jid,
|
||||
mfrom=jid_from,
|
||||
msubject=message_subject,
|
||||
mbody=message_body,
|
||||
mtype='headline')
|
231
blasta/xmpp/pubsub.py
Normal file
|
@ -0,0 +1,231 @@
|
|||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import slixmpp
|
||||
from slixmpp.exceptions import IqError, IqTimeout
|
||||
#import slixmpp.plugins.xep_0060.stanza.pubsub as pubsub
|
||||
import slixmpp.plugins.xep_0059.rsm as rsm
|
||||
|
||||
class XmppPubsub:
|
||||
|
||||
# TODO max-items might be limited (CanChat: 255), so iterate from a bigger number to a smaller.
|
||||
# NOTE This function was copied from atomtopubsub
|
||||
def create_node_atom(xmpp_instance, jid, node, title, subtitle, access_model):
|
||||
jid_from = str(xmpp_instance.boundjid) if xmpp_instance.is_component else None
|
||||
iq = xmpp_instance.Iq(stype='set',
|
||||
sto=jid,
|
||||
sfrom=jid_from)
|
||||
iq['pubsub']['create']['node'] = node
|
||||
form = iq['pubsub']['configure']['form']
|
||||
form['type'] = 'submit'
|
||||
form.addField('pubsub#access_model',
|
||||
ftype='list-single',
|
||||
value=access_model)
|
||||
form.addField('pubsub#deliver_payloads',
|
||||
ftype='boolean',
|
||||
value=0)
|
||||
form.addField('pubsub#description',
|
||||
ftype='text-single',
|
||||
value=subtitle)
|
||||
form.addField('pubsub#max_items',
|
||||
ftype='text-single',
|
||||
value='255')
|
||||
form.addField('pubsub#notify_retract',
|
||||
ftype='boolean',
|
||||
value=1)
|
||||
form.addField('pubsub#persist_items',
|
||||
ftype='boolean',
|
||||
value=1)
|
||||
form.addField('pubsub#send_last_published_item',
|
||||
ftype='text-single',
|
||||
value='never')
|
||||
form.addField('pubsub#title',
|
||||
ftype='text-single',
|
||||
value=title)
|
||||
form.addField('pubsub#type',
|
||||
ftype='text-single',
|
||||
value='http://www.w3.org/2005/Atom')
|
||||
return iq
|
||||
|
||||
def create_node_config(xmpp_instance, jid):
|
||||
jid_from = str(xmpp_instance.boundjid) if xmpp_instance.is_component else None
|
||||
iq = xmpp_instance.Iq(stype='set',
|
||||
sto=jid,
|
||||
sfrom=jid_from)
|
||||
iq['pubsub']['create']['node'] = 'xmpp:blasta:configuration:0'
|
||||
form = iq['pubsub']['configure']['form']
|
||||
form['type'] = 'submit'
|
||||
form.addField('pubsub#access_model',
|
||||
ftype='list-single',
|
||||
value='whitelist')
|
||||
form.addField('pubsub#deliver_payloads',
|
||||
ftype='boolean',
|
||||
value=0)
|
||||
form.addField('pubsub#description',
|
||||
ftype='text-single',
|
||||
value='Settings of the Blasta PubSub bookmarks system')
|
||||
form.addField('pubsub#max_items',
|
||||
ftype='text-single',
|
||||
value='30')
|
||||
form.addField('pubsub#notify_retract',
|
||||
ftype='boolean',
|
||||
value=1)
|
||||
form.addField('pubsub#persist_items',
|
||||
ftype='boolean',
|
||||
value=1)
|
||||
form.addField('pubsub#send_last_published_item',
|
||||
ftype='text-single',
|
||||
value='never')
|
||||
form.addField('pubsub#title',
|
||||
ftype='text-single',
|
||||
value='Blasta Settings')
|
||||
form.addField('pubsub#type',
|
||||
ftype='text-single',
|
||||
value='settings')
|
||||
return iq
|
||||
|
||||
async def del_node_item(xmpp_instance, pubsub, node, item_id):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].retract(
|
||||
pubsub, node, item_id, timeout=5, notify=None)
|
||||
result = iq
|
||||
except IqError as e:
|
||||
result = e.iq['error']['text']
|
||||
print(e)
|
||||
except IqTimeout as e:
|
||||
result = 'Timeout'
|
||||
print(e)
|
||||
print(result)
|
||||
return result
|
||||
|
||||
def get_iterator(xmpp_instance, pubsub, node, max_items, iterator):
|
||||
iterator = xmpp_instance.plugin['xep_0060'].get_items(
|
||||
pubsub, node, timeout=5, max_items=max_items, iterator=iterator)
|
||||
return iterator
|
||||
|
||||
async def get_node_configuration(xmpp_instance, pubsub, node):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].get_node_config(
|
||||
pubsub, node)
|
||||
return iq
|
||||
except (IqError, IqTimeout) as e:
|
||||
print(e)
|
||||
|
||||
async def get_node_item(xmpp_instance, pubsub, node, item_id):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].get_item(
|
||||
pubsub, node, item_id, timeout=5)
|
||||
result = iq
|
||||
except IqError as e:
|
||||
result = e.iq['error']['text']
|
||||
print(e)
|
||||
except IqTimeout as e:
|
||||
result = 'Timeout'
|
||||
print(e)
|
||||
return result
|
||||
|
||||
async def get_node_item_ids(xmpp_instance, pubsub, node):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0030'].get_items(
|
||||
pubsub, node)
|
||||
# Broken. See https://codeberg.org/poezio/slixmpp/issues/3548
|
||||
#iq = await xmpp_instance.plugin['xep_0060'].get_item_ids(
|
||||
# pubsub, node, timeout=5)
|
||||
result = iq
|
||||
except IqError as e:
|
||||
if e.iq['error']['text'] == 'Node not found':
|
||||
result = 'Node not found'
|
||||
elif e.iq['error']['condition'] == 'item-not-found':
|
||||
result = 'Item not found'
|
||||
else:
|
||||
result = None
|
||||
print(e)
|
||||
except IqTimeout as e:
|
||||
result = 'Timeout'
|
||||
print(e)
|
||||
return result
|
||||
|
||||
async def get_node_item_private(xmpp_instance, node, item_id):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0223'].retrieve(
|
||||
node, item_id, timeout=5)
|
||||
result = iq
|
||||
except IqError as e:
|
||||
result = e.iq['error']['text']
|
||||
print(e)
|
||||
except IqTimeout as e:
|
||||
result = 'Timeout'
|
||||
print(e)
|
||||
return result
|
||||
|
||||
async def get_node_items(xmpp_instance, pubsub, node, item_ids=None, max_items=None):
|
||||
try:
|
||||
if max_items:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].get_items(
|
||||
pubsub, node, timeout=5)
|
||||
it = xmpp_instance.plugin['xep_0060'].get_items(
|
||||
pubsub, node, timeout=5, max_items=max_items, iterator=True)
|
||||
q = rsm.Iq()
|
||||
q['to'] = pubsub
|
||||
q['disco_items']['node'] = node
|
||||
async for item in rsm.ResultIterator(q, 'disco_items', '10'):
|
||||
print(item['disco_items']['items'])
|
||||
|
||||
else:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].get_items(
|
||||
pubsub, node, timeout=5, item_ids=item_ids)
|
||||
result = iq
|
||||
except IqError as e:
|
||||
if e.iq['error']['text'] == 'Node not found':
|
||||
result = 'Node not found'
|
||||
elif e.iq['error']['condition'] == 'item-not-found':
|
||||
result = 'Item not found'
|
||||
else:
|
||||
result = None
|
||||
print(e)
|
||||
except IqTimeout as e:
|
||||
result = 'Timeout'
|
||||
print(e)
|
||||
return result
|
||||
|
||||
async def get_nodes(xmpp_instance):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].get_nodes()
|
||||
return iq
|
||||
except (IqError, IqTimeout) as e:
|
||||
print(e)
|
||||
|
||||
async def is_node_exist(xmpp_instance, node_name):
|
||||
iq = await XmppPubsub.get_nodes(xmpp_instance)
|
||||
nodes = iq['disco_items']['items']
|
||||
for node in nodes:
|
||||
if node[1] == node_name:
|
||||
return True
|
||||
|
||||
async def publish_node_item(xmpp_instance, jid, node, item_id, payload):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0060'].publish(
|
||||
jid, node, id=item_id, payload=payload)
|
||||
print(iq)
|
||||
return iq
|
||||
except (IqError, IqTimeout) as e:
|
||||
print(e)
|
||||
|
||||
async def publish_node_item_private(xmpp_instance, node, item_id, stanza):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0223'].store(
|
||||
stanza, node, item_id)
|
||||
print(iq)
|
||||
return iq
|
||||
except (IqError, IqTimeout) as e:
|
||||
print(e)
|
||||
if e.iq['error']['text'] == 'Field does not match: access_model':
|
||||
return 'Error: Could not set private bookmark due to Access Model mismatch'
|
||||
|
||||
async def set_node_private(xmpp_instance, node):
|
||||
try:
|
||||
iq = await xmpp_instance.plugin['xep_0223'].configure(node)
|
||||
print(iq)
|
||||
return iq
|
||||
except (IqError, IqTimeout) as e:
|
||||
print(e)
|
|
@ -1 +0,0 @@
|
|||
This directory is meant to store hashes and tags per JID as TOML.
|
|
@ -1 +0,0 @@
|
|||
This directory is contains exported nodes.
|
|
@ -1,18 +0,0 @@
|
|||
<?xml version="1.0"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.1" width="128px" height="128px" id="RSSicon" viewBox="0 0 256 256">
|
||||
<defs>
|
||||
<linearGradient x1="0.085" y1="0.085" x2="0.915" y2="0.915" id="RSSg">
|
||||
<stop offset="0.0" stop-color="#E3702D"/><stop offset="0.1071" stop-color="#EA7D31"/>
|
||||
<stop offset="0.3503" stop-color="#F69537"/><stop offset="0.5" stop-color="#FB9E3A"/>
|
||||
<stop offset="0.7016" stop-color="#EA7C31"/><stop offset="0.8866" stop-color="#DE642B"/>
|
||||
<stop offset="1.0" stop-color="#D95B29"/>
|
||||
</linearGradient>
|
||||
</defs>
|
||||
<rect width="256" height="256" rx="55" ry="55" x="0" y="0" fill="#CC5D15"/>
|
||||
<rect width="246" height="246" rx="50" ry="50" x="5" y="5" fill="#F49C52"/>
|
||||
<rect width="236" height="236" rx="47" ry="47" x="10" y="10" fill="url(#RSSg)"/>
|
||||
<circle cx="68" cy="189" r="24" fill="#FFF"/>
|
||||
<path d="M160 213h-34a82 82 0 0 0 -82 -82v-34a116 116 0 0 1 116 116z" fill="#FFF"/>
|
||||
<path d="M184 213A140 140 0 0 0 44 73 V 38a175 175 0 0 1 175 175z" fill="#FFF"/>
|
||||
</svg>
|
Before Width: | Height: | Size: 1.1 KiB |
|
@ -1 +0,0 @@
|
|||
This directory is meant to cache nodes per JID as TOML.
|
60
pyproject.toml
Normal file
|
@ -0,0 +1,60 @@
|
|||
[build-system]
|
||||
requires = ["setuptools>=61.2"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[project]
|
||||
name = "Blasta"
|
||||
version = "1.0"
|
||||
description = "A collaborative annotation management system for XMPP"
|
||||
authors = [{name = "Schimon Zachary", email = "sch@fedora.email"}]
|
||||
license = {text = "AGPL-3.0"}
|
||||
classifiers = [
|
||||
"Framework :: slixmpp",
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"License :: OSI Approved :: AGPL-3.0 License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Topic :: Internet :: Extensible Messaging and Presence Protocol (XMPP)",
|
||||
"Topic :: Internet :: WWW/HTTP :: Dynamic Content :: News/Diary",
|
||||
"Topic :: Internet :: XMPP",
|
||||
"Topic :: Office/Business :: News/Diary",
|
||||
]
|
||||
keywords = [
|
||||
"atom",
|
||||
"bookmark",
|
||||
"collaboration",
|
||||
"gemini",
|
||||
"index",
|
||||
"jabber",
|
||||
"journal",
|
||||
"news",
|
||||
"social",
|
||||
"syndication",
|
||||
"xml",
|
||||
"xmpp",
|
||||
]
|
||||
|
||||
dependencies = [
|
||||
"fastapi",
|
||||
"lxml",
|
||||
"python-dateutil",
|
||||
"python-multipart",
|
||||
"slixmpp",
|
||||
"tomli", # Python 3.10
|
||||
"uvicorn",
|
||||
]
|
||||
|
||||
[project.urls]
|
||||
Homepage = "https://schapps.woodpeckersnest.eu/blasta/"
|
||||
Repository = "https://git.xmpp-it.net/sch/Blasta"
|
||||
Issues = "https://git.xmpp-it.net/sch/Blasta/issues"
|
||||
|
||||
[project.scripts]
|
||||
blasta = "blasta.__main__:main"
|
||||
|
||||
[tool.setuptools]
|
||||
platforms = ["any"]
|
||||
|
||||
[tool.setuptools.package-data]
|
||||
"*" = ["*.toml"]
|
|
@ -13,22 +13,22 @@ journal = ""
|
|||
pubsub = ""
|
||||
|
||||
# Bibliography
|
||||
node_id = "urn:xmpp:bibliography:0"
|
||||
node_id = "blasta:annotation:0"
|
||||
node_title = "Blasta"
|
||||
node_subtitle = "Bibliography"
|
||||
node_subtitle = "Annotation"
|
||||
|
||||
# Private bibliography
|
||||
node_id_private = "xmpp:bibliography:private:0"
|
||||
node_id_private = "blasta:annotation:private:0"
|
||||
node_title_private = "Blasta (Private)"
|
||||
node_subtitle_private = "Private bibliography"
|
||||
node_subtitle_private = "Private annotation"
|
||||
|
||||
# Reading list
|
||||
node_id_read = "xmpp:bibliography:read:0"
|
||||
node_id_read = "blasta:annotation:read:0"
|
||||
node_title_read = "Blasta (Read)"
|
||||
node_subtitle_read = "Reading list"
|
||||
|
||||
# Settings node
|
||||
node_settings = "xmpp:blasta:settings:0"
|
||||
node_settings = "blasta:settings:0"
|
||||
|
||||
# Acceptable protocol types that would be aggregated to the Blasta database
|
||||
schemes = [
|