2023-07-16 17:23:44 +02:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
# -*- coding: utf-8 -*-
|
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
"""
|
|
|
|
|
|
|
|
TODO
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
0) Function "mark_feed_as_read": see function "maintain_archive"
|
|
|
|
|
2024-01-10 21:06:56 +01:00
|
|
|
1) Function to open connection (receive db_file).
|
|
|
|
Function to close connection.
|
|
|
|
All other functions to receive cursor.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
2024-01-10 21:06:56 +01:00
|
|
|
2) Merge function add_metadata into function import_feeds.
|
2024-02-10 18:53:53 +01:00
|
|
|
|
|
|
|
3) SQL prepared statements.
|
|
|
|
|
|
|
|
4) Support categories;
|
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
"""
|
|
|
|
|
2023-12-04 15:41:02 +01:00
|
|
|
from asyncio import Lock
|
2023-12-28 15:50:23 +01:00
|
|
|
# from slixfeed.data import join_url
|
2024-03-03 15:13:01 +01:00
|
|
|
from slixfeed.log import Logger
|
2024-01-06 23:03:08 +01:00
|
|
|
from sqlite3 import connect, Error, IntegrityError
|
2024-03-03 15:13:01 +01:00
|
|
|
import sys
|
2024-01-26 12:34:07 +01:00
|
|
|
import time
|
2023-07-16 17:23:44 +02:00
|
|
|
|
2023-09-29 13:49:24 +02:00
|
|
|
# from eliot import start_action, to_file
|
2023-11-13 14:45:10 +01:00
|
|
|
# # with start_action(action_type="list_feeds()", db=db_file):
|
2023-09-29 13:49:24 +02:00
|
|
|
# # with start_action(action_type="last_entries()", num=num):
|
2023-11-13 14:45:10 +01:00
|
|
|
# # with start_action(action_type="get_feeds()"):
|
2023-09-29 13:49:24 +02:00
|
|
|
# # with start_action(action_type="remove_entry()", source=source):
|
|
|
|
# # with start_action(action_type="search_entries()", query=query):
|
|
|
|
# # with start_action(action_type="check_entry()", link=link):
|
2023-07-16 17:23:44 +02:00
|
|
|
|
2024-03-03 15:13:01 +01:00
|
|
|
CURSORS = {}
|
|
|
|
|
2023-07-16 17:23:44 +02:00
|
|
|
# aiosqlite
|
2023-12-04 15:41:02 +01:00
|
|
|
DBLOCK = Lock()
|
2023-07-16 17:23:44 +02:00
|
|
|
|
2024-03-03 15:13:01 +01:00
|
|
|
logger = Logger(__name__)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
def create_connection(db_file):
|
|
|
|
"""
|
|
|
|
Create a database connection to the SQLite database
|
2023-10-04 14:37:31 +02:00
|
|
|
specified by db_file.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
conn : object
|
|
|
|
Connection object or None.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-04 11:16:49 +01:00
|
|
|
time_begin = time.time()
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
message_log = '{}'
|
|
|
|
logger.debug(message_log.format(function_name))
|
2023-09-29 13:49:24 +02:00
|
|
|
conn = None
|
|
|
|
try:
|
2023-12-04 15:41:02 +01:00
|
|
|
conn = connect(db_file)
|
2024-01-06 23:03:08 +01:00
|
|
|
conn.execute("PRAGMA foreign_keys = ON")
|
2024-03-04 11:16:49 +01:00
|
|
|
# return conn
|
2023-09-29 13:49:24 +02:00
|
|
|
except Error as e:
|
2024-05-20 16:23:58 +02:00
|
|
|
logger.warning('Error creating a connection to database {}.'.format(db_file))
|
|
|
|
logger.error(e)
|
2024-03-04 11:16:49 +01:00
|
|
|
time_end = time.time()
|
|
|
|
difference = time_end - time_begin
|
|
|
|
if difference > 1: logger.warning('{} (time: {})'.format(function_name,
|
|
|
|
difference))
|
2023-09-29 13:49:24 +02:00
|
|
|
return conn
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def create_tables(db_file):
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
|
|
|
Create SQLite tables.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2023-09-29 13:49:24 +02:00
|
|
|
with create_connection(db_file) as conn:
|
2024-04-05 17:25:04 +02:00
|
|
|
entries_properties_authors_table_sql = (
|
2024-02-10 18:53:53 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS entries_properties_authors (
|
2024-02-10 18:53:53 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
name TEXT,
|
|
|
|
url TEXT,
|
|
|
|
email TEXT,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "entries_properties" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
entries_properties_contributors_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS entries_properties_contributors (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
name TEXT,
|
|
|
|
url TEXT,
|
|
|
|
email TEXT,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "entries_properties" ("id")
|
2024-02-10 18:53:53 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
entries_properties_contents_table_sql = (
|
2024-02-10 18:53:53 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS entries_properties_contents (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
text TEXT,
|
|
|
|
type TEXT,
|
|
|
|
base TEXT,
|
|
|
|
lang TEXT,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "entries_properties" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
entries_properties_links_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS entries_properties_links (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
url TEXT,
|
|
|
|
type TEXT,
|
|
|
|
rel TEXT,
|
|
|
|
size INTEGER,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "entries_properties" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
entries_properties_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS entries_properties (
|
2024-02-10 18:53:53 +01:00
|
|
|
id INTEGER NOT NULL,
|
|
|
|
feed_id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier TEXT,
|
|
|
|
link TEXT,
|
|
|
|
title TEXT,
|
|
|
|
title_type TEXT,
|
|
|
|
summary_text TEXT,
|
|
|
|
summary_lang TEXT,
|
|
|
|
summary_type TEXT,
|
|
|
|
summary_base TEXT,
|
|
|
|
category TEXT,
|
|
|
|
href TEXT,
|
|
|
|
comments TEXT,
|
|
|
|
rating TEXT,
|
|
|
|
published TEXT,
|
|
|
|
updated TEXT,
|
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
entries_state_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS entries_state (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
rejected INTEGER NOT NULL DEFAULT 0,
|
2024-02-10 18:53:53 +01:00
|
|
|
read INTEGER NOT NULL DEFAULT 0,
|
2024-04-05 17:25:04 +02:00
|
|
|
archived INTEGER NOT NULL DEFAULT 0,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "entries_properties" ("id")
|
2024-02-10 18:53:53 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
entries_properties_tags_table_sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS entries_properties_tags (
|
2024-01-06 23:03:08 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
entry_id INTEGER NOT NULL,
|
|
|
|
term TEXT,
|
|
|
|
scheme TEXT,
|
|
|
|
label TEXT,
|
|
|
|
FOREIGN KEY ("entry_id") REFERENCES "entries_properties" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
2024-01-06 23:03:08 +01:00
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-02-10 18:53:53 +01:00
|
|
|
# TODO Rethink!
|
|
|
|
# Albeit, probably, more expensive, we might want to have feed_id
|
|
|
|
# as foreign key, as it is with feeds_properties and feeds_state
|
2024-04-05 17:25:04 +02:00
|
|
|
feeds_preferences_table_sql = (
|
2024-02-04 18:08:12 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS feeds_preferences (
|
2024-02-04 18:08:12 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
feed_id INTEGER NOT NULL UNIQUE,
|
|
|
|
enabled INTEGER NOT NULL DEFAULT 1,
|
|
|
|
mutable INTEGER NOT NULL DEFAULT 0,
|
|
|
|
filter INTEGER NOT NULL DEFAULT 1,
|
|
|
|
priority INTEGER NOT NULL DEFAULT 0,
|
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
2024-02-04 18:08:12 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
2024-04-05 17:25:04 +02:00
|
|
|
PRIMARY KEY ("id")
|
2024-02-04 18:08:12 +01:00
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-01-26 12:34:07 +01:00
|
|
|
feeds_properties_table_sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS feeds_properties (
|
2024-01-06 23:03:08 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
url TEXT NOT NULL UNIQUE,
|
|
|
|
identifier TEXT,
|
|
|
|
title TEXT,
|
|
|
|
title_type TEXT,
|
|
|
|
subtitle TEXT,
|
|
|
|
subtitle_type TEXT,
|
|
|
|
version TEXT,
|
2024-01-06 23:03:08 +01:00
|
|
|
encoding TEXT,
|
|
|
|
language TEXT,
|
2024-04-05 17:25:04 +02:00
|
|
|
rating TEXT,
|
2024-01-06 23:03:08 +01:00
|
|
|
entries INTEGER,
|
2024-04-05 17:25:04 +02:00
|
|
|
icon TEXT,
|
|
|
|
image TEXT,
|
|
|
|
logo TEXT,
|
|
|
|
ttl TEXT,
|
|
|
|
updated TEXT,
|
|
|
|
PRIMARY KEY ("id")
|
2024-01-06 23:03:08 +01:00
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
feeds_properties_links_table_sql = (
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS feeds_properties_links (
|
2024-03-26 17:23:22 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
feed_id INTEGER NOT NULL,
|
|
|
|
url TEXT,
|
|
|
|
type TEXT,
|
|
|
|
rel TEXT,
|
|
|
|
size INTEGER,
|
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
2024-03-26 17:23:22 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
2024-04-05 17:25:04 +02:00
|
|
|
PRIMARY KEY ("id")
|
2024-03-26 17:23:22 +01:00
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-02-25 02:52:24 +01:00
|
|
|
feeds_rules_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS feeds_rules (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
feed_id INTEGER NOT NULL UNIQUE,
|
|
|
|
type TEXT NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
keywords TEXT,
|
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
2024-02-25 02:52:24 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
2024-04-05 17:25:04 +02:00
|
|
|
PRIMARY KEY ("id")
|
2024-02-25 02:52:24 +01:00
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-01-26 12:34:07 +01:00
|
|
|
feeds_state_table_sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS feeds_state (
|
2024-01-06 23:03:08 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-01-10 21:06:56 +01:00
|
|
|
feed_id INTEGER NOT NULL UNIQUE,
|
2024-01-06 23:03:08 +01:00
|
|
|
renewed TEXT,
|
2024-04-05 17:25:04 +02:00
|
|
|
scanned TEXT,
|
2024-01-06 23:03:08 +01:00
|
|
|
status_code INTEGER,
|
|
|
|
valid INTEGER,
|
2024-04-05 17:25:04 +02:00
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
2024-01-06 23:03:08 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-02-10 18:53:53 +01:00
|
|
|
feeds_statistics_table_sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-02-10 18:53:53 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS statistics (
|
2024-01-06 23:03:08 +01:00
|
|
|
id INTEGER NOT NULL,
|
2024-02-10 18:53:53 +01:00
|
|
|
feed_id INTEGER NOT NULL UNIQUE,
|
|
|
|
offline INTEGER,
|
|
|
|
entries INTEGER,
|
|
|
|
entries INTEGER,
|
2024-04-05 17:25:04 +02:00
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
2024-01-06 23:03:08 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
feeds_properties_tags_table_sql = (
|
2024-02-25 20:21:10 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
CREATE TABLE IF NOT EXISTS feeds_properties_tags (
|
2024-02-25 20:21:10 +01:00
|
|
|
id INTEGER NOT NULL,
|
|
|
|
feed_id INTEGER NOT NULL,
|
2024-04-05 17:25:04 +02:00
|
|
|
term TEXT,
|
|
|
|
scheme TEXT,
|
|
|
|
label TEXT,
|
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds_properties" ("id")
|
2024-02-25 20:21:10 +01:00
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-02-10 18:53:53 +01:00
|
|
|
# TODO
|
|
|
|
# Consider parameter unique:
|
|
|
|
# entry_id TEXT NOT NULL UNIQUE,
|
|
|
|
# Will eliminate function:
|
|
|
|
# check_entry_exist
|
|
|
|
filters_table_sql = (
|
2024-01-26 12:34:07 +01:00
|
|
|
"""
|
2024-02-10 18:53:53 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS filters (
|
2024-01-26 12:34:07 +01:00
|
|
|
id INTEGER NOT NULL,
|
|
|
|
key TEXT NOT NULL,
|
2024-02-10 18:53:53 +01:00
|
|
|
value TEXT,
|
2024-01-26 12:34:07 +01:00
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2023-11-13 14:45:10 +01:00
|
|
|
settings_table_sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS settings (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
key TEXT NOT NULL,
|
|
|
|
value INTEGER,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-02-10 18:53:53 +01:00
|
|
|
status_table_sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-02-10 18:53:53 +01:00
|
|
|
CREATE TABLE IF NOT EXISTS status (
|
2024-01-06 23:03:08 +01:00
|
|
|
id INTEGER NOT NULL,
|
|
|
|
key TEXT NOT NULL,
|
2024-02-10 18:53:53 +01:00
|
|
|
value INTEGER,
|
2024-01-06 23:03:08 +01:00
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
2023-12-11 10:04:45 +01:00
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
tagged_feeds_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS tagged_feeds (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
feed_id INTEGER NOT NULL,
|
|
|
|
tag_id INTEGER NOT NULL,
|
|
|
|
FOREIGN KEY ("feed_id") REFERENCES "feeds" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
FOREIGN KEY ("tag_id") REFERENCES "tags" ("id")
|
|
|
|
ON UPDATE CASCADE
|
|
|
|
ON DELETE CASCADE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2024-02-25 20:21:10 +01:00
|
|
|
tags_table_sql = (
|
|
|
|
"""
|
|
|
|
CREATE TABLE IF NOT EXISTS tags (
|
|
|
|
id INTEGER NOT NULL,
|
|
|
|
tag TEXT NOT NULL UNIQUE,
|
|
|
|
PRIMARY KEY ("id")
|
|
|
|
);
|
|
|
|
"""
|
|
|
|
)
|
2023-11-02 06:14:01 +01:00
|
|
|
cur = conn.cursor()
|
|
|
|
# cur = get_cursor(db_file)
|
2024-04-05 17:25:04 +02:00
|
|
|
cur.execute(entries_properties_table_sql)
|
|
|
|
cur.execute(entries_properties_authors_table_sql)
|
|
|
|
cur.execute(entries_properties_contents_table_sql)
|
|
|
|
cur.execute(entries_properties_contributors_table_sql)
|
|
|
|
cur.execute(entries_properties_links_table_sql)
|
|
|
|
cur.execute(entries_properties_tags_table_sql)
|
|
|
|
cur.execute(entries_state_table_sql)
|
2024-01-26 12:34:07 +01:00
|
|
|
cur.execute(feeds_properties_table_sql)
|
2024-04-05 17:25:04 +02:00
|
|
|
cur.execute(feeds_properties_links_table_sql)
|
|
|
|
cur.execute(feeds_properties_tags_table_sql)
|
|
|
|
cur.execute(feeds_preferences_table_sql)
|
2024-02-25 02:52:24 +01:00
|
|
|
cur.execute(feeds_rules_table_sql)
|
2024-04-05 17:25:04 +02:00
|
|
|
cur.execute(feeds_state_table_sql)
|
2024-01-26 12:34:07 +01:00
|
|
|
cur.execute(filters_table_sql)
|
2023-11-02 06:14:01 +01:00
|
|
|
# cur.execute(statistics_table_sql)
|
|
|
|
cur.execute(settings_table_sql)
|
2024-01-26 12:34:07 +01:00
|
|
|
cur.execute(status_table_sql)
|
2024-04-05 17:25:04 +02:00
|
|
|
cur.execute(tagged_feeds_table_sql)
|
2024-02-25 20:21:10 +01:00
|
|
|
cur.execute(tags_table_sql)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
def get_cursor(db_file):
|
|
|
|
"""
|
|
|
|
Allocate a cursor to connection per database.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
CURSORS[db_file] : object
|
|
|
|
Cursor.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2023-09-29 13:49:24 +02:00
|
|
|
if db_file in CURSORS:
|
2023-07-16 17:23:44 +02:00
|
|
|
return CURSORS[db_file]
|
2023-09-29 13:49:24 +02:00
|
|
|
else:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
CURSORS[db_file] = cur
|
|
|
|
return CURSORS[db_file]
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-01-06 23:03:08 +01:00
|
|
|
async def import_feeds(db_file, feeds):
|
|
|
|
"""
|
|
|
|
Insert a new feed into the feeds table.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feeds : list
|
|
|
|
Set of feeds (Title and URL).
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-01-06 23:03:08 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
for feed in feeds:
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: feed: {}'
|
|
|
|
.format(function_name, feed))
|
2024-04-05 17:25:04 +02:00
|
|
|
url = feed['url']
|
|
|
|
title = feed['title']
|
2024-01-06 23:03:08 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO feeds_properties(
|
|
|
|
title, url)
|
2024-01-06 23:03:08 +01:00
|
|
|
VALUES(
|
|
|
|
?, ?)
|
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
par = (title, url)
|
2024-01-06 23:03:08 +01:00
|
|
|
try:
|
2024-01-14 13:46:38 +01:00
|
|
|
cur.execute(sql, par)
|
2024-01-06 23:03:08 +01:00
|
|
|
except IntegrityError as e:
|
2024-03-03 15:13:01 +01:00
|
|
|
logger.warning("Skipping: " + str(url))
|
|
|
|
logger.error(e)
|
2024-01-06 23:03:08 +01:00
|
|
|
|
|
|
|
|
2024-01-10 21:06:56 +01:00
|
|
|
async def add_metadata(db_file):
|
|
|
|
"""
|
|
|
|
Insert a new feed into the feeds table.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-01-10 21:06:56 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-10 21:06:56 +01:00
|
|
|
ORDER BY id ASC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
ixs = cur.execute(sql).fetchall()
|
|
|
|
for ix in ixs:
|
|
|
|
feed_id = ix[0]
|
2024-04-05 17:25:04 +02:00
|
|
|
# insert_feed_properties(cur, feed_id)
|
2024-01-10 21:06:56 +01:00
|
|
|
insert_feed_status(cur, feed_id)
|
2024-04-05 17:25:04 +02:00
|
|
|
insert_feed_preferences(cur, feed_id)
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
|
|
|
|
def insert_feed_status(cur, feed_id):
|
|
|
|
"""
|
|
|
|
Set feed status.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
cur : object
|
|
|
|
Cursor object.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, feed_id))
|
2024-01-10 21:06:56 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-01-26 12:34:07 +01:00
|
|
|
INTO feeds_state(
|
2024-01-10 21:06:56 +01:00
|
|
|
feed_id)
|
|
|
|
VALUES(
|
|
|
|
?)
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (feed_id,)
|
2024-01-10 21:06:56 +01:00
|
|
|
try:
|
2024-01-14 13:46:38 +01:00
|
|
|
cur.execute(sql, par)
|
2024-01-10 21:06:56 +01:00
|
|
|
except IntegrityError as e:
|
2024-03-03 15:13:01 +01:00
|
|
|
logger.warning(
|
2024-01-26 12:34:07 +01:00
|
|
|
"Skipping feed_id {} for table feeds_state".format(feed_id))
|
2024-03-03 15:13:01 +01:00
|
|
|
logger.error(e)
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
def insert_feed_preferences(cur, feed_id):
|
2024-01-10 21:06:56 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
Set feed preferences.
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
cur : object
|
|
|
|
Cursor object.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, feed_id))
|
2024-01-10 21:06:56 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO feeds_preferences(
|
2024-01-10 21:06:56 +01:00
|
|
|
feed_id)
|
|
|
|
VALUES(
|
|
|
|
?)
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (feed_id,)
|
2024-01-10 21:06:56 +01:00
|
|
|
try:
|
2024-01-14 13:46:38 +01:00
|
|
|
cur.execute(sql, par)
|
2024-01-10 21:06:56 +01:00
|
|
|
except IntegrityError as e:
|
2024-03-03 15:13:01 +01:00
|
|
|
logger.warning(
|
2024-04-05 17:25:04 +02:00
|
|
|
"Skipping feed_id {} for table feeds_preferences".format(feed_id))
|
2024-03-03 15:13:01 +01:00
|
|
|
logger.error(e)
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
|
2024-05-14 21:39:58 +02:00
|
|
|
# TODO Test
|
2024-04-05 17:25:04 +02:00
|
|
|
def insert_feed_properties(cur, feed_id):
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
Set feed properties.
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
cur : object
|
|
|
|
Cursor object.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: feed_id: {}'
|
|
|
|
.format(function_name, feed_id))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO feeds_properties(
|
2024-05-14 21:39:58 +02:00
|
|
|
id)
|
2024-03-26 17:23:22 +01:00
|
|
|
VALUES(
|
|
|
|
?)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
|
|
|
try:
|
|
|
|
cur.execute(sql, par)
|
|
|
|
except IntegrityError as e:
|
|
|
|
logger.warning(
|
2024-04-05 17:25:04 +02:00
|
|
|
"Skipping feed_id {} for table feeds_properties".format(feed_id))
|
2024-03-26 17:23:22 +01:00
|
|
|
logger.error(e)
|
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
async def insert_feed(db_file, url, title, identifier, entries=None, version=None,
|
2024-03-03 15:13:01 +01:00
|
|
|
encoding=None, language=None, status_code=None,
|
|
|
|
updated=None):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-12-04 15:41:02 +01:00
|
|
|
Insert a new feed into the feeds table.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url : str
|
|
|
|
URL.
|
2024-03-26 17:23:22 +01:00
|
|
|
title : str
|
|
|
|
Feed title.
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier : str
|
|
|
|
Feed identifier.
|
2024-01-06 23:03:08 +01:00
|
|
|
entries : int, optional
|
|
|
|
Number of entries. The default is None.
|
|
|
|
version : str, optional
|
|
|
|
Type of feed. The default is None.
|
|
|
|
encoding : str, optional
|
|
|
|
Encoding of feed. The default is None.
|
|
|
|
language : str, optional
|
|
|
|
Language code of feed. The default is None.
|
2023-11-13 14:45:10 +01:00
|
|
|
status : str, optional
|
|
|
|
HTTP status code. The default is None.
|
2024-01-06 23:03:08 +01:00
|
|
|
updated : ???, optional
|
|
|
|
Date feed was last updated. The default is None.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} url: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, url))
|
2023-09-29 13:49:24 +02:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO feeds_properties(
|
|
|
|
url, title, identifier, entries, version, encoding, language, updated)
|
2024-01-06 23:03:08 +01:00
|
|
|
VALUES(
|
2024-04-05 17:25:04 +02:00
|
|
|
?, ?, ?, ?, ?, ?, ?, ?)
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
par = (url, title, identifier, entries, version, encoding, language, updated)
|
2024-01-14 13:46:38 +01:00
|
|
|
cur.execute(sql, par)
|
2024-01-06 23:03:08 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE url = :url
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (url,)
|
|
|
|
feed_id = cur.execute(sql, par).fetchone()[0]
|
2024-01-06 23:03:08 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-01-26 12:34:07 +01:00
|
|
|
INTO feeds_state(
|
2024-04-05 17:25:04 +02:00
|
|
|
feed_id, status_code, valid)
|
2024-01-06 23:03:08 +01:00
|
|
|
VALUES(
|
2024-04-05 17:25:04 +02:00
|
|
|
?, ?, ?)
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
par = (feed_id, status_code, 1)
|
2024-01-14 13:46:38 +01:00
|
|
|
cur.execute(sql, par)
|
2024-03-26 17:23:22 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO feeds_preferences(
|
|
|
|
feed_id)
|
2024-03-26 17:23:22 +01:00
|
|
|
VALUES(
|
2024-04-05 17:25:04 +02:00
|
|
|
?)
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
par = (feed_id,)
|
2024-03-26 17:23:22 +01:00
|
|
|
cur.execute(sql, par)
|
2024-01-06 23:03:08 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
async def insert_feed_(db_file, url, title):
|
2024-01-10 21:06:56 +01:00
|
|
|
"""
|
|
|
|
Insert a new feed into the feeds table.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url : str
|
|
|
|
URL.
|
|
|
|
title : str, optional
|
|
|
|
Feed title. The default is None.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} url: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, url))
|
2024-01-10 21:06:56 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO feeds_properties(
|
|
|
|
title, url)
|
2024-01-10 21:06:56 +01:00
|
|
|
VALUES(
|
|
|
|
?, ?)
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (
|
|
|
|
title, url
|
|
|
|
)
|
|
|
|
cur.execute(sql, par)
|
2024-01-14 22:43:23 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-14 22:43:23 +01:00
|
|
|
WHERE url = :url
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url,)
|
|
|
|
feed_id = cur.execute(sql, par).fetchone()[0]
|
2024-04-05 17:25:04 +02:00
|
|
|
# insert_feed_properties(cur, feed_id)
|
|
|
|
insert_feed_status(cur, feed_id)
|
|
|
|
insert_feed_preferences(cur, feed_id)
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
|
2024-01-06 23:03:08 +01:00
|
|
|
async def remove_feed_by_url(db_file, url):
|
|
|
|
"""
|
|
|
|
Delete a feed by feed URL.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url : str
|
|
|
|
URL of feed.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} url: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, url))
|
2024-01-06 23:03:08 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
async with DBLOCK:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-01-13 18:17:43 +01:00
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE url = ?
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (url,)
|
|
|
|
cur.execute(sql, par)
|
2023-09-29 13:49:24 +02:00
|
|
|
|
2023-07-16 17:23:44 +02:00
|
|
|
|
2024-01-06 23:03:08 +01:00
|
|
|
async def remove_feed_by_index(db_file, ix):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
Delete a feed by feed ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
ix : str
|
|
|
|
Index of feed.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2023-09-29 13:49:24 +02:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
async with DBLOCK:
|
|
|
|
cur = conn.cursor()
|
2024-01-06 23:03:08 +01:00
|
|
|
# # NOTE Should we move DBLOCK to this line? 2022-12-23
|
|
|
|
# sql = (
|
|
|
|
# "DELETE "
|
|
|
|
# "FROM entries "
|
|
|
|
# "WHERE feed_id = ?"
|
|
|
|
# )
|
2024-01-14 13:46:38 +01:00
|
|
|
# par = (url,)
|
|
|
|
# cur.execute(sql, par) # Error? 2024-01-05
|
2024-01-06 23:03:08 +01:00
|
|
|
# sql = (
|
|
|
|
# "DELETE "
|
|
|
|
# "FROM archive "
|
|
|
|
# "WHERE feed_id = ?"
|
|
|
|
# )
|
2024-01-14 13:46:38 +01:00
|
|
|
# par = (url,)
|
|
|
|
# cur.execute(sql, par)
|
2024-01-03 16:04:01 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-02-25 20:21:10 +01:00
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE id = ?
|
|
|
|
"""
|
2024-01-03 16:04:01 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix,)
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
def get_feeds_by_tag_id(db_file, tag_id):
|
|
|
|
"""
|
|
|
|
Get feeds of given tag.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag_id : str
|
|
|
|
Tag ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
List of tags.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} tag_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, tag_id))
|
2024-02-29 18:08:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT feeds_properties.*
|
|
|
|
FROM feeds_properties
|
|
|
|
INNER JOIN tagged_feeds ON feeds_properties.id = tagged_feeds.feed_id
|
|
|
|
INNER JOIN tags ON tags.id = tagged_feeds.tag_id
|
2024-03-26 17:23:22 +01:00
|
|
|
WHERE tags.id = ?
|
2024-04-05 17:25:04 +02:00
|
|
|
ORDER BY feeds_properties.title;
|
2024-02-29 18:08:53 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (tag_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-02-25 20:21:10 +01:00
|
|
|
def get_tags_by_feed_id(db_file, feed_id):
|
|
|
|
"""
|
|
|
|
Get tags of given feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2024-02-25 20:21:10 +01:00
|
|
|
List of tags.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2024-02-25 20:21:10 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT tags.tag
|
|
|
|
FROM tags
|
2024-04-05 17:25:04 +02:00
|
|
|
INNER JOIN tagged_feeds ON tags.id = tagged_feeds.tag_id
|
|
|
|
INNER JOIN feeds_properties ON feeds_properties.id = tagged_feeds.feed_id
|
|
|
|
WHERE feeds_properties.id = ?
|
2024-03-26 17:23:22 +01:00
|
|
|
ORDER BY tags.tag;
|
2024-02-25 20:21:10 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
async def set_feed_id_and_tag_id(db_file, feed_id, tag_id):
|
|
|
|
"""
|
|
|
|
Set Feed ID and Tag ID.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed ID
|
|
|
|
tag_id : str
|
|
|
|
Tag ID
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {} tag_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id, tag_id))
|
2024-02-25 20:21:10 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO tagged_feeds(
|
2024-02-25 20:21:10 +01:00
|
|
|
feed_id, tag_id)
|
|
|
|
VALUES(
|
|
|
|
:feed_id, :tag_id)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"feed_id": feed_id,
|
|
|
|
"tag_id": tag_id
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-03-26 17:23:22 +01:00
|
|
|
def get_feed_properties(db_file, feed_id):
|
|
|
|
"""
|
|
|
|
Get properties of given feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-04-05 17:25:04 +02:00
|
|
|
properties : list
|
|
|
|
List of properties.
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM feeds_properties
|
2024-04-14 14:56:45 +02:00
|
|
|
WHERE id = :feed_id
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
2024-04-05 17:25:04 +02:00
|
|
|
properties = cur.execute(sql, par).fetchone()
|
|
|
|
return properties
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
def get_feed_identifier(db_file, feed_id):
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
Get identifier of given feed ID.
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier : str
|
|
|
|
Identifier name.
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT identifier
|
|
|
|
FROM feeds_properties
|
2024-05-14 21:39:58 +02:00
|
|
|
WHERE id = ?
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier = cur.execute(sql, par).fetchone()
|
|
|
|
return identifier
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
def check_identifier_exist(db_file, identifier):
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
Check whether given identifier exist.
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier : str
|
|
|
|
Identifier name.
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
id : str
|
|
|
|
ID.
|
|
|
|
feed_id : str
|
|
|
|
Feed ID.
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier : str
|
|
|
|
Identifier name.
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-04-05 17:25:04 +02:00
|
|
|
logger.debug('{}: db_file: {} identifier: {}'
|
|
|
|
.format(function_name, db_file, identifier))
|
2024-03-26 17:23:22 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT identifier
|
|
|
|
FROM feeds_properties
|
|
|
|
WHERE identifier = ?
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
par = (identifier,)
|
|
|
|
identifier = cur.execute(sql, par).fetchone()
|
|
|
|
return identifier
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
def get_tag_id(db_file, tag_name):
|
2024-02-25 20:21:10 +01:00
|
|
|
"""
|
|
|
|
Get ID of given tag. Check whether tag exist.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-02-29 18:08:53 +01:00
|
|
|
tag_name : str
|
|
|
|
Tag name.
|
2024-02-25 20:21:10 +01:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
ix : str
|
|
|
|
Tag ID.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} tag_name: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, tag_name))
|
2024-02-25 20:21:10 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
|
|
|
FROM tags
|
|
|
|
WHERE tag = ?
|
|
|
|
"""
|
|
|
|
)
|
2024-02-29 18:08:53 +01:00
|
|
|
par = (tag_name,)
|
2024-02-25 20:21:10 +01:00
|
|
|
ix = cur.execute(sql, par).fetchone()
|
|
|
|
return ix
|
|
|
|
|
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
def get_tag_name(db_file, ix):
|
|
|
|
"""
|
|
|
|
Get name of given tag. Check whether tag exist.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
ix : str
|
|
|
|
Tag ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tag_name : str
|
|
|
|
Tag name.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-02-29 18:08:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT tag
|
|
|
|
FROM tags
|
|
|
|
WHERE id = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (ix,)
|
|
|
|
tag_name = cur.execute(sql, par).fetchone()
|
|
|
|
return tag_name
|
|
|
|
|
|
|
|
|
2024-02-25 20:21:10 +01:00
|
|
|
def is_tag_id_associated(db_file, tag_id):
|
|
|
|
"""
|
|
|
|
Check whether tag_id is associated with any feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag_id : str
|
|
|
|
Tag ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tag_id : str
|
|
|
|
Tag ID.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} tag_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, tag_id))
|
2024-02-25 20:21:10 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT tag_id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM tagged_feeds
|
2024-02-25 20:21:10 +01:00
|
|
|
WHERE tag_id = :tag_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag_id": tag_id
|
|
|
|
}
|
|
|
|
tag_id = cur.execute(sql, par).fetchone()
|
|
|
|
return tag_id
|
|
|
|
|
|
|
|
|
|
|
|
async def delete_tag_by_index(db_file, ix):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-02-25 20:21:10 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE
|
|
|
|
FROM tags
|
|
|
|
WHERE id = :id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"id": ix
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
|
|
|
def is_tag_id_of_feed_id(db_file, tag_id, feed_id):
|
|
|
|
"""
|
|
|
|
Check whether given tag is related with given feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed ID.
|
|
|
|
tag_id : str
|
|
|
|
Tag ID.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
tag_id : str
|
|
|
|
Tag ID.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} tag_id: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id, tag_id))
|
2024-02-25 20:21:10 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT tag_id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM tagged_feeds
|
2024-02-25 20:21:10 +01:00
|
|
|
WHERE tag_id = :tag_id AND feed_id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag_id": tag_id,
|
|
|
|
"feed_id": feed_id
|
|
|
|
}
|
|
|
|
tag_id = cur.execute(sql, par).fetchone()
|
|
|
|
return tag_id
|
|
|
|
|
|
|
|
|
|
|
|
async def delete_feed_id_tag_id(db_file, feed_id, tag_id):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} tag_id: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id, tag_id))
|
2024-02-25 20:21:10 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM tagged_feeds
|
2024-02-25 20:21:10 +01:00
|
|
|
WHERE tag_id = :tag_id AND feed_id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag_id": tag_id,
|
|
|
|
"feed_id": feed_id
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
|
|
|
async def set_new_tag(db_file, tag):
|
|
|
|
"""
|
|
|
|
Set new Tag
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
tag : str
|
|
|
|
Tag
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} tag: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, tag))
|
2024-02-25 20:21:10 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO tags(
|
|
|
|
tag)
|
|
|
|
VALUES(
|
|
|
|
:tag)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"tag": tag
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_feed_id_and_name(db_file, url):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-01-04 02:16:24 +01:00
|
|
|
Get Id and Name of feed.
|
2023-10-04 14:37:31 +02:00
|
|
|
Check whether a feed exists.
|
|
|
|
Query for feeds by given url.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url : str
|
|
|
|
URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2023-11-13 14:45:10 +01:00
|
|
|
List of ID and Name of feed.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} url: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, url))
|
2024-01-13 18:17:43 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT id, title
|
|
|
|
FROM feeds_properties
|
2024-01-13 18:17:43 +01:00
|
|
|
WHERE url = ?
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (url,)
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
2024-01-13 18:17:43 +01:00
|
|
|
return result
|
2023-09-29 13:49:24 +02:00
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_number_of_items(db_file, table):
|
2023-09-29 13:49:24 +02:00
|
|
|
"""
|
2023-10-04 14:37:31 +02:00
|
|
|
Return number of entries or feeds.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
table : str
|
2024-04-05 17:25:04 +02:00
|
|
|
"entries_properties" or "feeds_properties".
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
count : ?
|
|
|
|
Number of rows.
|
2023-09-29 13:49:24 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} table: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, table))
|
2023-09-29 13:49:24 +02:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
SELECT count(id)
|
|
|
|
FROM {}
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
).format(table)
|
2023-11-02 06:14:01 +01:00
|
|
|
count = cur.execute(sql).fetchone()[0]
|
|
|
|
return count
|
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_number_of_feeds_active(db_file):
|
2023-11-02 06:14:01 +01:00
|
|
|
"""
|
|
|
|
Return number of active feeds.
|
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-07 23:24:59 +01:00
|
|
|
count : str
|
2023-11-13 14:45:10 +01:00
|
|
|
Number of rows.
|
2023-11-02 06:14:01 +01:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2023-11-02 06:14:01 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
SELECT count(id)
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_preferences
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE enabled = 1
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2023-11-02 06:14:01 +01:00
|
|
|
count = cur.execute(sql).fetchone()[0]
|
2023-09-29 13:49:24 +02:00
|
|
|
return count
|
2023-10-04 14:37:31 +02:00
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_number_of_entries_unread(db_file):
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
|
|
|
Return number of unread items.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
count : ?
|
|
|
|
Number of rows.
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2023-10-04 14:37:31 +02:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT count(id)
|
|
|
|
FROM entries_state
|
|
|
|
WHERE read = 0
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2023-11-02 06:14:01 +01:00
|
|
|
count = cur.execute(sql).fetchone()[0]
|
2023-10-04 14:37:31 +02:00
|
|
|
return count
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-03-08 15:21:22 +01:00
|
|
|
def get_entries(db_file, num):
|
|
|
|
"""
|
|
|
|
Extract information from entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
num : str, optional
|
|
|
|
Number. The default is None.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
News items.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} num: {}'
|
|
|
|
.format(function_name, db_file, num))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT id, title, link, summary_text, feed_id, published
|
|
|
|
FROM entries_properties
|
|
|
|
ORDER BY published DESC
|
2024-03-08 15:21:22 +01:00
|
|
|
LIMIT :num
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (num,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_entries_rejected(db_file, num):
|
|
|
|
"""
|
|
|
|
Extract information from rejected entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
num : str, optional
|
|
|
|
Number. The default is None.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
News items.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} num: {}'
|
|
|
|
.format(function_name, db_file, num))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT entries_properties.id, title, link, summary_text, feed_id, published
|
|
|
|
FROM entries_properties
|
|
|
|
INNER JOIN entries_state ON entries_properties.id = entries_state.entry_id
|
|
|
|
WHERE entries_state.rejected = 1
|
|
|
|
ORDER BY published DESC
|
2024-03-08 15:21:22 +01:00
|
|
|
LIMIT :num
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (num,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-04-14 14:56:45 +02:00
|
|
|
def get_enclosure_by_entry_id(db_file, entry_id):
|
2024-04-06 21:59:30 +02:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-04-14 14:56:45 +02:00
|
|
|
logger.debug('{}: db_file: {} entry_id: {}'
|
|
|
|
.format(function_name, db_file, entry_id))
|
2024-04-06 21:59:30 +02:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT url
|
|
|
|
FROM entries_properties_links
|
2024-04-14 14:56:45 +02:00
|
|
|
WHERE entry_id = :entry_id AND rel = "enclosure"
|
2024-04-06 21:59:30 +02:00
|
|
|
"""
|
|
|
|
)
|
2024-04-14 14:56:45 +02:00
|
|
|
par = (entry_id,)
|
2024-04-06 21:59:30 +02:00
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_unread_entries(db_file, num):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
Extract information from unread entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
num : str, optional
|
|
|
|
Number. The default is None.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2024-01-03 16:04:01 +01:00
|
|
|
News items.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} num: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, num))
|
2023-09-29 13:49:24 +02:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT entries_properties.id, title, link, summary_text, feed_id, published
|
|
|
|
FROM entries_properties
|
|
|
|
INNER JOIN entries_state ON entries_properties.id = entries_state.entry_id
|
|
|
|
WHERE entries_state.read = 0
|
|
|
|
ORDER BY published DESC
|
2024-02-19 21:50:53 +01:00
|
|
|
LIMIT :num
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (num,)
|
2024-02-29 18:08:53 +01:00
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-02-22 15:09:13 +01:00
|
|
|
def get_feed_id_by_entry_index(db_file, ix):
|
|
|
|
"""
|
|
|
|
Get feed id by entry index.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
ix : str
|
|
|
|
Index.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
feed_id : str
|
|
|
|
Feed index.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-02-22 15:09:13 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT feed_id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-02-22 15:09:13 +01:00
|
|
|
WHERE id = :ix
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (ix,)
|
|
|
|
feed_id = cur.execute(sql, par).fetchone()
|
|
|
|
return feed_id
|
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_feed_id(db_file, url):
|
2024-01-07 10:57:54 +01:00
|
|
|
"""
|
|
|
|
Get index of given feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2024-01-14 22:43:23 +01:00
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-07 10:57:54 +01:00
|
|
|
url : str
|
|
|
|
URL.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
feed_id : str
|
|
|
|
Feed index.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} url: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, url))
|
2024-01-14 22:43:23 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-14 22:43:23 +01:00
|
|
|
WHERE url = :url
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (url,)
|
2024-02-04 18:08:12 +01:00
|
|
|
feed_id = cur.execute(sql, par).fetchone()
|
2024-01-14 22:43:23 +01:00
|
|
|
return feed_id
|
2024-01-07 10:57:54 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
def is_entry_archived(cur, ix):
|
|
|
|
"""
|
|
|
|
Check whether a given entry is archived.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
cur : object
|
|
|
|
Cursor object.
|
|
|
|
ix : str
|
|
|
|
Index of entry.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
Entry ID.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: ix: {}'
|
|
|
|
.format(function_name, ix))
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
|
|
|
FROM entries_state
|
|
|
|
WHERE archived = 1 AND entry_id = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (ix,)
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
|
|
|
return result
|
|
|
|
|
2024-01-04 13:38:22 +01:00
|
|
|
async def mark_entry_as_read(cur, ix):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-11-29 16:32:35 +01:00
|
|
|
Set read status of entry as read.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2023-11-29 16:32:35 +01:00
|
|
|
cur : object
|
|
|
|
Cursor object.
|
2023-11-13 14:45:10 +01:00
|
|
|
ix : str
|
|
|
|
Index of entry.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: ix: {}'
|
|
|
|
.format(function_name, ix))
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
UPDATE entries_state
|
2024-01-06 23:03:08 +01:00
|
|
|
SET read = 1
|
2024-04-05 17:25:04 +02:00
|
|
|
WHERE entry_id = ?
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix,)
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-03-26 17:23:22 +01:00
|
|
|
def get_status_information_of_feed(db_file, feed_id):
|
|
|
|
"""
|
|
|
|
Get status information of given feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM feeds_state
|
|
|
|
WHERE feed_id = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
|
|
|
count = cur.execute(sql, par).fetchone()
|
|
|
|
return count
|
|
|
|
|
|
|
|
|
|
|
|
def get_unread_entries_of_feed(db_file, feed_id):
|
|
|
|
"""
|
|
|
|
Get entries of given feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
|
|
|
INNER JOIN entries_state ON entries_properties.id = entries_state.entry_id
|
|
|
|
WHERE entries_state.read = 0 AND feed_id = ?
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
2024-05-13 09:28:12 +02:00
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
2024-03-26 17:23:22 +01:00
|
|
|
|
|
|
|
|
2024-02-13 20:34:37 +01:00
|
|
|
def get_number_of_unread_entries_by_feed(db_file, feed_id):
|
|
|
|
"""
|
2024-02-14 04:04:49 +01:00
|
|
|
Count entries of given feed.
|
2024-02-13 20:34:37 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2024-02-13 20:34:37 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT count(entries_properties.id)
|
|
|
|
FROM entries_properties
|
|
|
|
INNER JOIN entries_state ON entries_properties.id = entries_state.entry_id
|
|
|
|
WHERE entries_state.read = 0 AND feed_id = ?
|
2024-02-13 20:34:37 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
|
|
|
count = cur.execute(sql, par).fetchone()
|
|
|
|
return count
|
|
|
|
|
|
|
|
|
2024-02-04 18:08:12 +01:00
|
|
|
async def mark_feed_as_read(db_file, feed_id):
|
2023-11-29 16:32:35 +01:00
|
|
|
"""
|
2024-01-06 23:03:08 +01:00
|
|
|
Set read status of entries of given feed as read.
|
2023-11-29 16:32:35 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-02-04 18:08:12 +01:00
|
|
|
feed_id : str
|
2024-04-05 17:25:04 +02:00
|
|
|
Feed ID.
|
2023-11-29 16:32:35 +01:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2023-11-29 16:32:35 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT id
|
|
|
|
FROM entries_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE feed_id = ?
|
|
|
|
"""
|
2023-11-29 16:32:35 +01:00
|
|
|
)
|
2024-02-04 18:08:12 +01:00
|
|
|
par = (feed_id,)
|
2024-04-05 17:25:04 +02:00
|
|
|
ixs = cur.execute(sql, par).fetchall()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
UPDATE entries_state
|
|
|
|
SET read = 1
|
|
|
|
WHERE entry_id = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
for ix in ixs: cur.execute(sql, ix)
|
|
|
|
# for ix in ixs:
|
|
|
|
# par = ix # Variable ix is already of type tuple
|
|
|
|
# cur.execute(sql, par)
|
2023-11-29 16:32:35 +01:00
|
|
|
|
|
|
|
|
2024-01-04 13:38:22 +01:00
|
|
|
async def delete_entry_by_id(db_file, ix):
|
|
|
|
"""
|
|
|
|
Delete entry by Id.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
ix : str
|
|
|
|
Index.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-01-04 13:38:22 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE id = :ix
|
|
|
|
"""
|
2024-01-04 13:38:22 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix,)
|
|
|
|
cur.execute(sql, par)
|
2024-01-04 13:38:22 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def archive_entry(db_file, ix):
|
|
|
|
"""
|
|
|
|
Insert entry to archive and delete entry.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
ix : str
|
|
|
|
Index.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-01-04 13:38:22 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
UPDATE entries_state
|
|
|
|
SET archived = 1
|
|
|
|
WHERE entry_id = :ix
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-04 13:38:22 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix,)
|
2024-04-05 17:25:04 +02:00
|
|
|
cur.execute(sql, par)
|
2024-01-09 16:53:19 +01:00
|
|
|
|
2024-01-04 13:38:22 +01:00
|
|
|
|
2024-05-14 21:39:58 +02:00
|
|
|
def get_feed_title(db_file, feed_id):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-05-14 21:39:58 +02:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
2024-01-02 19:11:36 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT title
|
|
|
|
FROM feeds_properties
|
2024-05-14 21:39:58 +02:00
|
|
|
WHERE id = :feed_id
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-02 19:11:36 +01:00
|
|
|
)
|
2024-05-14 21:39:58 +02:00
|
|
|
par = (feed_id,)
|
2024-02-04 18:08:12 +01:00
|
|
|
title = cur.execute(sql, par).fetchone()
|
2024-01-06 23:03:08 +01:00
|
|
|
return title
|
2024-01-02 19:11:36 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
def get_feed_subtitle(db_file, feed_id):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT subtitle
|
|
|
|
FROM feeds_properties
|
2024-05-14 21:39:58 +02:00
|
|
|
WHERE id = :feed_id
|
2024-04-05 17:25:04 +02:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (feed_id,)
|
|
|
|
title = cur.execute(sql, par).fetchone()
|
|
|
|
return title
|
|
|
|
|
|
|
|
|
|
|
|
async def set_feed_title(db_file, feed_id, title):
|
2024-02-19 01:26:10 +01:00
|
|
|
"""
|
|
|
|
Set new name for feed.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
feed_id : str
|
|
|
|
Index of feed.
|
|
|
|
name : str
|
|
|
|
New name.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-04-05 17:25:04 +02:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {} title: {}'
|
|
|
|
.format(function_name, db_file, feed_id, title))
|
2024-02-19 01:26:10 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
UPDATE feeds_properties
|
|
|
|
SET title = :title
|
2024-02-19 01:26:10 +01:00
|
|
|
WHERE id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
2024-04-05 17:25:04 +02:00
|
|
|
"title": title,
|
2024-02-19 01:26:10 +01:00
|
|
|
"feed_id": feed_id
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-04-14 14:56:45 +02:00
|
|
|
def get_entry_properties(db_file, ix):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
|
|
|
.format(function_name, db_file, ix))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM entries_properties
|
|
|
|
WHERE id = :ix
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (ix,)
|
|
|
|
title = cur.execute(sql, par).fetchone()
|
|
|
|
return title
|
|
|
|
|
|
|
|
|
2024-02-19 21:50:53 +01:00
|
|
|
def get_entry_title(db_file, ix):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-02-19 21:50:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-04-05 17:25:04 +02:00
|
|
|
sql = (
|
2024-02-19 21:50:53 +01:00
|
|
|
"""
|
|
|
|
SELECT title
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-02-19 21:50:53 +01:00
|
|
|
WHERE id = :ix
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (ix,)
|
|
|
|
title = cur.execute(sql, par).fetchone()
|
|
|
|
return title
|
|
|
|
|
|
|
|
|
2024-01-09 16:53:19 +01:00
|
|
|
def get_entry_url(db_file, ix):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, ix))
|
2024-01-09 16:53:19 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-04-05 17:25:04 +02:00
|
|
|
sql = (
|
2024-01-09 16:53:19 +01:00
|
|
|
"""
|
|
|
|
SELECT link
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-01-09 16:53:19 +01:00
|
|
|
WHERE id = :ix
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix,)
|
2024-02-19 21:50:53 +01:00
|
|
|
url = cur.execute(sql, par).fetchone()
|
2024-01-09 16:53:19 +01:00
|
|
|
return url
|
|
|
|
|
|
|
|
|
2024-05-14 21:39:58 +02:00
|
|
|
def get_feed_url(db_file, feed_id):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-05-14 21:39:58 +02:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
2024-01-11 11:55:42 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT url
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-05-14 21:39:58 +02:00
|
|
|
WHERE id = :feed_id
|
2024-01-11 11:55:42 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-05-14 21:39:58 +02:00
|
|
|
par = (feed_id,)
|
2024-02-04 18:08:12 +01:00
|
|
|
url = cur.execute(sql, par).fetchone()
|
2024-01-11 11:55:42 +01:00
|
|
|
return url
|
|
|
|
|
|
|
|
|
2024-01-02 19:11:36 +01:00
|
|
|
async def mark_as_read(db_file, ix):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} ix: {}'
|
2024-04-05 17:25:04 +02:00
|
|
|
.format(function_name, db_file, ix))
|
2024-01-02 19:11:36 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
# TODO While `async with DBLOCK` does work well from
|
|
|
|
# outside of functions, it would be better practice
|
|
|
|
# to place it within the functions.
|
|
|
|
# NOTE: We can use DBLOCK once for both
|
|
|
|
# functions, because, due to exclusive
|
|
|
|
# ID, only one can ever occur.
|
2024-04-05 17:25:04 +02:00
|
|
|
if is_entry_archived(cur, ix):
|
|
|
|
await delete_entry(cur, ix)
|
|
|
|
else:
|
|
|
|
await mark_entry_as_read(cur, ix)
|
|
|
|
|
2024-01-04 13:38:22 +01:00
|
|
|
|
2024-01-02 19:11:36 +01:00
|
|
|
|
2023-11-29 16:32:35 +01:00
|
|
|
async def mark_all_as_read(db_file):
|
|
|
|
"""
|
|
|
|
Set read status of all entries as read.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-07 21:20:10 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-04-05 17:25:04 +02:00
|
|
|
.format(function_name, db_file))
|
2023-11-29 16:32:35 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
UPDATE entries_state
|
2024-01-06 23:03:08 +01:00
|
|
|
SET read = 1
|
|
|
|
"""
|
2023-11-29 16:32:35 +01:00
|
|
|
)
|
|
|
|
cur.execute(sql)
|
2024-04-05 17:25:04 +02:00
|
|
|
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT entries_properties.id, title, link, summary_text, feed_id, published
|
|
|
|
FROM entries_properties
|
|
|
|
INNER JOIN entries_state ON entries_properties.id = entries_state.entry_id
|
2024-04-06 22:03:23 +02:00
|
|
|
WHERE entries_state.archived = 1
|
2024-04-05 17:25:04 +02:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
ixs = cur.execute(sql).fetchall()
|
2023-11-29 16:32:35 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
|
|
|
WHERE id = ?
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-29 16:32:35 +01:00
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
for ix in ixs: cur.execute(sql, ix)
|
2023-11-29 16:32:35 +01:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
async def delete_entry(cur, ix):
|
2023-11-15 15:00:49 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
Delete entry.
|
2023-11-15 15:00:49 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
ix : str
|
|
|
|
Index of entry.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: ix: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, ix))
|
2023-11-15 15:00:49 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-02-25 20:21:10 +01:00
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE id = ?
|
|
|
|
"""
|
2023-11-15 15:00:49 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix,)
|
|
|
|
cur.execute(sql, par)
|
2023-11-15 15:00:49 +01:00
|
|
|
|
|
|
|
|
2023-10-04 14:37:31 +02:00
|
|
|
async def update_statistics(cur):
|
|
|
|
"""
|
|
|
|
Update table statistics.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
cur : object
|
|
|
|
Cursor object.
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}'.format(function_name))
|
2023-10-04 14:37:31 +02:00
|
|
|
stat_dict = {}
|
2024-04-05 17:25:04 +02:00
|
|
|
stat_dict["feeds"] = get_number_of_items(cur, 'feeds_properties')
|
|
|
|
stat_dict["entries"] = get_number_of_items(cur, 'entries_properties')
|
2024-03-08 10:14:36 +01:00
|
|
|
stat_dict["unread"] = get_number_of_entries_unread(cur=cur)
|
2023-10-04 14:37:31 +02:00
|
|
|
for i in stat_dict:
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
|
|
|
"SELECT id "
|
|
|
|
"FROM statistics "
|
|
|
|
"WHERE title = ?"
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (i,)
|
|
|
|
cur.execute(sql, par)
|
2023-10-04 14:37:31 +02:00
|
|
|
if cur.fetchone():
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
|
|
|
"UPDATE statistics "
|
|
|
|
"SET number = :num "
|
|
|
|
"WHERE title = :title"
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2023-11-13 14:45:10 +01:00
|
|
|
"title": i,
|
|
|
|
"num": stat_dict[i]
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-10-04 14:37:31 +02:00
|
|
|
else:
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
|
|
|
"SELECT count(id) "
|
|
|
|
"FROM statistics"
|
|
|
|
)
|
2023-11-02 06:14:01 +01:00
|
|
|
count = cur.execute(sql).fetchone()[0]
|
2023-10-04 14:37:31 +02:00
|
|
|
ix = count + 1
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
|
|
|
"INSERT INTO statistics "
|
|
|
|
"VALUES(?,?,?)"
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (ix, i, stat_dict[i])
|
|
|
|
cur.execute(sql, par)
|
2023-10-04 14:37:31 +02:00
|
|
|
|
|
|
|
|
2024-02-07 23:24:59 +01:00
|
|
|
async def set_enabled_status(db_file, feed_id, status):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-01-03 16:04:01 +01:00
|
|
|
Set status of feed to enabled or not enabled (i.e. disabled).
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-02-07 23:24:59 +01:00
|
|
|
feed_id : str
|
|
|
|
Index of feed.
|
2024-01-03 16:04:01 +01:00
|
|
|
status : int
|
|
|
|
0 or 1.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {} status: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id, status))
|
2023-09-29 13:49:24 +02:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-01-03 16:04:01 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
UPDATE feeds_preferences
|
2024-01-06 23:03:08 +01:00
|
|
|
SET enabled = :status
|
2024-02-07 23:24:59 +01:00
|
|
|
WHERE feed_id = :feed_id
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-03 16:04:01 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-03 16:04:01 +01:00
|
|
|
"status": status,
|
2024-02-07 23:24:59 +01:00
|
|
|
"feed_id": feed_id
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
TODO
|
|
|
|
|
|
|
|
Investigate what causes date to be int 0
|
|
|
|
|
|
|
|
NOTE
|
|
|
|
|
|
|
|
When time functions of slixfeed.timedate
|
|
|
|
were async, there were errors of coroutines
|
|
|
|
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
async def add_entry(db_file, title, link, entry_id, feed_id, date,
|
|
|
|
read_status):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-01-06 23:03:08 +01:00
|
|
|
Add a new entry row into the entries table.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
2024-01-06 23:03:08 +01:00
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-07 10:57:54 +01:00
|
|
|
title : str
|
|
|
|
Title.
|
|
|
|
link : str
|
|
|
|
Link.
|
|
|
|
entry_id : str
|
|
|
|
Entry index.
|
2024-01-14 22:43:23 +01:00
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
2024-01-07 10:57:54 +01:00
|
|
|
date : str
|
|
|
|
Date.
|
|
|
|
read_status : str
|
|
|
|
0 or 1.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} title: {} link: {} entry_id: {} feed_id: {} date: {} read_status: {}'
|
|
|
|
.format(function_name, db_file, title, link, entry_id, feed_id, date, read_status))
|
2024-01-06 23:03:08 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries(
|
2024-04-05 17:25:04 +02:00
|
|
|
title, link, entry_id, feed_id, published, read)
|
2024-01-06 23:03:08 +01:00
|
|
|
VALUES(
|
2024-04-05 17:25:04 +02:00
|
|
|
:title, :link, :entry_id, :feed_id, :published, :read)
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-06 23:03:08 +01:00
|
|
|
"title": title,
|
|
|
|
"link": link,
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"feed_id": feed_id,
|
2024-04-05 17:25:04 +02:00
|
|
|
"published": date,
|
2024-01-06 23:03:08 +01:00
|
|
|
"read": read_status
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-01-06 23:03:08 +01:00
|
|
|
# try:
|
|
|
|
# cur.execute(sql, entry)
|
|
|
|
# except:
|
|
|
|
# # None
|
|
|
|
# print("Unknown error for sqlite.add_entry")
|
|
|
|
# print(entry)
|
|
|
|
# #
|
|
|
|
# # print(current_time(), "COROUTINE OBJECT NOW")
|
|
|
|
# # for i in entry:
|
|
|
|
# # print(type(i))
|
|
|
|
# # print(i)
|
|
|
|
# # print(type(entry))
|
|
|
|
# # print(entry)
|
|
|
|
# # print(current_time(), "COROUTINE OBJECT NOW")
|
|
|
|
# # breakpoint()
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
async def add_entries_and_update_feed_state(db_file, feed_id, new_entries):
|
2024-01-10 21:06:56 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
Add new entries and update feed state.
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-02-29 18:08:53 +01:00
|
|
|
new_entries : tuple
|
2024-01-10 21:06:56 +01:00
|
|
|
Set of entries as dict.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2024-01-10 21:06:56 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-04-05 17:25:04 +02:00
|
|
|
for new_entry in new_entries:
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
|
|
|
.format(function_name, db_file, feed_id))
|
2024-01-10 21:06:56 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
2024-04-05 17:25:04 +02:00
|
|
|
INTO entries_properties(
|
|
|
|
feed_id, identifier, link, title, title_type, summary_text, summary_lang, summary_type, summary_base, category, comments, published, updated)
|
2024-01-10 21:06:56 +01:00
|
|
|
VALUES(
|
2024-04-05 17:25:04 +02:00
|
|
|
:feed_id, :identifier, :link, :title, :title_type, :summary_text, :summary_lang, :summary_type, :summary_base, :category, :comments, :published, :updated)
|
2024-01-10 21:06:56 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-04-05 17:25:04 +02:00
|
|
|
entry_properties = new_entry['entry_properties']
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-10 21:06:56 +01:00
|
|
|
"feed_id": feed_id,
|
2024-04-05 17:25:04 +02:00
|
|
|
"identifier": entry_properties["identifier"],
|
|
|
|
"link": entry_properties["link"],
|
|
|
|
"title": entry_properties["title"],
|
|
|
|
"title_type": entry_properties["title_type"],
|
|
|
|
'summary_text' : entry_properties['summary_text'],
|
|
|
|
'summary_lang' : entry_properties['summary_lang'],
|
|
|
|
'summary_type' : entry_properties['summary_type'],
|
|
|
|
'summary_base' : entry_properties['summary_base'],
|
|
|
|
'category' : entry_properties['category'],
|
|
|
|
"comments": entry_properties["comments"],
|
|
|
|
"published": entry_properties["published"],
|
|
|
|
"updated": entry_properties["updated"],
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-04-05 17:25:04 +02:00
|
|
|
entry_id = cur.lastrowid
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries_state(
|
|
|
|
entry_id)
|
|
|
|
VALUES(
|
|
|
|
:entry_id)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
entry_authors = new_entry['entry_authors']
|
|
|
|
for entry_author in entry_authors:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries_properties_authors(
|
|
|
|
entry_id, name, url, email)
|
|
|
|
VALUES(
|
|
|
|
:entry_id, :name, :url, :email)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"name": entry_author['name'],
|
|
|
|
"url": entry_author['url'],
|
|
|
|
"email": entry_author['email'],
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
entry_contributors = new_entry['entry_contributors']
|
|
|
|
for entry_contributor in entry_contributors:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries_properties_contributors(
|
|
|
|
entry_id, name, url, email)
|
|
|
|
VALUES(
|
|
|
|
:entry_id, :name, :url, :email)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"name": entry_contributor['name'],
|
|
|
|
"url": entry_contributor['url'],
|
|
|
|
"email": entry_contributor['email'],
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
entry_contents = new_entry['entry_contents']
|
|
|
|
for entry_content in entry_contents:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries_properties_contents(
|
|
|
|
entry_id, text, type, base, lang)
|
|
|
|
VALUES(
|
|
|
|
:entry_id, :text, :type, :base, :lang)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"text": entry_content['text'],
|
|
|
|
"type": entry_content['type'],
|
|
|
|
"base": entry_content['base'],
|
|
|
|
"lang": entry_content['lang'],
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
entry_links = new_entry['entry_tags']
|
|
|
|
for entry_link in entry_links:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries_properties_tags(
|
|
|
|
entry_id, term, scheme, label)
|
|
|
|
VALUES(
|
|
|
|
:entry_id, :term, :scheme, :label)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"term": entry_link['term'],
|
|
|
|
"scheme": entry_link['scheme'],
|
|
|
|
"label": entry_link['label'],
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
entry_links = new_entry['entry_links']
|
|
|
|
for entry_link in entry_links:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO entries_properties_links(
|
|
|
|
entry_id, url, type, rel, size)
|
|
|
|
VALUES(
|
|
|
|
:entry_id, :url, :type, :rel, :size)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"entry_id": entry_id,
|
|
|
|
"url": entry_link['url'],
|
|
|
|
"rel": entry_link['rel'],
|
|
|
|
"type": entry_link['type'],
|
|
|
|
"size": entry_link['length'],
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-01-14 22:43:23 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
UPDATE feeds_state
|
|
|
|
SET renewed = :renewed
|
2024-01-14 22:43:23 +01:00
|
|
|
WHERE feed_id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
2024-01-26 12:34:07 +01:00
|
|
|
"renewed": time.time(),
|
2024-01-14 22:43:23 +01:00
|
|
|
"feed_id": feed_id
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-01-10 21:06:56 +01:00
|
|
|
|
|
|
|
|
2024-01-14 22:43:23 +01:00
|
|
|
async def set_date(db_file, feed_id):
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-01-06 23:03:08 +01:00
|
|
|
Set renewed date of given feed.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-14 22:43:23 +01:00
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2023-07-16 17:23:44 +02:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-01-06 23:03:08 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
UPDATE feeds_state
|
|
|
|
SET renewed = :renewed
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE feed_id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-26 12:34:07 +01:00
|
|
|
"renewed": time.time(),
|
2024-01-06 23:03:08 +01:00
|
|
|
"feed_id": feed_id
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
# cur = conn.cursor()
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
async def update_feed_identifier(db_file, feed_id, identifier):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} feed_id: {} identifier: {}'
|
|
|
|
.format(function_name, db_file, feed_id, identifier))
|
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
UPDATE feeds_properties
|
|
|
|
SET identifier = :identifier
|
2024-05-14 21:39:58 +02:00
|
|
|
WHERE id = :feed_id
|
2024-04-05 17:25:04 +02:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"identifier": identifier,
|
|
|
|
"feed_id": feed_id
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-01-14 22:43:23 +01:00
|
|
|
async def update_feed_status(db_file, feed_id, status_code):
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-01-06 23:03:08 +01:00
|
|
|
Set status_code of feed_id in table status.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-06 23:03:08 +01:00
|
|
|
url : str
|
2023-11-13 14:45:10 +01:00
|
|
|
Feed URL.
|
|
|
|
status : str
|
|
|
|
Status ID or message.
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-05-13 18:22:14 +02:00
|
|
|
# print('MID', feed_id, 'update_feed_status')
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-04-05 17:25:04 +02:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {} status_code: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id, status_code))
|
2024-04-05 17:25:04 +02:00
|
|
|
if status_code != 200:
|
|
|
|
print('{}: db_file: {} feed_id: {} status_code: {}'
|
|
|
|
.format(function_name, db_file, feed_id, status_code))
|
2023-07-16 17:23:44 +02:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-01-06 23:03:08 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
UPDATE feeds_state
|
2024-01-06 23:03:08 +01:00
|
|
|
SET status_code = :status_code, scanned = :scanned
|
|
|
|
WHERE feed_id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-07 10:57:54 +01:00
|
|
|
"status_code": status_code,
|
2024-01-26 12:34:07 +01:00
|
|
|
"scanned": time.time(),
|
2024-01-07 10:57:54 +01:00
|
|
|
"feed_id": feed_id
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-01-14 22:43:23 +01:00
|
|
|
async def update_feed_validity(db_file, feed_id, valid):
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-01-06 23:03:08 +01:00
|
|
|
Set validity status of feed_id in table status.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-06 23:03:08 +01:00
|
|
|
url : str
|
2023-11-13 14:45:10 +01:00
|
|
|
Feed URL.
|
|
|
|
valid : boolean
|
|
|
|
0 or 1.
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {} valid: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id, valid))
|
2023-07-16 17:23:44 +02:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-01-06 23:03:08 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
UPDATE feeds_state
|
2024-01-06 23:03:08 +01:00
|
|
|
SET valid = :valid
|
|
|
|
WHERE feed_id = :feed_id
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-06 23:03:08 +01:00
|
|
|
"valid": valid,
|
|
|
|
"feed_id": feed_id
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
2023-12-01 14:22:03 +01:00
|
|
|
|
2024-04-05 17:25:04 +02:00
|
|
|
async def update_feed_properties(db_file, feed_id, feed_properties):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-01-06 23:03:08 +01:00
|
|
|
Update properties of url in table feeds.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
2024-01-06 23:03:08 +01:00
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
url : str
|
|
|
|
Feed URL.
|
2024-04-05 17:25:04 +02:00
|
|
|
feed_properties : dict
|
|
|
|
Feed properties.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-05-13 18:22:14 +02:00
|
|
|
# print('MID', feed_id, 'update_feed_properties')
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-04-05 17:25:04 +02:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {} feed_properties: {}'
|
|
|
|
.format(function_name, db_file, feed_id, feed_properties))
|
2024-01-06 23:03:08 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-01-26 12:34:07 +01:00
|
|
|
UPDATE feeds_properties
|
2024-04-05 17:25:04 +02:00
|
|
|
SET version = :version, encoding = :encoding,
|
|
|
|
language = :language, rating = :rating,
|
|
|
|
entries = :entries, icon = :icon, image = :image,
|
|
|
|
logo = :logo, ttl = :ttl, updated = :updated
|
|
|
|
WHERE id = :feed_id
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-04-05 17:25:04 +02:00
|
|
|
"version" : feed_properties['version'],
|
|
|
|
"encoding" : feed_properties['encoding'],
|
|
|
|
"language" : feed_properties['language'],
|
|
|
|
"rating" : feed_properties['rating'],
|
|
|
|
"entries" : feed_properties['entries_count'],
|
|
|
|
"icon" : feed_properties['icon'],
|
|
|
|
"image" : feed_properties['image'],
|
|
|
|
"logo" : feed_properties['logo'],
|
|
|
|
"ttl" : feed_properties['ttl'],
|
|
|
|
"updated" : feed_properties['updated'],
|
2024-01-06 23:03:08 +01:00
|
|
|
"feed_id": feed_id
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-01-04 13:38:22 +01:00
|
|
|
async def maintain_archive(db_file, limit):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-12-08 12:32:01 +01:00
|
|
|
Maintain list of archived entries equal to specified number of items.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-06 23:03:08 +01:00
|
|
|
limit : str
|
|
|
|
Number of maximum entries to store.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} limit: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, limit))
|
2024-01-04 13:38:22 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
SELECT count(id)
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_state
|
|
|
|
WHERE archived = 1
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-04 13:38:22 +01:00
|
|
|
)
|
|
|
|
count = cur.execute(sql).fetchone()[0]
|
|
|
|
# FIXME Upon first time joining to a groupchat
|
|
|
|
# and then adding a URL, variable "limit"
|
|
|
|
# becomes a string in one of the iterations.
|
|
|
|
# if isinstance(limit,str):
|
|
|
|
# print("STOP")
|
|
|
|
# breakpoint()
|
|
|
|
difference = count - int(limit)
|
|
|
|
if difference > 0:
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-02-25 20:21:10 +01:00
|
|
|
DELETE
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE id
|
2024-01-13 18:17:43 +01:00
|
|
|
IN (
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT entry_id
|
|
|
|
FROM entries_state
|
|
|
|
INNER JOIN entries_properties ON entries_state.entry_id = entries_properties.id
|
|
|
|
WHERE archived = 1
|
|
|
|
ORDER BY published ASC
|
2024-01-13 18:17:43 +01:00
|
|
|
LIMIT :difference
|
|
|
|
)
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-01-04 13:38:22 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-01-04 13:38:22 +01:00
|
|
|
"difference": difference
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
|
2024-04-14 14:56:45 +02:00
|
|
|
def get_authors_by_entry_id(db_file, entry_id):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{} db_file: {} entry_id: {}'
|
|
|
|
.format(function_name, db_file, entry_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM entries_properties_authors
|
|
|
|
WHERE entry_id = :entry_id
|
|
|
|
ORDER BY name DESC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_contributors_by_entry_id(db_file, entry_id):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{} db_file: {} entry_id: {}'
|
|
|
|
.format(function_name, db_file, entry_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM entries_properties_contributors
|
|
|
|
WHERE entry_id = :entry_id
|
|
|
|
ORDER BY name DESC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_links_by_entry_id(db_file, entry_id):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} entry_id: {}'
|
|
|
|
.format(function_name, db_file, entry_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM entries_properties_links
|
|
|
|
WHERE entry_id = :entry_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_tags_by_entry_id(db_file, entry_id):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} entry_id: {}'
|
|
|
|
.format(function_name, db_file, entry_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM entries_properties_tags
|
|
|
|
WHERE entry_id = :entry_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
|
|
|
def get_contents_by_entry_id(db_file, entry_id):
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {} entry_id: {}'
|
|
|
|
.format(function_name, db_file, entry_id))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT *
|
|
|
|
FROM entries_properties_contents
|
|
|
|
WHERE entry_id = :entry_id
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (entry_id,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
# TODO Move entries that don't exist into table archive.
|
|
|
|
# NOTE Entries that are read from archive are deleted.
|
|
|
|
# NOTE Unlike entries from table entries, entries from
|
|
|
|
# table archive are not marked as read.
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_entries_of_feed(db_file, feed_id):
|
2023-09-29 13:49:24 +02:00
|
|
|
"""
|
2024-03-04 11:16:49 +01:00
|
|
|
Get entries of given feed.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-14 22:43:23 +01:00
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
2023-09-29 13:49:24 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{} db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2023-11-13 14:45:10 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2024-04-14 14:56:45 +02:00
|
|
|
SELECT id, title, link, identifier, published
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE feed_id = ?
|
2024-04-05 17:25:04 +02:00
|
|
|
ORDER BY published DESC
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-01-14 22:43:23 +01:00
|
|
|
par = (feed_id,)
|
2024-01-14 13:46:38 +01:00
|
|
|
items = cur.execute(sql, par).fetchall()
|
2024-01-04 13:38:22 +01:00
|
|
|
return items
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
|
2024-01-02 14:19:27 +01:00
|
|
|
# TODO What is this function for? 2024-01-02
|
2024-03-08 10:14:36 +01:00
|
|
|
# def get_feeds(db_file):
|
2024-01-02 14:19:27 +01:00
|
|
|
# """
|
|
|
|
# Query table feeds for Title, URL, Categories, Tags.
|
|
|
|
|
|
|
|
# Parameters
|
|
|
|
# ----------
|
|
|
|
# db_file : str
|
|
|
|
# Path to database file.
|
|
|
|
|
|
|
|
# Returns
|
|
|
|
# -------
|
2024-02-29 18:08:53 +01:00
|
|
|
# result : tuple
|
2024-01-02 14:19:27 +01:00
|
|
|
# Title, URL, Categories, Tags of feeds.
|
|
|
|
# """
|
|
|
|
# with create_connection(db_file) as conn:
|
|
|
|
# cur = conn.cursor()
|
|
|
|
# sql = (
|
|
|
|
# "SELECT name, address, type, categories, tags "
|
|
|
|
# "FROM feeds"
|
|
|
|
# )
|
|
|
|
# result = cur.execute(sql).fetchall()
|
|
|
|
# return result
|
2023-07-16 17:23:44 +02:00
|
|
|
|
2024-01-06 23:03:08 +01:00
|
|
|
# TODO select by "feed_id" (of table "status") from
|
|
|
|
# "feed" urls that are enabled in table "status"
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_feeds_url(db_file):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-02-07 23:24:59 +01:00
|
|
|
Query table feeds for URLs.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2023-11-13 14:45:10 +01:00
|
|
|
URLs of active feeds.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{} db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2023-09-29 13:49:24 +02:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
SELECT url
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM feeds_properties
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2023-11-02 06:14:01 +01:00
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
def get_feeds_by_enabled_state(db_file, enabled_state):
|
|
|
|
"""
|
|
|
|
Query table feeds by enabled state.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
enabled_state : boolean
|
|
|
|
False or True.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
List of URLs.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} enabled_state: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, enabled_state))
|
2024-02-29 18:08:53 +01:00
|
|
|
if enabled_state:
|
|
|
|
enabled_state = 1
|
|
|
|
else:
|
|
|
|
enabled_state = 0
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT feeds_properties.*
|
|
|
|
FROM feeds_properties
|
|
|
|
INNER JOIN feeds_preferences ON feeds_properties.id = feeds_preferences.feed_id
|
|
|
|
WHERE feeds_preferences.enabled = ?
|
2024-02-29 18:08:53 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (enabled_state,)
|
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-03-26 17:23:22 +01:00
|
|
|
def get_feeds_and_enabled_state(db_file):
|
|
|
|
"""
|
|
|
|
Select table feeds and join column enabled.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
List of URLs.
|
|
|
|
"""
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
|
|
|
logger.debug('{}: db_file: {}'
|
|
|
|
.format(function_name, db_file))
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT feeds_properties.*, feeds_preferences.enabled
|
|
|
|
FROM feeds_properties
|
|
|
|
INNER JOIN feeds_preferences ON feeds_properties.id = feeds_preferences.feed_id
|
|
|
|
ORDER BY feeds_properties.title ASC
|
2024-03-26 17:23:22 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_active_feeds_url(db_file):
|
2024-02-07 23:24:59 +01:00
|
|
|
"""
|
|
|
|
Query table feeds for active URLs.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2024-02-07 23:24:59 +01:00
|
|
|
URLs of active feeds.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-07 23:24:59 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT feeds_properties.url
|
|
|
|
FROM feeds_properties
|
|
|
|
INNER JOIN feeds_preferences ON feeds_properties.id = feeds_preferences.feed_id
|
|
|
|
WHERE feeds_preferences.enabled = 1
|
2024-02-07 23:24:59 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
def get_tags(db_file):
|
|
|
|
"""
|
|
|
|
Query table tags and list items.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
result : tuple
|
|
|
|
List of tags.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-29 18:08:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT tag, id
|
|
|
|
FROM tags
|
2024-03-26 17:23:22 +01:00
|
|
|
ORDER BY tag
|
2024-02-29 18:08:53 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_feeds(db_file):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-10-04 14:37:31 +02:00
|
|
|
Query table feeds and list items.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2024-01-02 12:42:41 +01:00
|
|
|
URLs of feeds.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-01-06 23:03:08 +01:00
|
|
|
# TODO
|
|
|
|
# 1) Select id from table feeds
|
|
|
|
# Select name, url (feeds) updated, enabled, feed_id (status)
|
|
|
|
# 2) Sort feeds by id. Sort status by feed_id
|
2024-02-29 18:08:53 +01:00
|
|
|
# result += cur.execute(sql).fetchall()
|
2024-01-13 18:17:43 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT id, title, url
|
|
|
|
FROM feeds_properties
|
|
|
|
ORDER BY title
|
2024-01-13 18:17:43 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-02-29 18:08:53 +01:00
|
|
|
result = cur.execute(sql).fetchall()
|
|
|
|
return result
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_last_entries(db_file, num):
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2023-12-18 16:29:32 +01:00
|
|
|
Query entries.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
num : str
|
|
|
|
Number.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
titles_list : tuple
|
2023-11-13 14:45:10 +01:00
|
|
|
List of recent N entries as message.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} num: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, num))
|
2024-01-13 18:17:43 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
# sql = (
|
|
|
|
# "SELECT title, link "
|
|
|
|
# "FROM entries "
|
|
|
|
# "ORDER BY ROWID DESC "
|
|
|
|
# "LIMIT :num"
|
|
|
|
# )
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT title, link, published
|
|
|
|
FROM entries_properties
|
|
|
|
INNER JOIN entries_state ON entries_properties.id = entries_state.entry_id
|
|
|
|
WHERE entries_state.read = 0
|
|
|
|
ORDER BY published DESC
|
2024-01-13 18:17:43 +01:00
|
|
|
LIMIT :num
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (num,)
|
2024-02-29 18:08:53 +01:00
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
2023-09-29 13:49:24 +02:00
|
|
|
|
|
|
|
|
2024-02-10 18:53:53 +01:00
|
|
|
def search_feeds(db_file, query):
|
2023-11-13 14:45:10 +01:00
|
|
|
"""
|
|
|
|
Query feeds.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
Search query.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
result : tuple
|
2023-11-13 14:45:10 +01:00
|
|
|
Feeds of specified keywords as message.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} query: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, query))
|
2024-01-13 18:17:43 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
SELECT title, id, url
|
|
|
|
FROM feeds_properties
|
|
|
|
WHERE title LIKE ?
|
2024-01-13 18:17:43 +01:00
|
|
|
OR url LIKE ?
|
|
|
|
LIMIT 50
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = [f'%{query}%', f'%{query}%']
|
2024-02-29 18:08:53 +01:00
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
2023-07-16 17:23:44 +02:00
|
|
|
|
|
|
|
|
|
|
|
async def search_entries(db_file, query):
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
Query entries.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
query : str
|
|
|
|
Search query.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-29 18:08:53 +01:00
|
|
|
titles_list : tuple
|
2023-11-13 14:45:10 +01:00
|
|
|
Entries of specified keywords as message.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} query: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, query))
|
2024-01-13 18:17:43 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT title, link
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-01-13 18:17:43 +01:00
|
|
|
WHERE title LIKE ?
|
|
|
|
LIMIT 50
|
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = (f'%{query}%', f'%{query}%')
|
2024-02-29 18:08:53 +01:00
|
|
|
result = cur.execute(sql, par).fetchall()
|
|
|
|
return result
|
2023-09-29 13:49:24 +02:00
|
|
|
|
2024-01-07 10:57:54 +01:00
|
|
|
|
2023-11-22 12:47:34 +01:00
|
|
|
"""
|
2023-12-01 14:22:03 +01:00
|
|
|
FIXME
|
|
|
|
|
|
|
|
Error due to missing date, but it appears that date is present:
|
2023-11-23 17:55:36 +01:00
|
|
|
ERROR DATE: source = https://blog.heckel.io/feed/
|
|
|
|
ERROR DATE: date = 2008-05-13T13:51:50+00:00
|
|
|
|
ERROR DATE: result = https://blog.heckel.io/feed/
|
2023-12-01 14:22:03 +01:00
|
|
|
|
|
|
|
19:32:05 ERROR DATE: source = https://mwl.io/feed
|
|
|
|
19:32:05 ERROR DATE: date = 2023-11-30T10:56:39+00:00
|
|
|
|
19:32:05 ERROR DATE: result = https://mwl.io/feed
|
|
|
|
19:32:05 ERROR DATE: source = https://mwl.io/feed
|
|
|
|
19:32:05 ERROR DATE: date = 2023-11-22T16:59:08+00:00
|
|
|
|
19:32:05 ERROR DATE: result = https://mwl.io/feed
|
|
|
|
19:32:06 ERROR DATE: source = https://mwl.io/feed
|
|
|
|
19:32:06 ERROR DATE: date = 2023-11-16T10:33:57+00:00
|
|
|
|
19:32:06 ERROR DATE: result = https://mwl.io/feed
|
|
|
|
19:32:06 ERROR DATE: source = https://mwl.io/feed
|
|
|
|
19:32:06 ERROR DATE: date = 2023-11-09T07:37:57+00:00
|
|
|
|
19:32:06 ERROR DATE: result = https://mwl.io/feed
|
|
|
|
|
2023-11-22 12:47:34 +01:00
|
|
|
"""
|
2024-04-05 17:25:04 +02:00
|
|
|
def check_entry_exist(db_file, feed_id, identifier=None, title=None, link=None,
|
|
|
|
published=None):
|
2023-10-04 14:37:31 +02:00
|
|
|
"""
|
|
|
|
Check whether an entry exists.
|
2023-11-13 14:45:10 +01:00
|
|
|
If entry has an ID, check by ID.
|
2024-04-05 17:25:04 +02:00
|
|
|
If entry has timestamp (published), check by title, link and date.
|
2023-11-13 14:45:10 +01:00
|
|
|
Otherwise, check by title and link.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
2024-01-14 22:43:23 +01:00
|
|
|
feed_id : str
|
|
|
|
Feed Id.
|
2024-04-05 17:25:04 +02:00
|
|
|
identifier : str, optional
|
2023-11-13 14:45:10 +01:00
|
|
|
Entry ID. The default is None.
|
|
|
|
title : str, optional
|
|
|
|
Entry title. The default is None.
|
|
|
|
link : str, optional
|
|
|
|
Entry URL. The default is None.
|
2024-04-05 17:25:04 +02:00
|
|
|
published : str, optional
|
2023-11-13 14:45:10 +01:00
|
|
|
Entry Timestamp. The default is None.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
bool
|
|
|
|
True or None.
|
2023-07-16 17:23:44 +02:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} feed_id: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, feed_id))
|
2024-01-13 18:17:43 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
exist = False
|
2024-04-05 17:25:04 +02:00
|
|
|
if identifier:
|
2024-01-13 18:17:43 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
|
|
|
WHERE identifier = :identifier and feed_id = :feed_id
|
2024-01-13 18:17:43 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2024-04-05 17:25:04 +02:00
|
|
|
"identifier": identifier,
|
2024-01-13 18:17:43 +01:00
|
|
|
"feed_id": feed_id
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
2024-01-13 18:17:43 +01:00
|
|
|
if result: exist = True
|
2024-04-05 17:25:04 +02:00
|
|
|
elif published:
|
2024-01-13 18:17:43 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-05-12 13:05:49 +02:00
|
|
|
WHERE title = :title AND link = :link AND published = :date
|
2024-01-13 18:17:43 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
|
|
|
"title": title,
|
|
|
|
"link": link,
|
2024-04-05 17:25:04 +02:00
|
|
|
"date": published
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
2024-01-13 18:17:43 +01:00
|
|
|
try:
|
2024-01-14 13:46:38 +01:00
|
|
|
result = cur.execute(sql, par).fetchone()
|
2024-01-13 18:17:43 +01:00
|
|
|
if result: exist = True
|
|
|
|
except:
|
2024-05-12 13:05:49 +02:00
|
|
|
logger.error("source =" + feed_id)
|
|
|
|
logger.error("published =" + published)
|
2024-01-13 18:17:43 +01:00
|
|
|
else:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT id
|
2024-04-05 17:25:04 +02:00
|
|
|
FROM entries_properties
|
2024-02-13 20:34:37 +01:00
|
|
|
WHERE title = :title AND link = :link
|
2024-01-13 18:17:43 +01:00
|
|
|
"""
|
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2023-11-15 15:00:49 +01:00
|
|
|
"title": title,
|
2024-01-13 18:17:43 +01:00
|
|
|
"link": link
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
result = cur.execute(sql, par).fetchone()
|
2024-01-07 10:57:54 +01:00
|
|
|
if result: exist = True
|
2024-01-13 18:17:43 +01:00
|
|
|
# try:
|
|
|
|
# if result:
|
|
|
|
# return True
|
|
|
|
# else:
|
|
|
|
# return None
|
|
|
|
# except:
|
|
|
|
# print(current_time(), "ERROR DATE: result =", url)
|
|
|
|
return exist
|
2023-11-02 06:14:01 +01:00
|
|
|
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
async def set_setting_value(db_file, key_value):
|
2023-11-02 06:14:01 +01:00
|
|
|
"""
|
2024-02-16 13:12:06 +01:00
|
|
|
Set setting value.
|
2023-11-02 06:14:01 +01:00
|
|
|
|
2024-01-14 13:46:38 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key_value : list
|
|
|
|
key : str
|
|
|
|
enabled, interval, masters, quantum, random.
|
|
|
|
value : int
|
|
|
|
Numeric value.
|
|
|
|
"""
|
|
|
|
key = key_value[0]
|
2024-03-04 11:16:49 +01:00
|
|
|
val = key_value[1]
|
2024-02-12 20:01:28 +01:00
|
|
|
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {} val: {}'
|
|
|
|
.format(function_name, db_file, key, val))
|
2024-03-03 15:13:01 +01:00
|
|
|
|
2024-03-13 13:09:48 +01:00
|
|
|
# NOTE This is not a good practice!
|
|
|
|
# When INI file was used, all values were strings.
|
|
|
|
# When TOML is now used, integers are integers, which means that
|
|
|
|
# statement "if not val" is equivalent to "if not 0" which is not so to
|
|
|
|
# statement "if not '0'"
|
|
|
|
|
|
|
|
# if not val:
|
|
|
|
# raise Exception('Missing value for key "{}" ({}).'.format(key, db_file))
|
2024-03-13 08:59:15 +01:00
|
|
|
# logger.error('Missing value for key "{}" ({}).'.format(key, db_file))
|
|
|
|
# return
|
2024-02-12 20:01:28 +01:00
|
|
|
|
2024-01-14 13:46:38 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO settings(
|
|
|
|
key, value)
|
|
|
|
VALUES(
|
2024-03-04 11:16:49 +01:00
|
|
|
:key, :val)
|
2024-01-14 13:46:38 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"key": key,
|
2024-03-04 11:16:49 +01:00
|
|
|
"val": val
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
async def update_setting_value(db_file, key_value):
|
2024-01-14 13:46:38 +01:00
|
|
|
"""
|
2024-02-16 13:12:06 +01:00
|
|
|
Update setting value.
|
2024-01-14 13:46:38 +01:00
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key_value : list
|
|
|
|
key : str
|
2023-12-14 09:43:30 +01:00
|
|
|
enabled, interval, masters, quantum, random.
|
2023-11-13 14:45:10 +01:00
|
|
|
value : int
|
|
|
|
Numeric value.
|
2023-11-02 06:14:01 +01:00
|
|
|
"""
|
|
|
|
# if isinstance(key_value, list):
|
|
|
|
# key = key_value[0]
|
|
|
|
# val = key_value[1]
|
|
|
|
# elif key_value == "enable":
|
|
|
|
# key = "enabled"
|
|
|
|
# val = 1
|
|
|
|
# else:
|
|
|
|
# key = "enabled"
|
|
|
|
# val = 0
|
|
|
|
key = key_value[0]
|
2024-03-04 11:16:49 +01:00
|
|
|
val = key_value[1]
|
2024-03-03 15:13:01 +01:00
|
|
|
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {} val: {}'
|
|
|
|
.format(function_name, db_file, key, val))
|
2024-03-03 15:13:01 +01:00
|
|
|
|
2023-11-02 06:14:01 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2023-11-13 14:45:10 +01:00
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
UPDATE settings
|
2024-03-04 11:16:49 +01:00
|
|
|
SET value = :val
|
2024-01-06 23:03:08 +01:00
|
|
|
WHERE key = :key
|
|
|
|
"""
|
2023-11-13 14:45:10 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2023-12-11 10:04:45 +01:00
|
|
|
"key": key,
|
2024-03-04 11:16:49 +01:00
|
|
|
"val": val
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-01-04 13:38:22 +01:00
|
|
|
# except:
|
|
|
|
# logging.debug(
|
|
|
|
# "No specific value set for key {}.".format(key)
|
|
|
|
# )
|
2023-11-02 06:14:01 +01:00
|
|
|
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
async def delete_filter(db_file, key):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key))
|
2024-02-16 13:12:06 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE
|
|
|
|
FROM filters
|
|
|
|
WHERE key = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (key,)
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
|
|
|
async def delete_setting(db_file, key):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key))
|
2024-02-16 13:12:06 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE
|
|
|
|
FROM settings
|
|
|
|
WHERE key = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (key,)
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-02-11 22:31:31 +01:00
|
|
|
async def delete_settings(db_file):
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-11 22:31:31 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
DELETE
|
|
|
|
FROM settings
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
cur.execute(sql)
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
|
|
|
|
def get_setting_value(db_file, key):
|
2023-11-02 06:14:01 +01:00
|
|
|
"""
|
|
|
|
Get settings value.
|
|
|
|
|
2023-11-13 14:45:10 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key : str
|
2023-12-11 10:04:45 +01:00
|
|
|
Key: archive, enabled, filter-allow, filter-deny,
|
|
|
|
interval, length, old, quantum, random.
|
2023-11-13 14:45:10 +01:00
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
val : str
|
|
|
|
Numeric value.
|
2023-11-02 06:14:01 +01:00
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key))
|
2023-11-02 06:14:01 +01:00
|
|
|
with create_connection(db_file) as conn:
|
2024-01-04 13:38:22 +01:00
|
|
|
cur = conn.cursor()
|
2024-02-16 13:12:06 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT value
|
|
|
|
FROM settings
|
|
|
|
WHERE key = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (key,)
|
|
|
|
value = cur.execute(sql, par).fetchone()
|
|
|
|
return value
|
|
|
|
|
2023-12-11 10:04:45 +01:00
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
def is_setting_key(db_file, key):
|
|
|
|
"""
|
|
|
|
Check whether setting key exist.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key : str
|
|
|
|
Key: allow, deny.
|
2023-12-11 10:04:45 +01:00
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
key : str
|
|
|
|
Key.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key))
|
2024-02-16 13:12:06 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT key
|
|
|
|
FROM settings
|
|
|
|
WHERE key = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (key,)
|
|
|
|
key = cur.execute(sql, par).fetchone()
|
|
|
|
return key
|
|
|
|
|
|
|
|
|
|
|
|
async def set_filter_value(db_file, key_value):
|
2023-12-11 10:04:45 +01:00
|
|
|
"""
|
|
|
|
Set settings value.
|
|
|
|
|
2024-01-14 13:46:38 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key_value : list
|
|
|
|
key : str
|
2024-02-16 13:12:06 +01:00
|
|
|
allow, deny, replace.
|
2024-01-14 13:46:38 +01:00
|
|
|
value : int
|
|
|
|
Numeric value.
|
|
|
|
"""
|
|
|
|
key = key_value[0]
|
|
|
|
val = key_value[1]
|
2024-03-03 15:13:01 +01:00
|
|
|
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {} val: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key, val))
|
|
|
|
|
2024-01-14 13:46:38 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO filters(
|
|
|
|
key, value)
|
|
|
|
VALUES(
|
2024-03-04 11:16:49 +01:00
|
|
|
:key, :val)
|
2024-01-14 13:46:38 +01:00
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"key": key,
|
2024-03-04 11:16:49 +01:00
|
|
|
"val": val
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
|
|
|
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
async def update_filter_value(db_file, key_value):
|
2024-01-14 13:46:38 +01:00
|
|
|
"""
|
|
|
|
Update settings value.
|
|
|
|
|
2023-12-11 10:04:45 +01:00
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key_value : list
|
|
|
|
key : str
|
2024-02-16 13:12:06 +01:00
|
|
|
allow, deny, replace.
|
2023-12-11 10:04:45 +01:00
|
|
|
value : int
|
|
|
|
Numeric value.
|
|
|
|
"""
|
|
|
|
# if isinstance(key_value, list):
|
|
|
|
# key = key_value[0]
|
|
|
|
# val = key_value[1]
|
|
|
|
# elif key_value == "enable":
|
|
|
|
# key = "enabled"
|
|
|
|
# val = 1
|
|
|
|
# else:
|
|
|
|
# key = "enabled"
|
|
|
|
# val = 0
|
|
|
|
key = key_value[0]
|
|
|
|
val = key_value[1]
|
2024-03-03 15:13:01 +01:00
|
|
|
|
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {} val: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key, val))
|
|
|
|
|
2023-12-11 10:04:45 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
2024-01-06 23:03:08 +01:00
|
|
|
"""
|
|
|
|
UPDATE filters
|
|
|
|
SET value = :value
|
|
|
|
WHERE key = :key
|
|
|
|
"""
|
2023-12-11 10:04:45 +01:00
|
|
|
)
|
2024-01-14 13:46:38 +01:00
|
|
|
par = {
|
2023-12-11 10:04:45 +01:00
|
|
|
"key": key,
|
|
|
|
"value": val
|
2024-01-14 13:46:38 +01:00
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2023-12-11 10:04:45 +01:00
|
|
|
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
def is_filter_key(db_file, key):
|
2024-02-16 03:59:01 +01:00
|
|
|
"""
|
2024-02-16 13:12:06 +01:00
|
|
|
Check whether filter key exist.
|
2024-02-16 03:59:01 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key : str
|
|
|
|
Key: allow, deny.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
key : str
|
|
|
|
Key.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key))
|
2024-02-16 03:59:01 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
2024-02-16 13:12:06 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT key
|
|
|
|
FROM filters
|
|
|
|
WHERE key = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (key,)
|
|
|
|
key = cur.execute(sql, par).fetchone()
|
|
|
|
return key
|
2024-02-16 03:59:01 +01:00
|
|
|
|
|
|
|
|
2024-02-16 13:12:06 +01:00
|
|
|
def get_filter_value(db_file, key):
|
2023-12-11 10:04:45 +01:00
|
|
|
"""
|
2024-02-16 13:12:06 +01:00
|
|
|
Get filter value.
|
2023-12-11 10:04:45 +01:00
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
key : str
|
|
|
|
Key: allow, deny.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
2024-02-16 03:59:01 +01:00
|
|
|
value : str
|
2023-12-11 10:04:45 +01:00
|
|
|
List of strings.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} key: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, key))
|
2023-12-11 10:04:45 +01:00
|
|
|
with create_connection(db_file) as conn:
|
2024-01-04 13:38:22 +01:00
|
|
|
cur = conn.cursor()
|
2024-02-16 13:12:06 +01:00
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT value
|
|
|
|
FROM filters
|
|
|
|
WHERE key = ?
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = (key,)
|
2024-02-16 13:33:07 +01:00
|
|
|
value = cur.execute(sql, par).fetchone()
|
2024-01-14 13:46:38 +01:00
|
|
|
return value
|
2024-01-26 12:34:07 +01:00
|
|
|
|
|
|
|
|
|
|
|
async def set_last_update_time(db_file):
|
|
|
|
"""
|
|
|
|
Set value of last_update.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-03-08 10:14:36 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
INSERT
|
|
|
|
INTO status(
|
|
|
|
key, value)
|
|
|
|
VALUES(
|
|
|
|
:key, :value)
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"key": "last_update",
|
|
|
|
"value": time.time()
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-01-26 12:34:07 +01:00
|
|
|
|
|
|
|
|
2024-03-08 10:14:36 +01:00
|
|
|
def get_last_update_time(db_file):
|
2024-01-26 12:34:07 +01:00
|
|
|
"""
|
|
|
|
Get value of last_update.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
val : str
|
|
|
|
Time.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-01-26 12:34:07 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
try:
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT value
|
|
|
|
FROM status
|
|
|
|
WHERE key = "last_update"
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
value = cur.execute(sql).fetchone()[0]
|
|
|
|
value = str(value)
|
|
|
|
except:
|
|
|
|
value = None
|
2024-03-03 15:13:01 +01:00
|
|
|
logger.debug(
|
2024-01-26 12:34:07 +01:00
|
|
|
"No specific value set for key last_update.")
|
|
|
|
return value
|
|
|
|
|
|
|
|
|
|
|
|
async def update_last_update_time(db_file):
|
|
|
|
"""
|
|
|
|
Update value of last_update.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
None.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-03-08 10:14:36 +01:00
|
|
|
async with DBLOCK:
|
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
UPDATE status
|
|
|
|
SET value = :value
|
|
|
|
WHERE key = "last_update"
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"value": time.time()
|
|
|
|
}
|
|
|
|
cur.execute(sql, par)
|
2024-02-19 21:50:53 +01:00
|
|
|
|
|
|
|
########################################
|
|
|
|
|
|
|
|
######### EXPERIMENTAL TABLE ###########
|
|
|
|
|
|
|
|
########################################
|
|
|
|
|
|
|
|
def get_categories(db_file):
|
|
|
|
"""
|
|
|
|
Get list of categories.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : tuple
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
categories : str
|
|
|
|
List of categories.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-19 21:50:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT category
|
|
|
|
FROM entries
|
|
|
|
ORDER BY category ASC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
categories = cur.execute(sql).fetchall()
|
|
|
|
return categories
|
|
|
|
|
|
|
|
|
|
|
|
def get_locales(db_file):
|
|
|
|
"""
|
|
|
|
Get list of locales.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
locales : tuple
|
|
|
|
List of locales.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-19 21:50:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT locale
|
|
|
|
FROM entries
|
|
|
|
ORDER BY locale ASC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
locales = cur.execute(sql).fetchall()
|
|
|
|
return locales
|
|
|
|
|
|
|
|
|
|
|
|
def get_nations(db_file):
|
|
|
|
"""
|
|
|
|
Get list of nations.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
nations : tuple
|
|
|
|
List of nations.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-19 21:50:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT DISTINCT nation
|
|
|
|
FROM entries
|
|
|
|
ORDER BY nation ASC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
locales = cur.execute(sql).fetchall()
|
|
|
|
return locales
|
|
|
|
|
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
# def get_tags(db_file):
|
|
|
|
# """
|
|
|
|
# Get list of title and urls.
|
2024-02-19 21:50:53 +01:00
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
# Parameters
|
|
|
|
# ----------
|
|
|
|
# db_file : str
|
|
|
|
# Path to database file.
|
2024-02-19 21:50:53 +01:00
|
|
|
|
2024-02-29 18:08:53 +01:00
|
|
|
# Returns
|
|
|
|
# -------
|
|
|
|
# titles_urls : tuple
|
|
|
|
# List of titles and urls.
|
|
|
|
# """
|
|
|
|
# with create_connection(db_file) as conn:
|
|
|
|
# cur = conn.cursor()
|
|
|
|
# sql = (
|
|
|
|
# """
|
|
|
|
# SELECT tags
|
|
|
|
# FROM entries
|
|
|
|
# ORDER BY tags ASC
|
|
|
|
# """
|
|
|
|
# )
|
|
|
|
# titles_urls = cur.execute(sql).fetchall()
|
|
|
|
# return titles_urls
|
2024-02-19 21:50:53 +01:00
|
|
|
|
|
|
|
|
|
|
|
def get_titles_tags_urls(db_file):
|
|
|
|
"""
|
|
|
|
Get list of title and urls.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
titles_urls : tuple
|
|
|
|
List of titles and urls.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file))
|
2024-02-19 21:50:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT title, tags, url
|
|
|
|
FROM entries
|
|
|
|
ORDER BY title ASC
|
|
|
|
LIMIT 800
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
titles_tags_urls = cur.execute(sql).fetchall()
|
|
|
|
return titles_tags_urls
|
|
|
|
|
|
|
|
|
|
|
|
def get_titles_tags_urls_by_category(db_file, category):
|
|
|
|
"""
|
|
|
|
Get list of title and urls of given category.
|
|
|
|
|
|
|
|
Parameters
|
|
|
|
----------
|
|
|
|
db_file : str
|
|
|
|
Path to database file.
|
|
|
|
|
|
|
|
Returns
|
|
|
|
-------
|
|
|
|
titles_urls : tuple
|
|
|
|
List of titles and urls.
|
|
|
|
"""
|
2024-03-03 15:13:01 +01:00
|
|
|
function_name = sys._getframe().f_code.co_name
|
2024-03-04 11:16:49 +01:00
|
|
|
logger.debug('{}: db_file: {} category: {}'
|
2024-03-03 15:13:01 +01:00
|
|
|
.format(function_name, db_file, category))
|
2024-02-19 21:50:53 +01:00
|
|
|
with create_connection(db_file) as conn:
|
|
|
|
cur = conn.cursor()
|
|
|
|
sql = (
|
|
|
|
"""
|
|
|
|
SELECT title, tags, url
|
|
|
|
FROM entries
|
|
|
|
WHERE category = :category
|
|
|
|
ORDER BY title ASC
|
|
|
|
"""
|
|
|
|
)
|
|
|
|
par = {
|
|
|
|
"category": category
|
|
|
|
}
|
|
|
|
titles_tags_urls = cur.execute(sql, par).fetchall()
|
|
|
|
return titles_tags_urls
|