Fixed adding of URLs via chat (Thank you roughnecks)

This commit is contained in:
Schimon Jehudah 2024-04-05 15:59:24 +00:00
parent 60756dbdd2
commit b6ca4aaa46
4 changed files with 48 additions and 6 deletions

View file

@ -915,9 +915,20 @@ async def add_feed(self, jid_bare, db_file, url, identifier):
else: else:
feed_updated = None feed_updated = None
entries_count = len(feed.entries) entries_count = len(feed.entries)
feed_properties = {
"version" : '',
"encoding" : '',
"language" : '',
"rating" : '',
"entries_count" : entries_count,
"icon" : '',
"image" : '',
"logo" : '',
"ttl" : '',
"updated" : feed_updated,
}
await sqlite.update_feed_properties(db_file, feed_id, await sqlite.update_feed_properties(db_file, feed_id,
entries_count, feed_properties)
feed_updated)
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0] feed_id = feed_id[0]
new_entries = get_properties_of_entries( new_entries = get_properties_of_entries(

View file

@ -352,6 +352,12 @@ async def check_updates(self, jid_bare):
url = url[0] url = url[0]
print('STA',url) print('STA',url)
# # Skip Reddit
# if 'reddit.com' in str(url).lower():
# print('Reddit Atom Syndication feeds are not supported by Slixfeed.')
# print('Skipping URL:', url)
# continue
result = await fetch.http(url) result = await fetch.http(url)
status_code = result['status_code'] status_code = result['status_code']
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
@ -361,6 +367,20 @@ async def check_updates(self, jid_bare):
document = result['content'] document = result['content']
feed = parse(document) feed = parse(document)
# # Skip Librarian
# if 'librarian' in feed.feed.title.lower():
# print('Librarian RSS feeds are not supported by Slixfeed.')
# print('Ask the Librarian developers to migrate to Atom Syndication.')
# print('Skipping URL:', url)
# continue
# # Skip Mastodon
# if 'mastodon' in feed.feed.generator.lower():
# print('Mastodon RSS feeds are not supported by Slixfeed.')
# print('Ask the Mastodon developers to migrate to Atom Syndication.')
# print('Skipping URL:', url)
# continue
feed_valid = 0 if feed.bozo else 1 feed_valid = 0 if feed.bozo else 1
await sqlite.update_feed_validity(db_file, feed_id, feed_valid) await sqlite.update_feed_validity(db_file, feed_id, feed_valid)

View file

@ -1,2 +1,2 @@
__version__ = '0.1.53' __version__ = '0.1.54'
__version_info__ = (0, 1, 53) __version_info__ = (0, 1, 54)

View file

@ -352,9 +352,20 @@ async def message(self, message):
else: else:
feed_updated = None feed_updated = None
entries_count = len(feed.entries) entries_count = len(feed.entries)
feed_properties = {
"version" : '',
"encoding" : '',
"language" : '',
"rating" : '',
"entries_count" : entries_count,
"icon" : '',
"image" : '',
"logo" : '',
"ttl" : '',
"updated" : feed_updated,
}
await sqlite.update_feed_properties(db_file, feed_id, await sqlite.update_feed_properties(db_file, feed_id,
entries_count, feed_properties)
feed_updated)
feed_id = sqlite.get_feed_id(db_file, url) feed_id = sqlite.get_feed_id(db_file, url)
feed_id = feed_id[0] feed_id = feed_id[0]
new_entries = action.get_properties_of_entries( new_entries = action.get_properties_of_entries(