forked from sch/Slixfeed
Remove a reference to module slixfed.dt which does not exist (Thank you lorenzo).
This commit is contained in:
parent
5babb02cf8
commit
22a3921915
5 changed files with 17 additions and 10 deletions
|
@ -3,6 +3,8 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
TODO Rename module to console or print
|
||||||
|
|
||||||
To use this class, first, instantiate Logger with the name of your module
|
To use this class, first, instantiate Logger with the name of your module
|
||||||
or class, then call the appropriate logging methods on that instance.
|
or class, then call the appropriate logging methods on that instance.
|
||||||
|
|
||||||
|
@ -47,3 +49,10 @@ class Logger:
|
||||||
# def check_difference(function_name, difference):
|
# def check_difference(function_name, difference):
|
||||||
# if difference > 1:
|
# if difference > 1:
|
||||||
# Logger.warning(message)
|
# Logger.warning(message)
|
||||||
|
|
||||||
|
|
||||||
|
class Message:
|
||||||
|
|
||||||
|
|
||||||
|
def printer(text):
|
||||||
|
print(text, end='\r')
|
||||||
|
|
|
@ -32,8 +32,7 @@ from slixfeed.config import Config
|
||||||
import slixfeed.fetch as fetch
|
import slixfeed.fetch as fetch
|
||||||
from slixfeed.log import Logger
|
from slixfeed.log import Logger
|
||||||
import slixfeed.sqlite as sqlite
|
import slixfeed.sqlite as sqlite
|
||||||
from slixfeed.utilities import DateAndTime, String, Url
|
from slixfeed.utilities import DateAndTime, Html, MD, String, Url, Utilities
|
||||||
from slixfeed.utilities import Html, MD, String, Utilities
|
|
||||||
from slixmpp.xmlstream import ET
|
from slixmpp.xmlstream import ET
|
||||||
import sys
|
import sys
|
||||||
from urllib.parse import urlsplit
|
from urllib.parse import urlsplit
|
||||||
|
@ -1333,7 +1332,7 @@ class FeedTask:
|
||||||
url, entry_identifier, entry)
|
url, entry_identifier, entry)
|
||||||
# new_entries.append(new_entry)
|
# new_entries.append(new_entry)
|
||||||
new_entries.extend([new_entry])
|
new_entries.extend([new_entry])
|
||||||
print(url)
|
print(url, end='\r')
|
||||||
if new_entries:
|
if new_entries:
|
||||||
await sqlite.add_entries_and_update_feed_state(db_file, feed_id, new_entries)
|
await sqlite.add_entries_and_update_feed_state(db_file, feed_id, new_entries)
|
||||||
limit = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
limit = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
||||||
|
@ -1357,7 +1356,6 @@ class FeedTask:
|
||||||
continue
|
continue
|
||||||
if not valid: ixs_invalid[ix] = read_status
|
if not valid: ixs_invalid[ix] = read_status
|
||||||
if len(ixs_invalid):
|
if len(ixs_invalid):
|
||||||
print('erasing {}/{}'.format(len(ixs_invalid), len(feed.entries)))
|
|
||||||
await sqlite.process_invalid_entries(db_file, ixs_invalid)
|
await sqlite.process_invalid_entries(db_file, ixs_invalid)
|
||||||
# TODO return number of archived entries and add if statement to run archive maintainence function
|
# TODO return number of archived entries and add if statement to run archive maintainence function
|
||||||
await sqlite.maintain_archive(db_file, limit)
|
await sqlite.maintain_archive(db_file, limit)
|
||||||
|
|
|
@ -47,7 +47,6 @@ from lxml import etree, html
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import slixfeed.config as config
|
import slixfeed.config as config
|
||||||
import slixfeed.dt as dt
|
|
||||||
import slixfeed.fetch as fetch
|
import slixfeed.fetch as fetch
|
||||||
from slixfeed.log import Logger
|
from slixfeed.log import Logger
|
||||||
import sys
|
import sys
|
||||||
|
@ -282,7 +281,7 @@ class MD:
|
||||||
file.write('- [{}]({})\n'.format(result[1], result[2]))
|
file.write('- [{}]({})\n'.format(result[1], result[2]))
|
||||||
file.write('\n\n* * *\n\nThis list was saved on {} from xmpp:{} using '
|
file.write('\n\n* * *\n\nThis list was saved on {} from xmpp:{} using '
|
||||||
'[Slixfeed](https://slixfeed.woodpeckersnest.space/)\n'
|
'[Slixfeed](https://slixfeed.woodpeckersnest.space/)\n'
|
||||||
.format(dt.current_date(), jid))
|
.format(DateAndTime.current_date(), jid))
|
||||||
|
|
||||||
|
|
||||||
def log_to_markdown(timestamp, filename, jid, message):
|
def log_to_markdown(timestamp, filename, jid, message):
|
||||||
|
|
|
@ -18,7 +18,7 @@ FIXME
|
||||||
"""
|
"""
|
||||||
from slixfeed.xmpp.bookmark import XmppBookmark
|
from slixfeed.xmpp.bookmark import XmppBookmark
|
||||||
from slixfeed.xmpp.muc import XmppMuc
|
from slixfeed.xmpp.muc import XmppMuc
|
||||||
from slixfeed.log import Logger
|
from slixfeed.log import Logger, Message
|
||||||
|
|
||||||
logger = Logger(__name__)
|
logger = Logger(__name__)
|
||||||
|
|
||||||
|
@ -34,8 +34,9 @@ class XmppGroupchat:
|
||||||
'bookmark {}'.format(bookmark['name']))
|
'bookmark {}'.format(bookmark['name']))
|
||||||
alias = bookmark["nick"]
|
alias = bookmark["nick"]
|
||||||
muc_jid = bookmark["jid"]
|
muc_jid = bookmark["jid"]
|
||||||
|
|
||||||
|
Message.printer('Joining MUC {} ...'.format(muc_jid))
|
||||||
result = await XmppMuc.join(self, muc_jid, alias)
|
result = await XmppMuc.join(self, muc_jid, alias)
|
||||||
print(result)
|
|
||||||
if result == 'ban':
|
if result == 'ban':
|
||||||
await XmppBookmark.remove(self, muc_jid)
|
await XmppBookmark.remove(self, muc_jid)
|
||||||
logger.warning('{} is banned from {}'.format(self.alias, muc_jid))
|
logger.warning('{} is banned from {}'.format(self.alias, muc_jid))
|
||||||
|
|
|
@ -57,7 +57,7 @@ class XmppMuc:
|
||||||
seconds=0,
|
seconds=0,
|
||||||
since=0,
|
since=0,
|
||||||
timeout=30)
|
timeout=30)
|
||||||
result = 'joined ' + jid
|
result = 'success'
|
||||||
except IqError as e:
|
except IqError as e:
|
||||||
logger.error('Error XmppIQ')
|
logger.error('Error XmppIQ')
|
||||||
logger.error(str(e))
|
logger.error(str(e))
|
||||||
|
|
Loading…
Reference in a new issue