forked from sch/Slixfeed
Do not fetch past messages upon joining to an MUC and consequently draw back these past messages over and over (Thank you roughnecks)
This commit is contained in:
parent
820b37fec3
commit
ad78295f27
5 changed files with 11 additions and 7 deletions
|
@ -1587,7 +1587,7 @@ def get_properties_of_entries(jid_bare, db_file, feed_url, feed_id, feed):
|
|||
url : str, optional
|
||||
URL.
|
||||
"""
|
||||
print('MID', feed_url, jid_bare, 'get_properties_of_entries')
|
||||
# print('MID', feed_url, jid_bare, 'get_properties_of_entries')
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: feed_id: {} url: {}'
|
||||
.format(function_name, feed_id, feed_url))
|
||||
|
|
|
@ -2378,7 +2378,7 @@ async def update_feed_status(db_file, feed_id, status_code):
|
|||
status : str
|
||||
Status ID or message.
|
||||
"""
|
||||
print('MID', feed_id, 'update_feed_status')
|
||||
# print('MID', feed_id, 'update_feed_status')
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: db_file: {} feed_id: {} status_code: {}'
|
||||
.format(function_name, db_file, feed_id, status_code))
|
||||
|
@ -2449,7 +2449,7 @@ async def update_feed_properties(db_file, feed_id, feed_properties):
|
|||
feed_properties : dict
|
||||
Feed properties.
|
||||
"""
|
||||
print('MID', feed_id, 'update_feed_properties')
|
||||
# print('MID', feed_id, 'update_feed_properties')
|
||||
function_name = sys._getframe().f_code.co_name
|
||||
logger.debug('{}: db_file: {} feed_id: {} feed_properties: {}'
|
||||
.format(function_name, db_file, feed_id, feed_properties))
|
||||
|
|
|
@ -342,7 +342,7 @@ async def check_updates(self, jid_bare):
|
|||
jid : str
|
||||
Jabber ID.
|
||||
"""
|
||||
print('Scanning for updates for JID {}'.format(jid_bare))
|
||||
# print('Scanning for updates for JID {}'.format(jid_bare))
|
||||
logging.info('Scanning for updates for JID {}'.format(jid_bare))
|
||||
while True:
|
||||
jid_file = jid_bare.replace('/', '_')
|
||||
|
@ -350,7 +350,7 @@ async def check_updates(self, jid_bare):
|
|||
urls = sqlite.get_active_feeds_url(db_file)
|
||||
for url in urls:
|
||||
url = url[0]
|
||||
print('STA',url)
|
||||
# print('STA',url)
|
||||
|
||||
# # Skip Reddit
|
||||
# if 'reddit.com' in str(url).lower():
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = '0.1.64'
|
||||
__version_info__ = (0, 1, 64)
|
||||
__version__ = '0.1.65'
|
||||
__version_info__ = (0, 1, 65)
|
||||
|
|
|
@ -51,6 +51,10 @@ class XmppGroupchat:
|
|||
alias,
|
||||
presence_options = {"pfrom" : jid_from},
|
||||
password=password,
|
||||
maxchars=0,
|
||||
maxstanzas=0,
|
||||
seconds=0,
|
||||
since=0,
|
||||
timeout=30)
|
||||
result = 'joined ' + jid
|
||||
except IqError as e:
|
||||
|
|
Loading…
Reference in a new issue