forked from sch/Slixfeed
Disable redundant print directives.
This commit is contained in:
parent
fb4ca2c852
commit
382b5a52be
2 changed files with 20 additions and 37 deletions
|
@ -2718,10 +2718,10 @@ def get_invalid_entries(db_file, url, feed):
|
|||
# TODO better check and don't repeat code
|
||||
if entry.has_key("id") and entry_id:
|
||||
if entry.id == entry_id:
|
||||
print(url)
|
||||
print("compare entry.id == entry_id:", entry.id)
|
||||
print("compare entry.id == entry_id:", entry_id)
|
||||
print("============")
|
||||
# print(url)
|
||||
# print("compare entry.id == entry_id:", entry.id)
|
||||
# print("compare entry.id == entry_id:", entry_id)
|
||||
# print("============")
|
||||
# items_valid.append(ix)
|
||||
break
|
||||
else:
|
||||
|
@ -2737,10 +2737,10 @@ def get_invalid_entries(db_file, url, feed):
|
|||
link = url
|
||||
# Compare date, link and title
|
||||
if entry.has_key("published") and timestamp:
|
||||
print(url)
|
||||
print("compare published:", title, link, time)
|
||||
print("compare published:", entry_title, entry_link, timestamp)
|
||||
print("============")
|
||||
# print(url)
|
||||
# print("compare published:", title, link, time)
|
||||
# print("compare published:", entry_title, entry_link, timestamp)
|
||||
# print("============")
|
||||
time = dt.rfc2822_to_iso8601(entry.published)
|
||||
if (entry_title == title and
|
||||
entry_link == link and
|
||||
|
@ -2751,17 +2751,17 @@ def get_invalid_entries(db_file, url, feed):
|
|||
# Compare link and title
|
||||
if (entry_title == title and
|
||||
entry_link == link):
|
||||
print(url)
|
||||
print("compare entry_link == link:", title, link)
|
||||
print("compare entry_title == title:", entry_title, entry_link)
|
||||
print("============")
|
||||
# print(url)
|
||||
# print("compare entry_link == link:", title, link)
|
||||
# print("compare entry_title == title:", entry_title, entry_link)
|
||||
# print("============")
|
||||
# items_valid.append(ix)
|
||||
break
|
||||
# print('invalid entry:')
|
||||
# print(entry)
|
||||
# TODO better check and don't repeat code
|
||||
ixs[ix] = read_status
|
||||
print(ixs)
|
||||
# print(ixs)
|
||||
return ixs
|
||||
|
||||
|
||||
|
@ -2790,7 +2790,7 @@ async def process_invalid_entries(db_file, ixs):
|
|||
for ix in ixs:
|
||||
logger.debug('{}: ix: {}'.format(function_name, ix))
|
||||
if ixs[ix] == 1:
|
||||
print('index {} ({}) be deleted'.format(ix, ixs[ix]))
|
||||
# print('index {} ({}) be deleted'.format(ix, ixs[ix]))
|
||||
sql = (
|
||||
"""
|
||||
DELETE
|
||||
|
@ -2799,7 +2799,7 @@ async def process_invalid_entries(db_file, ixs):
|
|||
"""
|
||||
)
|
||||
else:
|
||||
print('index {} ({}) be archived'.format(ix, ixs[ix]))
|
||||
# print('index {} ({}) be archived'.format(ix, ixs[ix]))
|
||||
sql = (
|
||||
"""
|
||||
UPDATE entries_state
|
||||
|
@ -2808,12 +2808,8 @@ async def process_invalid_entries(db_file, ixs):
|
|||
"""
|
||||
)
|
||||
par = (ix,)
|
||||
# cur.execute(sql, par)
|
||||
try:
|
||||
print('cur')
|
||||
cur.execute(sql, par)
|
||||
except Exception as e:
|
||||
logger.error(e)
|
||||
cur.execute(sql, par)
|
||||
|
||||
|
||||
# TODO Move entries that don't exist into table archive.
|
||||
# NOTE Entries that are read from archive are deleted.
|
||||
|
|
|
@ -842,7 +842,7 @@ class FeedTask:
|
|||
urls = sqlite.get_active_feeds_url(db_file)
|
||||
for url in urls:
|
||||
url = url[0]
|
||||
print('sta : ' + url)
|
||||
print('start scan\nurl {}\ndatabase {}'.format(url, db_file))
|
||||
# print('STA',url)
|
||||
|
||||
# # Skip Reddit
|
||||
|
@ -855,40 +855,27 @@ class FeedTask:
|
|||
status_code = result['status_code']
|
||||
feed_id = sqlite.get_feed_id(db_file, url)
|
||||
feed_id = feed_id[0]
|
||||
print('feed_id')
|
||||
print(feed_id)
|
||||
if not result['error']:
|
||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||
document = result['content']
|
||||
feed = parse(document)
|
||||
feed_valid = 0 if feed.bozo else 1
|
||||
print('feed_valid')
|
||||
print(feed_valid)
|
||||
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||
feed_properties = Feed.get_properties_of_feed(
|
||||
db_file, feed_id, feed)
|
||||
print('feed_properties')
|
||||
print(feed_properties)
|
||||
await sqlite.update_feed_properties(
|
||||
db_file, feed_id, feed_properties)
|
||||
new_entries = Feed.get_properties_of_entries(
|
||||
jid_bare, db_file, url, feed_id, feed)
|
||||
print('new_entries')
|
||||
print(new_entries)
|
||||
print('if new_entries')
|
||||
if new_entries:
|
||||
print('if new_entries (YES)')
|
||||
print('{}: {} new_entries: {} ({})'.format(jid_bare, len(new_entries), url, feed_id))
|
||||
await sqlite.add_entries_and_update_feed_state(db_file, feed_id, new_entries)
|
||||
limit = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
||||
ixs = sqlite.get_invalid_entries(db_file, url, feed)
|
||||
await sqlite.process_invalid_entries(db_file, ixs)
|
||||
await sqlite.maintain_archive(db_file, limit)
|
||||
# await sqlite.process_invalid_entries(db_file, ixs)
|
||||
print('end : ' + url)
|
||||
limit2 = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
||||
await sqlite.maintain_archive(db_file, limit2)
|
||||
# await asyncio.sleep(50)
|
||||
print('end scan\nurl {}\ndatabase {}'.format(url, db_file))
|
||||
await asyncio.sleep(50)
|
||||
val = Config.get_setting_value(self.settings, jid_bare, 'check')
|
||||
await asyncio.sleep(60 * float(val))
|
||||
# Schedule to call this function again in 90 minutes
|
||||
|
|
Loading…
Reference in a new issue