forked from sch/Slixfeed
Disable redundant print directives.
This commit is contained in:
parent
fb4ca2c852
commit
382b5a52be
2 changed files with 20 additions and 37 deletions
|
@ -2718,10 +2718,10 @@ def get_invalid_entries(db_file, url, feed):
|
||||||
# TODO better check and don't repeat code
|
# TODO better check and don't repeat code
|
||||||
if entry.has_key("id") and entry_id:
|
if entry.has_key("id") and entry_id:
|
||||||
if entry.id == entry_id:
|
if entry.id == entry_id:
|
||||||
print(url)
|
# print(url)
|
||||||
print("compare entry.id == entry_id:", entry.id)
|
# print("compare entry.id == entry_id:", entry.id)
|
||||||
print("compare entry.id == entry_id:", entry_id)
|
# print("compare entry.id == entry_id:", entry_id)
|
||||||
print("============")
|
# print("============")
|
||||||
# items_valid.append(ix)
|
# items_valid.append(ix)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -2737,10 +2737,10 @@ def get_invalid_entries(db_file, url, feed):
|
||||||
link = url
|
link = url
|
||||||
# Compare date, link and title
|
# Compare date, link and title
|
||||||
if entry.has_key("published") and timestamp:
|
if entry.has_key("published") and timestamp:
|
||||||
print(url)
|
# print(url)
|
||||||
print("compare published:", title, link, time)
|
# print("compare published:", title, link, time)
|
||||||
print("compare published:", entry_title, entry_link, timestamp)
|
# print("compare published:", entry_title, entry_link, timestamp)
|
||||||
print("============")
|
# print("============")
|
||||||
time = dt.rfc2822_to_iso8601(entry.published)
|
time = dt.rfc2822_to_iso8601(entry.published)
|
||||||
if (entry_title == title and
|
if (entry_title == title and
|
||||||
entry_link == link and
|
entry_link == link and
|
||||||
|
@ -2751,17 +2751,17 @@ def get_invalid_entries(db_file, url, feed):
|
||||||
# Compare link and title
|
# Compare link and title
|
||||||
if (entry_title == title and
|
if (entry_title == title and
|
||||||
entry_link == link):
|
entry_link == link):
|
||||||
print(url)
|
# print(url)
|
||||||
print("compare entry_link == link:", title, link)
|
# print("compare entry_link == link:", title, link)
|
||||||
print("compare entry_title == title:", entry_title, entry_link)
|
# print("compare entry_title == title:", entry_title, entry_link)
|
||||||
print("============")
|
# print("============")
|
||||||
# items_valid.append(ix)
|
# items_valid.append(ix)
|
||||||
break
|
break
|
||||||
# print('invalid entry:')
|
# print('invalid entry:')
|
||||||
# print(entry)
|
# print(entry)
|
||||||
# TODO better check and don't repeat code
|
# TODO better check and don't repeat code
|
||||||
ixs[ix] = read_status
|
ixs[ix] = read_status
|
||||||
print(ixs)
|
# print(ixs)
|
||||||
return ixs
|
return ixs
|
||||||
|
|
||||||
|
|
||||||
|
@ -2790,7 +2790,7 @@ async def process_invalid_entries(db_file, ixs):
|
||||||
for ix in ixs:
|
for ix in ixs:
|
||||||
logger.debug('{}: ix: {}'.format(function_name, ix))
|
logger.debug('{}: ix: {}'.format(function_name, ix))
|
||||||
if ixs[ix] == 1:
|
if ixs[ix] == 1:
|
||||||
print('index {} ({}) be deleted'.format(ix, ixs[ix]))
|
# print('index {} ({}) be deleted'.format(ix, ixs[ix]))
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
DELETE
|
DELETE
|
||||||
|
@ -2799,7 +2799,7 @@ async def process_invalid_entries(db_file, ixs):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
print('index {} ({}) be archived'.format(ix, ixs[ix]))
|
# print('index {} ({}) be archived'.format(ix, ixs[ix]))
|
||||||
sql = (
|
sql = (
|
||||||
"""
|
"""
|
||||||
UPDATE entries_state
|
UPDATE entries_state
|
||||||
|
@ -2808,12 +2808,8 @@ async def process_invalid_entries(db_file, ixs):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
par = (ix,)
|
par = (ix,)
|
||||||
# cur.execute(sql, par)
|
cur.execute(sql, par)
|
||||||
try:
|
|
||||||
print('cur')
|
|
||||||
cur.execute(sql, par)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(e)
|
|
||||||
|
|
||||||
# TODO Move entries that don't exist into table archive.
|
# TODO Move entries that don't exist into table archive.
|
||||||
# NOTE Entries that are read from archive are deleted.
|
# NOTE Entries that are read from archive are deleted.
|
||||||
|
|
|
@ -842,7 +842,7 @@ class FeedTask:
|
||||||
urls = sqlite.get_active_feeds_url(db_file)
|
urls = sqlite.get_active_feeds_url(db_file)
|
||||||
for url in urls:
|
for url in urls:
|
||||||
url = url[0]
|
url = url[0]
|
||||||
print('sta : ' + url)
|
print('start scan\nurl {}\ndatabase {}'.format(url, db_file))
|
||||||
# print('STA',url)
|
# print('STA',url)
|
||||||
|
|
||||||
# # Skip Reddit
|
# # Skip Reddit
|
||||||
|
@ -855,40 +855,27 @@ class FeedTask:
|
||||||
status_code = result['status_code']
|
status_code = result['status_code']
|
||||||
feed_id = sqlite.get_feed_id(db_file, url)
|
feed_id = sqlite.get_feed_id(db_file, url)
|
||||||
feed_id = feed_id[0]
|
feed_id = feed_id[0]
|
||||||
print('feed_id')
|
|
||||||
print(feed_id)
|
|
||||||
if not result['error']:
|
if not result['error']:
|
||||||
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
await sqlite.update_feed_status(db_file, feed_id, status_code)
|
||||||
document = result['content']
|
document = result['content']
|
||||||
feed = parse(document)
|
feed = parse(document)
|
||||||
feed_valid = 0 if feed.bozo else 1
|
feed_valid = 0 if feed.bozo else 1
|
||||||
print('feed_valid')
|
|
||||||
print(feed_valid)
|
|
||||||
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
await sqlite.update_feed_validity(db_file, feed_id, feed_valid)
|
||||||
feed_properties = Feed.get_properties_of_feed(
|
feed_properties = Feed.get_properties_of_feed(
|
||||||
db_file, feed_id, feed)
|
db_file, feed_id, feed)
|
||||||
print('feed_properties')
|
|
||||||
print(feed_properties)
|
|
||||||
await sqlite.update_feed_properties(
|
await sqlite.update_feed_properties(
|
||||||
db_file, feed_id, feed_properties)
|
db_file, feed_id, feed_properties)
|
||||||
new_entries = Feed.get_properties_of_entries(
|
new_entries = Feed.get_properties_of_entries(
|
||||||
jid_bare, db_file, url, feed_id, feed)
|
jid_bare, db_file, url, feed_id, feed)
|
||||||
print('new_entries')
|
|
||||||
print(new_entries)
|
|
||||||
print('if new_entries')
|
|
||||||
if new_entries:
|
if new_entries:
|
||||||
print('if new_entries (YES)')
|
|
||||||
print('{}: {} new_entries: {} ({})'.format(jid_bare, len(new_entries), url, feed_id))
|
|
||||||
await sqlite.add_entries_and_update_feed_state(db_file, feed_id, new_entries)
|
await sqlite.add_entries_and_update_feed_state(db_file, feed_id, new_entries)
|
||||||
limit = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
limit = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
||||||
ixs = sqlite.get_invalid_entries(db_file, url, feed)
|
ixs = sqlite.get_invalid_entries(db_file, url, feed)
|
||||||
await sqlite.process_invalid_entries(db_file, ixs)
|
await sqlite.process_invalid_entries(db_file, ixs)
|
||||||
await sqlite.maintain_archive(db_file, limit)
|
await sqlite.maintain_archive(db_file, limit)
|
||||||
# await sqlite.process_invalid_entries(db_file, ixs)
|
# await sqlite.process_invalid_entries(db_file, ixs)
|
||||||
print('end : ' + url)
|
print('end scan\nurl {}\ndatabase {}'.format(url, db_file))
|
||||||
limit2 = Config.get_setting_value(self.settings, jid_bare, 'archive')
|
await asyncio.sleep(50)
|
||||||
await sqlite.maintain_archive(db_file, limit2)
|
|
||||||
# await asyncio.sleep(50)
|
|
||||||
val = Config.get_setting_value(self.settings, jid_bare, 'check')
|
val = Config.get_setting_value(self.settings, jid_bare, 'check')
|
||||||
await asyncio.sleep(60 * float(val))
|
await asyncio.sleep(60 * float(val))
|
||||||
# Schedule to call this function again in 90 minutes
|
# Schedule to call this function again in 90 minutes
|
||||||
|
|
Loading…
Reference in a new issue