proper sort order when reading more than one entry at a time
put entries in the feed, sorted by the retrieved date first, then break ties based on the published date.
This commit is contained in:
parent
5f1baa228f
commit
127035e328
5 changed files with 23 additions and 22 deletions
|
@ -7,4 +7,4 @@ module=woodwind.wsgi
|
||||||
import=timers
|
import=timers
|
||||||
attach-daemon=rqworker
|
attach-daemon=rqworker
|
||||||
attach-daemon=python -m woodwind.websocket_server
|
attach-daemon=python -m woodwind.websocket_server
|
||||||
python-auto-reload=true
|
py-autoreload=3
|
||||||
|
|
|
@ -83,9 +83,7 @@ $(function(){
|
||||||
};
|
};
|
||||||
ws.onmessage = function(event) {
|
ws.onmessage = function(event) {
|
||||||
var data = JSON.parse(event.data);
|
var data = JSON.parse(event.data);
|
||||||
data.entries.forEach(function(entryHtml) {
|
$('body main').prepend(data.entries.join('\n'));
|
||||||
$('body main').prepend(entryHtml);
|
|
||||||
});
|
|
||||||
attachListeners();
|
attachListeners();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -87,11 +87,11 @@ def process_feed(session, feed):
|
||||||
check_push_subscription(session, feed, response)
|
check_push_subscription(session, feed, response)
|
||||||
backfill = len(feed.entries) == 0 # backfill if this is the first pull
|
backfill = len(feed.entries) == 0 # backfill if this is the first pull
|
||||||
if feed.type == 'xml':
|
if feed.type == 'xml':
|
||||||
result = process_xml_feed_for_new_entries(session, feed,
|
result = process_xml_feed_for_new_entries(
|
||||||
response, backfill)
|
session, feed, response, backfill, now)
|
||||||
elif feed.type == 'html':
|
elif feed.type == 'html':
|
||||||
result = process_html_feed_for_new_entries(session, feed,
|
result = process_html_feed_for_new_entries(
|
||||||
response, backfill)
|
session, feed, response, backfill, now)
|
||||||
else:
|
else:
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ def process_feed(session, feed):
|
||||||
|
|
||||||
for entry in new_entries:
|
for entry in new_entries:
|
||||||
for in_reply_to in entry.get_property('in-reply-to', []):
|
for in_reply_to in entry.get_property('in-reply-to', []):
|
||||||
fetch_reply_context(entry.id, in_reply_to)
|
fetch_reply_context(entry.id, in_reply_to, now)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
feed.last_checked = now
|
feed.last_checked = now
|
||||||
|
@ -204,6 +204,9 @@ def notify_feed_updated(session, feed, entries):
|
||||||
import flask.ext.login as flask_login
|
import flask.ext.login as flask_login
|
||||||
flask_app = create_app()
|
flask_app = create_app()
|
||||||
|
|
||||||
|
entries = sorted(entries, key=lambda e: (e.retrieved, e.published),
|
||||||
|
reverse=True)
|
||||||
|
|
||||||
for user in feed.users:
|
for user in feed.users:
|
||||||
with flask_app.test_request_context():
|
with flask_app.test_request_context():
|
||||||
flask_login.login_user(user, remember=True)
|
flask_login.login_user(user, remember=True)
|
||||||
|
@ -241,10 +244,9 @@ def is_content_equal(e1, e2):
|
||||||
and e1.properties == e2.properties)
|
and e1.properties == e2.properties)
|
||||||
|
|
||||||
|
|
||||||
def process_xml_feed_for_new_entries(session, feed, response, backfill):
|
def process_xml_feed_for_new_entries(session, feed, response, backfill, now):
|
||||||
logger.debug('fetching xml feed: %s', feed)
|
logger.debug('fetching xml feed: %s', feed)
|
||||||
|
|
||||||
now = datetime.datetime.utcnow()
|
|
||||||
parsed = feedparser.parse(get_response_content(response))
|
parsed = feedparser.parse(get_response_content(response))
|
||||||
feed_props = parsed.get('feed', {})
|
feed_props = parsed.get('feed', {})
|
||||||
default_author_url = feed_props.get('author_detail', {}).get('href')
|
default_author_url = feed_props.get('author_detail', {}).get('href')
|
||||||
|
@ -252,7 +254,9 @@ def process_xml_feed_for_new_entries(session, feed, response, backfill):
|
||||||
default_author_photo = feed_props.get('logo')
|
default_author_photo = feed_props.get('logo')
|
||||||
|
|
||||||
logger.debug('found {} entries'.format(len(parsed.entries)))
|
logger.debug('found {} entries'.format(len(parsed.entries)))
|
||||||
for p_entry in parsed.entries:
|
|
||||||
|
# work from the bottom up (oldest first, usually)
|
||||||
|
for p_entry in reversed(parsed.entries):
|
||||||
logger.debug('processing entry {}'.format(str(p_entry)[:256]))
|
logger.debug('processing entry {}'.format(str(p_entry)[:256]))
|
||||||
permalink = p_entry.get('link')
|
permalink = p_entry.get('link')
|
||||||
uid = p_entry.get('id') or permalink
|
uid = p_entry.get('id') or permalink
|
||||||
|
@ -309,21 +313,20 @@ def process_xml_feed_for_new_entries(session, feed, response, backfill):
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
|
||||||
def process_html_feed_for_new_entries(session, feed, response, backfill):
|
def process_html_feed_for_new_entries(session, feed, response, backfill, now):
|
||||||
doc = get_response_content(response)
|
doc = get_response_content(response)
|
||||||
parsed = mf2util.interpret_feed(
|
parsed = mf2util.interpret_feed(
|
||||||
mf2py.parse(url=feed.feed, doc=doc), feed.feed)
|
mf2py.parse(url=feed.feed, doc=doc), feed.feed)
|
||||||
hfeed = parsed.get('entries', [])
|
hfeed = parsed.get('entries', [])
|
||||||
|
|
||||||
for hentry in hfeed:
|
for hentry in hfeed:
|
||||||
entry = hentry_to_entry(hentry, feed, backfill)
|
entry = hentry_to_entry(hentry, feed, backfill, now)
|
||||||
if entry:
|
if entry:
|
||||||
logger.debug('built entry: %s', entry.permalink)
|
logger.debug('built entry: %s', entry.permalink)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
|
||||||
def hentry_to_entry(hentry, feed, backfill):
|
def hentry_to_entry(hentry, feed, backfill, now):
|
||||||
now = datetime.datetime.utcnow()
|
|
||||||
permalink = url = hentry.get('url')
|
permalink = url = hentry.get('url')
|
||||||
uid = hentry.get('uid') or url
|
uid = hentry.get('uid') or url
|
||||||
if not uid:
|
if not uid:
|
||||||
|
@ -369,7 +372,7 @@ def hentry_to_entry(hentry, feed, backfill):
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
|
|
||||||
def fetch_reply_context(entry_id, in_reply_to):
|
def fetch_reply_context(entry_id, in_reply_to, now):
|
||||||
with session_scope() as session:
|
with session_scope() as session:
|
||||||
entry = session.query(Entry).get(entry_id)
|
entry = session.query(Entry).get(entry_id)
|
||||||
context = session.query(Entry)\
|
context = session.query(Entry)\
|
||||||
|
@ -378,10 +381,9 @@ def fetch_reply_context(entry_id, in_reply_to):
|
||||||
if not context:
|
if not context:
|
||||||
logger.info('fetching in-reply-to url: %s', in_reply_to)
|
logger.info('fetching in-reply-to url: %s', in_reply_to)
|
||||||
parsed = mf2util.interpret(
|
parsed = mf2util.interpret(
|
||||||
mf2py.Parser(url=proxy_url(in_reply_to)).to_dict(),
|
mf2py.parse(url=proxy_url(in_reply_to)), in_reply_to)
|
||||||
in_reply_to)
|
|
||||||
if parsed:
|
if parsed:
|
||||||
context = hentry_to_entry(parsed, in_reply_to, False)
|
context = hentry_to_entry(parsed, in_reply_to, False, now)
|
||||||
|
|
||||||
if context:
|
if context:
|
||||||
entry.reply_context.append(context)
|
entry.reply_context.append(context)
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
{% if ws_topic %}
|
{% if ws_topic %}
|
||||||
<script>var WS_TOPIC = "{{ ws_topic }}";</script>
|
<script>var WS_TOPIC = "{{ ws_topic }}";</script>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<script src="{{url_for('static', filename='feed.js', version='2015-03-16')}}"></script>
|
<script src="{{url_for('static', filename='feed.js', version='2015-03-20')}}"></script>
|
||||||
|
|
||||||
{% if current_user and current_user.settings
|
{% if current_user and current_user.settings
|
||||||
and current_user.settings.get('reply-method') == 'indie-config' %}
|
and current_user.settings.get('reply-method') == 'indie-config' %}
|
||||||
|
|
|
@ -47,7 +47,8 @@ def index():
|
||||||
else:
|
else:
|
||||||
ws_topic = 'user:{}'.format(flask_login.current_user.id)
|
ws_topic = 'user:{}'.format(flask_login.current_user.id)
|
||||||
|
|
||||||
entries = entry_query.order_by(Entry.retrieved.desc())\
|
entries = entry_query.order_by(Entry.retrieved.desc(),
|
||||||
|
Entry.published.desc())\
|
||||||
.offset(offset).limit(per_page).all()
|
.offset(offset).limit(per_page).all()
|
||||||
|
|
||||||
entries = dedupe_copies(entries)
|
entries = dedupe_copies(entries)
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue