2016-11-16 00:56:29 +09:00
|
|
|
# frozen_string_literal: true
|
|
|
|
|
2016-03-09 04:16:11 +09:00
|
|
|
class Feed
|
|
|
|
def initialize(type, account)
|
|
|
|
@type = type
|
|
|
|
@account = account
|
|
|
|
end
|
|
|
|
|
2016-10-03 05:35:27 +09:00
|
|
|
def get(limit, max_id = nil, since_id = nil)
|
|
|
|
max_id = '+inf' if max_id.blank?
|
|
|
|
since_id = '-inf' if since_id.blank?
|
2016-10-23 02:38:47 +09:00
|
|
|
unhydrated = redis.zrevrangebyscore(key, "(#{max_id}", "(#{since_id}", limit: [0, limit], with_scores: true).map(&:last).map(&:to_i)
|
2016-03-09 04:16:11 +09:00
|
|
|
|
|
|
|
# If we're after most recent items and none are there, we need to precompute the feed
|
2016-10-20 01:20:19 +09:00
|
|
|
if unhydrated.empty? && max_id == '+inf' && since_id == '-inf'
|
2016-11-08 10:08:32 +09:00
|
|
|
RegenerationWorker.perform_async(@account.id, @type)
|
2016-12-04 02:21:26 +09:00
|
|
|
@statuses = Status.send("as_#{@type}_timeline", @account).cache_ids.paginate_by_max_id(limit, nil, nil)
|
2016-03-25 10:13:30 +09:00
|
|
|
else
|
2016-12-04 02:21:26 +09:00
|
|
|
status_map = Status.where(id: unhydrated).cache_ids.map { |s| [s.id, s] }.to_h
|
2016-11-29 23:49:39 +09:00
|
|
|
@statuses = unhydrated.map { |id| status_map[id] }.compact
|
2016-03-25 10:13:30 +09:00
|
|
|
end
|
2016-11-10 01:48:44 +09:00
|
|
|
|
|
|
|
@statuses
|
2016-03-09 04:16:11 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
private
|
|
|
|
|
|
|
|
def key
|
2016-09-10 03:04:34 +09:00
|
|
|
FeedManager.instance.key(@type, @account.id)
|
2016-03-09 04:16:11 +09:00
|
|
|
end
|
|
|
|
|
|
|
|
def redis
|
2016-11-16 00:56:29 +09:00
|
|
|
Redis.current
|
2016-03-09 04:16:11 +09:00
|
|
|
end
|
|
|
|
end
|