1
0
mirror of https://github.com/funamitech/mastodon synced 2024-11-23 22:57:05 +09:00

IndexingScheduler: fetch and import in batches (#24285)

Co-authored-by: Claire <claire.github-309c@sitedethib.com>
This commit is contained in:
Vyr Cossont 2023-03-31 05:38:47 -07:00 committed by GitHub
parent 500d6f93be
commit 7646ad8a2b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

View File

@ -6,17 +6,21 @@ class Scheduler::IndexingScheduler
sidekiq_options retry: 0 sidekiq_options retry: 0
IMPORT_BATCH_SIZE = 1000
SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
def perform def perform
return unless Chewy.enabled? return unless Chewy.enabled?
indexes.each do |type| indexes.each do |type|
with_redis do |redis| with_redis do |redis|
ids = redis.smembers("chewy:queue:#{type.name}") redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE) do |ids|
redis.pipelined do
type.import!(ids) ids.each_slice(IMPORT_BATCH_SIZE) do |slice_ids|
type.import!(slice_ids)
redis.pipelined do |pipeline| redis.srem("chewy:queue:#{type.name}", slice_ids)
ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) } end
end
end end
end end
end end