From 7646ad8a2b7d0809657a4f2a228443b4c992d070 Mon Sep 17 00:00:00 2001
From: Vyr Cossont <VyrCossont@users.noreply.github.com>
Date: Fri, 31 Mar 2023 05:38:47 -0700
Subject: [PATCH] IndexingScheduler: fetch and import in batches (#24285)

Co-authored-by: Claire <claire.github-309c@sitedethib.com>
---
 app/workers/scheduler/indexing_scheduler.rb | 16 ++++++++++------
 1 file changed, 10 insertions(+), 6 deletions(-)

diff --git a/app/workers/scheduler/indexing_scheduler.rb b/app/workers/scheduler/indexing_scheduler.rb
index c423966297..1bbe9cd5d0 100644
--- a/app/workers/scheduler/indexing_scheduler.rb
+++ b/app/workers/scheduler/indexing_scheduler.rb
@@ -6,17 +6,21 @@ class Scheduler::IndexingScheduler
 
   sidekiq_options retry: 0
 
+  IMPORT_BATCH_SIZE = 1000
+  SCAN_BATCH_SIZE = 10 * IMPORT_BATCH_SIZE
+
   def perform
     return unless Chewy.enabled?
 
     indexes.each do |type|
       with_redis do |redis|
-        ids = redis.smembers("chewy:queue:#{type.name}")
-
-        type.import!(ids)
-
-        redis.pipelined do |pipeline|
-          ids.each { |id| pipeline.srem("chewy:queue:#{type.name}", id) }
+        redis.sscan_each("chewy:queue:#{type.name}", count: SCAN_BATCH_SIZE) do |ids|
+          redis.pipelined do
+            ids.each_slice(IMPORT_BATCH_SIZE) do |slice_ids|
+              type.import!(slice_ids)
+              redis.srem("chewy:queue:#{type.name}", slice_ids)
+            end
+          end
         end
       end
     end
-- 
GitLab