Skip to content

Commit

Permalink
Add option to exclude specific queues
Browse files Browse the repository at this point in the history
If we have a queue that has more than 10M records
seating there, this whole mechanism would cause the
container to get killed for over use of memory.
  • Loading branch information
vaot committed May 27, 2021
1 parent 680c400 commit 3ccf3a6
Showing 1 changed file with 5 additions and 4 deletions.
9 changes: 5 additions & 4 deletions lib/sidekiq_unique_digests_cleaner.rb
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# frozen_string_literal: true

class SidekiqUniqueDigestsCleaner
def self.clean_unique_digests
def self.clean_unique_digests(exclude_queues: [])
Sidekiq.logger.info('######## Starting cleaning up locked unique digests...')
digests = stuck_digests
digests = stuck_digests(exclude_queues)
if digests.any?
Sidekiq.logger.info('######## Looks like those digests are stuck: ' + digests.join(', '))
Sidekiq.logger.info('######## Unlocking digests...')
Expand All @@ -14,11 +14,12 @@ def self.clean_unique_digests
end
end

def self.stuck_digests
def self.stuck_digests(exclude_queues)
digests_with_lock = [
Sidekiq::ScheduledSet.new.map(&:value),
Sidekiq::RetrySet.new.map(&:value),
Sidekiq::Queue.all.map { |queue| queue.map(&:value) },
Sidekiq::Queue.all.filter { |queue| !exclude_queues.include?(queue.name) }
.map { |queue| queue.map(&:value) }
].flatten.map { |job_value| JSON.parse(job_value, symbolize_names: true)[:unique_digest] }

digests_with_lock += Sidekiq::Workers.new.map { |_pid, _tid, job| job['unique_digest'] }
Expand Down

0 comments on commit 3ccf3a6

Please sign in to comment.