Skip to content

Commit

Permalink
Add option to exclude specific queues
Browse files Browse the repository at this point in the history
If we have a queue that has more than 10M records
seating there, this whole mechanism would cause the
container to get killed for over use of memory.
  • Loading branch information
vaot committed May 27, 2021
1 parent 136f8cc commit 7debe87
Showing 1 changed file with 6 additions and 5 deletions.
11 changes: 6 additions & 5 deletions lib/sidekiq_unique_digests_cleaner.rb
Original file line number Diff line number Diff line change
@@ -1,9 +1,9 @@
# frozen_string_literal: true

class SidekiqUniqueDigestsCleaner
def self.clean_unique_digests
def self.clean_unique_digests(exclude_queues: [])
Sidekiq.logger.info('######## Starting cleaning up locked unique digests...')
digests = stuck_digests
digests = stuck_digests(exclude_queues)
if digests.any?
Sidekiq.logger.info('######## Looks like those digests are stuck: ' + digests.join(', '))
Sidekiq.logger.info('######## Unlocking digests...')
Expand All @@ -14,13 +14,14 @@ def self.clean_unique_digests
end
end

def self.stuck_digests
def self.stuck_digests(exclude_queues)
digests_with_lock = [
Sidekiq::ScheduledSet.new.map(&:value),
Sidekiq::RetrySet.new.map(&:value),
Sidekiq::Queue.all.map { |queue| queue.map(&:value) },
Sidekiq::Queue.all.filter { |queue| !exclude_queues.include?(queue.name) }
.map { |queue| queue.map(&:value) },
Sidekiq::Workers.new.map { |_pid, _tid, job| job.value },
].flatten.map { |job_value| JSON.parse(job_value, symbolize_names: true)[:unique_digest] }
SidekiqUniqueJobs::Digests.all - digests_with_lock
end
end
end

0 comments on commit 7debe87

Please sign in to comment.