Skip to content

Commit

Permalink
Merge pull request #516 from danmayer/paging
Browse files Browse the repository at this point in the history
Add experimental reporting for large projects via paging
  • Loading branch information
danmayer authored Apr 20, 2024
2 parents e815dcd + 7a183da commit 4670d07
Show file tree
Hide file tree
Showing 23 changed files with 432 additions and 168 deletions.
4 changes: 3 additions & 1 deletion coverband.gemspec
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,9 @@ Gem::Specification.new do |spec|
spec.add_development_dependency "capybara"
spec.add_development_dependency "m"
spec.add_development_dependency "memory_profiler"
# breaking change in minitest and mocha
# breaking change in minitest and mocha...
# note: we are also adding 'spy' as mocha doesn't want us to spy on redis calls...
# ^^^ probably need a large test cleanup refactor
spec.add_development_dependency "minitest", "= 5.18.1"
spec.add_development_dependency "minitest-fork_executor"
spec.add_development_dependency "minitest-stub-const"
Expand Down
10 changes: 5 additions & 5 deletions lib/coverband/adapters/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def save_coverage
raise ABSTRACT_KEY
end

def coverage(_local_type = nil)
def coverage(_local_type = nil, opts = {})
raise ABSTRACT_KEY
end

Expand All @@ -51,9 +51,9 @@ def save_report(_report)
raise "abstract"
end

def get_coverage_report
def get_coverage_report(options = {})
coverage_cache = {}
data = Coverband.configuration.store.split_coverage(Coverband::TYPES, coverage_cache)
data = Coverband.configuration.store.split_coverage(Coverband::TYPES, coverage_cache, options)
data.merge(Coverband::MERGED_TYPE => Coverband.configuration.store.merged_coverage(Coverband::TYPES, coverage_cache))
end

Expand All @@ -67,12 +67,12 @@ def raw_store

protected

def split_coverage(types, coverage_cache)
def split_coverage(types, coverage_cache, options = {})
types.reduce({}) do |data, type|
if type == Coverband::RUNTIME_TYPE && Coverband.configuration.simulate_oneshot_lines_coverage
data.update(type => coverage_cache[type] ||= simulated_runtime_coverage)
else
data.update(type => coverage_cache[type] ||= coverage(type))
data.update(type => coverage_cache[type] ||= coverage(type, options))
end
end
end
Expand Down
2 changes: 1 addition & 1 deletion lib/coverband/adapters/file_store.rb
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,7 @@ def migrate!
raise NotImplementedError, "FileStore doesn't support migrations"
end

def coverage(_local_type = nil)
def coverage(_local_type = nil, opts = {})
if merge_mode
data = {}
Dir[path.sub(/\.\d+/, ".*")].each do |path|
Expand Down
113 changes: 97 additions & 16 deletions lib/coverband/adapters/hash_redis_store.rb
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def clear!(local_types = Coverband::TYPES)
# sleep in between to avoid holding other redis commands..
# with a small random offset so runtime and eager types can be processed "at the same time"
def deferred_time
rand(3.0..4.0)
rand(2.0..3.0)
end

def del(local_type)
Expand Down Expand Up @@ -89,7 +89,7 @@ def unlock!(local_type)
# used to store data to redis. It is changed only when breaking changes to our
# redis format are required.
###
REDIS_STORAGE_FORMAT_VERSION = "coverband_hash_3_3"
REDIS_STORAGE_FORMAT_VERSION = "coverband_hash_4_0"

JSON_PAYLOAD_EXPIRATION = 5 * 60

Expand All @@ -116,8 +116,8 @@ def initialize(redis, opts = {})

def supported?
Gem::Version.new(@redis.info["redis_version"]) >= Gem::Version.new("2.6.0")
rescue Redis::CannotConnectError => error
Coverband.configuration.logger.info "Redis is not available (#{error}), Coverband not configured"
rescue Redis::CannotConnectError => e
Coverband.configuration.logger.info "Redis is not available (#{e}), Coverband not configured"
Coverband.configuration.logger.info "If this is a setup task like assets:precompile feel free to ignore"
end

Expand All @@ -128,6 +128,7 @@ def clear!
file_keys = files_set
@redis.del(*file_keys) if file_keys.any?
@redis.del(files_key)
@redis.del(files_key(type))
@get_coverage_cache.clear!(type)
end
self.type = old_type
Expand All @@ -148,7 +149,7 @@ def save_report(report)
updated_time = (type == Coverband::EAGER_TYPE) ? nil : report_time
keys = []
report.each_slice(@save_report_batch_size) do |slice|
files_data = slice.map { |(file, data)|
files_data = slice.map do |(file, data)|
relative_file = @relative_file_converter.convert(file)
file_hash = file_hash(relative_file)
key = key(relative_file, file_hash: file_hash)
Expand All @@ -161,7 +162,7 @@ def save_report(report)
report_time: report_time,
updated_time: updated_time
)
}
end
next unless files_data.any?

arguments_key = [@redis_namespace, SecureRandom.uuid].compact.join(".")
Expand All @@ -171,12 +172,24 @@ def save_report(report)
@redis.sadd(files_key, keys) if keys.any?
end

def coverage(local_type = nil)
# NOTE: This method should be used for full coverage or filename coverage look ups
# When paging code should use coverage_for_types and pull eager and runtime together as matched pairs
def coverage(local_type = nil, opts = {})
page_size = opts[:page_size] || 250
cached_results = @get_coverage_cache.fetch(local_type || type) do |sleep_time|
files_set = files_set(local_type)

# use batches with a sleep in between to avoid overloading redis
files_set.each_slice(250).flat_map do |key_batch|
files_set = if opts[:page]
raise "call coverage_for_types with paging"
elsif opts[:filename]
type_key_prefix = key_prefix(local_type)
# NOTE: a better way to extract filename from key would be better
files_set(local_type).select do |cache_key|
cache_key.sub(type_key_prefix, "").match(short_name(opts[:filename]))
end || {}
else
files_set(local_type)
end
# below uses batches with a sleep in between to avoid overloading redis
files_set.each_slice(page_size).flat_map do |key_batch|
sleep sleep_time
@redis.pipelined do |pipeline|
key_batch.each do |key|
Expand All @@ -191,6 +204,70 @@ def coverage(local_type = nil)
end
end

def split_coverage(types, coverage_cache, options = {})
if types.is_a?(Array) && !options[:filename] && options[:page]
data = coverage_for_types(types, options)
coverage_cache[Coverband::RUNTIME_TYPE] = data[Coverband::RUNTIME_TYPE]
coverage_cache[Coverband::EAGER_TYPE] = data[Coverband::EAGER_TYPE]
data
else
super
end
end

def coverage_for_types(_types, opts = {})
page_size = opts[:page_size] || 250
hash_data = {}

runtime_file_set = files_set(Coverband::RUNTIME_TYPE)
@cached_file_count = runtime_file_set.length
runtime_file_set = runtime_file_set.each_slice(page_size).to_a[opts[:page] - 1] || []

hash_data[Coverband::RUNTIME_TYPE] = runtime_file_set.each_slice(page_size).flat_map do |key_batch|
@redis.pipelined do |pipeline|
key_batch.each do |key|
pipeline.hgetall(key)
end
end
end

eager_key_pre = key_prefix(Coverband::EAGER_TYPE)
runtime_key_pre = key_prefix(Coverband::RUNTIME_TYPE)
matched_file_set = files_set(Coverband::EAGER_TYPE)
.select do |eager_key, _val|
runtime_file_set.any? do |runtime_key|
(eager_key.sub(eager_key_pre, "") == runtime_key.sub(runtime_key_pre, ""))
end
end || []
hash_data[Coverband::EAGER_TYPE] = matched_file_set.each_slice(page_size).flat_map do |key_batch|
@redis.pipelined do |pipeline|
key_batch.each do |key|
pipeline.hgetall(key)
end
end
end
hash_data[Coverband::RUNTIME_TYPE] = hash_data[Coverband::RUNTIME_TYPE].each_with_object({}) do |data_from_redis, hash|
add_coverage_for_file(data_from_redis, hash)
end
hash_data[Coverband::EAGER_TYPE] = hash_data[Coverband::EAGER_TYPE].each_with_object({}) do |data_from_redis, hash|
add_coverage_for_file(data_from_redis, hash)
end
hash_data
end

def short_name(filename)
filename.sub(/^#{Coverband.configuration.root}/, ".")
.gsub(%r{^\./}, "")
end

def file_count(local_type = nil)
files_set(local_type).count { |filename| !Coverband.configuration.ignore.any? { |i| filename.match(i) } }
end

def cached_file_count
@cached_file_count ||= file_count(Coverband::RUNTIME_TYPE)
end

def raw_store
@redis
end
Expand All @@ -212,9 +289,13 @@ def add_coverage_for_file(data_from_redis, hash)
return unless file_hash(file) == data_from_redis[FILE_HASH]

data = coverage_data_from_redis(data_from_redis)
hash[file] = data_from_redis.select { |meta_data_key, _value| META_DATA_KEYS.include?(meta_data_key) }.merge!("data" => data)
hash[file][LAST_UPDATED_KEY] = (hash[file][LAST_UPDATED_KEY].nil? || hash[file][LAST_UPDATED_KEY] == "") ? nil : hash[file][LAST_UPDATED_KEY].to_i
hash[file].merge!(LAST_UPDATED_KEY => hash[file][LAST_UPDATED_KEY], FIRST_UPDATED_KEY => hash[file][FIRST_UPDATED_KEY].to_i)
hash[file] = data_from_redis.select do |meta_data_key, _value|
META_DATA_KEYS.include?(meta_data_key)
end.merge!("data" => data)
hash[file][LAST_UPDATED_KEY] =
(hash[file][LAST_UPDATED_KEY].nil? || hash[file][LAST_UPDATED_KEY] == "") ? nil : hash[file][LAST_UPDATED_KEY].to_i
hash[file].merge!(LAST_UPDATED_KEY => hash[file][LAST_UPDATED_KEY],
FIRST_UPDATED_KEY => hash[file][FIRST_UPDATED_KEY].to_i)
end

def coverage_data_from_redis(data_from_redis)
Expand All @@ -226,9 +307,9 @@ def coverage_data_from_redis(data_from_redis)
end

def script_input(key:, file:, file_hash:, data:, report_time:, updated_time:)
coverage_data = data.each_with_index.each_with_object({}) { |(coverage, index), hash|
coverage_data = data.each_with_index.each_with_object({}) do |(coverage, index), hash|
hash[index] = coverage if coverage
}
end
meta = {
first_updated_at: report_time,
file: file,
Expand Down
2 changes: 1 addition & 1 deletion lib/coverband/adapters/null_store.rb
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def migrate!
raise NotImplementedError, "NullStore doesn't support migrations"
end

def coverage(_local_type = nil)
def coverage(_local_type = nil, opts = {})
{}
end

Expand Down
2 changes: 1 addition & 1 deletion lib/coverband/adapters/stdout_store.rb
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def migrate!
raise NotImplementedError, "StdoutStore doesn't support migrations"
end

def coverage(_local_type = nil)
def coverage(_local_type = nil, opts = {})
{}
end

Expand Down
35 changes: 27 additions & 8 deletions lib/coverband/configuration.rb
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# frozen_string_literal: true

module Coverband
###
# Configuration parsing and options for the coverband gem.
###
class Configuration
attr_accessor :root_paths, :root,
:verbose,
Expand All @@ -17,7 +20,7 @@ class Configuration
:s3_secret_access_key, :password, :api_key, :service_url, :coverband_timeout, :service_dev_mode,
:service_test_mode, :process_type, :track_views, :redis_url,
:background_reporting_sleep_seconds, :reporting_wiggle,
:send_deferred_eager_loading_data
:send_deferred_eager_loading_data, :paged_reporting

attr_reader :track_gems, :ignore, :use_oneshot_lines_coverage

Expand Down Expand Up @@ -132,8 +135,8 @@ def railtie!
trackers << Coverband.configuration.view_tracker
end
trackers.each { |tracker| tracker.railtie! }
rescue Redis::CannotConnectError => error
Coverband.configuration.logger.info "Redis is not available (#{error}), Coverband not configured"
rescue Redis::CannotConnectError => e
Coverband.configuration.logger.info "Redis is not available (#{e}), Coverband not configured"
Coverband.configuration.logger.info "If this is a setup task like assets:precompile feel free to ignore"
end

Expand Down Expand Up @@ -170,7 +173,10 @@ def reporting_wiggle

def store
@store ||= if service?
raise "invalid configuration: unclear default store coverband expects either api_key or redis_url" if ENV["COVERBAND_REDIS_URL"]
if ENV["COVERBAND_REDIS_URL"]
raise "invalid configuration: unclear default store coverband expects either api_key or redis_url"
end

require "coverband/adapters/web_service_store"
Coverband::Adapters::WebServiceStore.new(service_url)
else
Expand All @@ -180,8 +186,14 @@ def store

def store=(store)
raise "Pass in an instance of Coverband::Adapters" unless store.is_a?(Coverband::Adapters::Base)
raise "invalid configuration: only coverband service expects an API Key" if api_key && store.class.to_s != "Coverband::Adapters::WebServiceStore"
raise "invalid configuration: coverband service shouldn't have redis url set" if ENV["COVERBAND_REDIS_URL"] && store.instance_of?(::Coverband::Adapters::WebServiceStore)
if api_key && store.class.to_s != "Coverband::Adapters::WebServiceStore"
raise "invalid configuration: only coverband service expects an API Key"
end
if ENV["COVERBAND_REDIS_URL"] &&
defined?(::Coverband::Adapters::WebServiceStore) &&
store.instance_of?(::Coverband::Adapters::WebServiceStore)
raise "invalid configuration: coverband service shouldn't have redis url set"
end

@store = store
end
Expand Down Expand Up @@ -241,7 +253,10 @@ def to_h
end

def use_oneshot_lines_coverage=(value)
raise(StandardError, "One shot line coverage is only available in ruby >= 2.6") unless one_shot_coverage_implemented_in_ruby_version? || !value
unless one_shot_coverage_implemented_in_ruby_version? || !value
raise(StandardError,
"One shot line coverage is only available in ruby >= 2.6")
end

@use_oneshot_lines_coverage = value
end
Expand Down Expand Up @@ -294,6 +309,10 @@ def send_deferred_eager_loading_data?
@send_deferred_eager_loading_data
end

def paged_reporting
!!@paged_reporting
end

def service_disabled_dev_test_env?
return false unless service?

Expand All @@ -318,7 +337,7 @@ def s3_secret_access_key
end

def track_gems=(_value)
puts "gem tracking is deprecated, setting this will be ignored"
puts "gem tracking is deprecated, setting this will be ignored & eventually removed"
end

private
Expand Down
9 changes: 5 additions & 4 deletions lib/coverband/reporters/base.rb
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,9 @@ class Base
class << self
DATA_KEY = "data"

def report(store, _options = {})
def report(store, options = {})
all_roots = Coverband.configuration.all_root_paths
get_current_scov_data_imp(store, all_roots)
get_current_scov_data_imp(store, all_roots, options)

# These are extremelhy verbose but useful during coverband development, not generally for users
# Only available by uncommenting this mode is never released
Expand Down Expand Up @@ -85,12 +85,13 @@ def merge_arrays(first, second)
# why do we need to merge covered files data?
# basically because paths on machines or deployed hosts could be different, so
# two different keys could point to the same filename or `line_key`
# this happens when deployment has a dynmaic path or the path change during deployment (hot code reload)
# TODO: think we are filtering based on ignore while sending to the store
# and as we also pull it out here
###
def get_current_scov_data_imp(store, roots)
def get_current_scov_data_imp(store, roots, options = {})
scov_style_report = {}
store.get_coverage_report.each_pair do |name, data|
store.get_coverage_report(options).each_pair do |name, data|
data.each_pair do |key, line_data|
next if Coverband.configuration.ignore.any? { |i| key.match(i) }
next unless line_data
Expand Down
Loading

0 comments on commit 4670d07

Please sign in to comment.