From b20a5ab48aa1a4aa10dabe6c29ead20087f53409 Mon Sep 17 00:00:00 2001 From: James Harton Date: Mon, 2 Sep 2024 04:37:32 +1200 Subject: [PATCH] improvement: Don't just all the values when there may be a large number of values. (#106) --- lib/spark/options/options.ex | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/lib/spark/options/options.ex b/lib/spark/options/options.ex index d278088..e0eb5e9 100644 --- a/lib/spark/options/options.ex +++ b/lib/spark/options/options.ex @@ -604,6 +604,22 @@ defmodule Spark.Options do defp document_values(opts) do case opts[:type] do + {in_type, range} + when in_type in [:in, :one_of] and is_struct(range, Range) and range.step > 0 -> + Keyword.update!( + opts, + :doc, + &"#{&1} Valid values are between #{range.first} and #{range.last}" + ) + + {in_type, range} + when in_type in [:in, :one_of] and is_struct(range, Range) and range.step < 0 -> + Keyword.update!( + opts, + :doc, + &"#{&1} Valid values are between #{range.last} and #{range.first}" + ) + {in_type, values} when in_type in [:in, :one_of] -> values = Enum.map_join(values, ", ", &inspect/1)