Skip to content

Commit

Permalink
Convert Array_Like_Helpers.map to a builtin to reduce stack size (#…
Browse files Browse the repository at this point in the history
…11363)

The ultimate goal is to reduce the method calls necessary for `Vector.map`.

# Important Notes
- I managed to reduce the number of Java stack frames needed for each `Vector.map` call from **150** to **22** (See #11363 (comment))
- Introduced `Stack_Size_Spec` regression test that will ensure that Java stack frames needed for `Vector.map` method call does not exceed **40**.
  • Loading branch information
Akirathan authored and GregoryTravis committed Nov 6, 2024
1 parent a8f221f commit 09d75b5
Show file tree
Hide file tree
Showing 46 changed files with 578 additions and 128 deletions.
2 changes: 1 addition & 1 deletion distribution/bin/enso
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
COMP_PATH=$(dirname "$0")/../component

JAVA_OPTS="--add-opens=java.base/java.nio=ALL-UNNAMED -Xss16M $JAVA_OPTS"
JAVA_OPTS="--add-opens=java.base/java.nio=ALL-UNNAMED $JAVA_OPTS"
exec java --module-path $COMP_PATH $JAVA_OPTS -m org.enso.runner/org.enso.runner.Main "$@"
exit
2 changes: 1 addition & 1 deletion distribution/bin/enso.bat
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
@echo off
set comp-dir=%~dp0\..\component
set JAVA_OPTS=%JAVA_OPTS% --add-opens=java.base/java.nio=ALL-UNNAMED -Xss16M
set JAVA_OPTS=%JAVA_OPTS% --add-opens=java.base/java.nio=ALL-UNNAMED
java --module-path %comp-dir% -Dpolyglot.compiler.IterativePartialEscape=true %JAVA_OPTS% -m org.enso.runner/org.enso.runner.Main %*
exit /B %errorlevel%
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ type JS_Object
mapper = ObjectMapper.new
new_object = mapper.createObjectNode
keys = Vector.build builder->
pairs.map on_problems=No_Wrap pair->
pairs.map on_problems=No_Wrap.Value pair->
case pair.first of
text : Text ->
## Ensure that any dataflow errors that could be stored in `pair.second` are propagated.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,7 @@ type Statistic
compute_bulk : Vector -> Vector Statistic -> Vector Any
compute_bulk data statistics:Vector=[Statistic.Count, Statistic.Sum] =
resolved_stats = statistics.map (r-> r:Statistic)
moment_order = resolved_stats.map on_problems=No_Wrap .order
moment_order = resolved_stats.map on_problems=No_Wrap.Value .order
has_min_max = resolved_stats.any (s-> s == Statistic.Minimum || s == Statistic.Maximum)
has_product = resolved_stats.any (s-> s == Statistic.Product)
max_moment_order = moment_order.filter (v-> v != Nothing) . fold 0 .max
Expand All @@ -202,7 +202,7 @@ type Statistic
Error.throw (Illegal_Argument.Error ("Can only compute " + stat.to_text + " on numerical data sets."))

if max_moment_order > 0 && counter.moments.is_nothing then report_error resolved_stats else
resolved_stats.map on_problems=No_Wrap statistic-> case statistic of
resolved_stats.map on_problems=No_Wrap.Value statistic-> case statistic of
Statistic.Covariance series -> check_if_empty counter.count <| calculate_correlation_statistics data series . covariance
Statistic.Pearson series -> check_if_empty counter.count <| calculate_correlation_statistics data series . pearsonCorrelation
Statistic.R_Squared series -> check_if_empty counter.count <| calculate_correlation_statistics data series . rSquared
Expand All @@ -229,7 +229,7 @@ type Statistic
running_bulk data statistics=[Statistic.Count, Statistic.Sum] =
resolved_stats = statistics.map (r-> r:Statistic)
check_running_support resolved_stats <|
moment_order = resolved_stats.map on_problems=No_Wrap .order
moment_order = resolved_stats.map on_problems=No_Wrap.Value .order
has_min_max = resolved_stats.any (s-> s == Statistic.Minimum || s == Statistic.Maximum)
has_product = resolved_stats.any (s-> s == Statistic.Product)
max_moment_order = moment_order.filter (v-> v != Nothing) . fold 0 .max
Expand All @@ -241,7 +241,7 @@ type Statistic
data.fold counter current->value->
result = compute_fold current value

row = Panic.rethrow_wrapped_if_error <| resolved_stats.map on_problems=No_Wrap s-> case s of
row = Panic.rethrow_wrapped_if_error <| resolved_stats.map on_problems=No_Wrap.Value s-> case s of
Statistic.Maximum -> if result.count == 0 then Nothing else result.maximum
Statistic.Minimum -> if result.count == 0 then Nothing else result.minimum
_ -> result.compute s
Expand Down
6 changes: 4 additions & 2 deletions distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ type Vector a

Vector.new my_vec.length (ix -> my_vec.at ix)
new : Integer -> (Integer -> Any) -> Vector Any
new length constructor = Array_Like_Helpers.vector_from_function length constructor
new length constructor = Array_Like_Helpers.vector_from_function length constructor Problem_Behavior.Report_Error

## PRIVATE
ADVANCED
Expand Down Expand Up @@ -700,7 +700,7 @@ type Vector a
[1, 2, 3] . map +1
map : (Any -> Any) -> Problem_Behavior | No_Wrap -> Vector Any
map self function on_problems:(Problem_Behavior | No_Wrap)=..Report_Error =
Array_Like_Helpers.map self function on_problems
@Tail_Call Array_Like_Helpers.map self function on_problems

## ICON union
Applies a function to each element of the vector, returning the `Vector`
Expand Down Expand Up @@ -1556,7 +1556,9 @@ type Map_Error

## PRIVATE
Indicates that a method should not wrap thrown errors in `Map_Error`.
@Builtin_Type
type No_Wrap
Value

## PRIVATE
Wrapped_Error.from (that : Map_Error) = Wrapped_Error.Value that that.inner_error
Original file line number Diff line number Diff line change
Expand Up @@ -510,6 +510,7 @@ type Missing_Argument
Error.throw (Missing_Argument.Error argument_name function_name call_location)

## Warning when additional warnings occurred.
@Builtin_Type
type Additional_Warnings
## PRIVATE
Error (count:Integer)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ import project.Error.Error
import project.Warning.Warning

## Specifies how to handle problems.
@Builtin_Type
type Problem_Behavior
## Ignore the problem and attempt to complete the operation
Ignore
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,6 @@ at array_like index = @Builtin_Method "Array_Like_Helpers.at"
vector_to_array : (Vector | Array) -> Array
vector_to_array array_like = @Builtin_Method "Array_Like_Helpers.vector_to_array"

vector_from_function_primitive : Integer -> (Integer -> Any) -> Vector Any
vector_from_function_primitive length constructor = @Builtin_Method "Array_Like_Helpers.vector_from_function"

flatten : (Vector | Array) -> Vector
flatten array_like = @Builtin_Method "Array_Like_Helpers.flatten"

Expand Down Expand Up @@ -84,33 +81,7 @@ slice vector start end = @Builtin_Method "Array_Like_Helpers.slice"
- Ignore: The result is `Nothing`, and the error is
ignored.
vector_from_function : Integer -> (Integer -> Any) -> Problem_Behavior | No_Wrap -> Vector Any
vector_from_function length function on_problems:(Problem_Behavior | No_Wrap)=..Report_Error =
num_errors = Ref.new 0
wrapped_function i =
result = function i
if result.is_error.not then result else
case on_problems of
Problem_Behavior.Ignore ->
Nothing
Problem_Behavior.Report_Error ->
result.catch_primitive caught->
Error.throw (Map_Error.Error i caught)
No_Wrap -> result
Problem_Behavior.Report_Warning ->
with_error_maybe = if num_errors.get >= MAX_MAP_WARNINGS then Nothing else
result.catch_primitive caught->
Warning.attach caught Nothing
num_errors.modify (_+1)
with_error_maybe
results = vector_from_function_primitive length wrapped_function
if num_errors.get <= MAX_MAP_WARNINGS then results else
err = Additional_Warnings.Error num_errors.get-MAX_MAP_WARNINGS
Warning.attach err results

## PRIVATE
The maximum number of warnings attached to result values in
`vector_from_function`.
MAX_MAP_WARNINGS = 10
vector_from_function length constructor on_problems = @Builtin_Method "Array_Like_Helpers.vector_from_function"

## PRIVATE
Creates a new vector where for each range, a corresponding section of the
Expand Down Expand Up @@ -258,7 +229,7 @@ transpose vec_of_vecs =
Vector.from_polyglot_array proxy

map vector function on_problems =
vector_from_function vector.length (i-> function (vector.at i)) on_problems
@Tail_Call vector_from_function vector.length (i-> function (vector.at i)) on_problems

map_with_index vector function on_problems =
vector_from_function vector.length (i-> function i (vector.at i)) on_problems
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@ polyglot java import java.time.LocalDate
polyglot java import java.time.LocalDateTime
polyglot java import java.util.function.Function
polyglot java import java.lang.Exception as JException
polyglot java import java.lang.RuntimeException as JRuntimeException
polyglot java import java.lang.Thread
polyglot java import java.lang.Thread.State
polyglot java import java.lang.Float
Expand All @@ -39,3 +40,4 @@ CaseFoldedString=JCaseFoldedString
Text_Utils=JText_Utils
BreakIterator=JBreakIterator
Exception=JException
RuntimeException=JRuntimeException
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,7 @@ type HTTP
# Create Unified Header list
boundary_header_list = if resolved_body.boundary.is_nothing then [] else [Header.multipart_form_data resolved_body.boundary]
all_headers = headers + boundary_header_list
mapped_headers = all_headers.map on_problems=No_Wrap .to_java_pair
mapped_headers = all_headers.map on_problems=No_Wrap.Value .to_java_pair

response = Response.Value (EnsoSecretHelper.makeRequest (self.make_client self resolved_body.hash) builder req.uri.to_java_representation mapped_headers (cache_policy.should_use_cache req))
if error_on_failure_code.not || response.code.is_success then response else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ type Header
`Header` values.
unify_vector : Vector (Header | Pair Text Text | Vector) -> Vector Header
unify_vector headers:Vector =
headers . map on_problems=No_Wrap h-> case h of
headers . map on_problems=No_Wrap.Value h-> case h of
_ : Vector -> Header.new (h.at 0) (h.at 1)
_ : Pair -> Header.new (h.at 0) (h.at 1)
_ : Function -> h:Header
Expand Down
2 changes: 1 addition & 1 deletion distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso
Original file line number Diff line number Diff line change
Expand Up @@ -296,7 +296,7 @@ set_array value warnings = @Builtin_Method "Warning.set_array"
map_attached_warnings_helper : (Any -> Maybe Any) -> Any -> Integer -> Any
map_attached_warnings_helper mapper value frames_to_drop =
warnings = Warning.get_all value
mapped_warnings = warnings.map on_problems=No_Wrap warning->
mapped_warnings = warnings.map on_problems=No_Wrap.Value warning->
case mapper warning.value of
Maybe.Some new_payload ->
self_call_name = "Warning.map_attached_warnings_helper"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ run_transaction_with_tables connection (tables : Vector Transactional_Table_Desc
## PRIVATE
private create_tables_inside_transaction connection (tables : Vector Transactional_Table_Description) (callback : Vector DB_Table -> Any) -> Any =
connection.jdbc_connection.run_within_transaction <|
created = tables.map on_problems=No_Wrap t-> t.create connection
created = tables.map on_problems=No_Wrap.Value t-> t.create connection
created.if_not_error <|
result = callback created

Expand All @@ -89,7 +89,7 @@ private create_tables_outside_transaction connection (tables : Vector Transactio
Panic.throw caught_panic

Panic.catch Any handler=handle_panic <|
created = tables.map on_problems=No_Wrap t->
created = tables.map on_problems=No_Wrap.Value t->
table = t.create connection
# We only register a table for cleanup if it was successfully created.
table.if_not_error <|
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -160,8 +160,8 @@ type Context
rewrite_internal_column column =
Internal_Column.Value column.name column.sql_type_reference (SQL_Expression.Column alias column.name)

new_columns = column_lists.map on_problems=No_Wrap columns->
columns.map on_problems=No_Wrap rewrite_internal_column
new_columns = column_lists.map on_problems=No_Wrap.Value columns->
columns.map on_problems=No_Wrap.Value rewrite_internal_column

encapsulated_columns = column_lists.flat_map columns->
columns.map column-> [column.name, column.expression]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ check_target_table_for_update target_table ~action = case target_table of
resolve_primary_key structure primary_key = case primary_key of
Nothing -> Nothing
_ : Vector -> if primary_key.is_empty then Nothing else
validated = primary_key.map on_problems=No_Wrap key->
validated = primary_key.map on_problems=No_Wrap.Value key->
if key.is_a Text then key else
Error.throw (Illegal_Argument.Error ("Primary key must be a vector of column names, instead got a " + (Meta.type_of key . to_display_text)))
validated.if_not_error <|
Expand Down Expand Up @@ -74,6 +74,6 @@ check_update_arguments_structure_match source_table target_table key_columns upd
if missing_target_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_target_key_columns.to_vector "the target table") else
if (update_action != Update_Action.Insert) && key_columns.is_empty then Error.throw (Illegal_Argument.Error "For the `update_action = "+update_action.to_text+"`, the `key_columns` must be specified to define how to match the records.") else
# Verify type matching
problems = source_table.columns.flat_map on_problems=No_Wrap check_source_column
problems = source_table.columns.flat_map on_problems=No_Wrap.Value check_source_column
problems.if_not_error <|
on_problems.attach_problems_before problems action
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ make_batched_insert_template connection table_name column_names =
prepare_create_table_statement : Connection -> Text -> Vector Column_Description -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement
prepare_create_table_statement connection table_name columns primary_key temporary on_problems:Problem_Behavior =
type_mapping = connection.dialect.get_type_mapping
column_descriptors = columns.map on_problems=No_Wrap def->
column_descriptors = columns.map on_problems=No_Wrap.Value def->
sql_type = type_mapping.value_type_to_sql def.value_type on_problems
sql_type_text = type_mapping.sql_type_to_text sql_type
Create_Column_Descriptor.Value def.name sql_type_text def.constraints
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ take_drop_helper take_drop table selector:(Index_Sub_Range | Range | Integer) =
row_column_name = table.make_temp_column_name
table_with_row_number = table.add_row_number name=row_column_name from=0

subqueries = ranges.map on_problems=No_Wrap range->
subqueries = ranges.map on_problems=No_Wrap.Value range->
generate_subquery table_with_row_number row_column_name range
combined = subqueries.reduce (a-> b-> a.union b)
combined.remove_columns row_column_name
Expand Down
4 changes: 2 additions & 2 deletions distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso
Original file line number Diff line number Diff line change
Expand Up @@ -2101,7 +2101,7 @@ type Column
map : (Any -> Any) -> Boolean -> Value_Type | Auto -> Column ! Invalid_Value_Type
map self function skip_nothing=True expected_value_type=Auto =
new_fn = if skip_nothing then (x-> if x.is_nothing then Nothing else function x) else function
new_st = self.to_vector.map on_problems=No_Wrap new_fn
new_st = self.to_vector.map on_problems=No_Wrap.Value new_fn
Column.from_vector self.name new_st value_type=expected_value_type

## ALIAS combine, join by row position, merge
Expand Down Expand Up @@ -2149,7 +2149,7 @@ type Column
function x y
False -> function
new_name = naming_helper.binary_operation_name "x" self that
vec = self.to_vector.zip on_problems=No_Wrap that.to_vector new_fn
vec = self.to_vector.zip on_problems=No_Wrap.Value that.to_vector new_fn
Column.from_vector new_name vec value_type=expected_value_type

## GROUP Standard.Base.Metadata
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,7 @@ type Data_Formatter
vector = case formats of
v : Vector -> v
singleton -> [singleton]
converted = vector.map on_problems=No_Wrap elem->
converted = vector.map on_problems=No_Wrap.Value elem->
## Ensure the element is a `Date_Time_Formatter` or is converted to it.
We need to convert _each_ element - we cannot perform a 'bulk' conversion like `vector : Vector Date_Time_Formatter` because of erasure.
checked = elem : Date_Time_Formatter
Expand Down Expand Up @@ -216,17 +216,17 @@ type Data_Formatter
## PRIVATE
make_date_parser self = self.wrap_base_parser <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
DateParser.new (self.date_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
DateParser.new (self.date_formats.map on_problems=No_Wrap.Value .get_java_formatter_for_parsing)

## PRIVATE
make_date_time_parser self = self.wrap_base_parser <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
DateTimeParser.new (self.datetime_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
DateTimeParser.new (self.datetime_formats.map on_problems=No_Wrap.Value .get_java_formatter_for_parsing)

## PRIVATE
make_time_of_day_parser self = self.wrap_base_parser <|
Panic.catch JException handler=(caught_panic-> Error.throw (Illegal_Argument.Error caught_panic.payload.getMessage)) <|
TimeOfDayParser.new (self.time_formats.map on_problems=No_Wrap .get_java_formatter_for_parsing)
TimeOfDayParser.new (self.time_formats.map on_problems=No_Wrap.Value .get_java_formatter_for_parsing)

## PRIVATE
make_identity_parser self = self.wrap_base_parser IdentityParser.new
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ prepare_aggregate_columns naming_helper group_by aggregates table error_on_missi
assert (resolved_keys.contains Nothing . not)
problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns
columns = if old_style then group_by else keys+aggregates
valid_resolved_aggregate_columns = columns.map on_problems=No_Wrap (resolve_aggregate table problem_builder) . filter x-> x.is_nothing.not
valid_resolved_aggregate_columns = columns.map on_problems=No_Wrap.Value (resolve_aggregate table problem_builder) . filter x-> x.is_nothing.not

# Grouping Key
key_columns = resolved_keys.map .column
Expand All @@ -80,7 +80,7 @@ prepare_aggregate_columns naming_helper group_by aggregates table error_on_missi
The second pass resolves the default names, ensuring that they do not
clash with the user-specified names (ensuring that user-specified names
take precedence).
pass_1 = valid_resolved_aggregate_columns.map on_problems=No_Wrap c-> if c.as == "" then "" else
pass_1 = valid_resolved_aggregate_columns.map on_problems=No_Wrap.Value c-> if c.as == "" then "" else
# Verify if the user-provided name is valid and if not, throw an error.
naming_helper.ensure_name_is_valid c.as <|
unique.make_unique c.as
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ unpack_problem_summary problem_summary =
## TODO [RW, GT] In the next iterations we will want to remove
`translate_problem` in favour of constructing Enso problem instances
directly in Java code. To do so, we will need https://github.com/enso-org/enso/issues/7797
parsed = problems_array . map on_problems=No_Wrap translate_problem
parsed = problems_array . map on_problems=No_Wrap.Value translate_problem
if count == parsed.length then parsed else
parsed + [Additional_Warnings.Error (count - parsed.length)]

Expand Down
Loading

0 comments on commit 09d75b5

Please sign in to comment.