From c2c4b95116daf45f2b9397642b6ab02979a51c07 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Fri, 5 Jul 2024 19:41:36 +0100 Subject: [PATCH] Final step removing the Problem_Behavior publishing. (#10461) - Remove publishing the constructors. - Fix any missed use in libs. - Alter tests to generally use auto-scoped calls. - `on_incomparable` to `on_problems`. --- .../Base/0.0.0-dev/src/Data/Array.enso | 23 +++--- .../Base/0.0.0-dev/src/Data/Vector.enso | 23 +++--- .../src/Errors/Problem_Behavior.enso | 8 -- .../src/Internal/Array_Like_Helpers.enso | 12 ++- .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 1 - .../Database/0.0.0-dev/src/DB_Table.enso | 6 +- .../Standard/Table/0.0.0-dev/src/Table.enso | 4 +- .../enso/compiler/core/EnsoParserTest.java | 2 +- test/Base_Tests/src/Data/Ordering_Spec.enso | 14 ++-- .../src/Data/Text/Encoding_Spec.enso | 20 ++--- test/Base_Tests/src/Data/Vector_Spec.enso | 26 +++--- test/Base_Tests/src/System/File_Spec.enso | 32 ++++---- .../System/Reporting_Stream_Decoder_Spec.enso | 10 +-- .../System/Reporting_Stream_Encoder_Spec.enso | 18 ++-- .../src/Table/Enso_Callback.enso | 6 +- .../src/Table/Helpers.enso | 2 +- .../Add_Row_Number_Spec.enso | 4 +- .../Aggregate_Spec.enso | 22 ++--- .../Conversion_Spec.enso | 2 +- .../Common_Table_Operations/Core_Spec.enso | 6 +- .../Cross_Tab_Spec.enso | 2 +- .../Distinct_Spec.enso | 26 +++--- .../Expression_Spec.enso | 26 +++--- .../Common_Table_Operations/Filter_Spec.enso | 20 ++--- .../Integration_Tests.enso | 6 +- .../Join/Cross_Join_Spec.enso | 4 +- .../Join/Join_Spec.enso | 14 ++-- .../Join/Lookup_Spec.enso | 2 +- .../Join/Union_Spec.enso | 2 +- .../Join/Zip_Spec.enso | 8 +- .../Select_Columns_Spec.enso | 38 ++++----- .../Transpose_Spec.enso | 2 +- .../src/Formatting/Parse_Values_Spec.enso | 6 +- .../src/IO/Delimited_Read_Spec.enso | 10 +-- .../src/IO/Delimited_Write_Spec.enso | 82 ++++++++++--------- test/Table_Tests/src/IO/Excel_Spec.enso | 76 ++++++++--------- .../src/In_Memory/Aggregate_Column_Spec.enso | 2 +- .../src/In_Memory/Builders_Spec.enso | 4 +- .../Table_Tests/src/In_Memory/Table_Spec.enso | 9 +- .../src/In_Memory/Table_Xml_Spec.enso | 14 +++- test/Table_Tests/src/Util.enso | 2 +- 41 files changed, 304 insertions(+), 292 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index 7a5f351891a2..6ede8c854250 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -85,8 +85,8 @@ type Array elements, returning an an `Ordering` if the two elements are comparable or `Nothing` if they are not. If set to `Nothing` (the default argument), `Ordering.compare _ _` method will be used. - - on_incomparable: A `Problem_Behavior` specifying what should happen if - two incomparable values are encountered. + - on_problems: A `Problem_Behavior` specifying what should happen if two + incomparable values are encountered. By default, elements are sorted in ascending order. @@ -121,10 +121,9 @@ type Array for the default comparator, which is always the first group. Additionally, an `Incomparable_Values` dataflow error will be returned - if the `on_incomparable` parameter is set to `Problem_Behavior.Report_Error`, - or a warning attached if the `on_incomparable` parameter is set to - `Problem_Behavior.Report_Warning` in case of encountering incomparable - values. + if the `on_problems` parameter is set to `Report_Error`, or a warning + attached if the `on_problems` parameter is set to `Report_Warning` in + case of encountering incomparable values. It takes equal advantage of ascending and descending runs in the array, making it much simpler to merge two or more sorted arrays: simply @@ -147,8 +146,8 @@ type Array [My_Type.Value 'hello', 1].to_array.sort == [1, My_Type.Value 'hello'].to_array sort : Sort_Direction -> (Any -> Any)|Nothing -> (Any -> Any -> (Ordering|Nothing))|Nothing -> Problem_Behavior -> Vector Any ! Incomparable_Values - sort self (order = Sort_Direction.Ascending) on=Nothing by=Nothing on_incomparable:Problem_Behavior=..Ignore = - Array_Like_Helpers.sort self order on by on_incomparable + sort self (order = Sort_Direction.Ascending) on=Nothing by=Nothing on_problems:Problem_Behavior=..Ignore = + Array_Like_Helpers.sort self order on by on_problems ## ALIAS first, head, last, limit, sample, slice, tail, top, keep GROUP Selections @@ -390,7 +389,7 @@ type Array Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -429,7 +428,7 @@ type Array Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -676,7 +675,7 @@ type Array Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -899,7 +898,7 @@ type Array Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index 3e67bcec54de..a0d3aba3c3e2 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -689,7 +689,7 @@ type Vector a Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -728,7 +728,7 @@ type Vector a Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -782,7 +782,7 @@ type Vector a Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -1024,7 +1024,7 @@ type Vector a Errors that are thrown when executing the supplied function are wrapped in `Map_Error`, which contains the index at which the error occurred. - In the `Problem_Behavior.Report_Warning` case, only `MAX_MAP_WARNINGS` + In the `Report_Warning` case, only `MAX_MAP_WARNINGS` warnings are attached to result values. After that, the warnings are dropped, but a count of the additional warnings is attached in an `Additional_Warnings` warning. @@ -1121,8 +1121,8 @@ type Vector a elements, returning an an `Ordering` if the two elements are comparable or `Nothing` if they are not. If set to `Nothing` (the default argument), `Ordering.compare _ _` method will be used. - - on_incomparable: A `Problem_Behavior` specifying what should happen if - two incomparable values are encountered. + - on_problems: A `Problem_Behavior` specifying what should happen if two + incomparable values are encountered. By default, elements are sorted in ascending order. @@ -1157,10 +1157,9 @@ type Vector a for the default comparator, which is always the first group. Additionally, an `Incomparable_Values` dataflow error will be returned - if the `on_incomparable` parameter is set to `Problem_Behavior.Report_Error`, - or a warning attached if the `on_incomparable` parameter is set to - `Problem_Behavior.Report_Warning` in case of encountering incomparable - values. + if the `on_problems` parameter is set to `Report_Error`, or a warning + attached if the `on_problems` parameter is set to `Report_Warning` in + case of encountering incomparable values. It takes equal advantage of ascending and descending runs in the array, making it much simpler to merge two or more sorted arrays: simply @@ -1183,8 +1182,8 @@ type Vector a [My_Type.Value 'hello', 1].sort == [1, My_Type.Value 'hello'] sort : Sort_Direction -> (Any -> Any)|Nothing -> (Any -> Any -> (Ordering|Nothing))|Nothing -> Problem_Behavior -> Vector Any ! Incomparable_Values - sort self (order = Sort_Direction.Ascending) on=Nothing by=Nothing on_incomparable:Problem_Behavior=..Ignore = - Array_Like_Helpers.sort self order on by on_incomparable + sort self (order = Sort_Direction.Ascending) on=Nothing by=Nothing on_problems:Problem_Behavior=..Ignore = + Array_Like_Helpers.sort self order on by on_problems ## ALIAS deduplicate, unique GROUP Selections diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Problem_Behavior.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Problem_Behavior.enso index 07b05d6c165a..262a8bdd4759 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Problem_Behavior.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Problem_Behavior.enso @@ -148,11 +148,3 @@ type Problem_Behavior warnings = Warning.get_all result . map .value cleared_result = Warning.set result [] self.attach_problems_after cleared_result warnings - - ## PRIVATE - Returns a mapping of Problem_Behavior constructors to an integer. - Used for sending the number to Java, rather than sending the atom. - to_number self = case self of - Problem_Behavior.Ignore -> 0 - Problem_Behavior.Report_Warning -> 1 - Problem_Behavior.Report_Error -> 2 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso index 739402ce07fd..12f640ec5f18 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso @@ -148,12 +148,20 @@ check_start_valid start length function = if used_start < 0 || used_start > length then Error.throw (Index_Out_Of_Bounds.Error start length+1) else function used_start -sort vector order on by on_incomparable = +sort vector order on by on_problems:Problem_Behavior = comps = case on == Nothing of True -> vector.map it-> Comparable.from it False -> vector.map it-> Comparable.from (on it) compare_funcs = comps.map (it-> it.compare) - vector.sort_builtin order.to_sign comps compare_funcs by on on_incomparable.to_number + vector.sort_builtin order.to_sign comps compare_funcs by on (on_problems_to_number on_problems) + +## PRIVATE + Returns a mapping of Problem_Behavior constructors to an integer. + Used for sending the number to Java, rather than sending the atom. +private on_problems_to_number on_problems:Problem_Behavior = case on_problems of + Problem_Behavior.Ignore -> 0 + Problem_Behavior.Report_Warning -> 1 + Problem_Behavior.Report_Error -> 2 distinct vector on = Vector.build builder-> diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index f1ab8b3384d3..6cef76e08b4e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -101,7 +101,6 @@ from project.Data.Range.Extensions export all from project.Data.Statistics.Extensions export all from project.Data.Text.Extensions export all from project.Data.Text.Regex export regex -from project.Errors.Problem_Behavior.Problem_Behavior export all from project.Function export all from project.Meta.Enso_Project export enso_project from project.Network.Extensions export all diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso index 75af674fe84a..282ab7e42d57 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso @@ -1405,7 +1405,7 @@ type DB_Table their layout in memory. Database tables may not impose a deterministic ordering. If the table defines a primary key, it is used to by default to ensure deterministic ordering. That can be overridden by specifying - a different ordering using `Table.order_by`. If no primary key was + a different ordering using `Table.sort`. If no primary key was defined nor any ordering was specified explicitly by the user, the order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. @@ -1526,7 +1526,7 @@ type DB_Table their layout in memory. Database tables may not impose a deterministic ordering. If the table defines a primary key, it is used to by default to ensure deterministic ordering. That can be overridden by specifying - a different ordering using `Table.order_by`. If no primary key was + a different ordering using `Table.sort`. If no primary key was defined nor any ordering was specified explicitly by the user, the order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. @@ -1729,7 +1729,7 @@ type DB_Table their layout in memory. Database tables may not impose a deterministic ordering. If the table defines a primary key, it is used to by default to ensure deterministic ordering. That can be overridden by specifying - a different ordering using `Table.order_by`. If no primary key was + a different ordering using `Table.sort`. If no primary key was defined nor any ordering was specified explicitly by the user, the order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 145f8a6fb927..9f4bc83f7283 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -2329,7 +2329,7 @@ type Table their layout in memory. Database tables may not impose a deterministic ordering. If the table defines a primary key, it is used to by default to ensure deterministic ordering. That can be overridden by specifying - a different ordering using `Table.order_by`. If no primary key was + a different ordering using `Table.sort`. If no primary key was defined nor any ordering was specified explicitly by the user, the order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. @@ -3177,7 +3177,7 @@ concat_columns column_set all_tables result_type result_row_count needs_cast on_ storage_builder.appendNulls null_row_count _ : Integer -> column = parent_table.at i - converted = if needs_cast then column.cast result_type on_problems=Report_Error else column + converted = if needs_cast then column.cast result_type on_problems=..Report_Error else column storage = converted.java_column.getStorage storage_builder.appendBulkStorage storage sealed_storage = storage_builder.seal diff --git a/engine/runtime-parser/src/test/java/org/enso/compiler/core/EnsoParserTest.java b/engine/runtime-parser/src/test/java/org/enso/compiler/core/EnsoParserTest.java index 781f51a5d443..249151bec5e2 100644 --- a/engine/runtime-parser/src/test/java/org/enso/compiler/core/EnsoParserTest.java +++ b/engine/runtime-parser/src/test/java/org/enso/compiler/core/EnsoParserTest.java @@ -1188,7 +1188,7 @@ public void testShouldQuoteValuesContainingTheCommentSymbol() { suite = Test.specify "should quote values containing the comment symbol if comments are enabled" <| format = Delimited_Format.Delimited ',' . with_comments - table.write file format on_problems=Report_Error . should_succeed + table.write file format on_problems=..Report_Error . should_succeed expected_text_2 = normalize_lines <| \""" "#",B b, diff --git a/test/Base_Tests/src/Data/Ordering_Spec.enso b/test/Base_Tests/src/Data/Ordering_Spec.enso index 2a1b1434acd8..382d1f98ee1e 100644 --- a/test/Base_Tests/src/Data/Ordering_Spec.enso +++ b/test/Base_Tests/src/Data/Ordering_Spec.enso @@ -173,8 +173,8 @@ add_specs suite_builder = expect_no_warns [[1]].sort group_builder.specify "should produce warnings when sorting nested vectors" <| - expect_incomparable_warn [1] [2] <| [[1], [2]].sort on_incomparable=Problem_Behavior.Report_Warning . should_equal [[1], [2]] - expect_incomparable_warn [2] [1] <| [[2], [1]].sort on_incomparable=Problem_Behavior.Report_Warning . should_equal [[2], [1]] + expect_incomparable_warn [1] [2] <| [[1], [2]].sort on_problems=..Report_Warning . should_equal [[1], [2]] + expect_incomparable_warn [2] [1] <| [[2], [1]].sort on_problems=..Report_Warning . should_equal [[2], [1]] group_builder.specify "should be able to sort primitive values in atoms" pending=topo_sort_pending <| [Ord.Value Nothing, Ord.Value 20, Ord.Value 10].sort . should_equal [Ord.Value 10, Ord.Value 20, Ord.Value Nothing] @@ -186,13 +186,13 @@ add_specs suite_builder = [(UPair.Value 1 2), (UPair.Value 3 4)].sort . should_fail_with Incomparable_Values group_builder.specify "should attach warning when trying to sort incomparable values" <| - expect_incomparable_warn Nothing Number.nan <| [Nothing, Number.nan].sort on_incomparable=Problem_Behavior.Report_Warning - expect_incomparable_warn 1 "hello" <| [1, "hello"].sort on_incomparable=Problem_Behavior.Report_Warning + expect_incomparable_warn Nothing Number.nan <| [Nothing, Number.nan].sort on_problems=..Report_Warning + expect_incomparable_warn 1 "hello" <| [1, "hello"].sort on_problems=..Report_Warning group_builder.specify "should respect previous warnings on a vector" <| Problems.expect_warning "my_warn" <| (Warning.attach "my_warn" [3, 2]) . sort Problems.expect_warning "my_warn" <| (Warning.attach "my_warn" [3, Number.nan]) . sort - expect_incomparable_warn 3 Number.nan <| (Warning.attach "my_warn" [3, Number.nan]) . sort on_incomparable=Problem_Behavior.Report_Warning + expect_incomparable_warn 3 Number.nan <| (Warning.attach "my_warn" [3, Number.nan]) . sort on_problems=..Report_Warning group_builder.specify "should respect previous warnings on vectors" pending="https://github.com/enso-org/enso/issues/6070" <| Problems.expect_warning "my_warn" <| [3, Warning.attach "my_warn" 2].sort @@ -216,13 +216,13 @@ add_specs suite_builder = group_builder.specify "should produce warning when sorting types with different comparators" <| [Ord.Value 1, 1].sort . should_equal [1, Ord.Value 1] - sorted = [Ord.Value 1, 1].sort on_incomparable=Problem_Behavior.Report_Warning + sorted = [Ord.Value 1, 1].sort on_problems=..Report_Warning Warning.get_all sorted . at 0 . value . starts_with "Different comparators" . should_be_true group_builder.specify "should merge groups of values with custom comparators based on the comparators FQN" <| [Ord.Value 1, My_Type.Value 1].sort . should_equal [My_Type.Value 1, Ord.Value 1] [My_Type.Value 1, Ord.Value 1].sort . should_equal [My_Type.Value 1, Ord.Value 1] - sorted = [Ord.Value 1, My_Type.Value 1].sort on_incomparable=Problem_Behavior.Report_Warning + sorted = [Ord.Value 1, My_Type.Value 1].sort on_problems=..Report_Warning Warning.get_all sorted . at 0 . value . starts_with "Different comparators" . should_be_true group_builder.specify "should be stable when sorting values with different comparators" <| diff --git a/test/Base_Tests/src/Data/Text/Encoding_Spec.enso b/test/Base_Tests/src/Data/Text/Encoding_Spec.enso index fc20b087a373..16e14c904deb 100644 --- a/test/Base_Tests/src/Data/Text/Encoding_Spec.enso +++ b/test/Base_Tests/src/Data/Text/Encoding_Spec.enso @@ -126,7 +126,7 @@ add_specs suite_builder = This is based on assumption that it is very unlikely for a valid Windows-1252 encoded file to start with characters `` or `ÿþ` (the Win-1252 representations of UTF-8 and UTF-16 BOMs). bytes1 = [-17, -69, -65] + [-30, -55, -1] - r1 = Text.from_bytes bytes1 Encoding.default Problem_Behavior.Report_Warning + r1 = Text.from_bytes bytes1 Encoding.default ..Report_Warning r1.should_equal "���" # We've got 3 characters that failed to decode. The BOM is stripped, so it is not counted. r1.length . should_equal 3 @@ -135,7 +135,7 @@ add_specs suite_builder = w1.to_display_text . should_contain "BOM" bytes2 = [-2, -1] + [0, 65, -1] - r2 = Text.from_bytes bytes2 Encoding.default Problem_Behavior.Report_Warning + r2 = Text.from_bytes bytes2 Encoding.default ..Report_Warning r2.should_equal "A�" # We have 1 correct character (A), one invalid character (odd number of bytes). The BOM is not counted. r2.length . should_equal 2 @@ -147,7 +147,7 @@ add_specs suite_builder = empty.should_equal "" Problems.assume_no_problems empty - txt = Text.from_bytes [-1] Encoding.default Problem_Behavior.Report_Warning + txt = Text.from_bytes [-1] Encoding.default ..Report_Warning txt.should_equal 'ÿ' # No problems, as falling back to Windows-1252. Problems.assume_no_problems txt @@ -236,7 +236,7 @@ add_specs suite_builder = ## A UTF-16 BOM representation does not make sense in UTF-8, so we can detect that situation and report it. group_builder.specify "should report a clearer error when UTF-16 BOM is encountered" <| bytes = [-2, -1] + [0, 65, 1, 5, 1, 25] - as_utf = Text.from_bytes bytes Encoding.utf_8 Problem_Behavior.Report_Warning + as_utf = Text.from_bytes bytes Encoding.utf_8 ..Report_Warning warnings = Problems.get_attached_warnings as_utf . map .to_display_text warnings.find (t-> t.contains "BOM") . should_succeed @@ -246,14 +246,14 @@ add_specs suite_builder = empty.should_equal "" Problems.assume_no_problems empty - txt = Text.from_bytes [-1] Encoding.utf_8 Problem_Behavior.Report_Warning + txt = Text.from_bytes [-1] Encoding.utf_8 ..Report_Warning txt.should_equal '\ufffd' w = Problems.expect_only_warning Encoding_Error txt w.to_display_text . should_contain "Failed to decode 1 code units (at positions: 0)." group_builder.specify "should report only a few example positions if many errors are encountered" <| bytes = Vector.fill 10000 -1 - txt = Text.from_bytes bytes Encoding.utf_8 Problem_Behavior.Report_Warning + txt = Text.from_bytes bytes Encoding.utf_8 ..Report_Warning txt.length . should_equal 10000 w = Problems.expect_only_warning Encoding_Error txt w.to_display_text . should_contain "Failed to decode 10000 code units" @@ -281,7 +281,7 @@ add_specs suite_builder = group_builder.specify "should warn if an inverted BOM is encountered" <| bytes = [-1, -2] + [0, 65, 1, 5, 1, 25] - txt = Text.from_bytes bytes Encoding.utf_16_be Problem_Behavior.Report_Warning + txt = Text.from_bytes bytes Encoding.utf_16_be ..Report_Warning txt.should_equal "￾Aąę" w = Problems.expect_only_warning Encoding_Error txt w.to_display_text . should_contain "BOM" @@ -291,7 +291,7 @@ add_specs suite_builder = empty.should_equal "" Problems.assume_no_problems empty - txt = Text.from_bytes [-1] Encoding.utf_16_be Problem_Behavior.Report_Warning + txt = Text.from_bytes [-1] Encoding.utf_16_be ..Report_Warning w = Problems.expect_only_warning Encoding_Error txt w.to_display_text . should_contain "Failed to decode 1 code units (at positions: 0)." @@ -317,7 +317,7 @@ add_specs suite_builder = # We cannot warn on UTF-8 BOM because it actually represents valid text: [-17, -69, -65] + [65] decoded as UTF-16 LE is "믯䆿". group_builder.specify "should warn if an inverted BOM is encountered" <| bytes = [-2, -1] + [0, 65, 1, 5, 1, 25] - txt = Text.from_bytes bytes Encoding.utf_16_le Problem_Behavior.Report_Warning + txt = Text.from_bytes bytes Encoding.utf_16_le ..Report_Warning txt.should_equal "￾䄀ԁᤁ" w = Problems.expect_only_warning Encoding_Error txt w.to_display_text . should_contain "BOM" @@ -327,7 +327,7 @@ add_specs suite_builder = empty.should_equal "" Problems.assume_no_problems empty - txt = Text.from_bytes [-1] Encoding.utf_16_le Problem_Behavior.Report_Warning + txt = Text.from_bytes [-1] Encoding.utf_16_le ..Report_Warning w = Problems.expect_only_warning Encoding_Error txt w.to_display_text . should_contain "Failed to decode 1 code units (at positions: 0)." diff --git a/test/Base_Tests/src/Data/Vector_Spec.enso b/test/Base_Tests/src/Data/Vector_Spec.enso index 33e8f692e4cd..407d18961383 100644 --- a/test/Base_Tests/src/Data/Vector_Spec.enso +++ b/test/Base_Tests/src/Data/Vector_Spec.enso @@ -747,9 +747,9 @@ type_spec suite_builder name alter = suite_builder.group name group_builder-> small_expected = [T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 1 8, T.Value 1 3, T.Value 4 0] small_vec.sort . should_equal small_expected - group_builder.specify "should fail the sort if Report_Error problem_behavior specified" <| - alter [T.Value 1 8, Nothing] . sort on_incomparable=Problem_Behavior.Report_Error . should_fail_with Incomparable_Values - alter [Nothing, Number.nan] . sort on_incomparable=Problem_Behavior.Report_Error . should_fail_with Incomparable_Values + group_builder.specify "should fail the sort if `Report_Error` problem_behavior specified" <| + alter [T.Value 1 8, Nothing] . sort on_problems=..Report_Error . should_fail_with Incomparable_Values + alter [Nothing, Number.nan] . sort on_problems=..Report_Error . should_fail_with Incomparable_Values group_builder.specify "should be able to use a custom element projection" <| small_vec = alter [T.Value 1 8, T.Value 1 3, T.Value -20 0, T.Value -1 1, T.Value -1 10, T.Value 4 0] @@ -913,7 +913,7 @@ type_spec suite_builder name alter = suite_builder.group name group_builder-> 8 -> Time_Of_Day.new _ -> x input = 0.up_to 500 . map gen - sorted = input.sort on_incomparable=Problem_Behavior.Report_Warning + sorted = input.sort on_problems=..Report_Warning Warning.get_all sorted . length . should_equal 10 Warning.limit_reached sorted . should_equal True @@ -954,29 +954,29 @@ type_spec suite_builder name alter = suite_builder.group name group_builder-> group_builder.specify "nested Map_Error indicies should be in the correct order" <| nested_vector = [[10, 20, 30, 40], [30, 10, 20, 30]] expected_warnings = [(Map_Error.Error 1 (Map_Error.Error 3 (My_Error.Error 30))), (Map_Error.Error 1 (Map_Error.Error 0 (My_Error.Error 30))), (Map_Error.Error 0 (Map_Error.Error 2 (My_Error.Error 30)))] - Warning.get_all wrap_errors=True (nested_vector.map (_.map map_fun on_problems=Problem_Behavior.Report_Warning) on_problems=Problem_Behavior.Report_Warning) . map .value . should_equal expected_warnings + Warning.get_all wrap_errors=True (nested_vector.map (_.map map_fun on_problems=..Report_Warning) on_problems=..Report_Warning) . map .value . should_equal expected_warnings group_builder.specify "a Nothing thrown inside map should be caught as Nothing" <| map_fun a = if a == 30 then Error.throw Nothing else a+1 alter [10, 20, 30, 40] . map map_fun . should_fail_with (Map_Error.Error 2 Nothing) unwrap_errors=False - group_builder.specify "an error thrown inside map with on_problems=Problem_Behavior.Ignore should be ignored" <| - results = alter [10, 20, 30, 40] . map map_fun on_problems=Problem_Behavior.Ignore + group_builder.specify "an error thrown inside map with on_problems=..Ignore should be ignored" <| + results = alter [10, 20, 30, 40] . map map_fun on_problems=..Ignore results . should_equal [11, 21, Nothing, 41] Warning.get_all results wrap_errors=True . should_equal [] - group_builder.specify "an error thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as a warning" <| - result = alter [10, 20, 30, 40] . map map_fun on_problems=Problem_Behavior.Report_Warning + group_builder.specify "an error thrown inside map with on_problems=..Report_Warning should be attached as a warning" <| + result = alter [10, 20, 30, 40] . map map_fun on_problems=..Report_Warning result . should_equal [11, 21, Nothing, 41] Warning.get_all result wrap_errors=True . map .value . should_equal [Map_Error.Error 2 (My_Error.Error 30)] - group_builder.specify "multiple errors thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as warnings" <| - result = alter [10, 30, 20, 30, 40, 30] . map map_fun on_problems=Problem_Behavior.Report_Warning + group_builder.specify "multiple errors thrown inside map with on_problems=..Report_Warning should be attached as warnings" <| + result = alter [10, 30, 20, 30, 40, 30] . map map_fun on_problems=..Report_Warning result . should_equal [11, Nothing, 21, Nothing, 41, Nothing] Warning.get_all result wrap_errors=True . map .value . should_equal ([5, 3, 1].map i-> Map_Error.Error i (My_Error.Error 30)) - group_builder.specify "many errors thrown inside map with on_problems=Problem_Behavior.Report_Warning should be attached as warnings and an Additional_Warnings" <| - result = alter ([10, 20] + (Vector.fill 12 30) + [40]) . map map_fun on_problems=Problem_Behavior.Report_Warning + group_builder.specify "many errors thrown inside map with on_problems=..Report_Warning should be attached as warnings and an Additional_Warnings" <| + result = alter ([10, 20] + (Vector.fill 12 30) + [40]) . map map_fun on_problems=..Report_Warning result . should_equal ([11, 21] + (Vector.fill 12 Nothing) + [41]) expected_warnings_regular = 11.down_to 1 . map i-> Map_Error.Error i (My_Error.Error 30) expected_warnings = expected_warnings_regular + [(Additional_Warnings.Error 2)] diff --git a/test/Base_Tests/src/System/File_Spec.enso b/test/Base_Tests/src/System/File_Spec.enso index cb5b15b77bf7..a85231799b0c 100644 --- a/test/Base_Tests/src/System/File_Spec.enso +++ b/test/Base_Tests/src/System/File_Spec.enso @@ -517,8 +517,8 @@ add_specs suite_builder = group_builder.specify "should allow appending text to a file" <| f = transient / "work.txt" f.delete_if_exists - "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed . should_equal f - '\nline 2!'.write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed . should_equal f + '\nline 2!'.write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed . should_equal f f.read_text.should_equal 'line 1!\nline 2!' f.delete f.exists.should_be_false @@ -526,10 +526,10 @@ add_specs suite_builder = group_builder.specify "should perform a dry run appending text to a file if Context.Output is disabled" <| f = transient / "work.txt" f.delete_if_exists - "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed . should_equal f Context.Output.with_disabled <| - r = '\nline 2!'.write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error + r = '\nline 2!'.write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error Problems.expect_only_warning Dry_Run_Operation r r.exists.should_be_true r.read_text.should_equal 'line 1!\nline 2!' @@ -542,11 +542,11 @@ add_specs suite_builder = f.delete_if_exists Context.Output.with_disabled <| - r = "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error + r = "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error Problems.expect_only_warning Dry_Run_Operation r r.exists.should_be_true - s = '\nline 2!'.write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error + s = '\nline 2!'.write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error Problems.expect_only_warning Dry_Run_Operation s s.exists.should_be_true @@ -565,12 +565,12 @@ add_specs suite_builder = f.delete_if_exists Context.Output.with_disabled <| - dry_run_file = "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error + dry_run_file = "line 1!".write f on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error Problems.expect_only_warning Dry_Run_Operation dry_run_file dry_run_file.exists.should_be_true # Contrary to example above, we write to the returned file, not just `f`. - s = '\nline 2!'.write dry_run_file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error + s = '\nline 2!'.write dry_run_file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error Problems.expect_only_warning Dry_Run_Operation s s.exists.should_be_true @@ -587,10 +587,10 @@ add_specs suite_builder = f = transient / "work.txt" f.delete_if_exists f.exists.should_be_false - "line 1!".write f on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed . should_equal f f.exists.should_be_true f.read_text.should_equal "line 1!" - "line 2!".write f on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed . should_equal f + "line 2!".write f on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed . should_equal f f.read_text.should_equal "line 2!" f.delete f.exists.should_be_false @@ -599,7 +599,7 @@ add_specs suite_builder = f = transient / "work.txt" f.delete_if_exists f.exists.should_be_false - "line 1!".write f on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed . should_equal f f.exists.should_be_true r = Context.Output.with_disabled <| @@ -615,7 +615,7 @@ add_specs suite_builder = f = transient / "work.txt" f.delete_if_exists f.exists.should_be_false - "line 1!".write f on_existing_file=Existing_File_Behavior.Error on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_existing_file=Existing_File_Behavior.Error on_problems=..Report_Error . should_succeed . should_equal f f.exists.should_be_true f.read_text.should_equal "line 1!" @@ -637,7 +637,7 @@ add_specs suite_builder = f = transient / "work.txt" f.delete_if_exists f.exists.should_be_false - "line 1!".write f on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_problems=..Report_Error . should_succeed . should_equal f if f.exists.not then Test.fail "The file should have been created." f.read_text.should_equal "line 1!" @@ -655,7 +655,7 @@ add_specs suite_builder = "new content".write n on_existing_file=Existing_File_Behavior.Overwrite n3.delete_if_exists - "line 2!".write f on_problems=Report_Error . should_succeed . should_equal f + "line 2!".write f on_problems=..Report_Error . should_succeed . should_equal f f.read_text.should_equal "line 2!" bak.read_text.should_equal "line 1!" if n3.exists then @@ -668,7 +668,7 @@ add_specs suite_builder = f = transient / "work.txt" f.delete_if_exists f.exists.should_be_false - "line 1!".write f on_problems=Report_Error . should_succeed . should_equal f + "line 1!".write f on_problems=..Report_Error . should_succeed . should_equal f r = Context.Output.with_disabled <| s = "New Content!".write f on_existing_file=Existing_File_Behavior.Backup @@ -843,7 +843,7 @@ add_specs suite_builder = f.delete_if_exists "Initial Content".write f on_existing_file=Existing_File_Behavior.Overwrite big_content = "Hello " * 2000 + input_content - r2 = big_content.write f encoding on_problems=Problem_Behavior.Report_Error + r2 = big_content.write f encoding on_problems=..Report_Error r2.should_fail_with Encoding_Error f.read . should_equal "Initial Content" f.delete diff --git a/test/Base_Tests/src/System/Reporting_Stream_Decoder_Spec.enso b/test/Base_Tests/src/System/Reporting_Stream_Decoder_Spec.enso index 907ab95b2710..4d7a8b50ac6a 100644 --- a/test/Base_Tests/src/System/Reporting_Stream_Decoder_Spec.enso +++ b/test/Base_Tests/src/System/Reporting_Stream_Decoder_Spec.enso @@ -40,7 +40,7 @@ add_specs suite_builder = result = f.with_input_stream [File_Access.Read] stream-> available_bytes_counts = Vector.build available_bytes_counter-> - stream.with_stream_decoder Encoding.utf_8 Problem_Behavior.Report_Error decoder-> + stream.with_stream_decoder Encoding.utf_8 ..Report_Error decoder-> read_chars decoder 1 . should_equal "H".codepoints read_chars decoder 2 . should_equal "el".codepoints read_chars decoder 3 . should_equal "lo ".codepoints @@ -93,12 +93,12 @@ add_specs suite_builder = encoding = Encoding.ascii expected_contents = 'Hello World! $\uFFFD\uFFFD\uFFFD' expected_problems = [Encoding_Error.Error "Failed to decode 3 code units (at positions: 14, 15, 16)."] - contents_1 = read_file_one_by_one windows_file encoding expected_contents.length on_problems=Problem_Behavior.Report_Warning + contents_1 = read_file_one_by_one windows_file encoding expected_contents.length on_problems=..Report_Warning contents_1.should_equal expected_contents Problems.get_attached_warnings contents_1 . should_equal expected_problems contents_2 = windows_file.with_input_stream [File_Access.Read] stream-> - stream.with_stream_decoder encoding Problem_Behavior.Report_Warning reporting_stream_decoder-> + stream.with_stream_decoder encoding ..Report_Warning reporting_stream_decoder-> codepoint_1 = reporting_stream_decoder.read codepoints_1 = read_characters reporting_stream_decoder 5 codepoints_2 = read_characters reporting_stream_decoder 3 @@ -110,10 +110,10 @@ add_specs suite_builder = group_builder.specify "should work correctly if no data is read from it" <| result = windows_file.with_input_stream [File_Access.Read] stream-> - stream.with_stream_decoder Encoding.ascii Problem_Behavior.Report_Error _->Nothing + stream.with_stream_decoder Encoding.ascii ..Report_Error _->Nothing result.should_succeed -read_file_one_by_one file encoding expected_size on_problems=Problem_Behavior.Report_Error = +read_file_one_by_one file encoding expected_size on_problems=..Report_Error = file.with_input_stream [File_Access.Read] stream-> stream.with_stream_decoder encoding on_problems reporting_stream_decoder-> codepoints = 0.up_to expected_size . map _-> diff --git a/test/Base_Tests/src/System/Reporting_Stream_Encoder_Spec.enso b/test/Base_Tests/src/System/Reporting_Stream_Encoder_Spec.enso index 4499e80b318a..c4c379bf1561 100644 --- a/test/Base_Tests/src/System/Reporting_Stream_Encoder_Spec.enso +++ b/test/Base_Tests/src/System/Reporting_Stream_Encoder_Spec.enso @@ -16,7 +16,7 @@ add_specs suite_builder = f.exists.should_be_false contents = 1.up_to 7 . map _->'Cześc\u0301 😎🚀🚧!' . join '\n' f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder Encoding.utf_8 Problem_Behavior.Report_Error reporting_stream_encoder-> + stream.with_stream_encoder Encoding.utf_8 ..Report_Error reporting_stream_encoder-> contents.char_vector.each char-> reporting_stream_encoder.write char f.read_text.should_equal contents @@ -28,7 +28,7 @@ add_specs suite_builder = encoding = Encoding.utf_16_be big = 1.up_to 7 . map _->'Cześc\u0301 😎🚀🚧!' . join '\n' f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Report_Error reporting_stream_encoder-> + stream.with_stream_encoder encoding ..Report_Error reporting_stream_encoder-> reporting_stream_encoder.write "A" reporting_stream_encoder.write "Abc" reporting_stream_encoder.write "Defghi" @@ -48,7 +48,7 @@ add_specs suite_builder = f.delete_if_exists f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Report_Error reporting_stream_encoder-> + stream.with_stream_encoder encoding ..Report_Error reporting_stream_encoder-> reporting_stream_encoder.write contents f.read_text encoding . should_equal contents @@ -59,7 +59,7 @@ add_specs suite_builder = contents = 'Sło\u0301wka!' f.delete_if_exists result = f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Report_Warning reporting_stream_encoder-> + stream.with_stream_encoder encoding ..Report_Warning reporting_stream_encoder-> reporting_stream_encoder.write contents result.should_succeed Problems.get_attached_warnings result . should_equal [Encoding_Error.Error "Encoding issues at codepoints 1, 3."] @@ -67,7 +67,7 @@ add_specs suite_builder = f.delete_if_exists result_2 = f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Report_Warning reporting_stream_encoder-> + stream.with_stream_encoder encoding ..Report_Warning reporting_stream_encoder-> reporting_stream_encoder.write "ABC" reporting_stream_encoder.write "ą" reporting_stream_encoder.write "foo" @@ -83,7 +83,7 @@ add_specs suite_builder = encoding = Encoding.ascii f.delete_if_exists result = f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Report_Error _->Nothing + stream.with_stream_encoder encoding ..Report_Error _->Nothing result.should_succeed f.read_text encoding . should_equal "" @@ -92,7 +92,7 @@ add_specs suite_builder = encoding = Encoding.ascii f.delete_if_exists result = f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Ignore rse-> + stream.with_stream_encoder encoding ..Ignore rse-> rse.write "BAR" Error.throw (Illegal_State.Error "FOO") result.should_fail_with Illegal_State @@ -104,7 +104,7 @@ add_specs suite_builder = encoding = Encoding.ascii f.delete_if_exists result = f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Ignore rse-> + stream.with_stream_encoder encoding ..Ignore rse-> rse.write "BAZ" result = Warning.attach "warn:1" <| Warning.attach "warn:2" <| 42 rse.write "23" @@ -119,7 +119,7 @@ add_specs suite_builder = encoding = Encoding.ascii f.delete_if_exists result = f.with_output_stream [File_Access.Write, File_Access.Create_New] stream-> - stream.with_stream_encoder encoding Problem_Behavior.Ignore rse-> + stream.with_stream_encoder encoding ..Ignore rse-> rse.write "BAZ" result = Warning.attach "warn:1" <| Warning.attach "warn:2" <| Nothing rse.write "23" diff --git a/test/Exploratory_Benchmarks/src/Table/Enso_Callback.enso b/test/Exploratory_Benchmarks/src/Table/Enso_Callback.enso index 0f9be2c0772c..7a6dde5fda9d 100644 --- a/test/Exploratory_Benchmarks/src/Table/Enso_Callback.enso +++ b/test/Exploratory_Benchmarks/src/Table/Enso_Callback.enso @@ -25,7 +25,7 @@ type Boxed_Enso_Callback_Test java_roundtrip self = expected_type = Storage.from_value_type_strict Value_Type.Char Column.from_storage "result" <| - Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> + Java_Problems.with_problem_aggregator ..Report_Warning java_problem_aggregator-> MapHelpers.mapCallback (Storage.get_storage_for_column self.text_column) self.fn expected_type java_problem_aggregator enso_map_as_vector self convert_polyglot_dates = @@ -67,7 +67,7 @@ type Primitive_Enso_Callback_Test java_roundtrip self = expected_type = Storage.from_value_type_strict Value_Type.Integer Column.from_storage "result" <| - Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> + Java_Problems.with_problem_aggregator ..Report_Warning java_problem_aggregator-> MapHelpers.mapCallback (Storage.get_storage_for_column self.int_column) self.fn expected_type java_problem_aggregator enso_map_as_vector_inferred_builder self convert_polyglot_dates = @@ -80,7 +80,7 @@ type Primitive_Enso_Callback_Test mapped = vector_proxy.map self.fn # No expected storage will use inferred builder. expected_storage_type = Storage.from_value_type_strict Value_Type.Integer - java_column = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Error java_problem_aggregator-> + java_column = Java_Problems.with_problem_aggregator ..Report_Error java_problem_aggregator-> Java_Column.fromItemsNoDateConversion "result" mapped expected_storage_type java_problem_aggregator Column.from_storage java_column.getName java_column.getStorage diff --git a/test/Exploratory_Benchmarks/src/Table/Helpers.enso b/test/Exploratory_Benchmarks/src/Table/Helpers.enso index 213f06222332..a5f5fe8980a6 100644 --- a/test/Exploratory_Benchmarks/src/Table/Helpers.enso +++ b/test/Exploratory_Benchmarks/src/Table/Helpers.enso @@ -15,7 +15,7 @@ column_from_vector : Text -> Vector -> Boolean -> Column column_from_vector name items convert_polyglot_dates = expected_storage_type = Nothing Illegal_Argument.handle_java_exception <| - Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> + Java_Problems.with_problem_aggregator ..Report_Warning java_problem_aggregator-> java_column = case convert_polyglot_dates of True -> Java_Column.fromItems name items expected_storage_type java_problem_aggregator diff --git a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso index a57a7ef10655..dfb371175da2 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso @@ -46,7 +46,7 @@ add_specs suite_builder setup = t2.at "Y" . to_vector . should_equal [1, 2] Problems.expect_warning Duplicate_Output_Column_Names t2 - r3 = t1.add_row_number name="X" on_problems=Problem_Behavior.Report_Error + r3 = t1.add_row_number name="X" on_problems=..Report_Error r3.should_fail_with Duplicate_Output_Column_Names group_builder.specify "should allow to order the row numbers by some columns" <| @@ -73,7 +73,7 @@ add_specs suite_builder setup = Problems.expect_warning Floating_Point_Equality t1 t1.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2] - r2 = t.add_row_number group_by=["X"] order_by=["row_id"] on_problems=Problem_Behavior.Report_Error + r2 = t.add_row_number group_by=["X"] order_by=["row_id"] on_problems=..Report_Error r2.should_fail_with Floating_Point_Equality t3 = t.add_row_number order_by=["X"] |> materialize |> _.sort "row_id" diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 7592192ff9a2..08da210f1ca2 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -1454,9 +1454,9 @@ add_specs suite_builder setup = problems1 = [Invalid_Aggregate_Column.Error "[MISSING]*[MISSING]" (No_Such_Column.Error "MISSING")] Problems.test_problem_handling action1 problems1 tester1 - t2 = data.table.aggregate ["Index"] [Sum "Value", Sum (expr "[[[")] on_problems=Problem_Behavior.Ignore + t2 = data.table.aggregate ["Index"] [Sum "Value", Sum (expr "[[[")] on_problems=..Ignore expect_column_names ["Index", "Sum Value"] t2 - err3 = data.table.aggregate ["Index"] [Sum "Value", Sum (expr "[[[")] on_problems=Problem_Behavior.Report_Error + err3 = data.table.aggregate ["Index"] [Sum "Value", Sum (expr "[[[")] on_problems=..Report_Error err3.should_fail_with Invalid_Aggregate_Column err3.catch.name . should_equal "[[[" err3.catch.expression_error . should_be_a Expression_Error.Syntax_Error @@ -1495,11 +1495,11 @@ add_specs suite_builder setup = t3.catch.criteria.should_equal [42] group_builder.specify "should raise a warning when can't find a column by name, but a hard error if the missing column is in group_by" <| - err1 = table.aggregate ["Missing", "Index", "Other_Missing"] on_problems=Problem_Behavior.Ignore + err1 = table.aggregate ["Missing", "Index", "Other_Missing"] on_problems=..Ignore err1.should_fail_with Invalid_Aggregate_Column err1.catch.name . should_equal "Missing" - t1 = table.aggregate ["Index"] [Sum "Value", Sum "Missing"] on_problems=Problem_Behavior.Report_Warning + t1 = table.aggregate ["Index"] [Sum "Value", Sum "Missing"] on_problems=..Report_Warning t1.column_names . should_equal ["Index", "Sum Value"] warnings = Problems.get_attached_warnings t1 warnings.not_empty . should_be_true @@ -1508,15 +1508,15 @@ add_specs suite_builder setup = ## Even if there are missing columns both for group-by and aggregations, the groupby errors are reported separately. - err2 = table.aggregate ["Index", "Unknown", "Other Missing"] [Sum "Value", Sum "Missing"] on_problems=Problem_Behavior.Report_Error + err2 = table.aggregate ["Index", "Unknown", "Other Missing"] [Sum "Value", Sum "Missing"] on_problems=..Report_Error err2.should_fail_with Invalid_Aggregate_Column err2.catch.name.should_equal "Unknown" - err3 = table.aggregate ["Index"] [Sum "Value", Sum "Missing"] on_problems=Problem_Behavior.Ignore error_on_missing_columns=True + err3 = table.aggregate ["Index"] [Sum "Value", Sum "Missing"] on_problems=..Ignore error_on_missing_columns=True err3.should_fail_with Invalid_Aggregate_Column err3.catch.name.should_equal "Missing" - err4 = table.aggregate [100, "Index", -42] on_problems=Problem_Behavior.Ignore + err4 = table.aggregate [100, "Index", -42] on_problems=..Ignore err4.should_fail_with Missing_Input_Columns err4.catch.criteria.should_equal [100, -42] @@ -1526,11 +1526,11 @@ add_specs suite_builder setup = Problems.test_problem_handling action2 problems2 tester2 # As above, missing errors from group-by take precedence over aggregates. - err5 = table.aggregate ["Index", 55, -33] [Sum "Value", Sum 144] on_problems=Problem_Behavior.Report_Error + err5 = table.aggregate ["Index", 55, -33] [Sum "Value", Sum 144] on_problems=..Report_Error err5.should_fail_with Missing_Input_Columns err5.catch.criteria.should_equal [55, -33] - err6 = table.aggregate ["Index"] [Sum "Value", Sum 42] on_problems=Problem_Behavior.Ignore error_on_missing_columns=True + err6 = table.aggregate ["Index"] [Sum "Value", Sum 42] on_problems=..Ignore error_on_missing_columns=True err6.catch . should_equal (Missing_Input_Columns.Error [42]) group_builder.specify "should raise a warning when a duplicate column name" <| @@ -1590,7 +1590,7 @@ add_specs suite_builder setup = False -> group_builder.specify "should error if unsupported operations are selected" <| t1 = table_builder [["X", [1.5, 2.0, 1.5, 1.0]]] - t2 = t1.aggregate columns=[Mode "X"] on_problems=Problem_Behavior.Ignore + t2 = t1.aggregate columns=[Mode "X"] on_problems=..Ignore t2.should_fail_with No_Output_Columns group_builder.specify "should check types" <| @@ -1643,7 +1643,7 @@ add_specs suite_builder setup = group_builder.specify "should not fail if trying concatenate unquoted delimiters with no separator" <| column = Concatenate "Text" separator="" t = table_builder [["Text", ["A", "BC", "def"]]] - result = t.aggregate columns=[column] on_problems=Report_Error + result = t.aggregate columns=[column] on_problems=..Report_Error Problems.assume_no_problems result result.column_names . should_equal ["Concatenate Text"] result.at "Concatenate Text" . to_vector . should_equal ["ABCdef"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso index 3aa38b13fecd..4fbd71a8c387 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso @@ -190,7 +190,7 @@ add_specs suite_builder setup = w2.affected_rows_count . should_equal 1 w2.to_display_text . should_contain "out of the range of the target type" - r3 = t2.at "X" . cast (Value_Type.Integer Bits.Bits_16) on_problems=Problem_Behavior.Report_Error + r3 = t2.at "X" . cast (Value_Type.Integer Bits.Bits_16) on_problems=..Report_Error r3.should_fail_with Conversion_Failure # Now converting the 16-bit column `c` into 32 bits. diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index eb99f8077d06..aa96cab90e3e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -203,15 +203,15 @@ add_specs suite_builder setup = group_builder.specify "should forward expression problems" <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] - r1 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=Problem_Behavior.Ignore + r1 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=..Ignore Problems.assume_no_problems r1 r1.at -1 . to_vector . should_equal [True, True, False] - r2 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=Problem_Behavior.Report_Warning + r2 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=..Report_Warning Problems.expect_warning Floating_Point_Equality r2 r2.at -1 . to_vector . should_equal [True, True, False] - err3 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=Problem_Behavior.Report_Error + err3 = t1.set (expr "([X] == 2) || ([X] + 0.5 == 2)") on_problems=..Report_Error err3.should_fail_with Floating_Point_Equality # These errors currently only work in in-memory. diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index 425bcdfa61d4..8a2debbc5611 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -176,7 +176,7 @@ add_specs suite_builder setup = err2.catch.expression_error . should_be_a Expression_Error.Syntax_Error group_builder.specify "should not allow Group_By for values" <| - err1 = data.table.cross_tab [] "Key" values=[Aggregate_Column.Count, Aggregate_Column.Group_By "Value"] on_problems=Problem_Behavior.Ignore + err1 = data.table.cross_tab [] "Key" values=[Aggregate_Column.Count, Aggregate_Column.Group_By "Value"] on_problems=..Ignore err1.should_fail_with Illegal_Argument group_builder.specify "should gracefully handle duplicate aggregate names" <| diff --git a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso index f9ffef0b9bd7..3521e6dcb78a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso @@ -36,7 +36,7 @@ add_specs suite_builder setup = a = ["A", ["a", "b", "a", "b", "a", "b"]] b = ["B", [2, 1, 2, 2, 2, 1]] t = table_builder [a, b] - r = t.distinct on_problems=Report_Error |> materialize |> _.sort ["A", "B"] + r = t.distinct on_problems=..Report_Error |> materialize |> _.sort ["A", "B"] r.at "A" . to_vector . should_equal ["a", "b", "b"] r.at "B" . to_vector . should_equal [2, 1, 2] @@ -46,12 +46,12 @@ add_specs suite_builder setup = c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] t = table_builder [a, b, c] - r1 = t.distinct ["A"] on_problems=Report_Error |> materialize + r1 = t.distinct ["A"] on_problems=..Report_Error |> materialize r1.at "A" . to_vector . should_equal ["a"] r1.at "B" . to_vector . should_equal [1] r1.at "C" . to_vector . should_equal [0.1] - r2 = t.distinct ["A", "B"] on_problems=Report_Error |> materialize |> _.sort "B" + r2 = t.distinct ["A", "B"] on_problems=..Report_Error |> materialize |> _.sort "B" r2.at "A" . to_vector . should_equal ["a", "a"] r2.at "B" . to_vector . should_equal [1, 2] cv = r2.at "C" . to_vector @@ -65,7 +65,7 @@ add_specs suite_builder setup = c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] t = table_builder [a, b, c] . sort ([(..Name "C" ..Descending)]) - r2 = t.distinct ["A", "B"] on_problems=Report_Error |> materialize |> _.sort "B" + r2 = t.distinct ["A", "B"] on_problems=..Report_Error |> materialize |> _.sort "B" r2.at "A" . to_vector . should_equal ["a", "a"] r2.at "B" . to_vector . should_equal [1, 2] r2.at "C" . to_vector . should_equal [0.5, 0.6] @@ -73,10 +73,10 @@ add_specs suite_builder setup = group_builder.specify "should allow to control case-sensitivity of keys" <| x = ["X", ['A', 'a', 'enso', 'Enso', 'A']] t1 = table_builder [x] - d1 = t1.distinct ["X"] on_problems=Report_Error |> materialize |> _.sort ["X"] + d1 = t1.distinct ["X"] on_problems=..Report_Error |> materialize |> _.sort ["X"] d1.at "X" . to_vector . should_equal ['A', 'Enso', 'a', 'enso'] - d2 = t1.distinct ["X"] case_sensitivity=Case_Sensitivity.Insensitive on_problems=Report_Error |> materialize |> _.sort ["X"] + d2 = t1.distinct ["X"] case_sensitivity=Case_Sensitivity.Insensitive on_problems=..Report_Error |> materialize |> _.sort ["X"] v = d2.at "X" . to_vector v.length . should_equal 2 v.filter (_.equals_ignore_case "enso") . length . should_equal 1 @@ -96,7 +96,7 @@ add_specs suite_builder setup = a = ["A", ["a", Nothing, "b", "a", "b", Nothing, "a", "b"]] b = ["B", [1, 2, 3, 4, 5, 6, 7, 8]] t = table_builder [a, b] - r = t.distinct ["A"] on_problems=Report_Error |> materialize |> _.sort "A" + r = t.distinct ["A"] on_problems=..Report_Error |> materialize |> _.sort "A" va = r.at "A" . to_vector vb = r.at "B" . to_vector va . should_equal [Nothing, "a", "b"] @@ -142,7 +142,7 @@ add_specs suite_builder setup = a = ["A", ["a", "a", "b", "b", "c"]] b = ["B", [1, 1, 1, 2, 1]] t = table_builder [a, b] - r = t.duplicates on_problems=Report_Error |> materialize |> _.sort ["A", "B"] + r = t.duplicates on_problems=..Report_Error |> materialize |> _.sort ["A", "B"] r.at "A" . to_vector . should_equal ["a", "a"] r.at "B" . to_vector . should_equal [1, 1] @@ -152,12 +152,12 @@ add_specs suite_builder setup = c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] t = table_builder [a, b, c] - r1 = t.duplicates ["A"] on_problems=Report_Error |> materialize + r1 = t.duplicates ["A"] on_problems=..Report_Error |> materialize r1.at "A" . to_vector . should_equal ["a", "a", "a", "a", "a", "a"] r1.at "B" . to_vector . should_equal [1, 1, 2, 2, 1, 3] r1.at "C" . to_vector . should_equal [0.1, 0.2, 0.3, 0.4, 0.5, 0.6] - r2 = t.duplicates ["A", "B"] on_problems=Report_Error |> materialize + r2 = t.duplicates ["A", "B"] on_problems=..Report_Error |> materialize r2.at "A" . to_vector . should_equal ["a", "a", "a", "a", "a"] r2.at "B" . to_vector . should_equal [1, 1, 2, 2, 1] r2.at "C" . to_vector . should_equal [0.1, 0.2, 0.3, 0.4, 0.5] @@ -165,10 +165,10 @@ add_specs suite_builder setup = group_builder.specify "should allow to control case-sensitivity of keys" <| x = ["X", ['A', 'a', 'enso', 'Enso', 'A']] t1 = table_builder [x] - d1 = t1.duplicates ["X"] on_problems=Report_Error |> materialize |> _.sort ["X"] + d1 = t1.duplicates ["X"] on_problems=..Report_Error |> materialize |> _.sort ["X"] d1.at "X" . to_vector . should_equal ['A', 'A'] - d2 = t1.duplicates ["X"] case_sensitivity=Case_Sensitivity.Insensitive on_problems=Report_Error |> materialize |> _.sort ["X"] + d2 = t1.duplicates ["X"] case_sensitivity=Case_Sensitivity.Insensitive on_problems=..Report_Error |> materialize |> _.sort ["X"] d2.at "X" . to_vector . should_equal ['A', 'A', 'Enso', 'a', 'enso'] group_builder.specify "should report a warning if the key contains floating point values" <| @@ -185,7 +185,7 @@ add_specs suite_builder setup = a = ["A", ["a", Nothing, "b", Nothing]] b = ["B", [1, 2, 3, 4]] t = table_builder [a, b] - r = t.duplicates ["A"] on_problems=Report_Error |> materialize |> _.sort "B" + r = t.duplicates ["A"] on_problems=..Report_Error |> materialize |> _.sort "B" r.at "A" . to_vector . should_equal [Nothing, Nothing] r.at "B" . to_vector . should_equal [2, 4] diff --git a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso index cc3d9cfd6ee0..53136103c712 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso @@ -370,16 +370,16 @@ add_specs suite_builder detailed setup = group_builder.specify "should report floating point equality" <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] - r1 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Ignore + r1 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=..Ignore Problems.assume_no_problems r1 r1.to_vector . should_equal [True, True, False] - r2 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Report_Warning + r2 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=..Report_Warning Problems.get_attached_warnings r2 . each warn-> (Error.unwrap warn).should_be_a Floating_Point_Equality r2.to_vector . should_equal [True, True, False] - err3 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=Problem_Behavior.Report_Error + err3 = t1.evaluate_expression "([X] == 2) || ([X] + 0.5 == 2)" on_problems=..Report_Error err3.should_fail_with Floating_Point_Equality r4 = t1.evaluate_expression 'if ([X] != 2) && ([X] + 0.5 != 2) then "A" else "B"' @@ -398,24 +398,24 @@ add_specs suite_builder detailed setup = group_builder.specify "already existing warnings should not be escalated to errors in error handling mode (1)" pending=db_pending <| t1 = table_builder [["X", [1.5, 2.0, 0.0]]] - c1 = t1.evaluate_expression "3 / [X]" on_problems=Problem_Behavior.Report_Warning + c1 = t1.evaluate_expression "3 / [X]" on_problems=..Report_Warning Problems.expect_warning Arithmetic_Error c1 t2 = t1.set c1 as="Y" Problems.expect_warning Arithmetic_Error t2 Problems.expect_warning Arithmetic_Error (t2.at "Y") - c2 = t2.evaluate_expression "[Y] >= 42.0" on_problems=Problem_Behavior.Report_Error + c2 = t2.evaluate_expression "[Y] >= 42.0" on_problems=..Report_Error Problems.expect_warning Arithmetic_Error c2 c2.to_vector . should_equal [False, False, True] - c3 = t2.evaluate_expression "[Y] == 42.0" on_problems=Problem_Behavior.Report_Error + c3 = t2.evaluate_expression "[Y] == 42.0" on_problems=..Report_Error c3.should_fail_with Floating_Point_Equality - t3 = t2.set (expr 'if [Y] == 42.0 then "A" else "B"') as="Z" on_problems=Problem_Behavior.Report_Error + t3 = t2.set (expr 'if [Y] == 42.0 then "A" else "B"') as="Z" on_problems=..Report_Error t3.should_fail_with Floating_Point_Equality - t4 = t2.set (expr 'if [Y] >= 42.0 then "A" else "B"') as="Z" on_problems=Problem_Behavior.Report_Error + t4 = t2.set (expr 'if [Y] >= 42.0 then "A" else "B"') as="Z" on_problems=..Report_Error t4.at "Z" . to_vector . should_equal ["B", "B", "A"] # Should still keep the inherited warning from "Y". Problems.expect_warning Arithmetic_Error t4 @@ -429,23 +429,23 @@ add_specs suite_builder detailed setup = Problems.expect_warning Illegal_State t2 Problems.expect_warning Illegal_State (t2.at "Y") - c2 = t2.evaluate_expression "[Y] >= 3.5" on_problems=Problem_Behavior.Report_Error + c2 = t2.evaluate_expression "[Y] >= 3.5" on_problems=..Report_Error Problems.expect_warning Illegal_State c2 c2.to_vector . should_equal [True, True, False] - c3 = t2.evaluate_expression "[Y] == 42.0" on_problems=Problem_Behavior.Report_Error + c3 = t2.evaluate_expression "[Y] == 42.0" on_problems=..Report_Error c3.should_fail_with Floating_Point_Equality - c4 = t2.evaluate_expression "[Y] == 3.0" on_problems=Problem_Behavior.Report_Warning + c4 = t2.evaluate_expression "[Y] == 3.0" on_problems=..Report_Warning c4.to_vector . should_equal [False, False, True] Problems.expect_warning Floating_Point_Equality c4 # Should still keep the inherited warning from "Y". Problems.expect_warning Illegal_State c4 - t3 = t2.set (expr 'if [Y] == 42.0 then "A" else "B"') as="Z" on_problems=Problem_Behavior.Report_Error + t3 = t2.set (expr 'if [Y] == 42.0 then "A" else "B"') as="Z" on_problems=..Report_Error t3.should_fail_with Floating_Point_Equality - t4 = t2.set (expr 'if [Y] >= 3.5 then "A" else "B"') as="Z" on_problems=Problem_Behavior.Report_Error + t4 = t2.set (expr 'if [Y] >= 3.5 then "A" else "B"') as="Z" on_problems=..Report_Error t4.at "Z" . to_vector . should_equal ["A", "A", "B"] # Should still keep the inherited warning from "Y". Problems.expect_warning Illegal_State t4 diff --git a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso index be5d61f608ec..84b9e1633177 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso @@ -402,15 +402,15 @@ add_specs suite_builder setup = group_builder.specify "should report issues: floating point equality" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, 2.0]]] - r1 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Ignore + r1 = t.filter "X" (Filter_Condition.Equal 2) on_problems=..Ignore r1.at "ix" . to_vector . should_equal [3, 5] Problems.assume_no_problems r1 - r2 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Report_Warning + r2 = t.filter "X" (Filter_Condition.Equal 2) on_problems=..Report_Warning r2.at "ix" . to_vector . should_equal [3, 5] Problems.expect_warning Floating_Point_Equality r2 - r3 = t.filter "X" (Filter_Condition.Equal 2) on_problems=Problem_Behavior.Report_Error + r3 = t.filter "X" (Filter_Condition.Equal 2) on_problems=..Report_Error r3.should_fail_with Floating_Point_Equality r4 = t.filter "X" (Filter_Condition.Not_Equal 2) @@ -430,12 +430,12 @@ add_specs suite_builder setup = Problems.expect_warning Floating_Point_Equality r1 r1.at "ix" . to_vector . should_equal [2] - i1 = t2.filter "Y" (Filter_Condition.Equal 5) on_problems=Problem_Behavior.Ignore + i1 = t2.filter "Y" (Filter_Condition.Equal 5) on_problems=..Ignore Problems.expect_warning Illegal_State i1 Problems.not_expect_warning Floating_Point_Equality i1 i1.at "ix" . to_vector . should_equal [2] - err1 = t2.filter "Y" (Filter_Condition.Equal 5) on_problems=Problem_Behavior.Report_Error + err1 = t2.filter "Y" (Filter_Condition.Equal 5) on_problems=..Report_Error err1.should_fail_with Floating_Point_Equality r2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) @@ -443,12 +443,12 @@ add_specs suite_builder setup = Problems.expect_warning Floating_Point_Equality r2 r2.at "ix" . to_vector . should_equal [1, 3] - i2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=Problem_Behavior.Ignore + i2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=..Ignore Problems.expect_warning Illegal_State i2 Problems.not_expect_warning Floating_Point_Equality i2 i2.at "ix" . to_vector . should_equal [1, 3] - err2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=Problem_Behavior.Report_Error + err2 = t2.filter "Y" (Filter_Condition.Not_Equal 5) on_problems=..Report_Error err2.should_fail_with Floating_Point_Equality group_builder.specify "should attach a warning when Nothing is used as a value in a comparison or `is_in` `Filter_Condition`" <| @@ -516,16 +516,16 @@ add_specs suite_builder setup = group_builder.specify "should report issues: floating point equality" <| t = table_builder [["ix", [1, 2, 3, 4, 5]], ["X", [10.0, 2.0001, 2.0, 4.5, -2.0]]] - r1 = t.filter (expr "[X] * [X] == 4.0") on_problems=Problem_Behavior.Ignore + r1 = t.filter (expr "[X] * [X] == 4.0") on_problems=..Ignore Problems.assume_no_problems r1 r1.at "ix" . to_vector . should_equal [3, 5] - r2 = t.filter (expr "[X] * [X] == 4.0") on_problems=Problem_Behavior.Report_Warning + r2 = t.filter (expr "[X] * [X] == 4.0") on_problems=..Report_Warning r2.at "ix" . to_vector . should_equal [3, 5] Problems.expect_warning Floating_Point_Equality r2 - r3 = t.filter (expr "[X] * [X] == 4.0") on_problems=Problem_Behavior.Report_Error + r3 = t.filter (expr "[X] * [X] == 4.0") on_problems=..Report_Error r3.should_fail_with Floating_Point_Equality r4 = t.filter (expr "[X] * [X] != 4.0") diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index 1a638d713364..11ebdf90167a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -113,7 +113,7 @@ add_specs suite_builder setup = c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] t = (table_builder [a, b, c]) . sort ([(..Name "C" ..Descending)]) - t2 = t.distinct ["A", "B"] on_problems=Report_Error + t2 = t.distinct ["A", "B"] on_problems=..Report_Error # Now, reverse the order! ## But the distinct was taken under descending order, so that should be preserved - we will still have _last_ rows from @@ -136,7 +136,7 @@ add_specs suite_builder setup = c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5]] t = table_builder [a, b, c] . sort "C" - t2 = t.distinct ["A"] on_problems=Report_Error + t2 = t.distinct ["A"] on_problems=..Report_Error r2 = t2 |> materialize r2.at "A" . to_vector . should_equal ["a", "b"] r2.at "B" . to_vector . should_equal [1, 5] @@ -147,7 +147,7 @@ add_specs suite_builder setup = r3.at "B" . to_vector . should_equal [5] t4 = t.filter "B" (Filter_Condition.Equal 5) - t5 = t4.distinct ["A"] on_problems=Report_Error + t5 = t4.distinct ["A"] on_problems=..Report_Error r5 = t5 |> materialize r5.at "A" . to_vector . should_contain_the_same_elements_as ["b", "a"] r5.at "B" . to_vector . should_equal [5, 5] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index 1faae6f2eba4..73f466c91883 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -87,7 +87,7 @@ add_specs suite_builder setup = Problems.test_problem_handling action problems tester t2.cross_join t101 right_row_limit=Nothing . row_count . should_equal 202 - t2.cross_join t3 right_row_limit=2 on_problems=Problem_Behavior.Report_Error . should_fail_with Cross_Join_Row_Limit_Exceeded + t2.cross_join t3 right_row_limit=2 on_problems=..Report_Error . should_fail_with Cross_Join_Row_Limit_Exceeded group_builder.specify "should ensure 1-1 mapping even with duplicate rows" <| t1 = table_builder [["X", [2, 1, 2, 2]], ["Y", [5, 4, 5, 5]]] @@ -138,7 +138,7 @@ add_specs suite_builder setup = t3.at "Right X" . to_vector . should_equal ['a'] t3.at "Right Y 1" . to_vector . should_equal ['d'] - t1.cross_join t2 on_problems=Problem_Behavior.Report_Error . should_fail_with Duplicate_Output_Column_Names + t1.cross_join t2 on_problems=..Report_Error . should_fail_with Duplicate_Output_Column_Names expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] (t1.cross_join t2 right_prefix="") diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index fbb5b2950564..0fde4d58ad9c 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -413,13 +413,13 @@ add_specs suite_builder setup = r1.catch.to_display_text.should_equal "The criteria 'X' did not match any columns in the right table." conditions2 = [Join_Condition.Equals "foo" 42, Join_Condition.Equals "X" -3, Join_Condition.Equals -1 "baz"] - r2 = t1.join t2 on=conditions2 on_problems=Problem_Behavior.Ignore + r2 = t1.join t2 on=conditions2 on_problems=..Ignore r2.should_fail_with Missing_Input_Columns r2.catch.criteria.should_equal ["foo"] r2.catch.to_display_text.should_equal "The criteria 'foo' did not match any columns in the left table." conditions3 = [Join_Condition.Equals "Y" 42, Join_Condition.Equals "X" -3, Join_Condition.Equals -1 "baz"] - r3 = t1.join t2 on=conditions3 on_problems=Problem_Behavior.Ignore + r3 = t1.join t2 on=conditions3 on_problems=..Ignore r3.should_fail_with Missing_Input_Columns r3.catch.criteria.should_equal ["baz", 42, -3] r3.catch.to_display_text.should_equal "The criteria 'baz', 42 (index), -3 (index) did not match any columns in the right table." @@ -433,15 +433,15 @@ add_specs suite_builder setup = result.catch.expected.should_equal "Char" test <| - t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X" "W") on_problems=Problem_Behavior.Ignore + t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X" "W") on_problems=..Ignore test <| - t1.join t2 on=(Join_Condition.Equals_Ignore_Case "Y" "Z") on_problems=Problem_Behavior.Ignore + t1.join t2 on=(Join_Condition.Equals_Ignore_Case "Y" "Z") on_problems=..Ignore group_builder.specify "should report Invalid_Value_Type if incompatible types are correlated" <| t1 = table_builder [["X", ["1", "2", "c"]]] t2 = table_builder [["X", [1, 2, 3]]] - r1 = t1.join t2 on_problems=Problem_Behavior.Ignore + r1 = t1.join t2 on_problems=..Ignore r1.should_fail_with Invalid_Value_Type group_builder.specify "should report Invalid_Value_Type if incompatible columns types are correlated in Between" <| @@ -485,7 +485,7 @@ add_specs suite_builder setup = if setup.supports_custom_objects then t1 = table_builder [["X", [My_Type.Value 1 2, 2.0, 2]], ["Y", [10, 20, 30]]] t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] - r3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=Problem_Behavior.Report_Warning + r3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=..Report_Warning r3.column_names.should_equal ["X", "Y", "Z", "W"] r4 = r3.sort ["Y", "W"] r4.at "X" . to_vector . should_equal [2.0, 2.0, 2, 2] @@ -614,7 +614,7 @@ add_specs suite_builder setup = t3.at "Right X" . to_vector . should_equal [1, 2] t3.at "Right Y 1" . to_vector . should_equal [2, 2] - err1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Y") on_problems=Problem_Behavior.Report_Error + err1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Y") on_problems=..Report_Error err1.should_fail_with Duplicate_Output_Column_Names err1.catch.column_names . should_equal ["Right Y"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso index 30454fe1b1b0..01c88d0cc04b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso @@ -84,7 +84,7 @@ add_specs suite_builder setup = w2.to_display_text . should_contain "unexpected columns were ignored" w2.to_display_text . should_contain "status" - err2 = my_table.merge lookup key_columns=["code"] add_new_columns=False on_problems=Problem_Behavior.Report_Error + err2 = my_table.merge lookup key_columns=["code"] add_new_columns=False on_problems=..Report_Error err2.should_fail_with Unexpected_Extra_Columns err2.catch.columns . should_equal ["status"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso index 4a15f52745aa..245ecdb1b08b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso @@ -388,7 +388,7 @@ run_union_tests group_builder setup call_union = t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]], ["C", [True, False, Nothing]]] t2 = table_builder [["C", ["x", "Y", "Z"]], ["A", [4, 5, 6]], ["B", [1, 2, 3]]] - r1 = call_union [t1, t2] on_problems=Problem_Behavior.Report_Error + r1 = call_union [t1, t2] on_problems=..Report_Error r1.should_fail_with No_Common_Type r1.catch.to_display_text . should_contain "converted to text" diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso index 32d86f729bce..877923eece8e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso @@ -98,7 +98,7 @@ add_specs suite_builder setup = t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a']], ["W", ['x']]] - t3 = t1.zip t2 keep_unmatched=True on_problems=Problem_Behavior.Report_Error + t3 = t1.zip t2 keep_unmatched=True on_problems=..Report_Error Problems.assume_no_problems t3 expect_column_names ["X", "Y", "Z", "W"] t3 t3.at "X" . to_vector . should_equal [1, 2, 3] @@ -110,7 +110,7 @@ add_specs suite_builder setup = t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a']], ["W", ['x']]] - t3 = t1.zip t2 keep_unmatched=False on_problems=Problem_Behavior.Report_Error + t3 = t1.zip t2 keep_unmatched=False on_problems=..Report_Error Problems.assume_no_problems t3 expect_column_names ["X", "Y", "Z", "W"] t3 t3.at "X" . to_vector . should_equal [1] @@ -154,7 +154,7 @@ add_specs suite_builder setup = group_builder.specify "should not report unmatched rows for rows that simply are all null" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", ['a', Nothing, Nothing]], ["W", ['b', Nothing, Nothing]]] - t3 = t1.zip t2 on_problems=Problem_Behavior.Report_Error + t3 = t1.zip t2 on_problems=..Report_Error Problems.assume_no_problems t3 expect_column_names ["X", "Y", "Z", "W"] t3 t3.at "X" . to_vector . should_equal [1, 2, 3] @@ -176,7 +176,7 @@ add_specs suite_builder setup = t3.at "Right X" . to_vector . should_equal ['a', Nothing] t3.at "Right Y 1" . to_vector . should_equal ['d', Nothing] - t1.zip t2 keep_unmatched=False on_problems=Problem_Behavior.Report_Error . should_fail_with Duplicate_Output_Column_Names + t1.zip t2 keep_unmatched=False on_problems=..Report_Error . should_fail_with Duplicate_Output_Column_Names expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] (t1.zip t2 right_prefix="") diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index 6225070d9d1c..e2df843fd368 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -158,7 +158,7 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] - t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector on_problems=..Report_Error expect_column_names ["foo"] t expect_column_names ["foo", "bar"] <| @@ -166,12 +166,12 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -6, 1, -7] - t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector on_problems=..Report_Error expect_column_names ["foo", "bar"] t group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] - t = data.table.select_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector on_problems=..Report_Error expect_column_names ["foo"] t expect_column_names ["foo", "bar"] <| @@ -185,7 +185,7 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = ["FOO", "foo"] - t = data.table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error + t = data.table.select_columns selector case_sensitivity=Case_Sensitivity.Insensitive on_problems=..Report_Error expect_column_names ["foo"] t expect_column_names ["bar", "foo"] <| @@ -199,12 +199,12 @@ add_specs suite_builder setup = problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester - err = data.table.select_columns selector on_problems=Problem_Behavior.Ignore + err = data.table.select_columns selector on_problems=..Ignore err.should_fail_with Missing_Input_Columns err.catch.criteria . should_equal ["hmm", weird_name] group_builder.specify "should correctly handle problems in mixed case" <| - err = data.table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore + err = data.table.select_columns ["foo", "hmm", 99] on_problems=..Ignore err.should_fail_with Missing_Input_Columns err.catch.criteria . should_equal ["hmm", 99] @@ -320,22 +320,22 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] - t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector on_problems=..Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] - t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector on_problems=..Report_Error expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] - t = data.table.remove_columns selector on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector on_problems=..Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = ["FOO", "foo"] - t = data.table.remove_columns selector Case_Sensitivity.Insensitive on_problems=Problem_Behavior.Report_Error + t = data.table.remove_columns selector Case_Sensitivity.Insensitive on_problems=..Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t group_builder.specify "should correctly handle problems: unmatched names" <| @@ -346,7 +346,7 @@ add_specs suite_builder setup = problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester - err = data.table.remove_columns selector error_on_missing_columns=True on_problems=Problem_Behavior.Ignore + err = data.table.remove_columns selector error_on_missing_columns=True on_problems=..Ignore err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle problems: no columns in the output" <| @@ -409,17 +409,17 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] - t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error + t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=..Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t group_builder.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] - t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error + t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=..Report_Error expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] t group_builder.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] - t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error + t = data.table.reorder_columns selector Position.After_Other_Columns on_problems=..Report_Error expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t group_builder.specify "should correctly handle problems: unmatched names" <| @@ -616,12 +616,12 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: aliased indices" <| map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]] - t1 = data.table.rename_columns map1 on_problems=Problem_Behavior.Report_Error + t1 = data.table.rename_columns map1 on_problems=..Report_Error Problems.assume_no_problems t1 expect_column_names ["alpha", "FirstColumn", "gamma", "delta"] t1 map2 = Map.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]] - t2 = data.table.rename_columns map2 on_problems=Problem_Behavior.Report_Error + t2 = data.table.rename_columns map2 on_problems=..Report_Error t2.should_fail_with Ambiguous_Column_Rename err = t2.catch . inner_error err.column_name . should_equal "beta" @@ -630,12 +630,12 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: aliased selectors" <| t = table_builder [["alpha", [1,2,3]], ["bet", [4,5,6]]] map1 = Map.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]] - t1 = t.rename_columns map1 on_problems=Problem_Behavior.Report_Error + t1 = t.rename_columns map1 on_problems=..Report_Error Problems.assume_no_problems t1 expect_column_names ["AA", "bet"] t1 map2 = Map.from_vector [["a.*".to_regex, "StartsWithA"], [".*a".to_regex, "EndsWithA"]] - t2 = t.rename_columns map2 on_problems=Problem_Behavior.Report_Error + t2 = t.rename_columns map2 on_problems=..Report_Error t2.should_fail_with Ambiguous_Column_Rename err = t2.catch . inner_error err.column_name . should_equal "alpha" @@ -648,7 +648,7 @@ add_specs suite_builder setup = same column, if the resulting rename is unambiguous, no error is raised. map3 = Map.from_vector [["a(.*)".to_regex, "$1A"], ["(.*)aa".to_regex, "$1aA"]] - t4 = t3.rename_columns map3 on_problems=Problem_Behavior.Report_Error + t4 = t3.rename_columns map3 on_problems=..Report_Error Problems.assume_no_problems t4 expect_column_names ["aaA", "bbb"] t4 diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index b3d9c6cf6c87..01e97c73992d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -126,6 +126,6 @@ add_specs suite_builder setup = Problems.test_problem_handling action2 problems2 tester2 # No clash with the columns that are removed by transpose. - t2 = t1.transpose ["X"] attribute_column_name="Y" value_column_name="Z" on_problems=Problem_Behavior.Report_Error + t2 = t1.transpose ["X"] attribute_column_name="Y" value_column_name="Z" on_problems=..Report_Error Problems.assume_no_problems t2 t2.column_names . should_equal ["X", "Y", "Z"] diff --git a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso index 4b1121032a5d..f045dbbb1054 100644 --- a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso +++ b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso @@ -318,7 +318,7 @@ add_specs suite_builder = group_builder.specify "should correctly handle problems: missing input columns" <| t1 = Table.new [["A", ["1", "2", "3"]]] - r1 = t1.parse columns=["A", "B", "C", "E"] on_problems=Problem_Behavior.Ignore + r1 = t1.parse columns=["A", "B", "C", "E"] on_problems=..Ignore r1.should_fail_with Missing_Input_Columns r1.catch.criteria . should_equal ["B", "C", "E"] @@ -381,11 +381,11 @@ add_specs suite_builder = r2 . should_equal t1 Problems.expect_warning No_Input_Columns_Selected r2 - r3 = t1.parse columns=[] error_on_missing_columns=False on_problems=Problem_Behavior.Ignore + r3 = t1.parse columns=[] error_on_missing_columns=False on_problems=..Ignore r3 . should_equal t1 Problems.assume_no_problems r3 - r4 = t1.parse columns=["nonexistent column :D", -42] error_on_missing_columns=False on_problems=Problem_Behavior.Report_Warning + r4 = t1.parse columns=["nonexistent column :D", -42] error_on_missing_columns=False on_problems=..Report_Warning r4 . should_equal t1 Problems.expect_warning No_Input_Columns_Selected r4 Problems.expect_warning (Missing_Input_Columns.Error ["nonexistent column :D", -42]) r4 diff --git a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso index c04b5975990b..e805ffc61488 100644 --- a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso @@ -96,7 +96,7 @@ add_specs suite_builder = r1.catch.should_be_a File_Error.Not_Found directory = enso_project.data - r2 = Data.read directory (..Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error + r2 = Data.read directory (..Delimited "," headers=True value_formatter=Nothing) ..Report_Error r2.should_fail_with Illegal_Argument r2.catch.message.should_equal "Cannot `read` a directory, use `Data.list`." @@ -108,7 +108,7 @@ add_specs suite_builder = text.write (path name) test_file name = - table = Data.read (path name) (..Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error + table = Data.read (path name) (..Delimited "," headers=True value_formatter=Nothing) ..Report_Error table.columns.map .name . should_equal ['a', 'b', 'c'] table.at 'a' . to_vector . should_equal ['d', '1'] table.at 'b' . to_vector . should_equal ['e', '2'] @@ -123,7 +123,7 @@ add_specs suite_builder = # Currently mixed line endings are not supported. 'a,b,c\nd,e,f\r1,2,3'.write (path 'mixed.csv') - Data.read (path 'mixed.csv') (..Delimited "," headers=True value_formatter=Nothing) Problem_Behavior.Report_Error . should_fail_with Invalid_Row + Data.read (path 'mixed.csv') (..Delimited "," headers=True value_formatter=Nothing) ..Report_Error . should_fail_with Invalid_Row ['crlf.csv', 'lf.csv', 'cr.csv', 'mixed.csv'].each (path >> .delete) @@ -155,14 +155,14 @@ add_specs suite_builder = file_2.delete group_builder.specify "should work with Windows-1252 encoding" <| - table = Data.read (enso_project.data / "windows.csv") (..Delimited "," headers=True encoding=Encoding.windows_1252) Problem_Behavior.Report_Error + table = Data.read (enso_project.data / "windows.csv") (..Delimited "," headers=True encoding=Encoding.windows_1252) ..Report_Error table.columns.map .name . should_equal ['a', 'b', 'c'] table.at 'a' . to_vector . should_equal ['$¢'] table.at 'b' . to_vector . should_equal ['¤'] table.at 'c' . to_vector . should_equal ['¥'] group_builder.specify "should work with UTF-16 encoding" <| - table = Data.read (enso_project.data / "utf16.csv") (..Delimited "," headers=True encoding=Encoding.utf_16_be) Problem_Behavior.Report_Error + table = Data.read (enso_project.data / "utf16.csv") (..Delimited "," headers=True encoding=Encoding.utf_16_be) ..Report_Error table.columns.map .name . should_equal ['ą', '🚀b', 'ć😎'] table.at 'ą' . to_vector . should_equal ['ą'] table.at '🚀b' . to_vector . should_equal ['✨🚀🚧😍😃😍😎😙😉☺'] diff --git a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso index 5e0f08e33248..855ef66551f6 100644 --- a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso @@ -28,12 +28,13 @@ add_specs suite_builder = table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]], ["D", ["a", 2, My_Type.Value 10]]] file = (enso_project.data / "transient" / "written.csv") file.delete_if_exists - table.write file on_problems=Report_Error . should_succeed . should_equal file + table.write file on_problems=..Report_Error . should_succeed . should_equal file expected_text = normalize_lines <| """ A,B,C,D 1,1.0,x,a 2,1.5,y,2 3,2.2,z,[[[My Type :: 10]]] + ## """ text = Data.read_text file text.should_equal expected_text file.delete @@ -45,7 +46,7 @@ add_specs suite_builder = style=setting.first separator=setting.second file = (enso_project.data / "transient" / "endings.csv") - table.write file (..Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed + table.write file (..Delimited ',' line_endings=style) on_problems=..Report_Error . should_succeed text = Data.read_text file text.should_equal (lines.join separator suffix=separator) file.delete @@ -55,7 +56,7 @@ add_specs suite_builder = table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three', 'a\nb']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5, 0.0]]] file = (enso_project.data / "transient" / "quotes1.csv") file.delete_if_exists - table.write file (..Delimited "," value_formatter=data_formatter) on_problems=Report_Error . should_succeed + table.write file (..Delimited "," value_formatter=data_formatter) on_problems=..Report_Error . should_succeed expected_text = normalize_lines <| """ "The Column ""Name""","Hello, Column?" foo,"1,0" @@ -64,6 +65,7 @@ add_specs suite_builder = "one, two, three","-1,5" "a b","0,0" + ## """ text = Data.read_text file text.should_equal expected_text file.delete @@ -73,7 +75,7 @@ add_specs suite_builder = table = Table.new [['"A"', ["foo",'!"baz" ', 'one, two, three', "a;b; c ", "a\b", 'n\nm']], ["B", [1000000.5, 1000.0, 0.0, -1.2, Nothing, 33]]] file = (enso_project.data / "transient" / "quotes2.csv") file.delete_if_exists - table.write file (Delimited_Format.Delimited ";" value_formatter=data_formatter . with_quotes quote='"' quote_escape='\\') on_problems=Report_Error . should_succeed + table.write file (Delimited_Format.Delimited ";" value_formatter=data_formatter . with_quotes quote='"' quote_escape='\\') on_problems=..Report_Error . should_succeed expected_text = normalize_lines <| """ "\"A\"";B foo;1'000'000.5 @@ -83,6 +85,7 @@ add_specs suite_builder = "a\\b"; "n m";33.0 + ## """ text = Data.read_text file text.should_equal expected_text file.delete @@ -92,7 +95,7 @@ add_specs suite_builder = table = Table.new [['"A"', [Nothing,"The 'thing'.", 'one, "two", three', 'a\tb', 'x\ny', 'w\vz']], ["B\C", [1000000.5, 1000.0, Nothing, -1.2, 2.0, 42.0]]] file = (enso_project.data / "transient" / "quotes3.csv") file.delete_if_exists - table.write file (Delimited_Format.Delimited '\t' value_formatter=data_formatter . with_quotes quote='\'' quote_escape='\'') on_problems=Report_Error . should_succeed + table.write file (Delimited_Format.Delimited '\t' value_formatter=data_formatter . with_quotes quote='\'' quote_escape='\'') on_problems=..Report_Error . should_succeed expected_text = normalize_lines <| ''' "A"\tB\\C \t'1''000''000.5' @@ -102,6 +105,7 @@ add_specs suite_builder = 'x y'\t2.0 w\vz\t42.0 + ## ''' text = Data.read_text file text.should_equal expected_text file.delete @@ -110,12 +114,13 @@ add_specs suite_builder = table = Table.new [["A", [1,Nothing,3]], ["B", [Nothing,"","abc"]]] file = (enso_project.data / "transient" / "empty_vs_null.csv") file.delete_if_exists - table.write file on_problems=Report_Error . should_succeed + table.write file on_problems=..Report_Error . should_succeed expected_text = normalize_lines <| """ A,B 1, ,"" 3,abc + ## """ text = Data.read_text file text.should_equal expected_text file.delete @@ -124,14 +129,14 @@ add_specs suite_builder = table = Table.new [["#", ['b', 'x', '#']], ["B", [Nothing,"#","abc"]]] file = (enso_project.data / "transient" / "comments.csv") file.delete_if_exists - table.write file on_problems=Report_Error . should_succeed + table.write file on_problems=..Report_Error . should_succeed expected_text = join_lines ['#,B','b,', 'x,#', '#,abc'] text = Data.read_text file text.should_equal expected_text file.delete format = Delimited_Format.Delimited ',' . with_comments - table.write file format on_problems=Report_Error . should_succeed + table.write file format on_problems=..Report_Error . should_succeed expected_text_2 = normalize_lines <| """ "#",B b, @@ -146,7 +151,7 @@ add_specs suite_builder = table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three']], ["Hello, Column?", [1.0, 1000000.5, 2.2, -1.5]]] file = (enso_project.data / "transient" / "quote_disabled.csv") file.delete_if_exists - r = table.write file format on_problems=Report_Error + r = table.write file format on_problems=..Report_Error r.should_equal file warnings = Problems.get_attached_warnings r ## Only the 3rd row of the first column warns. Since quoting is @@ -163,6 +168,7 @@ add_specs suite_builder = 'bar',1000000,5 "baz",2,2 one, two, three,-1,5 + ## """ text = Data.read_text file text.should_equal expected_text file.delete @@ -172,12 +178,13 @@ add_specs suite_builder = table = Table.new [['The Column "Name"', ["foo","'bar'",'"baz"', 'one, two, three']], ["B", [1.0, 1000000.5, 2.2, -1.5]], ["C", ["foo", My_Type.Value 44, (Date.new 2022 06 21), 42]], ["D", [1,2,3,4000]], ["E", [Nothing, (Time_Of_Day.new 13 55), Nothing, Nothing]]] file = (enso_project.data / "transient" / "quote_always.csv") file.delete_if_exists - table.write file format on_problems=Report_Error . should_succeed + table.write file format on_problems=..Report_Error . should_succeed expected_text1 = normalize_lines <| """ "The Column \"Name\"","B","C","D","E" "foo",1.0,"foo",1, "'bar'","1\"000\"000.5","[[[My Type :: 44]]]",2,13:55:00 "\"baz\"",2.2,"Tuesday, 21 Jun 2022",3, + ## """ expected_text = expected_text1 + '"one, two, three",-1.5,42,"4\\"000",\n' text = Data.read_text file text.should_equal expected_text @@ -187,7 +194,7 @@ add_specs suite_builder = table = Table.new [["ąęćś", [0]], ["ß", ["żółw 🐢"]]] file = (enso_project.data / "transient" / "utf16.csv") file.delete_if_exists - table.write file (..Delimited "," encoding=Encoding.utf_16_be) on_problems=Report_Error . should_succeed + table.write file (..Delimited "," encoding=Encoding.utf_16_be) on_problems=..Report_Error . should_succeed expected_text = normalize_lines <| """ ąęćś,ß 0,żółw 🐢 @@ -218,11 +225,12 @@ add_specs suite_builder = table_1 = Table.new [["A", ["x", "y"]], ["B", ["z", "w"]]] file_1 = (enso_project.data / "transient" / "textonly.csv") file_1.delete_if_exists - result_1 = table_1.write file_1 format on_problems=Report_Error . should_succeed + result_1 = table_1.write file_1 format on_problems=..Report_Error . should_succeed expected_text = normalize_lines <| """ A,B x,z y,w + ## """ text_1 = Data.read_text file_1 text_1.should_equal expected_text result_1 . should_equal file_1 @@ -242,7 +250,7 @@ add_specs suite_builder = table = Table.new [["A", [1,2,3]], ["B", [1.0,1.5,2.2]], ["C", ["x","y","z"]]] file = (enso_project.data / "transient" / "append_nonexistent.csv") file.delete_if_exists - table.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table.write file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed got_table = file.read got_table.should_equal table file.delete @@ -252,7 +260,7 @@ add_specs suite_builder = file = (enso_project.data / "transient" / "append_empty.csv") file.delete_if_exists "".write file - table.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table.write file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed got_table = file.read got_table.should_equal table file.delete @@ -262,7 +270,7 @@ add_specs suite_builder = file = (enso_project.data / "transient" / "append_missing_newline.csv") file.delete_if_exists 'A,B,C\r0,0,0'.write file - table.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table.write file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed text = Data.read_text file expected_lines = ["A,B,C", "0,0,0", "1,1.0,x", "2,1.5,y", "3,2.2,z"] text.should_equal (expected_lines.join '\r' suffix='\r') @@ -273,8 +281,8 @@ add_specs suite_builder = appending_table = Table.new [["B", [33,44]], ["A", [Nothing, 0]], ["C", ["a","BB"]]] file = (enso_project.data / "transient" / "append_by_name.csv") file.delete_if_exists - existing_table.write file on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed - appending_table.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + existing_table.write file on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed + appending_table.write file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed got_table = file.read expected_table = Table.new [["A", [1,2,Nothing,0]], ["B", [1.0,1.5,33,44]], ["C", ["x","y","a","BB"]]] got_table.should_equal expected_table @@ -285,9 +293,9 @@ add_specs suite_builder = appending_table = Table.new [["B1", [33,44]], ["0", [Nothing, 0]], ["C", ["a","BB"]]] file = (enso_project.data / "transient" / "append_by_name_2.csv") file.delete_if_exists - existing_table.write file on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + existing_table.write file on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed format = Delimited_Format.Delimited "," . with_headers - appending_table.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + appending_table.write file format on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed got_table = file.read format expected_table = Table.new [["0", [1,2,Nothing,0]], ["B1", [1.0,1.5,33,44]], ["C", ["x","y","a","BB"]]] got_table.should_equal expected_table @@ -333,8 +341,8 @@ add_specs suite_builder = test_append initial_file_format append_format expected_table = file = (enso_project.data / "transient" / "append_by_position.csv") file.delete_if_exists - existing_table.write file initial_file_format on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed - appending_table.write file append_format match_columns=Match_Columns.By_Position on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + existing_table.write file initial_file_format on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed + appending_table.write file append_format match_columns=Match_Columns.By_Position on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed read_format = initial_file_format got_table = file.read read_format got_table.should_equal expected_table @@ -382,8 +390,8 @@ add_specs suite_builder = style=setting.first separator=setting.second file = (enso_project.data / "transient" / "endings.csv") - initial_table.write file (..Delimited ',' line_endings=style) on_problems=Report_Error . should_succeed - table_to_append.write file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + initial_table.write file (..Delimited ',' line_endings=style) on_problems=..Report_Error . should_succeed + table_to_append.write file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed text = Data.read_text file text.should_equal (expected_lines.join separator suffix=separator) file.delete @@ -395,8 +403,8 @@ add_specs suite_builder = nonexistent_file.delete_if_exists table_to_append = Table.new [["a", ["x", "y"]], ["d", ["z", "w"]]] - table_to_append.write nonexistent_file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed - table_to_append.write empty_file on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table_to_append.write nonexistent_file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed + table_to_append.write empty_file on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed expected_lines = ["a,d", "x,z", "y,w"] expected_text = join_lines expected_lines @@ -413,7 +421,7 @@ add_specs suite_builder = file.delete_if_exists (initial_lines.join separator suffix=separator).write file format = Delimited_Format.Delimited ',' . with_comments - table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines.join separator suffix=separator text.should_equal expected_text @@ -429,7 +437,7 @@ add_specs suite_builder = file.delete_if_exists (initial_lines.join separator).write file format = Delimited_Format.Delimited ',' . with_comments - table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines.join separator suffix=separator text.should_equal expected_text @@ -449,25 +457,25 @@ add_specs suite_builder = line_ending_pairs.each setting-> separator=setting.second (base_line+separator).write file - table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines_1.join separator suffix=separator text.should_equal expected_text file.delete base_line.write file - table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed Data.read_text file . should_equal <| normalize_lines base_line+'\n1\n2\n' file.delete # 1 character without trailing newline "#".write file - table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed Data.read_text file . should_equal <| normalize_lines '#\n1\n2\n' file.delete "#".write file - table.write file format.with_comments on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format.with_comments on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed Data.read_text file . should_equal <| normalize_lines '#\n1\n2\n' file.delete @@ -477,7 +485,7 @@ add_specs suite_builder = [format.with_comments, format].each format-> separator=setting.second ("#"+separator).write file - table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines_2.join separator suffix=separator text.should_equal expected_text @@ -490,7 +498,7 @@ add_specs suite_builder = line_ending_pairs.each setting-> separator=setting.second ("A"+separator+middle_line).write file - table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines_3.join separator suffix=separator text.should_equal expected_text @@ -500,7 +508,7 @@ add_specs suite_builder = line_ending_pairs.each setting-> separator=setting.second ("A"+separator+middle_line+separator).write file - table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + table.write file format on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines_3.join separator suffix=separator text.should_equal expected_text @@ -516,7 +524,7 @@ add_specs suite_builder = file.delete_if_exists (initial_line+separator).write file format = Delimited_Format.Delimited ',' . with_comments - table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = expected_lines.join separator suffix=separator text.should_equal expected_text @@ -530,7 +538,7 @@ add_specs suite_builder = file.delete_if_exists (join_lines initial_lines trailing_newline=False).write file format = Delimited_Format.Delimited ',' . with_comments - table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + table_to_append.write file format on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed text = Data.read_text file expected_text = join_lines expected_lines text.should_equal expected_text @@ -641,7 +649,7 @@ add_specs suite_builder = f.delete_if_exists "Initial Content".write f on_existing_file=Existing_File_Behavior.Overwrite big_table = Table.new [["X", 0.up_to 5000 . to_vector + ["😊"]]] - r2 = big_table.write f format on_problems=Problem_Behavior.Report_Error + r2 = big_table.write f format on_problems=..Report_Error r2.should_fail_with Encoding_Error r2.catch.to_display_text . should_contain "Encoding issues" f.read ..Plain_Text . should_equal "Initial Content" diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index 6f3a5611a05e..08d54310c1d7 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -151,7 +151,7 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify 'should write a table to non-existent file as a new sheet with headers; and return the file object on success' <| out = data.create_out - data.table.write out on_problems=Report_Error . should_succeed . should_equal out + data.table.write out on_problems=..Report_Error . should_succeed . should_equal out written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] @@ -169,7 +169,7 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify 'should write a table to non-existent file in append mode as a new sheet with headers' <| out = data.create_out - data.table.write out on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + data.table.write out on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] @@ -178,8 +178,8 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify 'should write a table to existing file overriding EnsoSheet' <| out = data.create_out - data.table.write out on_problems=Report_Error . should_succeed - data.table.write out on_problems=Report_Error . should_succeed + data.table.write out on_problems=..Report_Error . should_succeed + data.table.write out on_problems=..Report_Error . should_succeed written_workbook = out.read written_workbook.sheet_count . should_equal 1 written_workbook.sheet_names . should_equal ['EnsoSheet'] @@ -189,21 +189,21 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet with headers' <| out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - data.table.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + data.table.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") written.should_equal data.table group_builder.specify 'should write a table to existing file in overwrite mode as a new sheet without headers' <| out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - data.table.write out (..Sheet "NoHeaders") on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + data.table.write out (..Sheet "NoHeaders") on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed written = out.read (..Sheet "NoHeaders") written.should_equal (data.table.rename_columns ['A', 'B', 'C', 'D', 'E', 'F']) group_builder.specify 'should create new sheets at the start if index is 0' <| out = data.create_out - data.table.write out (..Sheet 0) on_problems=Report_Error . should_succeed - data.clothes.write out (..Sheet 0) on_problems=Report_Error . should_succeed + data.table.write out (..Sheet 0) on_problems=..Report_Error . should_succeed + data.clothes.write out (..Sheet 0) on_problems=..Report_Error . should_succeed read_1 = out.read (..Sheet "Sheet1") read_1 . should_equal data.table read_2 = out.read (..Sheet "Sheet2") @@ -216,21 +216,21 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify 'should write a table to specific single cell location of an existing sheet' <| out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - data.table.write out (..Range "Another!G1") on_problems=Report_Error . should_succeed + data.table.write out (..Range "Another!G1") on_problems=..Report_Error . should_succeed written = out.read (..Range "Another!G1") written.should_equal data.table group_builder.specify 'should clear out an existing fixed range and replace' <| out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - data.sub_clothes.write out (..Range "Another!A1:D20") on_problems=Report_Error . should_succeed + data.sub_clothes.write out (..Range "Another!A1:D20") on_problems=..Report_Error . should_succeed written = out.read (..Range "Another!A1") written.should_equal data.sub_clothes group_builder.specify 'should clear out an existing range and replace' <| out = data.create_out (enso_project.data / test_sheet_name) . copy_to out - data.sub_clothes.write out (..Range "Another!A1") on_problems=Report_Error . should_succeed + data.sub_clothes.write out (..Range "Another!A1") on_problems=..Report_Error . should_succeed written = out.read (..Range "Another!A1") written.should_equal data.sub_clothes @@ -273,7 +273,7 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify 'should write a table to non-existent file as a new sheet without headers' <| out = data.create_out - data.table.write out (..Sheet "Sheet1" headers=False) on_problems=Report_Error . should_succeed + data.table.write out (..Sheet "Sheet1" headers=False) on_problems=..Report_Error . should_succeed written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['Sheet1'] @@ -287,7 +287,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -296,7 +296,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -305,7 +305,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Sheet "Another") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -314,7 +314,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -323,7 +323,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -332,7 +332,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Another!A1") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -341,7 +341,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB', [4, 5]], ['CC', [True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a', 'b', 'c', 'd', 'e']], ['BB', [1, 2, 3, 4, 5]], ['CC', [True, False, False, True, False]]] - extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -350,7 +350,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected @@ -359,7 +359,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['CC',[True, False]], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Random!K9") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Random!K9") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Range "Random!K9") . select_columns [0, 1, 2] written.should_equal expected @@ -368,7 +368,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['AA 1',[True, False]], ['BB 1', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['AA 1',[True, False, False, True, False]]] - extra_another.write out (..Range "Random!S3") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Random!S3") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Range "Random!S3") . select_columns [0, 1, 2] written.should_equal expected @@ -377,7 +377,7 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['A', ['d', 'e']], ['B',[4, 5]], ['C',[True, False]], ['D', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Random!K9") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Random!K9") on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position on_problems=..Report_Error . should_succeed written = out.read (..Range "Random!K9") . select_columns [0, 1, 2] written.should_equal expected @@ -386,19 +386,19 @@ spec_write suite_builder suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']]] expected = Table.new [['AA', ['a','b','c','d', 'e']], ['BB',[1,2,3,4,5]], ['CC',[True, False, False, True, False]]] - extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed + extra_another.write out (..Range "Another!A1:D6") on_existing_file=Existing_File_Behavior.Append on_problems=..Report_Error . should_succeed written = out.read (..Sheet "Another") . select_columns [0, 1, 2] written.should_equal expected group_builder.specify 'should error gracefully if an unknown extension' <| out = data.create_out suffix="notxls" - data.table.write out format=..Workbook on_problems=Report_Error . should_fail_with Illegal_Argument - data.table.write out format=..Sheet on_problems=Report_Error . should_fail_with Illegal_Argument + data.table.write out format=..Workbook on_problems=..Report_Error . should_fail_with Illegal_Argument + data.table.write out format=..Sheet on_problems=..Report_Error . should_fail_with Illegal_Argument group_builder.specify 'should be able to write to a new dry run file' <| out = data.create_out temp = Context.Output.with_disabled <| - result = data.table.write out on_problems=Report_Error . should_succeed + result = data.table.write out on_problems=..Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true @@ -416,7 +416,7 @@ spec_write suite_builder suffix test_sheet_name = out = data.create_out out.exists.should_be_false temp = Context.Output.with_disabled <| - result = data.table.write out on_problems=Report_Error . should_succeed + result = data.table.write out on_problems=..Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -427,7 +427,7 @@ spec_write suite_builder suffix test_sheet_name = opened_temp.sheet_names . should_equal ['EnsoSheet'] temp2 = Context.Output.with_disabled <| - result = data.table.write out (..Sheet "Another") on_problems=Report_Error . should_succeed + result = data.table.write out (..Sheet "Another") on_problems=..Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -447,7 +447,7 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify "should be able to write to a dry-run file multiple times if the dry-run file object is threaded through" <| out = data.create_out temp1 = Context.Output.with_disabled <| - result = data.table.write out on_problems=Report_Error . should_succeed + result = data.table.write out on_problems=..Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -457,7 +457,7 @@ spec_write suite_builder suffix test_sheet_name = opened_temp.sheet_names . should_equal ['EnsoSheet'] temp2 = Context.Output.with_disabled <| - result = data.table.write temp1 (..Sheet "Another") on_problems=Report_Error . should_succeed + result = data.table.write temp1 (..Sheet "Another") on_problems=..Report_Error . should_succeed Problems.expect_only_warning Dry_Run_Operation result result.exists.should_be_true result @@ -476,11 +476,11 @@ spec_write suite_builder suffix test_sheet_name = bak = out.parent / (out.name+".bak") t1 = Table.new [["X", [1]]] - t1.write out on_existing_file=Existing_File_Behavior.Backup on_problems=Report_Error . should_succeed + t1.write out on_existing_file=Existing_File_Behavior.Backup on_problems=..Report_Error . should_succeed bak.exists.should_be_false t2 = Table.new [["X", [2]]] - t2.write out on_existing_file=Existing_File_Behavior.Backup on_problems=Report_Error . should_succeed + t2.write out on_existing_file=Existing_File_Behavior.Backup on_problems=..Report_Error . should_succeed bak.exists.should_be_true opened_out = out.read @@ -491,7 +491,7 @@ spec_write suite_builder suffix test_sheet_name = opened_backup.read 'EnsoSheet' . should_equal t1 t3 = Table.new [["X", [3]]] - t3.write out on_existing_file=Existing_File_Behavior.Backup on_problems=Report_Error . should_succeed + t3.write out on_existing_file=Existing_File_Behavior.Backup on_problems=..Report_Error . should_succeed opened_out.read 'EnsoSheet' . should_equal t3 # The backup should actually have been updated @@ -508,7 +508,7 @@ spec_write suite_builder suffix test_sheet_name = out_bak = out.parent / (out.name+".bak") - data.table.write out on_problems=Report_Error . should_succeed . should_equal out + data.table.write out on_problems=..Report_Error . should_succeed . should_equal out written = out.read written.sheet_count . should_equal 1 written.sheet_names . should_equal ['EnsoSheet'] @@ -603,7 +603,7 @@ spec_write suite_builder suffix test_sheet_name = group_builder.specify "should allow to write to a workbook that is open, and reflect that changes when the sheet is read again" <| out = data.create_out - data.table.write out on_problems=Report_Error . should_succeed + data.table.write out on_problems=..Report_Error . should_succeed workbook = out.read workbook.sheet_names.should_equal ["EnsoSheet"] @@ -1023,7 +1023,7 @@ add_specs suite_builder = big_file = enso_project.data / "transient" / "big.xlsx" big_file.delete_if_exists - table.write big_file on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + table.write big_file on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed IO.println "Done "+Time_Of_Day.now.to_text # Verify that the file is as big as we expected. @@ -1055,7 +1055,7 @@ add_specs suite_builder = big_file = enso_project.data / "transient" / "big.xls" big_file.delete_if_exists - table.write big_file on_existing_file=Existing_File_Behavior.Overwrite on_problems=Report_Error . should_succeed + table.write big_file on_existing_file=Existing_File_Behavior.Overwrite on_problems=..Report_Error . should_succeed IO.println "Done "+Time_Of_Day.now.to_text # Verify that the file is as big as we expected. diff --git a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso index b006a1e77a23..d70afbaf9770 100644 --- a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso @@ -36,7 +36,7 @@ add_specs suite_builder = suite_builder.group "Aggregate Columns" group_builder- result = acc = Aggregate_Column_Helper.java_aggregator "Name" resolved indexes = Vector.new table.row_count v->v - Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> + Java_Problems.with_problem_aggregator ..Report_Warning java_problem_aggregator-> Illegal_Argument.handle_java_exception <| acc.aggregate indexes java_problem_aggregator diff --git a/test/Table_Tests/src/In_Memory/Builders_Spec.enso b/test/Table_Tests/src/In_Memory/Builders_Spec.enso index af4f98931a01..b3bedf3dd366 100644 --- a/test/Table_Tests/src/In_Memory/Builders_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Builders_Spec.enso @@ -23,7 +23,7 @@ add_specs suite_builder = suite_builder.group "[In-Memory] Storage Builders" gro other ones, selecting the types automatically. elem = ['X', 42, 1.5, My.Data 1 2, Date.new 2022 8 27, Time_Of_Day.new 18 00, Date_Time.new 2022 8 27 11 22 25, "a", Nothing] elem.each e-> Test.with_clue "{"+e.to_text+"}: " <| - r = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> + r = Java_Problems.with_problem_aggregator ..Report_Warning java_problem_aggregator-> builder = make_inferred_builder 1 java_problem_aggregator vector = Vector.fill 10 e + Vector.fill 1000 Nothing + Vector.fill 5 e 0.up_to 10 . each _-> @@ -38,7 +38,7 @@ add_specs suite_builder = suite_builder.group "[In-Memory] Storage Builders" gro group_builder.specify "Inferred Builder should correctly resize when retyping to a mixed column, with an underestimated initial size" <| mixed_values = [10, 11, 22, 23, 24, 25, '2020-02-28'] - r = Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> + r = Java_Problems.with_problem_aggregator ..Report_Warning java_problem_aggregator-> builder = make_inferred_builder 3 java_problem_aggregator mixed_values.map v-> builder.append v storage = builder.seal diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 09956f0058df..e69c5a631b0c 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -187,6 +187,7 @@ add_specs suite_builder = 0 | 10 | False 1 | 20 | True 2 | 30 | False + ## """ expected_text = raw_expected_text.lines.map .trim . join '\n' raw_got_text = t.display got_text = raw_got_text.lines.map .trim . join '\n' @@ -741,7 +742,7 @@ add_specs suite_builder = c = ["C", [0.1, 0.2, 0.3, 0.4, 0.5, 0.6]] t = Table.new [a, b, c] - r2 = t.distinct ["A", "B"] on_problems=Report_Error + r2 = t.distinct ["A", "B"] on_problems=..Report_Error r2.at "A" . to_vector . should_equal ["a", "a"] r2.at "B" . to_vector . should_equal [1, 2] r2.at "C" . to_vector . should_equal [0.1, 0.3] @@ -750,7 +751,7 @@ add_specs suite_builder = a = ["A", ["a", Nothing, "b", "a", "b", Nothing, "a", "b"]] b = ["B", [1, 2, 3, 4, 5, 6, 7, 8]] t = Table.new [a, b] - r = t.distinct ["A"] on_problems=Report_Error + r = t.distinct ["A"] on_problems=..Report_Error r.at "A" . to_vector . should_equal ["a", Nothing, "b"] r.at "B" . to_vector . should_equal [1, 2, 3] @@ -762,11 +763,11 @@ add_specs suite_builder = x = ["X", ['A', 'a', 'enso', 'śledź', 'Enso', 'A', 's\u0301ledz\u0301']] y = ["Y", [1, 2, 3, 4, 5, 6, 7]] t1 = Table.new [x, y] - d1 = t1.distinct ["X"] on_problems=Report_Error + d1 = t1.distinct ["X"] on_problems=..Report_Error d1.at "X" . to_vector . should_equal ['A', 'a', 'enso', 'śledź', 'Enso'] d1.at "Y" . to_vector . should_equal [1, 2, 3, 4, 5] - d2 = t1.distinct ["X"] case_sensitivity=Case_Sensitivity.Insensitive on_problems=Report_Error + d2 = t1.distinct ["X"] case_sensitivity=Case_Sensitivity.Insensitive on_problems=..Report_Error d2.at "X" . to_vector . should_equal ['A', 'enso', 'śledź'] d2.at "Y" . to_vector . should_equal [1, 3, 4] diff --git a/test/Table_Tests/src/In_Memory/Table_Xml_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Xml_Spec.enso index 04817fe3ddea..b0c0fde0ef90 100644 --- a/test/Table_Tests/src/In_Memory/Table_Xml_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Xml_Spec.enso @@ -181,11 +181,12 @@ add_specs suite_builder = 1859 + ## ''' r.outer_xml.should_equal (depretty e) Problems.expect_warning (Unexpected_Extra_Columns.Warning ["Last Borrowed"]) r group_builder.specify "Will not warn if not all incoming columns specified, but warnings are ignored" <| t = data.table . take 1 - r = t.to_xml ["Year"] ["Author", "Price"] "Title" on_problems=Problem_Behavior.Ignore + r = t.to_xml ["Year"] ["Author", "Price"] "Title" on_problems=..Ignore e = ''' @@ -193,11 +194,12 @@ add_specs suite_builder = 1859
- r.outer_xml.should_equal (depretty e) + ## ''' + r.outer_xml.should_equal (depretty e) Problems.assume_no_problems r - group_builder.specify "Will error if not all incoming columns specified and on_problems set to Report_Error" <| + group_builder.specify "Will error if not all incoming columns specified and on_problems set to `Report_Error`" <| t = data.table - r = t.to_xml ["Year"] ["Author", "Price"] "Title" on_problems=Problem_Behavior.Report_Error + r = t.to_xml ["Year"] ["Author", "Price"] "Title" on_problems=..Report_Error r.should_fail_with (Unexpected_Extra_Columns.Warning ["Last Borrowed"]) group_builder.specify "Will error if configured with a element column that is not in the table" <| t = data.table @@ -221,6 +223,7 @@ add_specs suite_builder = 1859 + ## ''' r.outer_xml.should_equal (depretty e) group_builder.specify "Illegal xml names are cleaned up" <| numeric_column_name = ["1", [10]] @@ -239,6 +242,7 @@ add_specs suite_builder = 1999-01-02T03:40Z[UTC] + ## ''' r.outer_xml.should_equal (depretty e) group_builder.specify "Empty strings are empty attributes/elements. Nothing values omit the entire attribute/element" <| desc_column = ["desc", ["Col1 and Col2 both have values", "Col1 has value, Col2 Nothing", "Col1 empty string, Col2 has value", "Col1 and Col2 both blank string", "Col1 and Col2 both Nothing" ]] @@ -274,6 +278,7 @@ add_specs suite_builder = + ## ''' r.outer_xml.should_equal (depretty e) group_builder.specify "Panic if wrong types passed in element_columns" <| t = data.table @@ -326,6 +331,7 @@ add_specs suite_builder = 👩‍🔬 + ## ''' r.outer_xml.should_equal (depretty e) group_builder.specify "xml or special characters get escaped" <| xml_column = ["xml", ["", "12", '']] diff --git a/test/Table_Tests/src/Util.enso b/test/Table_Tests/src/Util.enso index 4a7fe6123a49..723b29c7d43d 100644 --- a/test/Table_Tests/src/Util.enso +++ b/test/Table_Tests/src/Util.enso @@ -130,7 +130,7 @@ Any.should_have_relative_ordering self (example : Vector) = got_piece = pieces.at ix expected_piece = example.at ix are_consistent = Panic.rethrow <| - (got_piece.sort on_incomparable=Problem_Behavior.Report_Error) == (expected_piece.sort on_incomparable=Problem_Behavior.Report_Error) + (got_piece.sort on_problems=..Report_Error) == (expected_piece.sort on_problems=..Report_Error) if are_consistent.not then offset = pieces.take ix . fold 0 acc-> p-> acc+p.length Test.fail "Expected the run of vector elements starting at offset "+offset.to_text+" to be a permutation of "+expected_piece.to_display_text+" but got "+got_piece.to_display_text+" (at "+loc+")."