From 0ad70c63328c4e165f4b837bdf94c8c6177662de Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Fri, 2 Dec 2022 18:08:14 +0000 Subject: [PATCH] Tidy Standard.Base part 5 of n ... (hopefully the end...) (#3929) - Moved `Any`, `Error` and `Panic` to `Standard.Base`. - Separated `Json` and `Range` extensions into own modules. - Tidied `Case`, `Case_Sensitivity`, `Encoding`, `Matching`, `Regex_Matcher`, `Span`, `Text_Matcher`, `Text_Ordering` and `Text_Sub_Range` in `Standard.Base.Data.Text`. - Tidied `Standard.Base.Data.Text.Extensions` and stopped it re-exporting anything. - Tidied `Regex_Mode`. Renamed `Option` to `Regex_Option` and added type to export. - Tidied up `Regex` space. - Tidied up `Meta` space. - Remove `Matching` from export. - Moved `Standard.Base.Data.Boolean` to `Standard.Base.Boolean`. # Important Notes - Moved `to_json` and `to_default_visualization_data` from base types to extension methods. --- .../lib/Standard/Base/0.0.0-dev/package.yaml | 13 +- .../Base/0.0.0-dev/src/{Data => }/Any.enso | 40 +- .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 2 +- .../Base/0.0.0-dev/src/Data/Array.enso | 23 +- .../Base/0.0.0-dev/src/Data/Array_Proxy.enso | 2 +- .../Base/0.0.0-dev/src/Data/Boolean.enso | 2 +- .../0.0.0-dev/src/Data/Filter_Condition.enso | 2 +- .../0.0.0-dev/src/Data/Index_Sub_Range.enso | 21 +- .../Base/0.0.0-dev/src/Data/Json.enso | 78 +-- .../0.0.0-dev/src/Data/Json/Extensions.enso | 194 +++++++ .../0.0.0-dev/src/Data/Json/Internal.enso | 12 +- .../Base/0.0.0-dev/src/Data/List.enso | 4 +- .../Base/0.0.0-dev/src/Data/Locale.enso | 20 +- .../Standard/Base/0.0.0-dev/src/Data/Map.enso | 4 +- .../Base/0.0.0-dev/src/Data/Map/Internal.enso | 2 +- .../Base/0.0.0-dev/src/Data/Maybe.enso | 2 +- .../Base/0.0.0-dev/src/Data/Noise.enso | 7 +- .../Base/0.0.0-dev/src/Data/Numbers.enso | 15 +- .../src/Data/Ordering/Comparator.enso | 6 +- .../Ordering/Vector_Lexicographic_Order.enso | 2 +- .../Base/0.0.0-dev/src/Data/Pair.enso | 2 +- .../Base/0.0.0-dev/src/Data/Range.enso | 45 +- .../0.0.0-dev/src/Data/Range/Extensions.enso | 37 ++ .../Base/0.0.0-dev/src/Data/Regression.enso | 20 +- .../Base/0.0.0-dev/src/Data/Statistics.enso | 16 +- .../Base/0.0.0-dev/src/Data/Text.enso | 5 +- .../Base/0.0.0-dev/src/Data/Text/Case.enso | 17 - .../src/Data/Text/Case_Sensitivity.enso | 12 +- .../0.0.0-dev/src/Data/Text/Encoding.enso | 43 +- .../0.0.0-dev/src/Data/Text/Extensions.enso | 107 ++-- .../0.0.0-dev/src/Data/Text/Matching.enso | 34 +- .../src/Data/Text/Prim_Text_Helper.enso | 2 +- .../Base/0.0.0-dev/src/Data/Text/Regex.enso | 108 ++-- .../0.0.0-dev/src/Data/Text/Regex/Engine.enso | 193 +------ .../src/Data/Text/Regex/Engine/Default.enso | 216 +++---- .../0.0.0-dev/src/Data/Text/Regex/Match.enso | 105 ++++ .../src/Data/Text/Regex/Pattern.enso | 88 +++ .../src/Data/Text/Regex/Regex_Mode.enso | 11 +- .../Regex/{Option.enso => Regex_Option.enso} | 13 +- .../src/Data/Text/Regex_Matcher.enso | 19 +- .../Base/0.0.0-dev/src/Data/Text/Span.enso | 49 +- .../0.0.0-dev/src/Data/Text/Text_Matcher.enso | 21 +- .../src/Data/Text/Text_Ordering.enso | 14 +- .../src/Data/Text/Text_Sub_Range.enso | 174 +++--- .../Base/0.0.0-dev/src/Data/Time/Date.enso | 18 +- .../0.0.0-dev/src/Data/Time/Date_Time.enso | 6 +- .../src/Data/Time/Day_Of_Week_From.enso | 7 +- .../0.0.0-dev/src/Data/Time/Duration.enso | 20 +- .../Base/0.0.0-dev/src/Data/Time/Period.enso | 18 +- .../0.0.0-dev/src/Data/Time/Time_Of_Day.enso | 6 +- .../0.0.0-dev/src/Data/Time/Time_Zone.enso | 6 +- .../Base/0.0.0-dev/src/Data/Vector.enso | 53 +- .../Standard/Base/0.0.0-dev/src/Error.enso | 142 +++++ .../Base/0.0.0-dev/src/Error/Common.enso | 540 +++--------------- .../0.0.0-dev/src/Error/Problem_Behavior.enso | 6 +- .../Standard/Base/0.0.0-dev/src/Function.enso | 2 +- .../lib/Standard/Base/0.0.0-dev/src/IO.enso | 2 +- .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 99 ++-- .../lib/Standard/Base/0.0.0-dev/src/Meta.enso | 253 ++++---- .../0.0.0-dev/src/Network/Extensions.enso | 16 + .../src/Network/{Http.enso => HTTP.enso} | 439 ++++---------- .../Base/0.0.0-dev/src/Network/HTTP/Form.enso | 94 +++ .../src/Network/HTTP/HTTP_Method.enso | 38 ++ .../src/Network/HTTP/HTTP_Status_Code.enso | 166 ++++++ .../src/Network/HTTP/HTTP_Version.enso | 6 + .../0.0.0-dev/src/Network/HTTP/Header.enso | 181 ++++++ .../0.0.0-dev/src/Network/HTTP/Request.enso | 240 ++++++++ .../Body.enso => HTTP/Request_Body.enso} | 8 +- .../src/Network/{Http => HTTP}/Response.enso | 25 +- .../Body.enso => HTTP/Response_Body.enso} | 15 +- .../Base/0.0.0-dev/src/Network/Http/Form.enso | 119 ---- .../0.0.0-dev/src/Network/Http/Header.enso | 186 ------ .../0.0.0-dev/src/Network/Http/Method.enso | 29 - .../0.0.0-dev/src/Network/Http/Request.enso | 238 -------- .../src/Network/Http/Status_Code.enso | 173 ------ .../0.0.0-dev/src/Network/Http/Version.enso | 11 - .../Base/0.0.0-dev/src/Network/Proxy.enso | 38 +- .../Base/0.0.0-dev/src/Network/URI.enso | 91 ++- .../0.0.0-dev/src/Network/URI/Internal.enso | 12 - .../Standard/Base/0.0.0-dev/src/Nothing.enso | 2 +- .../Standard/Base/0.0.0-dev/src/Panic.enso | 268 +++++++++ .../Standard/Base/0.0.0-dev/src/Polyglot.enso | 2 +- .../Base/0.0.0-dev/src/Polyglot/Java.enso | 2 +- .../Standard/Base/0.0.0-dev/src/Random.enso | 2 +- .../Standard/Base/0.0.0-dev/src/Runtime.enso | 2 +- .../Base/0.0.0-dev/src/Runtime/Debug.enso | 2 +- .../src/Runtime/Managed_Resource.enso | 2 +- .../Base/0.0.0-dev/src/Runtime/Ref.enso | 2 +- .../Base/0.0.0-dev/src/Runtime/State.enso | 2 +- .../Base/0.0.0-dev/src/Runtime/Thread.enso | 2 +- .../Base/0.0.0-dev/src/Runtime/Unsafe.enso | 2 +- .../Base/0.0.0-dev/src/System/File.enso | 59 +- .../System/File/Existing_File_Behavior.enso | 9 +- .../src/System/File/Write_Extensions.enso | 13 +- .../0.0.0-dev/src/System/File_Format.enso | 14 +- .../src/System/Process/Exit_Code.enso | 4 +- .../Standard/Base/0.0.0-dev/src/Warning.enso | 6 +- .../0.0.0-dev/src/Connection/Connection.enso | 16 +- .../src/Connection/Postgres_Options.enso | 5 +- .../src/Connection/Redshift_Options.enso | 3 - .../src/Connection/SQLite_Options.enso | 3 - .../Database/0.0.0-dev/src/Data/Column.enso | 19 +- .../Database/0.0.0-dev/src/Data/Dialect.enso | 9 +- .../Database/0.0.0-dev/src/Data/SQL_Type.enso | 4 +- .../Database/0.0.0-dev/src/Data/Table.enso | 59 +- .../Database/0.0.0-dev/src/Errors.enso | 73 ++- .../src/Internal/Aggregate_Helper.enso | 8 +- .../src/Internal/Base_Generator.enso | 30 +- .../0.0.0-dev/src/Internal/Helpers.enso | 24 +- .../0.0.0-dev/src/Internal/IR/Context.enso | 4 +- .../0.0.0-dev/src/Internal/IR/From_Spec.enso | 4 +- .../src/Internal/JDBC_Connection.enso | 12 +- .../Postgres/Postgres_Connection.enso | 2 +- .../Internal/Postgres/Postgres_Dialect.enso | 10 +- .../Internal/SQLite/SQLite_Connection.enso | 2 +- .../src/Internal/SQLite/SQLite_Dialect.enso | 14 +- .../Standard/Database/0.0.0-dev/src/Main.enso | 35 +- .../Standard/Examples/0.0.0-dev/src/Main.enso | 26 +- .../Standard/Searcher/0.0.0-dev/src/Main.enso | 6 +- .../Searcher/0.0.0-dev/src/Network.enso | 22 +- .../Searcher/0.0.0-dev/src/Network/Http.enso | 22 +- .../Table/0.0.0-dev/src/Data/Column.enso | 8 +- .../0.0.0-dev/src/Data/Data_Formatter.enso | 14 +- .../Table/0.0.0-dev/src/Data/Table.enso | 57 +- .../0.0.0-dev/src/Data/Table_Conversions.enso | 16 +- .../src/Delimited/Delimited_Format.enso | 11 +- .../src/Delimited/Delimited_Reader.enso | 10 +- .../src/Delimited/Delimited_Writer.enso | 18 +- .../0.0.0-dev/src/Excel/Excel_Format.enso | 16 +- .../0.0.0-dev/src/Excel/Excel_Range.enso | 4 +- .../0.0.0-dev/src/Excel/Excel_Reader.enso | 13 +- .../0.0.0-dev/src/Excel/Excel_Section.enso | 3 - .../0.0.0-dev/src/Excel/Excel_Writer.enso | 20 +- .../src/Internal/Parse_Values_Helper.enso | 2 +- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 18 +- .../Standard/Table/0.0.0-dev/src/Main.enso | 53 +- .../Visualization/0.0.0-dev/src/Helpers.enso | 15 + .../0.0.0-dev/src/SQL/Visualization.enso | 13 +- .../languageserver/search/Suggestions.scala | 2 +- .../test/instrument/ReplTest.scala | 2 +- .../test/instrument/RuntimeErrorsTest.scala | 18 +- .../test/instrument/RuntimeServerTest.scala | 6 +- .../RuntimeVisualizationsTest.scala | 4 +- .../node/expression/builtin/Error.java | 2 +- .../node/expression/builtin/error/Panic.java | 2 +- .../org/enso/compiler/EnsoCompilerTest.java | 2 +- .../org/enso/compiler/ParseStdLibTest.java | 2 +- .../semantic/CompileDiagnosticsTest.scala | 6 +- .../test/semantic/DataflowErrorsTest.scala | 20 +- .../test/semantic/InteropTest.scala | 2 +- .../test/semantic/MethodsTest.scala | 6 +- .../interpreter/test/semantic/TextTest.scala | 2 + .../java/org/enso/base/Array_Builder.java | 4 + test/Benchmarks/src/Equality.enso | 4 +- .../Aggregate_Spec.enso | 6 +- .../Expression_Spec.enso | 2 - .../Common_Table_Operations/Filter_Spec.enso | 10 +- .../Missing_Values_Spec.enso | 4 +- .../Take_Drop_Spec.enso | 16 +- .../src/Database/Codegen_Spec.enso | 10 +- .../Table_Tests/src/Database/Common_Spec.enso | 4 +- .../Helpers/Fake_Test_Connection.enso | 2 +- .../src/Database/Postgres_Spec.enso | 2 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 12 +- .../src/Formatting/Data_Formatter_Spec.enso | 10 +- .../src/Formatting/Parse_Values_Spec.enso | 2 - .../src/IO/Delimited_Read_Spec.enso | 8 +- .../src/IO/Delimited_Write_Spec.enso | 8 +- test/Table_Tests/src/IO/Excel_Spec.enso | 56 +- .../src/In_Memory/Aggregate_Column_Spec.enso | 2 +- .../Table_Tests/src/In_Memory/Table_Spec.enso | 20 +- test/Tests/src/Data/Array_Spec.enso | 4 +- test/Tests/src/Data/Noise/Generator_Spec.enso | 2 +- test/Tests/src/Data/Range_Spec.enso | 40 +- test/Tests/src/Data/Regression_Spec.enso | 4 +- test/Tests/src/Data/Statistics_Spec.enso | 56 +- .../Data/Text/Default_Regex_Engine_Spec.enso | 110 ++-- test/Tests/src/Data/Text/Encoding_Spec.enso | 8 +- test/Tests/src/Data/Text/Matching_Spec.enso | 5 +- test/Tests/src/Data/Text/Regex_Spec.enso | 11 +- test/Tests/src/Data/Text/Span_Spec.enso | 37 +- test/Tests/src/Data/Text/Utils_Spec.enso | 4 +- test/Tests/src/Data/Text_Spec.enso | 130 ++--- test/Tests/src/Data/Time/Date_Spec.enso | 2 +- test/Tests/src/Data/Time/Date_Time_Spec.enso | 6 +- .../Tests/src/Data/Time/Time_Of_Day_Spec.enso | 2 +- test/Tests/src/Data/Vector_Spec.enso | 10 +- test/Tests/src/Network/Http/Header_Spec.enso | 6 +- test/Tests/src/Network/Http/Request_Spec.enso | 22 +- test/Tests/src/Network/Http_Spec.enso | 110 ++-- test/Tests/src/Network/URI_Spec.enso | 6 +- .../src/Runtime/Managed_Resource_Spec.enso | 16 +- test/Tests/src/Semantic/Conversion_Spec.enso | 2 +- test/Tests/src/Semantic/Error_Spec.enso | 48 +- test/Tests/src/Semantic/Meta_Spec.enso | 10 +- test/Tests/src/System/File_Spec.enso | 28 +- test/Visualization_Tests/src/Table_Spec.enso | 21 + .../Base/0.0.0-dev/src/{Data => }/Any.enso | 0 .../Standard/Base/0.0.0-dev/src/Error.enso | 9 + .../Base/0.0.0-dev/src/Error/Common.enso | 15 - .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 10 +- .../Standard/Base/0.0.0-dev/src/Panic.enso | 4 + .../Base/0.0.0-dev/src/Runtime/Resource.enso | 3 + 203 files changed, 3516 insertions(+), 3686 deletions(-) rename distribution/lib/Standard/Base/0.0.0-dev/src/{Data => }/Any.enso (89%) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range/Extensions.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso rename distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/{Option.enso => Regex_Option.enso} (80%) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Extensions.enso rename distribution/lib/Standard/Base/0.0.0-dev/src/Network/{Http.enso => HTTP.enso} (52%) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Form.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Method.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Status_Code.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Version.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso rename distribution/lib/Standard/Base/0.0.0-dev/src/Network/{Http/Request/Body.enso => HTTP/Request_Body.enso} (77%) rename distribution/lib/Standard/Base/0.0.0-dev/src/Network/{Http => HTTP}/Response.enso (70%) rename distribution/lib/Standard/Base/0.0.0-dev/src/Network/{Http/Response/Body.enso => HTTP/Response_Body.enso} (85%) delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Form.enso delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Header.enso delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Method.enso delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request.enso delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Status_Code.enso delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Version.enso delete mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI/Internal.enso create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso rename test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/{Data => }/Any.enso (100%) create mode 100644 test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso create mode 100644 test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso diff --git a/distribution/lib/Standard/Base/0.0.0-dev/package.yaml b/distribution/lib/Standard/Base/0.0.0-dev/package.yaml index 7387d797cc4a..a10d73bad707 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/package.yaml +++ b/distribution/lib/Standard/Base/0.0.0-dev/package.yaml @@ -18,22 +18,13 @@ component-groups: - Standard.Base.Data.list_directory - Web: exports: - - Standard.Base.Network.Http.new - - Standard.Base.Network.Http.fetch - - Standard.Base.Network.Http.get - - Standard.Base.Network.Http.post - - Standard.Base.Network.Http.post_form - - Standard.Base.Network.Http.post_json - - Standard.Base.Network.Http.put - - Standard.Base.Network.Http.put_json - - Standard.Base.Network.Http.head - - Standard.Base.Network.Http.options + - Standard.Base.Network.HTTP.HTTP.new + - Standard.Base.Network.HTTP.HTTP.fetch - Parse: exports: - Standard.Base.Data.Json.Json.parse - Standard.Base.Data.Text.Regex.compile - Standard.Base.Data.Text.Regex.escape - - Standard.Base.Data.Text.Regex.from_flags - Select: exports: - Standard.Base.Data.Vector.Vector.tail diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Any.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso similarity index 89% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Data/Any.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso index 43562a63d5dd..33e0878424b7 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Any.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso @@ -1,13 +1,12 @@ import project.Data.Ordering.Ordering import project.Data.Pair.Pair +import project.Data.Range.Extensions import project.Data.Text.Text +import project.Error.Error import project.Nothing.Nothing import project.Meta from project.Data.Boolean import Boolean, True, False -from project.Data.Json import all -from project.Data.Range import all -from project.Error.Common import Error, dataflow_error_handler ## Any is the universal top-type, with all other types being subsumed by it. @@ -83,18 +82,18 @@ type Any self_meta = Meta.meta self that_meta = Meta.meta that case Pair.new self_meta that_meta of - Pair.Value (Meta.Atom_Data _) (Meta.Atom_Data _) -> + Pair.Value (Meta.Atom.Value _) (Meta.Atom.Value _) -> c_1 = self_meta.constructor ... c_2 = that_meta.constructor ... if Meta.is_same_object c_1 c_2 . not then False else f_1 = self_meta.fields f_2 = that_meta.fields 0.up_to f_1.length . all i-> (f_1.at i) == (f_2.at i) - Pair.Value (Meta.Error_Data _) (Meta.Error_Data _) -> self_meta.payload == that_meta.payload - Pair.Value (Meta.Polyglot_Data o_1) (Meta.Polyglot_Data o_2) -> - langs_match = (self_meta.get_language == Meta.Java) && (that_meta.get_language == Meta.Java) + Pair.Value (Meta.Error.Value _) (Meta.Error.Value _) -> self_meta.payload == that_meta.payload + Pair.Value (Meta.Polyglot.Value o_1) (Meta.Polyglot.Value o_2) -> + langs_match = (self_meta.get_language == Meta.Language.Java) && (that_meta.get_language == Meta.Language.Java) if langs_match.not then False else o_1.equals o_2 - Pair.Value (Meta.Unresolved_Symbol_Data _) (Meta.Unresolved_Symbol_Data _) -> + Pair.Value (Meta.Unresolved_Symbol.Value _) (Meta.Unresolved_Symbol.Value _) -> (self_meta.name == that_meta.name) && (self_meta.scope == that_meta.scope) ## Constructor comparison is covered by the identity equality. Primitive objects should define their own equality. @@ -261,13 +260,13 @@ type Any matching type. By default this is identity. > Example - Catching an `Illegal_Argument_Error` and returning its message. + Catching an `Illegal_Argument` and returning its message. from Standard.Base import all example_catch = - error = Error.throw (Illegal_Argument_Error_Data "My message") - error.catch Illegal_Argument_Error_Data (err -> err.message) + error = Error.throw (Illegal_Argument.Error "My message") + error.catch Illegal_Argument.Error (err -> err.message) > Example Catching any dataflow error and turning it into a regular value. @@ -394,22 +393,3 @@ type Any (+1 >> *2) 2 >> : (Any -> Any) -> (Any -> Any) -> Any -> Any >> self ~that = x -> that (self x) - - ## UNSTABLE - ADVANCED - - Returns a Text used to display this value in the IDE. - - The particular representation is left unspecified and subject to change in - the future. The current implementation uses JSON serialization as the - default. - - Types defining their own versions of this method should ensure that the - result is reasonably small and that the operation is quick to compute. - - > Example - Converting the number `2` into visualization data. - - 2.to_default_visualization_data - to_default_visualization_data : Text - to_default_visualization_data self = self.to_json.to_text diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index 1bd2cc4a0ccb..c6f32a0e47fe 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Text.Encoding.Encoding import project.Data.Text.Text import project.Data.Vector.Vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index 6ed1785671ed..11c3d2906ce8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -1,14 +1,14 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Integer import project.Data.Ordering.Ordering +import project.Data.Range.Extensions import project.Data.Text.Text import project.Data.Vector.Vector -import project.Error.Common.Panic import project.Meta import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, False -from project.Data.Range import all ## The type of primitive mutable arrays. @Builtin_Type @@ -168,20 +168,3 @@ type Array eq_at i = self.at i == that.at i Panic.catch_primitive handler=(_ -> False) if self.length == that.length then 0.up_to self.length . all eq_at else False - - ## UNSTABLE - ADVANCED - - Returns a Text used to display this value in the IDE. - - The particular representation is left unspecified and subject to change in - the future. The current implementation uses JSON serialization as the - default. - - > Example - Converting an array to its default visualization representation. - - [1, 2, 3, 4].to_array.to_default_visualization_data - to_default_visualization_data : Text - to_default_visualization_data self = - Vector.from_polyglot_array self . to_default_visualization_data diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array_Proxy.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array_Proxy.enso index 261ab2c7c768..8830c2d317c8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array_Proxy.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array_Proxy.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Numbers.Integer diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso index b230ecc8efa4..ec91639c5c08 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Ordering.Ordering import project.Nothing.Nothing diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso index 20e1fb78df72..4349bcfcd979 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Text.Extensions import project.Data.Text.Regex import project.Data.Text.Text diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso index 73ab48c8b7fd..d85141f626bf 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso @@ -1,13 +1,16 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Integer +import project.Data.Range.Range +import project.Data.Range.Extensions import project.Data.Vector.Vector +import project.Error.Error import project.Math +import project.Panic.Panic import project.Random import project.Runtime.Ref.Ref from project.Data.Boolean import Boolean, True, False -from project.Data.Range import all -from project.Error.Common import Error, Panic, Index_Out_Of_Bounds_Error_Data, Illegal_Argument_Error_Data +from project.Error.Common import Index_Out_Of_Bounds_Error_Data, Illegal_Argument type Index_Sub_Range ## Select the first `count` items. @@ -64,8 +67,8 @@ resolve_ranges ranges length = if (actual_index < 0) || (actual_index >= length) then Panic.throw (Index_Out_Of_Bounds_Error_Data descriptor length) else actual_index Range.Value start end step -> - if step <= 0 then Panic.throw (Illegal_Argument_Error_Data "Range step must be positive.") else - if (start < 0) || (end < 0) then Panic.throw (Illegal_Argument_Error_Data "Range start and end must not be negative.") else + if step <= 0 then Panic.throw (Illegal_Argument.Error "Range step must be positive.") else + if (start < 0) || (end < 0) then Panic.throw (Illegal_Argument.Error "Range start and end must not be negative.") else if start >= length then Panic.throw (Index_Out_Of_Bounds_Error_Data start length) else actual_end = Math.min end length if actual_end < start then start.up_to start . with_step step else @@ -161,7 +164,7 @@ take_helper length at single_slice slice_ranges index_sub_range = case index_sub end = 0.up_to length . find i-> (predicate (at i)).not true_end = if end.is_nothing then length else end single_slice 0 true_end - Index_Sub_Range.By_Index one_or_many_descriptors -> Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument_Error_Data] <| + Index_Sub_Range.By_Index one_or_many_descriptors -> Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument.Error] <| indices = case one_or_many_descriptors of _ : Vector -> one_or_many_descriptors _ -> [one_or_many_descriptors] @@ -172,7 +175,7 @@ take_helper length at single_slice slice_ranges index_sub_range = case index_sub indices_to_take = Random.random_indices length count rng take_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index indices_to_take) Index_Sub_Range.Every step start -> - if step <= 0 then Error.throw (Illegal_Argument_Error_Data "Step within Every must be positive.") else + if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else if start >= length then single_slice 0 0 else range = start.up_to length . with_step step take_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index range) @@ -211,7 +214,7 @@ drop_helper length at single_slice slice_ranges index_sub_range = case index_sub end = 0.up_to length . find i-> (predicate (at i)).not true_end = if end.is_nothing then length else end single_slice true_end length - Index_Sub_Range.By_Index one_or_many_descriptors -> Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument_Error_Data] <| + Index_Sub_Range.By_Index one_or_many_descriptors -> Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument.Error] <| indices = case one_or_many_descriptors of _ : Vector -> one_or_many_descriptors _ -> [one_or_many_descriptors] @@ -224,7 +227,7 @@ drop_helper length at single_slice slice_ranges index_sub_range = case index_sub indices_to_drop = Random.random_indices length count rng drop_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index indices_to_drop) Index_Sub_Range.Every step start -> - if step <= 0 then Error.throw (Illegal_Argument_Error_Data "Step within Every must be positive.") else + if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else if start >= length then single_slice 0 length else range = start.up_to length . with_step step drop_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index range) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso index 513d23d67f5d..f7b66641673c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso @@ -1,15 +1,17 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Json.Internal import project.Data.Map.Map import project.Data.Map.No_Value_For_Key +import project.Data.Range.Extensions import project.Data.Text.Text import project.Data.Vector.Vector +import project.Error.Error import project.Meta import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project.Data.Range import all -from project.Error.Common import Panic, Error, Illegal_Argument_Error_Data +from project.Error.Common import Illegal_Argument ## Represents a JSON structure. type Json @@ -164,7 +166,7 @@ type Json Json.Object _ -> self.fields.get field . map_error case _ of No_Value_For_Key.Error _ -> No_Such_Field.Error field x -> x - _ -> Error.throw (Illegal_Argument_Error_Data "Json.get: self must be an Object") + _ -> Error.throw (Illegal_Argument.Error "Json.get: self must be an Object") ## UNSTABLE @@ -237,71 +239,3 @@ type Marshalling_Error "Type mismatch error: the json with type `" + json_text + "` did not match the format `" + format_text + "`." Marshalling_Error.Missing_Field _ field _ -> "Missing field in Json: the field `" + field.to_text "` was missing in the json." - -## ALIAS To JSON - - Generically converts an atom into a JSON object. - - The input atom is converted into a JSON object, with a `"type"` field set to - the atom's type name and all other fields serialized with their name as - object key and the value as the object value. - - > Example - Convert a vector to JSON. - [1, 2, 3, 4].to_json -Any.to_json : Json -Any.to_json self = - m = Meta.meta self - case m of - Meta.Atom_Data _ -> - cons = Meta.Constructor_Data m.constructor - fs = m.fields - fnames = cons.fields - json_fs = 0.up_to fnames.length . fold Map.empty m-> i-> - m.insert (fnames.at i) (fs.at i . to_json) - with_tp = json_fs . insert "type" (Json.String cons.name) - Json.Object with_tp - Meta.Constructor_Data _ -> - Json.Object (Map.empty . insert "type" (Json.String m.name)) - - ## The following two cases cannot be handled generically and should - instead define their own `to_json` implementations. - Meta.Polyglot_Data _ -> Json.Null - Meta.Primitive_Data _ -> Json.Null - -## Text to JSON conversion. - - > Example - Convert the text "Hello World!" to JSON. - - "Hello World!".to_json - > Example - Convert the text "cześć" to JSON. - - "cześć".to_json -Text.to_json : Json -Text.to_json self = Json.String self - -## Method used by object builders to convert a value into a valid JSON key. - - > Example - Ensure that the text "foo" is a JSON key. - "foo".to_json_key -Text.to_json_key : Text -Text.to_json_key self = self - -## Convert a boolean to JSON. - - > Example - Convert `True` to JSON. - True.to_json -Boolean.to_json : Json -Boolean.to_json self = Json.Boolean self - -## Convert `Nothing` to JSON. - - > Example - Convert `Nothing` to JSON. - Nothing.to_json -Nothing.to_json : Json -Nothing.to_json self = Json.Null diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso new file mode 100644 index 000000000000..c6c49362e76e --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso @@ -0,0 +1,194 @@ +import project.Any.Any +import project.Data.Array.Array +import project.Data.Json.Json +import project.Data.Locale.Locale +import project.Data.Map.Map +import project.Data.Numbers.Number +import project.Data.Range.Extensions +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Error +import project.Meta +import project.Nothing.Nothing + +from project.Data.Boolean import Boolean, True, False + +## ALIAS To JSON + + Generically converts an atom into a JSON object. + + The input atom is converted into a JSON object, with a `"type"` field set to + the atom's type name and all other fields serialized with their name as + object key and the value as the object value. + + > Example + Convert a vector to JSON. + [1, 2, 3, 4].to_json +Any.to_json : Json +Any.to_json self = + m = Meta.meta self + case m of + _ : Meta.Atom -> + cons = Meta.Constructor.Value m.constructor + fs = m.fields + fnames = cons.fields + json_fs = 0.up_to fnames.length . fold Map.empty m-> i-> + m.insert (fnames.at i) (fs.at i . to_json) + with_tp = json_fs . insert "type" (Json.String cons.name) + Json.Object with_tp + _ : Meta.Constructor -> + Json.Object (Map.empty . insert "type" (Json.String m.name)) + + ## The following two cases cannot be handled generically and should + instead define their own `to_json` implementations. + _ : Meta.Polyglot -> Json.Null + _ : Meta.Primitive -> Json.Null + +## UNSTABLE + ADVANCED + + Returns a Text used to display this value in the IDE. + + The particular representation is left unspecified and subject to change in + the future. The current implementation uses JSON serialization as the + default. + + Types defining their own versions of this method should ensure that the + result is reasonably small and that the operation is quick to compute. + + > Example + Converting the number `2` into visualization data. + + 2.to_default_visualization_data +Any.to_default_visualization_data : Text +Any.to_default_visualization_data self = self.to_json.to_text + +## UNSTABLE + + Returns a JSON representation of the dataflow error. + + > Example + Converting a dataflow error to JSON. + + import Standard.Examples + + example_to_json = Examples.throw_error.to_json +Error.to_json : Json +Error.to_json self = + error_type = ["type", "Error"] + caught = self.catch + error_content = ["content", caught.to_json] + error_message = ["message", caught.to_display_text] + Json.from_pairs [error_type, error_content, error_message] + +## UNSTABLE + + Returns a display representation of the dataflow error on which it is called. + + > Example + Displaying a dataflow error. + + import Standard.Examples + + example_display = Examples.throw_error.to_default_visualization_data +Error.to_default_visualization_data : Text +Error.to_default_visualization_data self = self.catch Any .to_default_visualization_data + +## Text to JSON conversion. + + > Example + Convert the text "Hello World!" to JSON. + + "Hello World!".to_json + > Example + Convert the text "cześć" to JSON. + + "cześć".to_json +Text.to_json : Json +Text.to_json self = Json.String self + +## Method used by object builders to convert a value into a valid JSON key. + + > Example + Ensure that the text "foo" is a JSON key. + "foo".to_json_key +Text.to_json_key : Text +Text.to_json_key self = self + +## Convert a boolean to JSON. + + > Example + Convert `True` to JSON. + True.to_json +Boolean.to_json : Json +Boolean.to_json self = Json.Boolean self + +## Convert `Nothing` to JSON. + + > Example + Convert `Nothing` to JSON. + Nothing.to_json +Nothing.to_json : Json +Nothing.to_json self = Json.Null + + +## Number to JSON conversion. + + > Example + Convert the number 8 to JSON. + + 8.to_json +Number.to_json : Json +Number.to_json self = Json.Number self + +## Vector to JSON conversion. + + > Example + Convert a vector of numbers to JSON. + + [1, 2, 3].to_json +Vector.to_json : Json +Vector.to_json self = Json.Array (self.map .to_json) + +## UNSTABLE + + Transform the vector into text for displaying as part of its default + visualization. +Vector.to_default_visualization_data : Text +Vector.to_default_visualization_data self = + json = self.take 100 . to_json + json.to_text + +## UNSTABLE + ADVANCED + + Returns a Text used to display this value in the IDE. + + The particular representation is left unspecified and subject to change in + the future. The current implementation uses JSON serialization as the + default. + + > Example + Converting an array to its default visualization representation. + + [1, 2, 3, 4].to_array.to_default_visualization_data +Array.to_default_visualization_data : Text +Array.to_default_visualization_data self = + Vector.from_polyglot_array self . to_default_visualization_data + +## A Locale to Json conversion + + > Example + Convert the default locale to JSON. + + import Standard.Base.Data.Locale.Locale + + example_to_json = Locale.default.to_json +Locale.to_json : Json +Locale.to_json self = + b = Vector.new_builder + b.append ["type", "Locale"] + if self.language.is_nothing.not then b.append ["language", self.language] + if self.country.is_nothing.not then b.append ["country", self.country] + if self.variant.is_nothing.not then b.append ["variant", self.variant] + Json.from_pairs b.to_vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Internal.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Internal.enso index 70cf61ef807c..f691efe5476c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Internal.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Internal.enso @@ -1,4 +1,7 @@ -import project.Data.Any.Any +import project.Any.Any +import project.Data.Json.Json +import project.Data.Json.Extensions +import project.Data.Json.Marshalling_Error import project.Data.List.List import project.Data.Map.Map import project.Data.Numbers.Decimal @@ -8,11 +11,10 @@ import project.Data.Text.Text import project.Data.Vector.Vector import project.Meta import project.Nothing.Nothing +import project.Panic.Panic import project.Runtime.Ref.Ref from project.Data.Boolean import Boolean, True, False -from project.Data.Json import Json, Marshalling_Error -from project.Error.Common import Panic polyglot java import org.enso.base.json.Parser polyglot java import org.enso.base.json.Printer @@ -310,9 +312,9 @@ into_helper fmt json = case fmt of _ -> m = Meta.meta fmt case m of - Meta.Atom_Data _ -> case json of + _ : Meta.Atom -> case json of Json.Object json_fields -> - cons = Meta.Constructor_Data m.constructor + cons = Meta.Constructor.Value m.constructor field_names = cons.fields field_formats = m.fields field_values = field_names.zip field_formats n-> inner_format-> diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso index 45ee9bd0a215..8b0b1b3b31d0 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/List.enso @@ -1,15 +1,15 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Filter_Condition.Filter_Condition import project.Data.Numbers.Integer import project.Data.Numbers.Number import project.Data.Text.Text import project.Data.Vector.Vector +import project.Error.Error import project.Function.Function import project.Nothing.Nothing import project.Runtime.Unsafe from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error from project.Data.List.List import Nil, Cons diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso index 3e3eda1885d7..a58cfcd17577 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso @@ -1,5 +1,4 @@ -import project.Data.Any.Any -import project.Data.Json.Json +import project.Any.Any import project.Data.Text.Text import project.Data.Vector.Vector import project.Nothing.Nothing @@ -422,23 +421,6 @@ type Locale to_text : Text | Nothing to_text self = self.java_locale.toLanguageTag - ## A Locale to Json conversion - - > Example - Convert the default locale to JSON. - - import Standard.Base.Data.Locale.Locale - - example_to_json = Locale.default.to_json - to_json : Json - to_json self = - b = Vector.new_builder - b.append ["type", "Locale"] - if self.language.is_nothing.not then b.append ["language", self.language] - if self.country.is_nothing.not then b.append ["country", self.country] - if self.variant.is_nothing.not then b.append ["variant", self.variant] - Json.from_pairs b.to_vector - ## Compares two locales for equality. == : Any -> Boolean == self other = case other of diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso index eb5bc3abfd64..9f3d2f66a5e5 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso @@ -1,14 +1,14 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Integer import project.Data.Ordering.Ordering import project.Data.Map.Internal import project.Data.Pair.Pair import project.Data.Text.Text import project.Data.Vector.Vector +import project.Error.Error import project.Nothing.Nothing from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error ## A key-value store. This type assumes all keys are pairwise comparable, using the `<`, `>` and `==` operators. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map/Internal.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map/Internal.enso index da1a315292c3..b25945550a48 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map/Internal.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map/Internal.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Map.Map import project.Data.Numbers.Integer import project.Data.Ordering.Ordering diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Maybe.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Maybe.enso index 93b294102353..314cff7325c9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Maybe.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Maybe.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any from project.Data.Boolean import Boolean, True, False diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso index 567d0dad56a1..c1d28c2adfcd 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso @@ -1,9 +1,8 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Interval.Bound import project.Data.Interval.Interval import project.Data.Numbers.Number - -from project.Error.Common import unimplemented +import project.Error.Common.Unimplemented polyglot java import java.lang.Long polyglot java import java.util.Random @@ -27,7 +26,7 @@ type Generator The return type may be chosen freely by the generator implementation, as it usually depends on the generator and its intended use. step : Number -> Interval -> Any - step self _ _ = unimplemented "Only intended to demonstrate an interface." + step self _ _ = Unimplemented.throw "Only intended to demonstrate an interface." ## A noise generator that implements a seeded deterministic random peterbation of the input. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index 864a07ca9bf0..5ac224577016 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -1,11 +1,11 @@ -import project.Data.Json.Json import project.Data.Ordering.Ordering import project.Data.Text.Text import project.Data.Locale.Locale -import project.Nothing +import project.Error.Error +import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Panic,Error,Illegal_Argument_Error polyglot java import java.lang.Double polyglot java import java.lang.Math @@ -326,15 +326,6 @@ type Number max : Number -> Number max self that = if self > that then self else that - ## Number to JSON conversion. - - > Example - Convert the number 8 to JSON. - - 8.to_json - to_json : Json - to_json self = Json.Number self - ## A constant holding the floating-point positive infinity. positive_infinity : Decimal positive_infinity = Double.POSITIVE_INFINITY diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Comparator.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Comparator.enso index a0671b91797a..444a48af2070 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Comparator.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Comparator.enso @@ -1,12 +1,12 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Ordering.Natural_Order import project.Data.Ordering.Ordering import project.Data.Text.Case_Sensitivity.Case_Sensitivity import project.Data.Text.Text_Ordering.Text_Ordering +import project.Error.Common.Incomparable_Values_Error import project.Nothing.Nothing from project.Data.Boolean import True, False -from project.Data.Vector import handle_incomparable_value polyglot java import org.enso.base.ObjectComparator @@ -19,7 +19,7 @@ polyglot java import org.enso.base.ObjectComparator Otherwise can support a custom fallback comparator. new : Nothing | (Any -> Any -> Ordering) -> ObjectComparator new custom_comparator=Nothing = - comparator_to_java cmp x y = handle_incomparable_value (cmp x y . to_sign) + comparator_to_java cmp x y = Incomparable_Values_Error.handle_errors (cmp x y . to_sign) case custom_comparator of Nothing -> ObjectComparator.getInstance (comparator_to_java .compare_to) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Vector_Lexicographic_Order.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Vector_Lexicographic_Order.enso index 4a449d95e2b9..4b7c8ea72154 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Vector_Lexicographic_Order.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Ordering/Vector_Lexicographic_Order.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Ordering.Ordering import project.Data.Vector.Vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Pair.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Pair.enso index 5bb59b4c5746..6e2bff166dc0 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Pair.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Pair.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any ## A pair of elements. type Pair diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso index f01bcdb9f2a0..c2abbdb69b81 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso @@ -1,13 +1,14 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Filter_Condition.Filter_Condition import project.Data.Numbers.Integer import project.Data.Numbers.Number import project.Data.Vector.Vector +import project.Error.Error import project.Function.Function import project.Nothing.Nothing from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Illegal_Argument_Error_Data, Illegal_State_Error_Data +from project.Error.Common import Illegal_Argument, Illegal_State ## Represents a right-exclusive range of integer values. type Range @@ -46,10 +47,10 @@ type Range with_step self new_step = case new_step of _ : Integer -> if new_step == 0 then throw_zero_step_error else - if new_step < 0 then Error.throw (Illegal_Argument_Error_Data "The step should be positive. A decreasing sequence will remain decreasing after updating it with positive step, as this operation only sets the magnitude without changing the sign.") else + if new_step < 0 then Error.throw (Illegal_Argument.Error "The step should be positive. A decreasing sequence will remain decreasing after updating it with positive step, as this operation only sets the magnitude without changing the sign.") else Range.Value self.start self.end self.step.signum*new_step _ -> - Error.throw (Illegal_Argument_Error_Data "Range step should be an integer.") + Error.throw (Illegal_Argument.Error "Range step should be an integer.") ## Returns the last element that is included within the range or `Nothing` if the range is empty. @@ -293,39 +294,7 @@ type Range `Range 0 10 . contains 3.0 == False` and get a type error for decimals instead. _ -> - Error.throw (Illegal_Argument_Error_Data "`Range.contains` only accepts Integers.") - -## ALIAS Range - - Creates an increasing right-exclusive range of integers from `self` to `n`. - - Arguments: - - n: The end of the range. - - > Example - Create a range containing the numbers 0, 1, 2, 3, 4. - - 0.up_to 5 -Integer.up_to : Integer -> Range -Integer.up_to self n = case n of - _ : Integer -> Range.Value self n 1 - _ -> Error.throw (Illegal_Argument_Error_Data "Expected range end to be an Integer.") - -## ALIAS Range - - Creates a decreasing right-exclusive range of integers from `self` to `n`. - - Arguments: - - n: The end of the range. - - > Example - Create a range containing the numbers 5, 4, 3, 2, 1. - - 5.down_to 0 -Integer.down_to : Integer -> Range -Integer.down_to self n = case n of - _ : Integer -> Range.Value self n -1 - _ -> Error.throw (Illegal_Argument_Error_Data "Expected range end to be an Integer.") + Error.throw (Illegal_Argument.Error "`Range.contains` only accepts Integers.") ## PRIVATE -throw_zero_step_error = Error.throw (Illegal_State_Error_Data "A range with step = 0 is ill-formed.") +throw_zero_step_error = Error.throw (Illegal_State.Error "A range with step = 0 is ill-formed.") diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range/Extensions.enso new file mode 100644 index 000000000000..d521ae73fb89 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range/Extensions.enso @@ -0,0 +1,37 @@ +import project.Data.Numbers.Integer +import project.Data.Range.Range +import project.Error.Error + +from project.Error.Common import Illegal_Argument + +## ALIAS Range + + Creates an increasing right-exclusive range of integers from `self` to `n`. + + Arguments: + - n: The end of the range. + + > Example + Create a range containing the numbers 0, 1, 2, 3, 4. + + 0.up_to 5 +Integer.up_to : Integer -> Range +Integer.up_to self n = case n of + _ : Integer -> Range.Value self n 1 + _ -> Error.throw (Illegal_Argument.Error "Expected range end to be an Integer.") + +## ALIAS Range + + Creates a decreasing right-exclusive range of integers from `self` to `n`. + + Arguments: + - n: The end of the range. + + > Example + Create a range containing the numbers 5, 4, 3, 2, 1. + + 5.down_to 0 +Integer.down_to : Integer -> Range +Integer.down_to self n = case n of + _ : Integer -> Range.Value self n -1 + _ -> Error.throw (Illegal_Argument.Error "Expected range end to be an Integer.") diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Regression.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Regression.enso index 00b824148ae3..729c315db9de 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Regression.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Regression.enso @@ -1,11 +1,13 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Number import project.Data.Statistics.Statistic import project.Data.Text.Text import project.Data.Vector.Vector +import project.Error.Error import project.Nothing.Nothing +import project.Panic.Panic -from project.Error.Common import Error, Panic, Illegal_Argument_Error_Data, Illegal_Argument_Error +from project.Error.Common import Illegal_Argument polyglot java import org.enso.base.statistics.Regression polyglot java import org.enso.base.statistics.FitError @@ -34,16 +36,16 @@ type Model ## PRIVATE Computes the natural log series as long as all values are positive. - ln_series : Vector -> Vector ! Illegal_Argument_Error + ln_series : Vector -> Vector ! Illegal_Argument ln_series xs series_name="Values" = ln_with_panic x = if x.is_nothing then Nothing else - if x <= 0 then Panic.throw (Illegal_Argument_Error_Data (series_name + " must be positive.")) else x.ln - Panic.recover Illegal_Argument_Error_Data <| xs.map ln_with_panic + if x <= 0 then Panic.throw (Illegal_Argument.Error (series_name + " must be positive.")) else x.ln + Panic.recover Illegal_Argument.Error <| xs.map ln_with_panic ## Use Least Squares to fit a line to the data. -fit_least_squares : Vector -> Vector -> Model -> Fitted_Model ! Illegal_Argument_Error | Fit_Error +fit_least_squares : Vector -> Vector -> Model -> Fitted_Model ! Illegal_Argument | Fit_Error fit_least_squares known_xs known_ys model=Model.Linear = - Illegal_Argument_Error.handle_java_exception <| Fit_Error.handle_java_exception <| case model of + Illegal_Argument.handle_java_exception <| Fit_Error.handle_java_exception <| case model of Model.Linear intercept -> fitted = if intercept.is_nothing then Regression.fit_linear known_xs.to_array known_ys.to_array else Regression.fit_linear known_xs.to_array known_ys.to_array intercept @@ -62,7 +64,7 @@ fit_least_squares known_xs known_ys model=Model.Linear = log_ys = Model.ln_series known_ys "Y-values" fitted = Regression.fit_linear log_xs.to_array log_ys.to_array Model.fitted_model_with_r_squared Fitted_Model.Power fitted.intercept.exp fitted.slope known_xs known_ys - _ -> Error.throw (Illegal_Argument_Error_Data "Unsupported model.") + _ -> Error.throw (Illegal_Argument.Error "Unsupported model.") type Fitted_Model ## Fitted line (y = slope x + intercept). @@ -94,7 +96,7 @@ type Fitted_Model Fitted_Model.Exponential a b _ -> a * (b * x).exp Fitted_Model.Logarithmic a b _ -> a * x.ln + b Fitted_Model.Power a b _ -> a * (x ^ b) - _ -> Error.throw (Illegal_Argument_Error_Data "Unsupported model.") + _ -> Error.throw (Illegal_Argument.Error "Unsupported model.") ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Statistics.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Statistics.enso index 332e8ac4633b..6835317cc65e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Statistics.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Statistics.enso @@ -1,16 +1,18 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Numbers.Decimal import project.Data.Numbers.Number import project.Data.Ordering.Comparator +import project.Data.Range.Extensions import project.Data.Vector.Vector +import project.Error.Error import project.Meta import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project.Data.Range import all from project.Data.Vector import Empty_Error -from project.Error.Common import Error, Panic, Illegal_Argument_Error_Data, Illegal_Argument_Error, Unsupported_Argument_Types, Unsupported_Argument_Types_Data, Incomparable_Values_Error +from project.Error.Common import Illegal_Argument, Unsupported_Argument_Types, Unsupported_Argument_Types_Data, Incomparable_Values_Error polyglot java import org.enso.base.statistics.Moments polyglot java import org.enso.base.statistics.MomentStatistic @@ -52,7 +54,7 @@ type Rank_Method Rank_Method.Ordinal -> Rank.Method.ORDINAL Rank_Method.Dense -> Rank.Method.DENSE - report_nullpointer caught_panic = Error.throw (Illegal_Argument_Error_Data caught_panic.payload.cause.getMessage) + report_nullpointer caught_panic = Error.throw (Illegal_Argument.Error caught_panic.payload.cause.getMessage) handle_nullpointer = Panic.catch NullPointerException handler=report_nullpointer handle_classcast = Panic.catch ClassCastException handler=(_ -> Error.throw Incomparable_Values_Error) @@ -143,7 +145,7 @@ type Statistic report_invalid _ = statistics.map_with_index i->v-> if java_stats.at i . is_nothing then Nothing else - Error.throw (Illegal_Argument_Error_Data ("Can only compute " + v.to_text + " on numerical data sets.")) + Error.throw (Illegal_Argument.Error ("Can only compute " + v.to_text + " on numerical data sets.")) handle_unsupported = Panic.catch Unsupported_Argument_Types_Data handler=report_invalid empty_map s = if (s == Statistic.Count) || (s == Statistic.Sum) then 0 else @@ -230,10 +232,10 @@ to_moment_statistic s = case s of ## PRIVATE wrap_java_call : Any -> Any wrap_java_call ~function = - report_unsupported _ = Error.throw (Illegal_Argument_Error_Data ("Can only compute correlations on numerical data sets.")) + report_unsupported _ = Error.throw (Illegal_Argument.Error ("Can only compute correlations on numerical data sets.")) handle_unsupported = Panic.catch Unsupported_Argument_Types_Data handler=report_unsupported - handle_unsupported <| Illegal_Argument_Error.handle_java_exception <| function + handle_unsupported <| Illegal_Argument.handle_java_exception <| function ## PRIVATE Given two series, get a computed CorrelationStatistics object diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso index 86b020e5bdaa..645fe7e947d9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso @@ -1,10 +1,11 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Locale.Locale import project.Data.Ordering.Ordering +import project.Error.Error import project.Meta from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Type_Error_Data +from project.Error.Common import Type_Error_Data polyglot java import org.enso.base.Text_Utils diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case.enso index 4d5218dbf25e..1e2360090e94 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case.enso @@ -1,11 +1,3 @@ -from Standard.Base import all - -polyglot java import org.enso.base.text.TextFoldingStrategy - -# TODO Dubious constructor export -from project.Data.Text.Case.Case import all -from project.Data.Text.Case.Case export all - ## Specifies the casing options for text conversion. type Case ## All letters in lower case. @@ -16,12 +8,3 @@ type Case ## First letter of each word in upper case, rest in lower case. Title - -## PRIVATE - Creates a Java `TextFoldingStrategy` from the case sensitivity setting. -folding_strategy : Case_Sensitivity -> TextFoldingStrategy -folding_strategy case_sensitivity = case case_sensitivity of - Case_Sensitivity.Sensitive -> TextFoldingStrategy.unicodeNormalizedFold - Case_Sensitivity.Insensitive locale -> - TextFoldingStrategy.caseInsensitiveFold locale.java_locale - diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case_Sensitivity.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case_Sensitivity.enso index c85b2a730fdc..9469c88be9e7 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case_Sensitivity.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Case_Sensitivity.enso @@ -1,4 +1,6 @@ -from Standard.Base import all +import project.Data.Locale.Locale + +polyglot java import org.enso.base.text.TextFoldingStrategy type Case_Sensitivity ## Represents a case-sensitive comparison mode. @@ -9,3 +11,11 @@ type Case_Sensitivity Arguments: - locale: The locale used for the comparison. Insensitive locale=Locale.default + + ## PRIVATE + Creates a Java `TextFoldingStrategy` from the case sensitivity setting. + folding_strategy : Case_Sensitivity -> TextFoldingStrategy + folding_strategy case_sensitivity = case case_sensitivity of + Case_Sensitivity.Sensitive -> TextFoldingStrategy.unicodeNormalizedFold + Case_Sensitivity.Insensitive locale -> + TextFoldingStrategy.caseInsensitiveFold locale.java_locale diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso index eca34999ad19..6af97f9f087f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso @@ -1,23 +1,28 @@ -from Standard.Base import all +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Error +import project.Panic.Panic + +from project.Error.Common import Illegal_Argument, Encoding_Error polyglot java import java.nio.charset.Charset polyglot java import java.nio.charset.UnsupportedCharsetException polyglot java import org.enso.base.Text_Utils -## Get all available character sets from Java as Encodings. - Used to provide auto completion in the UI. -all_character_sets : Vector Text -all_character_sets = - java_array = Charset.availableCharsets.keySet.toArray - Vector.from_polyglot_array java_array - -## Get all available Encodings. -all_encodings : Vector Encoding -all_encodings = - all_character_sets.map Encoding.Value - ## Represents a character encoding. type Encoding + ## Get all available character sets from Java as Encodings. + Used to provide auto completion in the UI. + all_character_sets : Vector Text + all_character_sets = + java_array = Charset.availableCharsets.keySet.toArray + Vector.from_polyglot_array java_array + + ## Get all available Encodings. + all_encodings : Vector Encoding + all_encodings = + Encoding.all_character_sets.map Encoding.Value + ## Create a new Encoding object. Arguments: @@ -29,7 +34,7 @@ type Encoding to_java_charset : Charset to_java_charset self = Panic.catch UnsupportedCharsetException (Charset.forName self.character_set) _-> - Error.throw (Illegal_Argument_Error_Data ("Unknown Character Set: " + self.character_set)) + Error.throw (Illegal_Argument.Error ("Unknown Character Set: " + self.character_set)) ## Encoding for ASCII. ascii : Encoding @@ -94,13 +99,3 @@ type Encoding ## Encoding for Vietnamese (Windows). windows_1258 : Encoding windows_1258 = Encoding.Value "windows-1258" - -## One or more byte sequences were not decodable using the Encoding. -type Encoding_Error - Error (message:Text) - - ## PRIVATE - - Provides a human-readable representation of the encoding error. - to_display_text : Text - to_display_text self = "Encoding_Error: " + self.message diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso index cf498b4f8992..60d9cea16326 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso @@ -1,24 +1,34 @@ ## Methods for operating on `Text` in Enso. -from Standard.Base import all - -import Standard.Base.Data.Array.Array -import Standard.Base.Data.Text.Regex - -import Standard.Base.Data.Text.Matching_Mode -import Standard.Base.Data.Text.Case -import Standard.Base.Data.Text.Location - -import project.Data.Index_Sub_Range as Index_Sub_Range_Module -import project.Data.Text.Text_Sub_Range as Text_Sub_Range_Module +import project.Any.Any +import project.Data.Array.Array +import project.Data.Index_Sub_Range.Index_Sub_Range +import project.Data.Locale.Locale +import project.Data.Numbers.Integer +import project.Data.Range.Extensions +import project.Data.Range.Range +import project.Data.Text.Case.Case +import project.Data.Text.Encoding.Encoding +import project.Data.Text.Location +import project.Data.Text.Matching_Mode +import project.Data.Text.Regex +import project.Data.Text.Regex.Regex_Mode.Regex_Mode +import project.Data.Text.Regex_Matcher.Regex_Matcher +import project.Data.Text.Span.Span +import project.Data.Text.Span.Utf_16_Span +import project.Data.Text.Text +import project.Data.Text.Text_Matcher.Text_Matcher import project.Data.Text.Text_Sub_Range.Codepoint_Ranges +import project.Data.Vector.Vector +import project.Error.Error +import project.Error.Problem_Behavior.Problem_Behavior +import project.Meta +import project.Nothing.Nothing -from Standard.Base.Error.Problem_Behavior import Report_Warning -import Standard.Base.Meta +from project.Data.Boolean import Boolean, True, False +from project.Error.Common import Index_Out_Of_Bounds_Error, Index_Out_Of_Bounds_Error_Data, Illegal_Argument, Encoding_Error -export Standard.Base.Data.Text.Matching_Mode -export Standard.Base.Data.Text.Case -export Standard.Base.Data.Text.Location +import project.Data.Index_Sub_Range as Index_Sub_Range_Module polyglot java import com.ibm.icu.lang.UCharacter polyglot java import com.ibm.icu.text.BreakIterator @@ -196,7 +206,7 @@ Text.is_match : Text -> (Text_Matcher | Regex_Matcher) -> Boolean ! Regex.Compil Text.is_match self pattern=".*" matcher=Regex_Matcher.Regex_Matcher_Data = case matcher of Text_Matcher.Case_Sensitive -> self == pattern Text_Matcher.Case_Insensitive locale -> self.equals_ignore_case pattern locale - _ : Regex_Matcher.Regex_Matcher -> + _ : Regex_Matcher -> compiled_pattern = matcher.compile pattern compiled_pattern.matches self @@ -228,9 +238,9 @@ Text.is_match self pattern=".*" matcher=Regex_Matcher.Regex_Matcher_Data = case 'abc def\tghi'.split '\\s+' Regex_Matcher.Regex_Matcher_Data == ["abc", "def", "ghi"] Text.split : Text -> (Text_Matcher | Regex_Matcher) -> Vector Text -Text.split self delimiter="," matcher=Text_Matcher.Case_Sensitive = if delimiter.is_empty then Error.throw (Illegal_Argument_Error_Data "The delimiter cannot be empty.") else +Text.split self delimiter="," matcher=Text_Matcher.Case_Sensitive = if delimiter.is_empty then Error.throw (Illegal_Argument.Error "The delimiter cannot be empty.") else case matcher of - _ : Text_Matcher.Text_Matcher -> + _ : Text_Matcher -> delimiters = Vector.from_polyglot_array <| case matcher of Text_Matcher.Case_Sensitive -> Text_Utils.span_of_all self delimiter @@ -242,7 +252,7 @@ Text.split self delimiter="," matcher=Text_Matcher.Case_Sensitive = if delimiter end = if i == delimiters.length then (Text_Utils.char_length self) else delimiters.at i . codeunit_start Text_Utils.substring self start end - _ : Regex_Matcher.Regex_Matcher -> + _ : Regex_Matcher -> compiled_pattern = matcher.compile delimiter compiled_pattern.split self mode=Regex_Mode.All @@ -322,10 +332,10 @@ Text.split self delimiter="," matcher=Text_Matcher.Case_Sensitive = if delimiter "aaa aaa".replace "aa" "c" matcher=Regex_Matcher . should_equal "ca ca" "aaa aaa".replace "aa" "c" mode=Matching_Mode.First matcher=Regex_Matcher . should_equal "ca aaa" "aaa aaa".replace "aa" "c" mode=Matching_Mode.Last matcher=Regex_Matcher . should_equal "aaa ca" -Text.replace : Text -> Text -> Matching_Mode | Regex_Mode -> (Text_Matcher | Regex_Matcher) -> Text +Text.replace : Text -> Text -> Matching_Mode.First | Matching_Mode.Last | Regex_Mode -> (Text_Matcher | Regex_Matcher) -> Text Text.replace self term="" new_text="" mode=Regex_Mode.All matcher=Text_Matcher.Case_Sensitive = if term.is_empty then self else case matcher of - _ : Text_Matcher.Text_Matcher -> + _ : Text_Matcher -> array_from_single_result result = case result of Nothing -> Array.empty _ -> Array.new_1 result @@ -337,7 +347,7 @@ Text.replace self term="" new_text="" mode=Regex_Mode.All matcher=Text_Matcher.C array_from_single_result <| Text_Utils.span_of self term Matching_Mode.Last -> array_from_single_result <| Text_Utils.last_span_of self term - _ -> Error.throw (Illegal_Argument_Error_Data "Invalid mode.") + _ -> Error.throw (Illegal_Argument.Error "Invalid mode.") Text_Matcher.Case_Insensitive locale -> case mode of Regex_Mode.All -> Text_Utils.span_of_all_case_insensitive self term locale.java_locale @@ -347,9 +357,9 @@ Text.replace self term="" new_text="" mode=Regex_Mode.All matcher=Text_Matcher.C Matching_Mode.Last -> array_from_single_result <| Text_Utils.span_of_case_insensitive self term locale.java_locale True - _ -> Error.throw (Illegal_Argument_Error_Data "Invalid mode.") + _ -> Error.throw (Illegal_Argument.Error "Invalid mode.") Text_Utils.replace_spans self spans_array new_text - _ : Regex_Matcher.Regex_Matcher -> + _ : Regex_Matcher -> compiled_pattern = matcher.compile term compiled_pattern.replace self new_text mode=mode @@ -511,7 +521,7 @@ Text.is_whitespace self = "Hello".bytes (Encoding.ascii) Text.bytes : Encoding -> Problem_Behavior -> Vector Byte -Text.bytes self encoding on_problems=Report_Warning = +Text.bytes self encoding on_problems=Problem_Behavior.Report_Warning = result = Encoding_Utils.get_bytes self (encoding . to_java_charset) vector = Vector.from_polyglot_array result.result if result.warnings.is_nothing then vector else @@ -534,7 +544,7 @@ Text.bytes self encoding on_problems=Report_Warning = "Hello".bytes (Encoding.ascii) Text.from_bytes : Vector Byte -> Encoding -> Text -Text.from_bytes bytes encoding on_problems=Report_Warning = +Text.from_bytes bytes encoding on_problems=Problem_Behavior.Report_Warning = result = Encoding_Utils.from_bytes bytes.to_array (encoding . to_java_charset) if result.warnings.is_nothing then result.result else on_problems.attach_problems_after result.result [Encoding_Error.Error result.warnings] @@ -557,7 +567,7 @@ Text.from_bytes bytes encoding on_problems=Report_Warning = "Hello".utf_8 Text.utf_8 : Problem_Behavior -> Vector Byte -Text.utf_8 self on_problems=Report_Warning = +Text.utf_8 self on_problems=Problem_Behavior.Report_Warning = self.bytes Encoding.utf_8 on_problems ## Takes a vector of bytes and returns Text resulting from decoding it as UTF-8. @@ -578,7 +588,7 @@ Text.utf_8 self on_problems=Report_Warning = Text.from_utf_8 [-32, -92, -107, -32, -91, -115, -32, -92, -73, -32, -92, -65] Text.from_utf_8 : Vector Byte -> Problem_Behavior -> Text -Text.from_utf_8 bytes on_problems=Report_Warning = +Text.from_utf_8 bytes on_problems=Problem_Behavior.Report_Warning = Text.from_bytes bytes Encoding.utf_8 on_problems ## Returns a vector containing the UTF-16 characters that encode the input text. @@ -671,10 +681,10 @@ Text.starts_with self prefix matcher=Text_Matcher.Case_Sensitive = case matcher Text_Matcher.Case_Sensitive -> Text_Utils.starts_with self prefix Text_Matcher.Case_Insensitive locale -> self.take (Index_Sub_Range.First prefix.length) . equals_ignore_case prefix locale=locale - _ : Regex_Matcher.Regex_Matcher -> + _ : Regex_Matcher -> preprocessed_pattern = "\A(?:" + prefix + ")" compiled_pattern = matcher.compile preprocessed_pattern - match = compiled_pattern.match self Regex_Mode.First + match = compiled_pattern.match self Matching_Mode.First match.is_nothing.not ## ALIAS Check Suffix @@ -706,10 +716,10 @@ Text.ends_with self suffix matcher=Text_Matcher.Case_Sensitive = case matcher of Text_Matcher.Case_Sensitive -> Text_Utils.ends_with self suffix Text_Matcher.Case_Insensitive locale -> self.take (Index_Sub_Range.Last suffix.length) . equals_ignore_case suffix locale=locale - _ : Regex_Matcher.Regex_Matcher -> + _ : Regex_Matcher -> preprocessed_pattern = "(?:" + suffix + ")\z" compiled_pattern = matcher.compile preprocessed_pattern - match = compiled_pattern.match self Regex_Mode.First + match = compiled_pattern.match self Matching_Mode.First match.is_nothing.not ## ALIAS Contains @@ -768,9 +778,9 @@ Text.contains self term="" matcher=Text_Matcher.Case_Sensitive = case matcher of Text_Matcher.Case_Sensitive -> Text_Utils.contains self term Text_Matcher.Case_Insensitive locale -> Text_Utils.contains_case_insensitive self term locale.java_locale - _ : Regex_Matcher.Regex_Matcher -> + _ : Regex_Matcher -> compiled_pattern = matcher.compile term - match = compiled_pattern.match self Regex_Mode.First + match = compiled_pattern.match self Matching_Mode.First match.is_nothing.not ## Takes an integer and returns a new text, consisting of `count` concatenated @@ -850,8 +860,8 @@ Text.repeat self count=1 = "Hello World!".take (By_Index [Range 0 3, 6, Range 6 12 2]) == "HelWWrd" "Hello World!".take (Sample 3 seed=42) == "l d" Text.take : (Text_Sub_Range | Index_Sub_Range | Range | Integer) -> Text ! Index_Out_Of_Bounds_Error -Text.take self range=(First 1) = - ranges = Text_Sub_Range_Module.find_codepoint_ranges self range +Text.take self range=(Index_Sub_Range.First 1) = + ranges = Codepoint_Ranges.resolve self range case ranges of Range.Value start end 1 -> Text_Utils.substring self start end @@ -896,8 +906,8 @@ Text.take self range=(First 1) = "Hello World!".drop (By_Index [Range 0 3, 6, Range 6 12 2]) == "lo ol!" "Hello World!".drop (Sample 3 seed=42) == "HeloWorl!" Text.drop : (Text_Sub_Range | Index_Sub_Range | Range) -> Text ! Index_Out_Of_Bounds_Error -Text.drop self range=(First 1) = - ranges = Text_Sub_Range_Module.find_codepoint_ranges self range +Text.drop self range=(Index_Sub_Range.First 1) = + ranges = Codepoint_Ranges.resolve self range case ranges of Range.Value start end 1 -> if start == 0 then Text_Utils.drop_first self end else @@ -973,7 +983,7 @@ Text.to_case self case_option=Case.Lower locale=Locale.default = case case_optio Text.pad : Integer -> Text -> (Location.Start | Location.End) -> Text Text.pad self length=0 with_pad=' ' at=Location.End = with_pad_length = with_pad.length - if with_pad_length == 0 then Error.throw (Illegal_Argument_Error_Data "`with_pad` must not be an empty string.") else + if with_pad_length == 0 then Error.throw (Illegal_Argument.Error "`with_pad` must not be an empty string.") else pad_size = length - self.length if pad_size <= 0 then self else full_repetitions = pad_size.div with_pad_length @@ -1142,13 +1152,13 @@ Text.locate self term="" mode=Matching_Mode.First matcher=Text_Matcher.Case_Sens from our term, the `length` counted in grapheme clusters is guaranteed to be the same. end = start + term.length - Span_Data (start.up_to end) self + Span.Value (start.up_to end) self Text_Matcher.Case_Insensitive locale -> case term.is_empty of True -> case mode of - Matching_Mode.First -> Span_Data (0.up_to 0) self + Matching_Mode.First -> Span.Value (0.up_to 0) self Matching_Mode.Last -> end = self.length - Span_Data (end.up_to end) self + Span.Value (end.up_to end) self False -> search_for_last = case mode of Matching_Mode.First -> False @@ -1156,7 +1166,7 @@ Text.locate self term="" mode=Matching_Mode.First matcher=Text_Matcher.Case_Sens case Text_Utils.span_of_case_insensitive self term locale.java_locale search_for_last of Nothing -> Nothing grapheme_span -> - Span_Data (grapheme_span.grapheme_start.up_to grapheme_span.grapheme_end) self + Span.Value (grapheme_span.grapheme_start.up_to grapheme_span.grapheme_end) self _ -> case mode of Matching_Mode.First -> case matcher.compile term . match self Matching_Mode.First of @@ -1233,7 +1243,7 @@ Text.locate self term="" mode=Matching_Mode.First matcher=Text_Matcher.Case_Sens match_2 = ligatures . locate_all "ffiff" matcher=(Text_Matcher Case_Insensitive) match_2 . map .length == [2, 5] Text.locate_all : Text -> (Text_Matcher | Regex_Matcher) -> Vector Span -Text.locate_all self term="" matcher=Text_Matcher.Case_Sensitive = if term.is_empty then Vector.new (self.length + 1) (ix -> Span_Data (ix.up_to ix) self) else case matcher of +Text.locate_all self term="" matcher=Text_Matcher.Case_Sensitive = if term.is_empty then Vector.new (self.length + 1) (ix -> Span.Value (ix.up_to ix) self) else case matcher of Text_Matcher.Case_Sensitive -> codepoint_spans = Vector.from_polyglot_array <| Text_Utils.span_of_all self term grahpeme_ixes = Vector.from_polyglot_array <| Text_Utils.utf16_indices_to_grapheme_indices self (codepoint_spans.map .codeunit_start).to_array @@ -1243,12 +1253,12 @@ Text.locate_all self term="" matcher=Text_Matcher.Case_Sensitive = if term.is_em offset = term.length grahpeme_ixes . map start-> end = start+offset - Span_Data (start.up_to end) self + Span.Value (start.up_to end) self Text_Matcher.Case_Insensitive locale -> grapheme_spans = Vector.from_polyglot_array <| Text_Utils.span_of_all_case_insensitive self term locale.java_locale grapheme_spans.map grapheme_span-> - Span_Data (grapheme_span.grapheme_start.up_to grapheme_span.grapheme_end) self - _ : Regex_Matcher.Regex_Matcher -> + Span.Value (grapheme_span.grapheme_start.up_to grapheme_span.grapheme_end) self + _ : Regex_Matcher -> case matcher.compile term . match self Regex_Mode.All of Nothing -> [] matches -> matches.map m-> m.span 0 . to_grapheme_span @@ -1264,3 +1274,4 @@ slice_text text char_ranges = char_ranges.map char_range-> sb.append text char_range.start char_range.end sb.toString + diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Matching.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Matching.enso index 008a6b44b97b..7aa4e648eebd 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Matching.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Matching.enso @@ -1,27 +1,29 @@ -from Standard.Base import all +import project.Data.Numbers.Integer +import project.Data.Pair.Pair +import project.Data.Range.Extensions +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Problem_Behavior.Problem_Behavior +import project.Panic.Panic +import project.Panic.Wrapped_Dataflow_Error -from Standard.Base.Error.Problem_Behavior import Report_Warning -from Standard.Base.Error.Common import Wrapped_Dataflow_Error_Data - -# TODO Dubious constructor export -from project.Data.Text.Matching.No_Matches_Found import all -from project.Data.Text.Matching.No_Matches_Found export all +from project.Data.Boolean import Boolean, True, False ## UNSTABLE An error indicating that some criteria did not match any names in the input. type No_Matches_Found - No_Matches_Found_Data (criteria : Vector Text) + Error (criteria : Vector Text) to_display_text : Text to_display_text self = "The criteria "+self.criteria.to_text+" did not match any names in the input." ## PRIVATE -match_criteria_implementation matcher objects criteria reorder=False name_mapper=(x->x) on_problems=Report_Warning = +match_criteria_implementation matcher objects criteria reorder=False name_mapper=(x->x) on_problems=Problem_Behavior.Report_Warning = result = internal_match_criteria_implementation matcher objects criteria reorder name_mapper unmatched_criteria = result.second problems = if unmatched_criteria.is_empty then [] else - [No_Matches_Found_Data unmatched_criteria] + [No_Matches_Found.Error unmatched_criteria] on_problems.attach_problems_after result.first problems ## PRIVATE @@ -31,14 +33,10 @@ match_criteria_callback matcher objects criteria problem_callback reorder=False problem_callback unmatched_criteria result.first -# TODO Dubious constructor export -from project.Data.Text.Matching.Match_Matrix import all -from project.Data.Text.Matching.Match_Matrix export all - type Match_Matrix ## PRIVATE A helper type holding a matrix of matches. - Match_Matrix_Data matrix criteria objects + Value matrix criteria objects # Checks if the ith object is matched by any criterion. is_object_matched_by_anything : Integer -> Boolean @@ -82,13 +80,13 @@ make_match_matrix matcher objects criteria object_name_mapper=(x->x) criterion_m matrix = objects.map obj-> criteria.map criterion-> matcher.match_single_criterion (object_name_mapper obj) (criterion_mapper criterion) - Match_Matrix_Data matrix criteria objects + Match_Matrix.Value matrix criteria objects ## PRIVATE -internal_match_criteria_implementation matcher objects criteria reorder=False name_mapper=(x->x) = Panic.catch Wrapped_Dataflow_Error_Data (handler = x-> x.payload.unwrap) <| +internal_match_criteria_implementation matcher objects criteria reorder=False name_mapper=(x->x) = Panic.catch Wrapped_Dataflow_Error.Error (handler = x-> x.payload.unwrap) <| ## TODO [RW] discuss: this line of code also shows an issue we had with ensuring input dataflow-errors are correctly propagated, later on we stopped doing that and testing for that as it was too cumbersome. Maybe it could be helped with an @Accepts_Error annotation similar to the one from the interpreter??? [matcher, objects, criteria, reorder, name_mapper] . each v-> - Panic.rethrow (v.map_error Wrapped_Dataflow_Error_Data) + Panic.rethrow (v.map_error Wrapped_Dataflow_Error.Error) match_matrix = make_match_matrix matcher objects criteria name_mapper unmatched_criteria = match_matrix.unmatched_criteria diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Prim_Text_Helper.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Prim_Text_Helper.enso index 4e6a4abff46e..a6b8c5b4edd1 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Prim_Text_Helper.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Prim_Text_Helper.enso @@ -1,6 +1,6 @@ ## Internal text utilities for inspecting text primitives. -from Standard.Base import Text +import project.Data.Text.Text ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso index 48c00b445c53..1ceec4b3d719 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso @@ -1,17 +1,19 @@ ## This module contains the basic interface to the more advanced functionality - of Enso's regular expression engine. TODO Examples -from Standard.Base import all +import project.Data.Boolean.Boolean +import project.Data.Numbers.Integer +import project.Data.Text.Regex.Engine.Engine +import project.Data.Text.Regex.Pattern.Pattern +import project.Data.Text.Regex.Engine.Default +import project.Data.Text.Regex.Regex_Option.Regex_Option +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Nothing.Nothing -import Standard.Base.Data.Text.Regex -import Standard.Base.Data.Text.Regex.Engine -# TODO[DB] Fix composite types #183857386 -import Standard.Base.Data.Text.Regex.Engine.Engine as Engine_Type -import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine -import Standard.Base.Data.Text.Regex.Option +from project.Error.Common import Compile_Error ## Compile the provided `expression` into a regex pattern that can be used for matching. @@ -37,10 +39,10 @@ import Standard.Base.Data.Text.Regex.Option - comments: Enables or disables the comments mode for the regular expression. In comments mode, the following changes apply: - Whitespace within the pattern is ignored, except when within a - character class or when preceeded by an unescaped backslash, or within + character class or when preceded by an unescaped backslash, or within grouping constructs (e.g. `(?...)`). - When a line contains a `#`, that is not in a character class and is not - preceeded by an unescaped backslash, all characters from the leftmost + preceded by an unescaped backslash, all characters from the leftmost such `#` to the end of the line are ignored. That is to say, they act as _comments_ in the regex. - extra_opts: Specifies additional options in a vector. This allows options @@ -64,8 +66,8 @@ import Standard.Base.Data.Text.Regex.Option useful to be able to manually retain a pattern that you have computed. This function exists so you can hold onto the resultant `Pattern` object, instead of immediately proceeding to match using it. -compile : Text -> Engine_Type -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Vector Option.Option -> Engine.Pattern ! Compile_Error -compile expression engine=Default_Engine.new match_ascii=Nothing case_insensitive=Nothing dot_matches_newline=Nothing multiline=Nothing comments=Nothing extra_opts=[] = +compile : Text -> Engine -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Vector Regex_Option -> Pattern ! Compile_Error +compile expression engine=Default.new match_ascii=Nothing case_insensitive=Nothing dot_matches_newline=Nothing multiline=Nothing comments=Nothing extra_opts=[] = options_vec = from_flags match_ascii case_insensitive dot_matches_newline multiline comments extra_opts engine.compile expression options_vec @@ -80,44 +82,40 @@ compile expression engine=Default_Engine.new match_ascii=Nothing case_insensitiv ! Matching Engines Care should be taken to ensure that you use the same engine for escaping and matching, as engine syntax may differ in certain cases. -escape : Text -> Engine_Type -> Text -escape expression engine=Default_Engine.new = engine.escape expression +escape : Text -> Engine -> Text +escape expression engine=Default.new = engine.escape expression ## PRIVATE Turns the options flags into a vector of options. -from_flags : Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Vector Option.Option -> Vector Option.Option +from_flags : Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Boolean | Nothing -> Vector Regex_Option -> Vector Regex_Option from_flags match_ascii case_insensitive dot_matches_newline multiline comments extra_opts = builder = Vector.new_builder - process_override : Boolean | Nothing -> Option -> Nothing + process_override : Boolean | Nothing -> Regex_Option -> Nothing process_override param option = case param of _ : Boolean -> if param then builder.append option Nothing -> if extra_opts.contains option then builder.append option - process_override match_ascii Option.Ascii_Matching - process_override case_insensitive Option.Case_Insensitive - process_override dot_matches_newline Option.Dot_Matches_Newline - process_override multiline Option.Multiline - process_override comments Option.Comments + process_override match_ascii Regex_Option.Ascii_Matching + process_override case_insensitive Regex_Option.Case_Insensitive + process_override dot_matches_newline Regex_Option.Dot_Matches_Newline + process_override multiline Regex_Option.Multiline + process_override comments Regex_Option.Comments ## Add any non-overridable options from extra_opts extra_opts.each opt-> - not_ascii = opt != Option.Ascii_Matching - not_insensitive = opt != Option.Case_Insensitive - not_dot_matches_newline = opt != Option.Dot_Matches_Newline - not_multiline = opt != Option.Multiline - not_comments = opt != Option.Comments + not_ascii = opt != Regex_Option.Ascii_Matching + not_insensitive = opt != Regex_Option.Case_Insensitive + not_dot_matches_newline = opt != Regex_Option.Dot_Matches_Newline + not_multiline = opt != Regex_Option.Multiline + not_comments = opt != Regex_Option.Comments if not_ascii && not_insensitive && not_dot_matches_newline && not_multiline && not_comments then builder.append opt builder.to_vector -# TODO Dubious constructor export -from project.Data.Text.Regex.No_Such_Group_Error import all -from project.Data.Text.Regex.No_Such_Group_Error export all - ## PRIVATE An error that is emitted when there is no such group in the match for the @@ -125,13 +123,57 @@ from project.Data.Text.Regex.No_Such_Group_Error export all Arguments: - id: The identifier of the group that was asked for but does not exist. -type No_Such_Group_Error - No_Such_Group_Error_Data (id : Text | Integer) +type No_Such_Group + Error (id : Text | Integer) ## PRIVATE - Provides a human-readable representation of the `No_Such_Group_Error`. + Provides a human-readable representation of the `No_Such_Group`. to_display_text : Text to_display_text self = case self.id of _ : Integer -> "No group exists with the index " + self.id.to_text + "." _ : Text -> "No group exists with the name " + self.id + "." + +## PRIVATE + + An error representing that one of the passed options was invalid. + + Arguments: + - opt: The option that was not valid for this regex engine. +type Invalid_Option + Error (opt : Any) + + ## PRIVATE + + Provides a human-readable representation of the invalid option error. + to_display_text : Text + to_display_text self = + "The option " + self.opt.to_text + " is not valid for the default regex engine." + +## PRIVATE + + An error representing that there is something wrong with the mode for a regex + match. + + Arguments: + - message: The text of the message to display to users. +type Mode_Error + Error (message : Text) + + ## PRIVATE + + Provides a human-readable representation of the mode error. + to_display_text : Text + to_display_text self = self.message.to_text + +## PRIVATE + + An error representing that the bounds for a match are invalid. +type Invalid_Bounds_Error + + ## PRIVATE + + Provides a human-readable representation of the invalid bounds error. + to_display_text : Text + to_display_text = + "The start bound cannot be greater than the end bound." diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine.enso index ae95c386821a..f10b71be89d8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine.enso @@ -8,7 +8,7 @@ form for equality and the unicode versions of the standard character classes. - It must support the standard options specified in - `Standard.Base.Data.Text.Regex.Option`. It may specify additional, + `Standard.Base.Data.Text.Regex.Regex_Option`. It may specify additional, engine-specific options, but this is not required by the specification. - In the defining module, the engine implementation must provide a full specification of its syntax in the module documentation block. @@ -16,10 +16,12 @@ This file is _not executable_. It instead describes the interface for the customisable `Engine` and `Pattern` types. -from Standard.Base import all -import Standard.Base.Error.Common as Errors +import project.Data.Text.Text +import project.Data.Text.Regex.Regex_Option.Regex_Option +import project.Data.Text.Regex.Invalid_Option +import project.Data.Vector.Vector -from Standard.Base.Data.Text.Regex.Engine.Default export Default +from project.Error.Common import Compile_Error, Unimplemented ## The `Data.Text.Regex.Engine.Engine` interface. type Engine @@ -34,8 +36,8 @@ type Engine to compile. - options: The options to configure the matching process with. These are merged with the specific `engine_opts`. - compile : Text -> Vector Global_Option.Option -> Pattern ! (Regex.Compile_Error | Invalid_Option_Error) - compile self _ _ = Errors.unimplemented "This is an interface only." + compile : Text -> Vector Regex_Option -> Pattern ! (Compile_Error | Invalid_Option) + compile self _ _ = Unimplemented.throw "This is an interface only." ## PRIVATE @@ -45,181 +47,4 @@ type Engine Arguments: - expression: The expression to escape metacharacters in. escape : Text -> Text - escape self _ = Errors.unimplemented "This is an interface only." - -## The `Data.Text.Regex.Engine.Pattern` interface. -type Pattern - - ## PRIVATE - - Tries to match the provided `input` against the pattern `self`. - - Arguments: - - input: The text to match the pattern described by `self` against. - - mode: The matching mode to use. This must default to `Regex_Mode.All`. - - This method will _always_ return `Nothing` if it fails to match. - - ? Return Type - When asked to match in a mode that can only provide a single match, the - return type is either a single `Match` object. When asked to match in a - mode that permits multiple matches, it will always return a `Vector`, - even if only a single match is found. - match : Text -> (Regex_Mode | Matching_Mode) -> Match | Vector Match | Nothing - match self _ _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Returns `True` if the input matches against the pattern described by - `self`, otherwise `False`. - - Arguments: - - input: The text to check for matching. - matches : Text -> Boolean - matches self _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Tries to find the text in the `input` that matches against the pattern - `self`. - - Arguments: - - input: The text to find matches in. - - mode: The matching mode to use. This must default to `Regex_Mode.All` - - This method will _always_ return `Nothing` if it fails to find any - matches. - - ? Return Type - When asked to match in a mode that can only provide a single match, the - return type is either a single `Match` object. When asked to match in a - mode that permits multiple matches, it will always return a `Vector`, - even if only a single match is found. - find : Text -> (Regex_Mode | Matching_Mode) -> Text | Vector Text | Nothing - find self _ _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Splits the `input` text based on the pattern described by `self`. - - Arguments: - - input: The text to splut based on the pattern described by `self`. - - mode: The splitting mode to use. This must default to `Regex_Mode.All`. - - This method will _always_ return a vector. If no splits take place, the - vector will contain a single element. - split : Text -> (Regex_Mode.First | Integer | Regex_Mode.All) -> Vector Text - split self _ _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Replace all occurrences of the pattern described by `self` in the `input` - with the specified `replacement`. - - Arguments: - - input: The text in which to perform the replacement(s). - - replacement: The literal text with which to replace any matches. - - mode: The matching mode to use for finding candidates to replace. This - must default to `Regex_Mode.All`. - - If this method performs no replacements it will return the `input` text - unchanged. - replace : Text -> Text -> Regex_Mode | Matching_Mode | Integer -> Text - replace self _ _ _ = Errors.unimplemented "This is an interface only." - -## The `Data.Text.Regex.Engine.Match` interface. -type Match - - ## PRIVATE - - Gets the text matched by the group with the provided identifier, or - `Nothing` if the group did not participate in the match. If no such group - exists for the provided identifier, a `No_Such_Group_Error` is returned. - - Arguments: - - id: The index or name of that group. - - ? The Full Match - The group with index 0 is always the full match of the pattern. - - ? Named Groups by Index - If the regex contained named groups, these may also be accessed by - index based on their position in the pattern. - group : Integer | Text -> Text | Nothing ! Regex.No_Such_Group_Error - group self _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Gets a vector containing the results of _all_ of the capturing groups in - the pattern, replacing the value of groups that did not participate in - the match with `default`. - - Arguments: - - default: The value to return for a given index when the group at that - index did not participate in the match. The default for this argument - should be `Nothing`. - - ? The Full Match - The group with index 0 is always the full match of the pattern. - - ? Named Groups by Index - If the regex contained named groups, these may also be accessed by - index based on their position in the pattern. - groups : (a : Any) -> Vector (Text | a) - groups self _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Gets a map containing the named capturing groups for the pattern, - replacing the value for groups that did not participate in the match with - `default`. - - Arguments: - - default: The value to return for a given name when the group at that - index did not participate in the match. This should default to - `Nothing`. - named_groups : (a : Any) -> Map Text (Text | a) - named_groups self _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Gets the index of the first character captured by the group with the - given identifier, or `Nothing` if the group did not participate in the - match. - - Arguments: - - id: The identifier for the group to fetch the start index for. - start : Integer | Text -> Integer | Nothing ! Regex.No_Such_Group_Error - start self _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Gets the index of the first character after `start` that was not captured - by the group with the given identifier, or `Nothing` if the group did not - participate in the match. - - Arguments: - - id: The identifier for the group to fetch the end index for. - end : Integer | Text -> Integer | Nothing ! Regex.No_Such_Group_Error - end self _ = Errors.unimplemented "This is an intercace only." - - ## PRIVATE - - Returns the span matched by the group with the provided identifier, or - `Nothing` if the group did not participate in the match. - - Arguments: - - id: The identifier for the group to fetch the end index for. - span : Integer | Text -> Span | Nothing ! Regex.No_Such_Group_Error - span self _ = Errors.unimplemented "This is an interface only." - - ## PRIVATE - - Returns the start character index of the match's region. - start_position : Integer - start_position self = Errors.unimplemented "This is an interface only." - - ## Returns the end character index of the match's region. - end_position : Integer - end_position self = Errors.unimplemented "This is an interface only." - + escape self _ = Unimplemented.throw "This is an interface only." diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine/Default.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine/Default.enso index b83b6136a9f7..a922f7ff13ad 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine/Default.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Engine/Default.enso @@ -33,12 +33,26 @@ TBC -from Standard.Base import all - -import Standard.Base.Data.Text.Regex -import Standard.Base.Data.Text.Regex.Engine -import Standard.Base.Data.Text.Regex.Option as Global_Option -import Standard.Base.Polyglot.Java +import project.Any.Any +import project.Data.Map.Map +import project.Data.Numbers.Integer +import project.Data.Range.Extensions +import project.Data.Text.Matching_Mode +import project.Data.Text.Regex.Invalid_Option +import project.Data.Text.Regex.Invalid_Bounds_Error +import project.Data.Text.Regex.Mode_Error +import project.Data.Text.Regex.No_Such_Group +import project.Data.Text.Regex.Regex_Mode.Regex_Mode +import project.Data.Text.Regex.Regex_Option.Regex_Option +import project.Data.Text.Text +import project.Data.Text.Span.Utf_16_Span +import project.Data.Vector.Vector +import project.Meta +import project.Nothing.Nothing +import project.Panic.Panic + +from project.Data.Boolean import Boolean, True, False +from project.Error.Common import Compile_Error, Syntax_Error_Data, Polyglot_Error polyglot java import java.lang.IllegalArgumentException polyglot java import java.lang.IndexOutOfBoundsException @@ -62,17 +76,13 @@ polyglot java import org.enso.base.Text_Utils import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine example_new = - engine_opts = [Default_Engine.Literal_Pattern] + engine_opts = [Default_Engine.Option.Literal_Pattern] Default_Engine.new engine_opts -new : Vector Option -> Engine -new opts=[] = Engine_Data opts - -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Engine import all -from project.Data.Text.Regex.Engine.Default.Engine export all +new : Vector (Regex_Option | Option) -> Default_Engine +new opts=[] = Default_Engine.Value opts ## The default implementation of the `Data.Text.Regex.Engine.Engine` interface. -type Engine +type Default_Engine ## PRIVATE @@ -81,7 +91,7 @@ type Engine Arguments: - engine_opts: Options for regex matching that are specific to this engine. - Engine_Data (engine_opts : Vector Option) + Value (engine_opts : Vector (Regex_Option | Option)) ## ADVANCED @@ -105,14 +115,14 @@ type Engine consisting of a single "a". import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine - import Standard.Base.Data.Text.Regex.Option as Global_Option + import Standard.Base.Data.Text.Regex.Regex_Option.Regex_Option example_compile = expression = "^a$" - options = [Global_Option.Multiline] + options = [Regex_Option.Multiline] engine = Default_Engine.new engine.compile expression options - compile : Text -> Vector Global_Option.Option -> Pattern ! (Regex.Compile_Error | Invalid_Option_Error) + compile : Text -> Vector (Regex_Option | Option) -> Pattern ! (Compile_Error | Invalid_Option) compile self expression options = all_options = options + self.engine_opts options_bitmask = from_enso_options all_options @@ -122,12 +132,12 @@ type Engine Java_Pattern.compile (unicode_regex.transform expression) options_bitmask internal_pattern = maybe_java_pattern.map_error case _ of - Polyglot_Error_Data err -> + Polyglot_Error.Polyglot_Error_Data err -> if err.is_a PatternSyntaxException . not then err else Syntax_Error_Data ("The regex could not be compiled: " + err.getMessage) other -> other - Pattern_Data internal_pattern all_options self + Pattern.Value internal_pattern all_options self ## ADVANCED @@ -141,7 +151,7 @@ type Engine Turn a literal string into a regex that matches that string exactly. import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine - import Standard.Base.Data.Text.Regex.Option as Global_Option + import Standard.Base.Data.Text.Regex.Regex_Option.Regex_Option example_escape = literal_string = "\!\.|abcde" @@ -150,10 +160,6 @@ type Engine escape : Text -> Text escape self expression = Java_Pattern.quote expression -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Pattern import all -from project.Data.Text.Regex.Engine.Default.Pattern export all - ## The default implementation of the `Data.Text.Regex.Engine.Pattern` interface. type Pattern @@ -165,7 +171,7 @@ type Pattern - internal_pattern: The internal representation of the compiled pattern. - options: The vector of options with which this pattern was built. - engine: A handle to the engine that built this pattern. - Pattern_Data (internal_pattern : Java_Pattern) (options : Vector (Global_Option.Option | Option)) (engine : Engine) + Value (internal_pattern : Java_Pattern) (options : Vector (Regex_Option | Option)) (engine : Default_Engine) ## PRIVATE @@ -189,14 +195,14 @@ type Pattern in code unit space, normalization could shift these indices! This should be addressed when reviewing See: https://www.pivotaltracker.com/story/show/181524498 - #normalized_input = if self.options.contains Global_Option.Ascii_Matching then input else + #normalized_input = if self.options.contains Regex_Option.Ascii_Matching then input else # Text_Utils.normalize input normalized_input = input internal_matcher = self.internal_pattern.matcher normalized_input . region start end - if self.options.contains No_Anchoring_Bounds then + if self.options.contains Option.No_Anchoring_Bounds then internal_matcher.useAnchoringBounds False - if self.options.contains Transparent_Bounds then + if self.options.contains Option.Transparent_Bounds then internal_matcher.useTransparentBounds True internal_matcher @@ -226,7 +232,7 @@ type Pattern engine = Default.new pattern = engine.compile ".." [] input = "abcdefghij" - pattern.match input mode=Regex_Mode.First + pattern.match input mode=Matching_Mode.First > Example Match up to the first 3 instances of the pattern `".."` in the input. @@ -260,17 +266,17 @@ type Pattern pattern = engine.compile ".*" [] input = "abcdefghij" pattern.match input mode=Regex_Mode.Full - match : Text -> (Regex_Mode | Matching_Mode) -> Match | Vector Match | Nothing + match : Text -> (Regex_Mode | Matching_Mode.First | Matching_Mode.Last) -> Match | Vector Match | Nothing match self input mode=Regex_Mode.All = do_match_mode mode start end = case mode of Matching_Mode.First -> internal_matcher = self.build_matcher input start end if internal_matcher . find start . not then Nothing else - Match_Data internal_matcher start end input + Match.Value internal_matcher start end input _ : Integer -> if mode < 0 then Panic.throw <| - Mode_Error_Data "Cannot match a negative number of times." + Mode_Error.Error "Cannot match a negative number of times." builder = Vector.new_builder @@ -282,7 +288,7 @@ type Pattern found = internal_matcher.find offset if found.not then Nothing else - builder.append (Match_Data internal_matcher start end input) + builder.append (Match.Value internal_matcher start end input) match_end = internal_matcher.end 0 # Ensure progress even if the match is an empty string. new_offset = if match_end > offset then match_end else offset+1 @@ -302,7 +308,7 @@ type Pattern found = internal_matcher.find offset if found.not then Nothing else - builder.append (Match_Data internal_matcher start end input) + builder.append (Match.Value internal_matcher start end input) match_end = internal_matcher.end 0 # Ensure progress even if the match is an empty string. new_offset = if match_end > offset then match_end else offset+1 @@ -315,9 +321,9 @@ type Pattern Regex_Mode.Full -> internal_matcher = self.build_matcher input start end if internal_matcher.matches.not then Nothing else - Match_Data internal_matcher start end input + Match.Value internal_matcher start end input Regex_Mode.Bounded _ _ _ -> Panic.throw <| - Mode_Error_Data "Modes cannot be recursive." + Mode_Error.Error "Modes cannot be recursive." case mode of Regex_Mode.Bounded start end sub_mode -> @@ -345,7 +351,7 @@ type Pattern pattern.matches input matches : Text -> Boolean matches self input = case self.match input mode=Regex_Mode.Full of - Match_Data _ _ _ _ -> True + _ : Match -> True _ : Vector -> True _ -> False @@ -410,11 +416,11 @@ type Pattern pattern = engine.compile ".*" [] input = "abcdefghij" pattern.find input mode=Regex_Mode.Full - find : Text -> (Regex_Mode | Matching_Mode) -> Text | Vector Text | Nothing + find : Text -> (Regex_Mode | Matching_Mode.First | Matching_Mode.Last) -> Text | Vector Text | Nothing find self input mode=Regex_Mode.All = matches = self.match input mode case matches of - Match_Data _ _ _ _ -> matches.group 0 + _ : Match -> matches.group 0 _ : Vector -> matches.map (_.group 0) _ -> matches @@ -461,7 +467,7 @@ type Pattern pattern = engine.compile "a" [] input = "bacadaeaf" pattern.match input - split : Text -> Matching_Mode | Regex_Mode | Integer -> Vector Text + split : Text -> Matching_Mode.First | Matching_Mode.Last | Regex_Mode | Integer -> Vector Text split self input mode=Regex_Mode.All = # Java uses this to mean the max length of the resulting array, so we # add 1. @@ -469,16 +475,16 @@ type Pattern Matching_Mode.First -> 2 _ : Integer -> if mode < 0 then Panic.throw <| - Mode_Error_Data "Cannot match a negative number of times." + Mode_Error.Error "Cannot match a negative number of times." mode + 1 Regex_Mode.All -> -1 Regex_Mode.Full -> Panic.throw <| - Mode_Error_Data "Splitting on a full match yields an empty text." + Mode_Error.Error "Splitting on a full match yields an empty text." Regex_Mode.Bounded _ _ _ -> Panic.throw <| - Mode_Error_Data "Splitting on a bounded region is not well-defined." + Mode_Error.Error "Splitting on a bounded region is not well-defined." Matching_Mode.Last -> Panic.throw <| - Mode_Error_Data "Splitting on the last match is not supported." + Mode_Error.Error "Splitting on the last match is not supported." splits = self.internal_pattern.split input limit Vector.from_polyglot_array splits @@ -531,7 +537,7 @@ type Pattern pattern = engine.compile "aa [] input = "aabbaabbbbbaab" pattern.replace input "REPLACED" - replace : Text -> Text -> Regex_Mode | Matching_Mode | Integer -> Text + replace : Text -> Text -> Regex_Mode | Matching_Mode.First | Matching_Mode.Last | Integer -> Text replace self input replacement mode=Regex_Mode.All = do_replace_mode mode start end = case mode of Matching_Mode.First -> @@ -539,7 +545,7 @@ type Pattern internal_matcher.replaceFirst replacement _ : Integer -> if mode < 0 then Panic.throw <| - Mode_Error_Data "Cannot replace a negative number of times." + Mode_Error.Error "Cannot replace a negative number of times." internal_matcher = self.build_matcher input start end buffer = StringBuffer.new @@ -557,7 +563,7 @@ type Pattern internal_matcher.replaceAll replacement Regex_Mode.Full -> case self.match input mode=Regex_Mode.Full of - Match_Data _ _ _ _ -> self.replace input replacement Matching_Mode.First + _ : Match -> self.replace input replacement Matching_Mode.First Nothing -> input Matching_Mode.Last -> all_matches = self.match input @@ -578,17 +584,13 @@ type Pattern internal_matcher.appendTail buffer buffer.to_text Regex_Mode.Bounded _ _ _ -> Panic.throw <| - Mode_Error_Data "Modes cannot be recursive." + Mode_Error.Error "Modes cannot be recursive." case mode of Regex_Mode.Bounded _ _ _ -> Panic.throw <| - Mode_Error_Data "Bounded replacements are not well-formed." + Mode_Error.Error "Bounded replacements are not well-formed." _ -> do_replace_mode mode 0 (Text_Utils.char_length input) -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Match import all -from project.Data.Text.Regex.Engine.Default.Match export all - ## The default implementation of the `Data.Text.Regex.Engine.Match` interface. type Match @@ -602,11 +604,11 @@ type Match - region_start: The start of the region over which the match was made. - region_end: The end of the region over which the match was made. - input: The input text that was being matched. - Match_Data (internal_match : Java_Matcher) (region_start : Integer) (region_end : Integer) (input : Text) + Value (internal_match : Java_Matcher) (region_start : Integer) (region_end : Integer) (input : Text) ## Gets the text matched by the group with the provided identifier, or `Nothing` if the group did not participate in the match. If no such group - exists for the provided identifier, a `No_Such_Group_Error` is returned. + exists for the provided identifier, a `No_Such_Group` is returned. Arguments: - id: The index or name of that group. @@ -635,7 +637,7 @@ type Match example_group = match = Examples.match match.group "letters" - group : Integer | Text -> Text | Nothing ! Regex.No_Such_Group_Error + group : Integer | Text -> Text | Nothing ! No_Such_Group group self id = Panic.recover Any (self.internal_match.group id) . map_error (handle_error _ id) @@ -663,7 +665,7 @@ type Match example_groups = match = Examples.match match.groups default="UNMATCHED" - groups : (a : Any) -> Vector (Text | a) + groups : Any -> Vector (Text | Any) groups self default=Nothing = group_numbers = 0.up_to self.internal_match.groupCount+1 group_numbers.map n-> @@ -688,7 +690,7 @@ type Match example_groups = match = Examples.match matcg.named_groups default="UNMATCHED" - named_groups : (a : Any) -> Map Text (Text | a) + named_groups : Any -> Map Text (Text | Any) named_groups self default=Nothing = group_names = Vector.from_polyglot_array <| Regex_Utils.get_group_names self.internal_match.pattern @@ -721,7 +723,7 @@ type Match example_start = match = Examples.match match.start 0 - start : Integer | Text -> Integer | Nothing ! Regex.No_Such_Group_Error + start : Integer | Text -> Integer | Nothing ! No_Such_Group start self id = result = Panic.recover Any (self.internal_match.start id) no_errors = result.map_error (handle_error _ id) @@ -749,7 +751,7 @@ type Match example_end = match = Examples.match match.end 0 - end : Integer | Text -> Integer | Nothing ! Regex.No_Such_Group_Error + end : Integer | Text -> Integer | Nothing ! No_Such_Group end self id = result = Panic.recover Any (self.internal_match.end id) no_errors = result.map_error (handle_error _ id) @@ -775,10 +777,10 @@ type Match example_Span = match = Examples.match match.span 0 - span : Integer | Text -> Utf_16_Span | Nothing ! Regex.No_Such_Group_Error + span : Integer | Text -> Utf_16_Span | Nothing ! No_Such_Group span self id = case self.group id of Nothing -> Nothing - _ -> Utf_16_Span_Data ((self.start id).up_to (self.end id)) self.input + _ -> Utf_16_Span.Value ((self.start id).up_to (self.end id)) self.input ## Returns the start character index of the match's region. @@ -827,19 +829,15 @@ type Match - id: The group identifier with which the error is associated. handle_error : Any -> (Text | Integer) -> Any handle_error error id = case error of - Polyglot_Error_Data err -> + Polyglot_Error.Polyglot_Error_Data err -> is_ioob = err.is_a IndexOutOfBoundsException is_iae = err.is_a IllegalArgumentException maps_to_no_such_group = is_ioob || is_iae if maps_to_no_such_group.not then err else - Regex.No_Such_Group_Error_Data id + No_Such_Group.Error id other -> other -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Option import all -from project.Data.Text.Regex.Engine.Default.Option export all - ## Options specific to the `Default` regular expression engine. type Option @@ -876,78 +874,22 @@ type Option Arguments: - opts: The enso-side options to configure the regex. -from_enso_options : Vector (Option | Global_Option.Option) -> Integer +from_enso_options : Vector (Option | Regex_Option) -> Integer from_enso_options opts = java_flags = Panic.recover Any <| opts.flat_map case _ of - Literal_Pattern -> [Java_Pattern.LITERAL] - Unix_Lines -> [Java_Pattern.UNIX_LINES] - Global_Option.Case_Insensitive -> [Java_Pattern.CASE_INSENSITIVE] - Global_Option.Dot_Matches_Newline -> [Java_Pattern.DOTALL] - Global_Option.Multiline -> [Java_Pattern.MULTILINE] - Global_Option.Comments -> [Java_Pattern.COMMENTS] - Global_Option.Ascii_Matching -> [] - No_Anchoring_Bounds -> [] - Transparent_Bounds -> [] - other -> Panic.throw (Invalid_Option_Error_Data other) + Option.Literal_Pattern -> [Java_Pattern.LITERAL] + Option.Unix_Lines -> [Java_Pattern.UNIX_LINES] + Option.No_Anchoring_Bounds -> [] + Option.Transparent_Bounds -> [] + Regex_Option.Case_Insensitive -> [Java_Pattern.CASE_INSENSITIVE] + Regex_Option.Dot_Matches_Newline -> [Java_Pattern.DOTALL] + Regex_Option.Multiline -> [Java_Pattern.MULTILINE] + Regex_Option.Comments -> [Java_Pattern.COMMENTS] + Regex_Option.Ascii_Matching -> [] + other -> Panic.throw (Invalid_Option.Error other) options_bitmask = java_flags.fold 0 .bit_or - if opts.contains Global_Option.Ascii_Matching then options_bitmask else + if opts.contains Regex_Option.Ascii_Matching then options_bitmask else unicode = [Java_Pattern.CANON_EQ, Java_Pattern.UNICODE_CASE, Java_Pattern.UNICODE_CHARACTER_CLASS].fold 0 .bit_or options_bitmask.bit_or unicode - -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Invalid_Bounds_Error import all -from project.Data.Text.Regex.Engine.Default.Invalid_Bounds_Error export all - -## PRIVATE - - An error representing that the bounds for a match are invalid. -type Invalid_Bounds_Error - - ## PRIVATE - - Provides a human-readable representation of the invalid bounds error. - to_display_text : Text - to_display_text = - "The start bound cannot be greater than the end bound." - -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Mode_Error import all -from project.Data.Text.Regex.Engine.Default.Mode_Error export all - -## PRIVATE - - An error representing that there is something wrong with the mode for a regex - match. - - Arguments: - - message: The text of the message to display to users. -type Mode_Error - Mode_Error_Data (message : Text) - - ## PRIVATE - - Provides a human-readable representation of the mode error. - to_display_text : Text - to_display_text self = self.message.to_text - -# TODO Dubious constructor export -from project.Data.Text.Regex.Engine.Default.Invalid_Option_Error import all -from project.Data.Text.Regex.Engine.Default.Invalid_Option_Error export all - -## PRIVATE - - An error representing that one of the passed options was invalid. - - Arguments: - - opt: The option that was not valid for this regex engine. -type Invalid_Option_Error - Invalid_Option_Error_Data (opt : Any) - - ## PRIVATE - - Provides a human-readable representation of the invalid option error. - to_display_text : Text - to_display_text self = - "The option " + self.opt.to_text + " is not valid for the default regex engine." diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso new file mode 100644 index 000000000000..9d7fb77347d6 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso @@ -0,0 +1,105 @@ +import project.Any.Any +import project.Data.Map.Map +import project.Data.Numbers.Integer +import project.Data.Text.Span.Span +import project.Data.Text.Text +import project.Data.Text.Regex.No_Such_Group +import project.Data.Vector.Vector +import project.Error.Common.Unimplemented +import project.Nothing.Nothing + +## The `Data.Text.Regex.Engine.Match` interface. +type Match + + ## PRIVATE + + Gets the text matched by the group with the provided identifier, or + `Nothing` if the group did not participate in the match. If no such group + exists for the provided identifier, a `No_Such_Group` is returned. + + Arguments: + - id: The index or name of that group. + + ? The Full Match + The group with index 0 is always the full match of the pattern. + + ? Named Groups by Index + If the regex contained named groups, these may also be accessed by + index based on their position in the pattern. + group : Integer | Text -> Text | Nothing ! No_Such_Group + group self _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Gets a vector containing the results of _all_ of the capturing groups in + the pattern, replacing the value of groups that did not participate in + the match with `default`. + + Arguments: + - default: The value to return for a given index when the group at that + index did not participate in the match. The default for this argument + should be `Nothing`. + + ? The Full Match + The group with index 0 is always the full match of the pattern. + + ? Named Groups by Index + If the regex contained named groups, these may also be accessed by + index based on their position in the pattern. + groups : Any -> Vector (Text | Any) + groups self _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Gets a map containing the named capturing groups for the pattern, + replacing the value for groups that did not participate in the match with + `default`. + + Arguments: + - default: The value to return for a given name when the group at that + index did not participate in the match. This should default to + `Nothing`. + named_groups : Any -> Map Text (Text | Any) + named_groups self _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Gets the index of the first character captured by the group with the + given identifier, or `Nothing` if the group did not participate in the + match. + + Arguments: + - id: The identifier for the group to fetch the start index for. + start : Integer | Text -> Integer | Nothing ! No_Such_Group + start self _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Gets the index of the first character after `start` that was not captured + by the group with the given identifier, or `Nothing` if the group did not + participate in the match. + + Arguments: + - id: The identifier for the group to fetch the end index for. + end : Integer | Text -> Integer | Nothing ! No_Such_Group + end self _ = Unimplemented.throw "This is an intercace only." + + ## PRIVATE + + Returns the span matched by the group with the provided identifier, or + `Nothing` if the group did not participate in the match. + + Arguments: + - id: The identifier for the group to fetch the end index for. + span : Integer | Text -> Span | Nothing ! No_Such_Group + span self _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Returns the start character index of the match's region. + start_position : Integer + start_position self = Unimplemented.throw "This is an interface only." + + ## Returns the end character index of the match's region. + end_position : Integer + end_position self = Unimplemented.throw "This is an interface only." diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso new file mode 100644 index 000000000000..6a7ed097c49b --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso @@ -0,0 +1,88 @@ +import project.Data.Boolean.Boolean +import project.Data.Numbers.Integer +import project.Data.Text.Matching_Mode +import project.Data.Text.Regex.Regex_Mode.Regex_Mode +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Common.Unimplemented +import project.Nothing.Nothing + +## The `Data.Text.Regex.Engine.Pattern` interface. +type Pattern + + ## PRIVATE + + Tries to match the provided `input` against the pattern `self`. + + Arguments: + - input: The text to match the pattern described by `self` against. + - mode: The matching mode to use. This must default to `Regex_Mode.All`. + + This method will _always_ return `Nothing` if it fails to match. + + ? Return Type + When asked to match in a mode that can only provide a single match, the + return type is either a single `Match` object. When asked to match in a + mode that permits multiple matches, it will always return a `Vector`, + even if only a single match is found. + match : Text -> (Regex_Mode | Matching_Mode.First | Matching_Mode.Last) -> Match | Vector Match | Nothing + match self _ _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Returns `True` if the input matches against the pattern described by + `self`, otherwise `False`. + + Arguments: + - input: The text to check for matching. + matches : Text -> Boolean + matches self _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Tries to find the text in the `input` that matches against the pattern + `self`. + + Arguments: + - input: The text to find matches in. + - mode: The matching mode to use. This must default to `Regex_Mode.All` + + This method will _always_ return `Nothing` if it fails to find any + matches. + + ? Return Type + When asked to match in a mode that can only provide a single match, the + return type is either a single `Match` object. When asked to match in a + mode that permits multiple matches, it will always return a `Vector`, + even if only a single match is found. + find : Text -> (Regex_Mode | Matching_Mode.First | Matching_Mode.Last) -> Text | Vector Text | Nothing + find self _ _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Splits the `input` text based on the pattern described by `self`. + + Arguments: + - input: The text to splut based on the pattern described by `self`. + - mode: The splitting mode to use. This must default to `Regex_Mode.All`. + + This method will _always_ return a vector. If no splits take place, the + vector will contain a single element. + split : Text -> (Matching_Mode.First | Integer | Regex_Mode) -> Vector Text + split self _ _ = Unimplemented.throw "This is an interface only." + + ## PRIVATE + + Replace all occurrences of the pattern described by `self` in the `input` + with the specified `replacement`. + + Arguments: + - input: The text in which to perform the replacement(s). + - replacement: The literal text with which to replace any matches. + - mode: The matching mode to use for finding candidates to replace. This + must default to `Regex_Mode.All`. + + If this method performs no replacements it will return the `input` text + unchanged. + replace : Text -> Text -> Regex_Mode | Matching_Mode.First | Matching_Mode.Last | Integer -> Text + replace self _ _ _ = Unimplemented.throw "This is an interface only." diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Mode.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Mode.enso index 76849c1dbdbe..763419af6599 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Mode.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Mode.enso @@ -3,13 +3,8 @@ This lets you configure how you want to match, from the `First` match only, to matching on the `Full` content of the input text. -from Standard.Base import all -from Standard.Base.Data.Text.Matching_Mode import First, Last -from Standard.Base.Data.Text.Matching_Mode export First, Last - -# TODO Dubious constructor export -from project.Data.Text.Regex.Regex_Mode.Regex_Mode import all -from project.Data.Text.Regex.Regex_Mode.Regex_Mode export all +import project.Data.Numbers.Integer +import project.Data.Text.Matching_Mode type Regex_Mode ## The regex will make all possible matches. @@ -29,5 +24,5 @@ type Regex_Mode The `start` and `end` indices range over _characters_ in the text. The precise definition of `character` is, for the moment, defined by the regular expression engine itself. - Bounded (start : Integer) (end : Integer) (mode : (First | Integer | All | Full) = All) + Bounded (start : Integer) (end : Integer) (mode : (Matching_Mode.First | Matching_Mode.Last | Regex_Mode) = Regex_Mode.All) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Option.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Option.enso similarity index 80% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Option.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Option.enso index 4df9e2898ba7..e005fbbe18a4 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Option.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Regex_Option.enso @@ -3,14 +3,7 @@ In this file, Enso provides a set of standard options that must be supported by all regex engines integrated with Enso. -from Standard.Base import all - -# TODO Dubious constructor export -from project.Data.Text.Regex.Option.Option import all -from project.Data.Text.Regex.Option.Option export all -from project.Data.Text.Regex.Engine.Engine export all - -type Option +type Regex_Option ## Specifies that all predefined character classes and POSIX character classes will match _only_ on ASCII characters. @@ -30,10 +23,10 @@ type Option Comments mode has the following changes: - Whitespace within the pattern is ignored, except when within a - character class or when preceeded by an unescaped backslash, or within + character class or when preceded by an unescaped backslash, or within grouping constructs (e.g. `(?...)`). - When a line contains a `#`, that is not in a character class and is not - preceeded by an unescaped backslash, all characters from the leftmost + preceded by an unescaped backslash, all characters from the leftmost such `#` to the end of the line are ignored. That is to say, they act as _comments_ in the regex. Comments diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex_Matcher.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex_Matcher.enso index c2332ac5f1b3..23e4dba3619d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex_Matcher.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex_Matcher.enso @@ -1,10 +1,13 @@ -from Standard.Base import all -import Standard.Base.Data.Text.Regex.Engine.Pattern -from Standard.Base.Data.Text.Matching import match_criteria_implementation +import project.Any.Any +import project.Data.Text.Case_Sensitivity.Case_Sensitivity +import project.Data.Text.Matching +import project.Data.Text.Regex +import project.Data.Text.Regex.Pattern.Pattern +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Problem_Behavior.Problem_Behavior -# TODO Dubious constructor export -from project.Data.Text.Regex_Matcher.Regex_Matcher import all -from project.Data.Text.Regex_Matcher.Regex_Matcher export all +from project.Data.Boolean import Boolean, True, False ## Represents regex matching mode. type Regex_Matcher @@ -103,5 +106,5 @@ type Regex_Matcher ordering the result according to the order of criteria that matched them. Text_Matcher.match_criteria [Pair.new "foo" 42, Pair.new "bar" 33, Pair.new "baz" 10, Pair.new "foo" 0, Pair.new 10 10] ["bar", "foo"] reorder=True name_mapper=_.name == [Pair.new "bar" 33, Pair.new "foo" 42, Pair.new "foo" 0] - match_criteria : Vector Any -> Vector Text -> Boolean -> (Any -> Text) -> Problem_Behavior -> Vector Any ! No_Matches_Found - match_criteria self = match_criteria_implementation self + match_criteria : Vector Any -> Vector Text -> Boolean -> (Any -> Text) -> Problem_Behavior -> Vector Any ! Matching.No_Matches_Found + match_criteria self = Matching.match_criteria_implementation self diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Span.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Span.enso index ddf33295aa4b..fbb0f99b140a 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Span.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Span.enso @@ -7,19 +7,22 @@ example_span = text = "Hello!" - Span_Data 0 3 text + Span.Value 0 3 text -from Standard.Base import all +import project.Data.Numbers.Integer +import project.Data.Pair.Pair +import project.Data.Range.Extensions +import project.Data.Range.Range +import project.Data.Text.Text +import project.Error.Error +import project.Nothing.Nothing -from Standard.Base.Error.Common import Index_Out_Of_Bounds_Error +from project.Data.Boolean import Boolean, True, False +from project.Error.Common import Index_Out_Of_Bounds_Error, Index_Out_Of_Bounds_Error_Data, Illegal_State, Illegal_Argument polyglot java import org.enso.base.Text_Utils polyglot java import com.ibm.icu.text.BreakIterator -# TODO Dubious constructor export -from project.Data.Text.Span.Span import all -from project.Data.Text.Span.Span export all - type Span ## A representation of a span of characters in Enso's `Text` type. @@ -42,8 +45,8 @@ type Span example_span = text = "Hello!" range = 0.up_to 3 - Span.Span_Data range text - Span_Data (range : Range) (parent : Text) + Span.Value range text + Value (range : Range) (parent : Text) ## The index of the first character included in the span. @@ -84,17 +87,12 @@ type Span Find the span of code units corresponding to the span of extended grapheme clusters. text = 'ae\u{301}fz' - (Span_Data (Range 1 3) text).to_utf_16_span == (Utf_16_Span_Data (Range 1 4) text) + (Span.Value (Range 1 3) text).to_utf_16_span == (Utf_16_Span_Data (Range 1 4) text) to_utf_16_span : Utf_16_Span to_utf_16_span self = - Utf_16_Span_Data (range_to_char_indices self.parent self.range) self.parent - -# TODO Dubious constructor export -from project.Data.Text.Span.Utf_16_Span import all -from project.Data.Text.Span.Utf_16_Span export all + Utf_16_Span.Value (range_to_char_indices self.parent self.range) self.parent type Utf_16_Span - ## A representation of a span of UTF-16 code units in Enso's `Text` type. Arguments: @@ -110,7 +108,7 @@ type Utf_16_Span example_span = text = 'a\u{301}bc' Span.Utf_16_Span_Data (Range 0 3) text - Utf_16_Span_Data (range : Range) (parent : Text) + Value (range : Range) (parent : Text) ## The index of the first code unit included in the span. start : Integer @@ -144,15 +142,15 @@ type Utf_16_Span text = 'a\u{301}e\u{302}o\u{303}' span = Utf_16_Span_Data (Range 1 5) text # The span contains the units [\u{301}, e, \u{302}, o]. extended = span.to_grapheme_span - extended == Span_Data (Range 0 3) text # The span is extended to the whole string since it contained code units from every grapheme cluster. + extended == Span.Value (Range 0 3) text # The span is extended to the whole string since it contained code units from every grapheme cluster. extended.to_utf_16_span == Utf_16_Span_Data (Range 0 6) text to_grapheme_span : Span - to_grapheme_span self = if (self.start < 0) || (self.end > Text_Utils.char_length self.parent) then Error.throw (Illegal_State_Error_Data "Utf_16_Span indices are out of range of the associated text.") else - if self.end < self.start then Error.throw (Illegal_State_Error "Utf_16_Span invariant violation: start <= end") else + to_grapheme_span self = if (self.start < 0) || (self.end > Text_Utils.char_length self.parent) then Error.throw (Illegal_State.Error "Utf_16_Span indices are out of range of the associated text.") else + if self.end < self.start then Error.throw (Illegal_State.Error "Utf_16_Span invariant violation: start <= end") else case self.start == self.end of True -> grapheme_ix = Text_Utils.utf16_index_to_grapheme_index self.parent self.start - Span_Data (grapheme_ix.up_to grapheme_ix) self.parent + Span.Value (grapheme_ix.up_to grapheme_ix) self.parent False -> grapheme_ixes = Text_Utils.utf16_indices_to_grapheme_indices self.parent [self.start, self.end - 1].to_array grapheme_first = grapheme_ixes.at 0 @@ -162,13 +160,13 @@ type Utf_16_Span only a part of a grapheme were contained in our original span, the resulting span will be extended to contain this whole grapheme. grapheme_end = grapheme_last + 1 - Span_Data (grapheme_first.up_to grapheme_end) self.parent + Span.Value (grapheme_first.up_to grapheme_end) self.parent ## PRIVATE Utility function taking a range pointing at grapheme clusters and converting to a range on the underlying code units. -range_to_char_indices : Text -> Range -> Range ! (Index_Out_Of_Bounds_Error | Illegal_Argument_Error) -range_to_char_indices text range = if range.step != 1 then Error.throw (Illegal_Argument_Error_Data "Text indexing only supports ranges with step equal to 1.") else +range_to_char_indices : Text -> Range -> Range ! (Index_Out_Of_Bounds_Error | Illegal_Argument) +range_to_char_indices text range = if range.step != 1 then Error.throw (Illegal_Argument.Error "Text indexing only supports ranges with step equal to 1.") else len = text.length start = if range.start < 0 then range.start + len else range.start end = if range.end == Nothing then len else (if range.end < 0 then range.end + len else range.end) @@ -185,6 +183,3 @@ range_to_char_indices text range = if range.step != 1 then Error.throw (Illegal_ start_index = iterator.next start end_index = iterator.next (end - start) start_index.up_to end_index - -Span.from (that:Utf_16_Span) = that.to_grapheme_span -Utf_16_Span.from (that:Span) = that.to_utf_16_span diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Matcher.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Matcher.enso index 3ac47b19bee9..39730f3cda56 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Matcher.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Matcher.enso @@ -1,9 +1,10 @@ -from Standard.Base import all -from Standard.Base.Data.Text.Matching import match_criteria_implementation - -# TODO Dubious constructor export -from project.Data.Text.Text_Matcher.Text_Matcher import all -from project.Data.Text.Text_Matcher.Text_Matcher export all +import project.Any.Any +import project.Data.Boolean.Boolean +import project.Data.Locale.Locale +import project.Data.Text.Matching +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Problem_Behavior.Problem_Behavior ## Represents exact text matching mode. type Text_Matcher @@ -27,8 +28,8 @@ type Text_Matcher Text_Matcher.match_single_criterion "Foobar" "foo" == False match_single_criterion : Text -> Text -> Boolean match_single_criterion self name criterion = case self of - Case_Sensitive -> name == criterion - Case_Insensitive locale -> name.equals_ignore_case criterion locale=locale + Text_Matcher.Case_Sensitive -> name == criterion + Text_Matcher.Case_Insensitive locale -> name.equals_ignore_case criterion locale=locale ## UNSTABLE Selects objects from an input list that match any of the provided criteria. @@ -68,5 +69,5 @@ type Text_Matcher ordering the result according to the order of criteria that matched them. Text_Matcher.match_criteria [Pair.new "foo" 42, Pair.new "bar" 33, Pair.new "baz" 10, Pair.new "foo" 0, Pair.new 10 10] ["bar", "foo"] reorder=True name_mapper=_.name == [Pair.new "bar" 33, Pair.new "foo" 42, Pair.new "foo" 0] - match_criteria : Vector Any -> Vector Text -> Boolean -> (Any -> Text) -> Problem_Behavior -> Vector Any ! No_Matches_Found - match_criteria self = match_criteria_implementation self + match_criteria : Vector Any -> Vector Text -> Boolean -> (Any -> Text) -> Problem_Behavior -> Vector Any ! Matching.No_Matches_Found + match_criteria self = Matching.match_criteria_implementation self diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso index ae3dcd677fb4..da10b1d249a8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso @@ -1,8 +1,8 @@ -from Standard.Base import all +import project.Data.Locale.Locale +import project.Data.Text.Case_Sensitivity.Case_Sensitivity +import project.Nothing.Nothing -# TODO Dubious constructor export -from project.Data.Text.Text_Ordering.Text_Ordering import all -from project.Data.Text.Text_Ordering.Text_Ordering export all +from project.Data.Boolean import Boolean, True, False type Text_Ordering ## Specifies the ordering of text values. @@ -45,6 +45,6 @@ type Text_Ordering is to be determined by the backend. case_sensitivity : Case_Sensitivity case_sensitivity self = case self of - Default _ -> Nothing - Case_Sensitive _ -> Case_Sensitivity.Sensitive - Case_Insensitive locale _ -> Case_Sensitivity.Insensitive locale + Text_Ordering.Default _ -> Nothing + Text_Ordering.Case_Sensitive _ -> Case_Sensitivity.Sensitive + Text_Ordering.Case_Insensitive locale _ -> Case_Sensitivity.Insensitive locale diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso index b0d9440b0782..2f601bfe3c41 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso @@ -1,10 +1,20 @@ -from Standard.Base import all - -from project.Error.Common import Error, Index_Out_Of_Bounds_Error, Illegal_Argument_Error_Data -import project.Data.Index_Sub_Range as Index_Sub_Range_Module +import project.Data.Index_Sub_Range.Index_Sub_Range +import project.Data.Numbers.Integer +import project.Data.Pair.Pair +import project.Data.Range.Extensions +import project.Data.Range.Range +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Error +import project.Nothing.Nothing +import project.Panic.Panic import project.Random -from Standard.Base.Data.Text.Span import range_to_char_indices +from project.Data.Boolean import Boolean, True, False +from project.Error.Common import Index_Out_Of_Bounds_Error, Illegal_Argument, Index_Out_Of_Bounds_Error_Data + +import project.Data.Index_Sub_Range as Index_Sub_Range_Module +import project.Data.Text.Span as Span_Module polyglot java import com.ibm.icu.text.BreakIterator polyglot java import org.enso.base.Text_Utils @@ -29,78 +39,6 @@ type Text_Sub_Range Select an empty string if the input does not contain `delimiter`. After_Last (delimiter : Text) -## PRIVATE - Finds code-point indices corresponding to the part of the input matching the - range specified by one of the types: `Text_Sub_Range`, `Index_Sub_Range`, - `Range`. - - This method may return either a single range instance or a vector of ranges. - - While the input ranges may have varying steps, they are processed and split - in such a way that the ranges returned by this method always have a step - equal to 1. -find_codepoint_ranges : Text -> (Text_Sub_Range | Index_Sub_Range | Range | Integer) -> (Range | Codepoint_Ranges) -find_codepoint_ranges text subrange = - case subrange of - Text_Sub_Range.Before delimiter -> - if delimiter.is_empty then (0.up_to 0) else - span = Text_Utils.span_of text delimiter - if span.is_nothing then (0.up_to (Text_Utils.char_length text)) else - (0.up_to span.codeunit_start) - Text_Sub_Range.Before_Last delimiter -> - if delimiter.is_empty then (0.up_to (Text_Utils.char_length text)) else - span = Text_Utils.last_span_of text delimiter - if span.is_nothing then (0.up_to (Text_Utils.char_length text)) else - (0.up_to span.codeunit_start) - Text_Sub_Range.After delimiter -> - if delimiter.is_empty then (0.up_to (Text_Utils.char_length text)) else - span = Text_Utils.span_of text delimiter - if span.is_nothing then (0.up_to 0) else - (span.codeunit_end.up_to (Text_Utils.char_length text)) - Text_Sub_Range.After_Last delimiter -> - if delimiter.is_empty then (0.up_to 0) else - span = Text_Utils.last_span_of text delimiter - if span.is_nothing then (0.up_to 0) else - (span.codeunit_end.up_to (Text_Utils.char_length text)) - Index_Sub_Range.First count -> - if count <= 0 then (0.up_to 0) else - iterator = BreakIterator.getCharacterInstance - iterator.setText text - start_index = iterator.next count - 0.up_to (if start_index == -1 then (Text_Utils.char_length text) else start_index) - Index_Sub_Range.Last count -> - if count <= 0 then (0.up_to 0) else - iterator = BreakIterator.getCharacterInstance - iterator.setText text - iterator.last - start_index = iterator.next -count - (if start_index == -1 then 0 else start_index).up_to (Text_Utils.char_length text) - Index_Sub_Range.While predicate -> - indices = find_sub_range_end text _-> start-> end-> - predicate (Text_Utils.substring text start end) . not - if indices.first.is_nothing then (0.up_to indices.second) else - 0.up_to indices.first - Index_Sub_Range.By_Index indices -> - case indices of - _ : Vector -> - if indices.length == 1 then resolve_index_or_range text indices.first else - batch_resolve_indices_or_ranges text indices - _ -> resolve_index_or_range text indices - Index_Sub_Range.Sample count seed -> - rng = Random.new seed - indices = Random.random_indices text.length count rng - find_codepoint_ranges text (Index_Sub_Range.By_Index indices) - Index_Sub_Range.Every step start -> - if step <= 0 then Error.throw (Illegal_Argument_Error_Data "Step within Every must be positive.") else - len = text.length - if start >= len then 0.up_to 0 else - range = start.up_to text.length . with_step step - find_codepoint_ranges text (Index_Sub_Range.By_Index range) - _ : Range -> - find_codepoint_ranges text (Index_Sub_Range.By_Index subrange) - _ : Integer -> - find_codepoint_ranges text (Index_Sub_Range.First subrange) - type Codepoint_Ranges ## PRIVATE A list of codepoint ranges corresponding to the matched parts of the @@ -121,6 +59,78 @@ type Codepoint_Ranges sorted_and_distinct_ranges self = if self.is_sorted_and_distinct then self.ranges else Index_Sub_Range_Module.sort_and_merge_ranges self.ranges + ## PRIVATE + Finds code-point indices corresponding to the part of the input matching the + range specified by one of the types: `Text_Sub_Range`, `Index_Sub_Range`, + `Range`. + + This method may return either a single range instance or a vector of ranges. + + While the input ranges may have varying steps, they are processed and split + in such a way that the ranges returned by this method always have a step + equal to 1. + resolve : Text -> (Text_Sub_Range | Index_Sub_Range | Range | Integer) -> (Range | Codepoint_Ranges) + resolve text subrange = + case subrange of + Text_Sub_Range.Before delimiter -> + if delimiter.is_empty then (0.up_to 0) else + span = Text_Utils.span_of text delimiter + if span.is_nothing then (0.up_to (Text_Utils.char_length text)) else + (0.up_to span.codeunit_start) + Text_Sub_Range.Before_Last delimiter -> + if delimiter.is_empty then (0.up_to (Text_Utils.char_length text)) else + span = Text_Utils.last_span_of text delimiter + if span.is_nothing then (0.up_to (Text_Utils.char_length text)) else + (0.up_to span.codeunit_start) + Text_Sub_Range.After delimiter -> + if delimiter.is_empty then (0.up_to (Text_Utils.char_length text)) else + span = Text_Utils.span_of text delimiter + if span.is_nothing then (0.up_to 0) else + (span.codeunit_end.up_to (Text_Utils.char_length text)) + Text_Sub_Range.After_Last delimiter -> + if delimiter.is_empty then (0.up_to 0) else + span = Text_Utils.last_span_of text delimiter + if span.is_nothing then (0.up_to 0) else + (span.codeunit_end.up_to (Text_Utils.char_length text)) + Index_Sub_Range.First count -> + if count <= 0 then (0.up_to 0) else + iterator = BreakIterator.getCharacterInstance + iterator.setText text + start_index = iterator.next count + 0.up_to (if start_index == -1 then (Text_Utils.char_length text) else start_index) + Index_Sub_Range.Last count -> + if count <= 0 then (0.up_to 0) else + iterator = BreakIterator.getCharacterInstance + iterator.setText text + iterator.last + start_index = iterator.next -count + (if start_index == -1 then 0 else start_index).up_to (Text_Utils.char_length text) + Index_Sub_Range.While predicate -> + indices = find_sub_range_end text _-> start-> end-> + predicate (Text_Utils.substring text start end) . not + if indices.first.is_nothing then (0.up_to indices.second) else + 0.up_to indices.first + Index_Sub_Range.By_Index indices -> + case indices of + _ : Vector -> + if indices.length == 1 then resolve_index_or_range text indices.first else + batch_resolve_indices_or_ranges text indices + _ -> resolve_index_or_range text indices + Index_Sub_Range.Sample count seed -> + rng = Random.new seed + indices = Random.random_indices text.length count rng + Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index indices) + Index_Sub_Range.Every step start -> + if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else + len = text.length + if start >= len then 0.up_to 0 else + range = start.up_to text.length . with_step step + Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index range) + _ : Range -> + Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index subrange) + _ : Integer -> + Codepoint_Ranges.resolve text (Index_Sub_Range.First subrange) + ## PRIVATE Utility function to find char indices for Text_Sub_Range. Arguments: @@ -144,7 +154,7 @@ find_sub_range_end text predicate = loop 0 0 iterator.next ## PRIVATE -resolve_index_or_range text descriptor = Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument_Error_Data] <| +resolve_index_or_range text descriptor = Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument.Error] <| iterator = BreakIterator.getCharacterInstance iterator.setText text case descriptor of @@ -160,7 +170,7 @@ resolve_index_or_range text descriptor = Panic.recover [Index_Out_Of_Bounds_Erro true_range = normalize_range descriptor len if descriptor.is_empty then 0.up_to 0 else case true_range.step == 1 of - True -> range_to_char_indices text true_range + True -> Span_Module.range_to_char_indices text true_range False -> ranges = Vector.new_builder if true_range.step <= 0 then panic_on_non_positive_step @@ -192,7 +202,7 @@ character_ranges text = ranges.to_vector ## PRIVATE -batch_resolve_indices_or_ranges text descriptors = Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument_Error_Data] <| +batch_resolve_indices_or_ranges text descriptors = Panic.recover [Index_Out_Of_Bounds_Error_Data, Illegal_Argument.Error] <| ## This is pre-computing the ranges for all characters in the string, which may be much more than necessary, for example if all ranges reference only the beginning of the string. In the future we may want to replace this @@ -222,7 +232,7 @@ batch_resolve_indices_or_ranges text descriptors = Panic.recover [Index_Out_Of_B ## PRIVATE panic_on_non_positive_step = - Panic.throw (Illegal_Argument_Error_Data "Range step must be positive.") + Panic.throw (Illegal_Argument.Error "Range step must be positive.") ## PRIVATE Ensures that the range is valid and trims it to the length of the collection. @@ -230,7 +240,7 @@ normalize_range range length = if range.step <= 0 then panic_on_non_positive_step # We may add support for negative indices in the future. if (range.start < 0) || (range.end < 0) then - Panic.throw (Illegal_Argument_Error_Data "Ranges with negative indices are not supported for indexing.") + Panic.throw (Illegal_Argument.Error "Ranges with negative indices are not supported for indexing.") if (range.start >= length) then Panic.throw (Index_Out_Of_Bounds_Error_Data range.start length) if range.end >= length then range.start.up_to length . with_step range.step else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso index e568f978220c..dc6cc6078b5b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Json.Json import project.Data.Locale.Locale import project.Data.Numbers.Integer @@ -13,14 +13,16 @@ import project.Data.Time.Period.Period import project.Data.Time.Time_Of_Day.Time_Of_Day import project.Data.Time.Time_Zone.Time_Zone import project.Data.Vector.Vector +import project.Error.Error import project.Math import project.Meta import project.Nothing.Nothing +import project.Panic.Panic import project.Polyglot from project.Data.Boolean import Boolean, True, False from project.Data.Time.Date_Time import ensure_in_epoch -from project.Error.Common import Error, Panic, Type_Error_Data, Time_Error, Polyglot_Error_Data, Illegal_Argument_Error, Illegal_Argument_Error_Data, unimplemented +from project.Error.Common import Type_Error_Data, Time_Error, Polyglot_Error, Illegal_Argument polyglot java import org.enso.base.Time_Utils polyglot java import java.time.temporal.ChronoField @@ -197,7 +199,7 @@ type Date _ : Text -> parse_builtin text pattern _ -> Panic.throw (Time_Error.Error "An invalid pattern was provided.") result . map_error <| case _ of - Polyglot_Error_Data err -> Time_Error.Error err.getMessage + Polyglot_Error.Polyglot_Error_Data err -> Time_Error.Error err.getMessage ex -> ex ## Get the year field. @@ -270,7 +272,7 @@ type Date ## Returns the century of the date. century : Integer century self = if self.year > 0 then (self.year - 1).div 100 + 1 else - Error.throw (Illegal_Argument_Error_Data "Century can only be given for AD years.") + Error.throw (Illegal_Argument.Error "Century can only be given for AD years.") ## Returns the quarter of the year the date falls into. quarter : Integer @@ -377,7 +379,7 @@ type Date import Standard.Base.Data.Time.Duration example_add = Date.new 2020 + (Period.new months=6) - + : Period -> Date ! (Time_Error | Illegal_Argument_Error) + + : Period -> Date ! (Time_Error | Illegal_Argument) + self period = case period of _ : Period -> @@ -385,7 +387,7 @@ type Date _ : Duration -> Error.throw (Time_Error.Error "Date does not support adding/subtracting Duration. Use Period instead.") _ -> - Error.throw (Illegal_Argument_Error_Data "Illegal period argument") + Error.throw (Illegal_Argument.Error "Illegal period argument") ## Shift the date by the specified amount of business days. @@ -522,7 +524,7 @@ type Date import Standard.Base.Data.Time.Duration example_subtract = Date.new 2020 - (Period.new days=7) - - : Period -> Date ! (Time_Error | Illegal_Argument_Error) + - : Period -> Date ! (Time_Error | Illegal_Argument) - self period = case period of _ : Period -> @@ -531,7 +533,7 @@ type Date _ : Duration -> Error.throw (Time_Error.Error "Date does not support adding/subtracting Duration. Use Period instead.") _ -> - Error.throw (Illegal_Argument_Error_Data "Illegal period argument") + Error.throw (Illegal_Argument.Error "Illegal period argument") ## A Date to Json conversion. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso index 4f6ecb744f1d..c689bcb3a00e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Json.Json import project.Data.Locale.Locale import project.Data.Numbers.Integer @@ -14,12 +14,14 @@ import project.Data.Time.Time_Of_Day.Time_Of_Day import project.Data.Time.Time_Period.Time_Period import project.Data.Time.Time_Zone.Time_Zone import project.Data.Vector.Vector +import project.Error.Error import project.Meta import project.Nothing.Nothing +import project.Panic.Panic import project.Warning.Warning from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Panic, Time_Error, Type_Error_Data +from project.Error.Common import Time_Error, Type_Error_Data polyglot java import java.time.format.DateTimeFormatter polyglot java import java.time.temporal.ChronoField diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Day_Of_Week_From.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Day_Of_Week_From.enso index 6a231b1284d4..1be6b111e28e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Day_Of_Week_From.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Day_Of_Week_From.enso @@ -1,8 +1,9 @@ import project.Data.Numbers.Integer import project.Data.Time.Day_Of_Week.Day_Of_Week -from project.Data.Boolean import Boolean, True, False +import project.Error.Error -from project.Error.Common import Error, Illegal_Argument_Error_Data +from project.Data.Boolean import Boolean, True, False +from project.Error.Common import Illegal_Argument ## Convert from an integer to a Day_Of_Week @@ -17,7 +18,7 @@ Day_Of_Week.from (that : Integer) (first_day:Day_Of_Week=Day_Of_Week.Sunday) (st True -> valid_range = if start_at_zero then "0-6" else "1-7" message = "Invalid day of week (must be " + valid_range + ")." - Error.throw (Illegal_Argument_Error_Data message) + Error.throw (Illegal_Argument.Error message) False -> day_number = if first_day == Day_Of_Week.Sunday then shifted else (shifted + (first_day.to_integer start_at_zero=True)) % 7 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso index e86a0d072a07..2005ddfd2b79 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Json.Json import project.Data.Numbers.Integer import project.Data.Ordering.Ordering @@ -6,12 +6,14 @@ import project.Data.Pair.Pair import project.Data.Time.Date_Time.Date_Time import project.Data.Time.Period.Period import project.Data.Vector.Vector +import project.Error.Error import project.Meta +import project.Panic.Panic import project.Runtime import project.System from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Panic, Type_Error_Data, Time_Error, Illegal_Argument_Error, Illegal_Argument_Error_Data, Illegal_State_Error, Illegal_State_Error_Data +from project.Error.Common import Type_Error_Data, Time_Error, Illegal_Argument, Illegal_State polyglot java import java.time.Duration as Java_Duration polyglot java import java.time.Period as Java_Period @@ -46,13 +48,13 @@ new_builtin : Integer -> Integer -> Integer -> Integer -> Integer -> Integer -> new_builtin hours minutes seconds milliseconds nanoseconds = @Builtin_Method "Duration.new_builtin" ## PRIVATE -ensure_duration : Any -> (Any -> Any) -> Any ! (Time_Error | Illegal_Argument_Error) +ensure_duration : Any -> (Any -> Any) -> Any ! (Time_Error | Illegal_Argument) ensure_duration object ~action = case object of _ : Duration -> action _ : Period -> Error.throw (Time_Error.Error "Cannot use Period as a parameter") x -> - Error.throw Illegal_Argument_Error_Data <| + Error.throw Illegal_Argument.Error <| "Expected Duration type, got: " + (Meta.get_qualified_type_name x) @Builtin_Type @@ -254,24 +256,24 @@ type Duration ## UNSTABLE Convert the duration to total milliseconds. - total_milliseconds : Integer ! Illegal_State_Error + total_milliseconds : Integer ! Illegal_State total_milliseconds self = Panic.catch ArithmeticException (self.total_milliseconds_builtin) _-> - Error.throw (Illegal_State_Error_Data "The duration is too large to convert it to milliseconds") + Error.throw (Illegal_State.Error "The duration is too large to convert it to milliseconds") ## UNSTABLE Convert the duration to total seconds. - total_seconds : Decimal ! Illegal_State_Error + total_seconds : Decimal ! Illegal_State total_seconds self = self.total_milliseconds / 1000.0 ## UNSTABLE Convert the duration to total minutes. - total_minutes : Decimal ! Illegal_State_Error + total_minutes : Decimal ! Illegal_State total_minutes self = self.total_seconds / 60.0 ## UNSTABLE Convert the duration to total minutes. - total_hours : Decimal ! Illegal_State_Error + total_hours : Decimal ! Illegal_State total_hours self = self.total_minutes / 60.0 ## Convert this duration to a Vector of hours, minutes, seconds, milliseconds diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso index 27facf2bb4ab..07bc713243eb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso @@ -1,27 +1,29 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Integer import project.Data.Time.Date.Date import project.Data.Time.Duration.Duration import project.Data.Text.Text +import project.Error.Error import project.Meta import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Panic, Time_Error, Illegal_Argument_Error, Illegal_Argument_Error_Data, Incomparable_Values_Error +from project.Error.Common import Time_Error, Illegal_Argument, Incomparable_Values_Error polyglot java import java.time.Period as Java_Period polyglot java import java.time.DateTimeException polyglot java import java.lang.ArithmeticException ## PRIVATE -ensure_period : Any -> (Any -> Any) -> Text -> Any ! (Time_Error | Illegal_Argument_Error) +ensure_period : Any -> (Any -> Any) -> Text -> Any ! (Time_Error | Illegal_Argument) ensure_period object ~action error_msg="Cannot use Duration as a parameter" = case object of _ : Period -> action _ : Duration -> Error.throw (Time_Error.Error error_msg) x -> - Error.throw Illegal_Argument_Error_Data <| + Error.throw Illegal_Argument.Error <| "Expected Period type, got: " + (Meta.get_qualified_type_name x) ## A date-based amount of time in the ISO-8601 calendar system, such as @@ -96,13 +98,13 @@ type Period import Standard.Base.Data.Time.Period example_add = (Period.new months=1) + (Period.new days=1) - + : Period -> Period ! (Time_Error | Illegal_Argument_Error) + + : Period -> Period ! (Time_Error | Illegal_Argument) + self other_period = ensure_period other_period <| Panic.catch Any (Period.Value (self.internal_period.plus other_period.internal_period)) err-> case err of _ : DateTimeException -> Error.throw Time_Error.Error "Period addition failed:"+err.getMessage - _ : ArithmeticException -> Error.throw Illegal_Argument_Error_Data "Arithmetic error:"+err.getMessage cause=err + _ : ArithmeticException -> Error.throw Illegal_Argument.Error "Arithmetic error:"+err.getMessage cause=err ## Subtract a specified amount of time from this period. @@ -117,13 +119,13 @@ type Period example_subtract = (Period.new years=3) - (Period.new months=11) - - : Period -> Period ! (Time_Error | Illegal_Argument_Error) + - : Period -> Period ! (Time_Error | Illegal_Argument) - self other_period = ensure_period other_period <| Panic.catch Any (Period.Value (self.internal_period.minus other_period.internal_period)) err-> case err of DateTimeException -> Error.throw Time_Error.Error "Period subtraction failed" - ArithmeticException -> Error.throw Illegal_Argument_Error_Data "Arithmetic error" + ArithmeticException -> Error.throw Illegal_Argument.Error "Arithmetic error" ## Check two periods for equality. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso index 14dafdb6b4bc..574e531a0c92 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Json.Json import project.Data.Locale.Locale import project.Data.Numbers.Integer @@ -10,11 +10,13 @@ import project.Data.Time.Duration.Duration import project.Data.Time.Period.Period import project.Data.Time.Time_Period.Time_Period import project.Data.Time.Time_Zone.Time_Zone +import project.Error.Error import project.Meta import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Panic, Time_Error, Type_Error_Data +from project.Error.Common import Time_Error, Type_Error_Data polyglot java import java.time.format.DateTimeFormatter polyglot java import java.time.Instant diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso index fbb0032e6e6c..0b87601e64c8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Zone.enso @@ -1,10 +1,12 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Json.Json import project.Data.Numbers.Integer import project.Data.Text.Text +import project.Error.Error +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Panic, Error, Time_Error +from project.Error.Common import Time_Error polyglot java import java.time.ZoneId polyglot java import java.time.ZoneOffset diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index e4aeed3c04db..91f64d24be3e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Filter_Condition.Filter_Condition import project.Data.List.List @@ -7,21 +7,22 @@ import project.Data.Numbers.Integer import project.Data.Ordering.Ordering import project.Data.Ordering.Sort_Direction.Sort_Direction import project.Data.Pair.Pair +import project.Data.Range.Range +import project.Data.Range.Extensions import project.Data.Text.Text +import project.Error.Error import project.Function.Function -import project.Nothing.Nothing import project.Math +import project.Nothing.Nothing +import project.Panic.Panic import project.Random from project.Data.Boolean import Boolean, True, False from project.Data.Index_Sub_Range import Index_Sub_Range, take_helper, drop_helper -from project.Data.Json import all -from project.Data.Range import all -from project.Error.Common import Error, Panic, Index_Out_Of_Bounds_Error, Index_Out_Of_Bounds_Error_Data, No_Such_Method_Error, No_Such_Method_Error_Data, Illegal_Argument_Error_Data, Incomparable_Values_Error, Type_Error_Data, Unsupported_Argument_Types_Data +from project.Error.Common import Index_Out_Of_Bounds_Error, Index_Out_Of_Bounds_Error_Data, No_Such_Method_Error, No_Such_Method_Error_Data, Illegal_Argument, Incomparable_Values_Error, Type_Error_Data, Unsupported_Argument_Types_Data polyglot java import java.lang.IndexOutOfBoundsException polyglot java import org.enso.base.Array_Builder -polyglot java import java.lang.ClassCastException ## The basic, immutable, vector type. A vector allows to store an arbitrary number of elements, in linear memory. @@ -556,7 +557,7 @@ type Vector a (0.up_to 100).to_vector.short_display_text max_entries=2 == "[0, 1 and 98 more elements]" short_display_text : Integer -> Text short_display_text self max_entries=10 = - if max_entries < 1 then Error.throw <| Illegal_Argument_Error_Data "The `max_entries` parameter must be positive." else + if max_entries < 1 then Error.throw <| Illegal_Argument.Error "The `max_entries` parameter must be positive." else prefix = self.take (Index_Sub_Range.First max_entries) if prefix.length == self.length then self.to_text else remaining_count = self.length - prefix.length @@ -742,15 +743,6 @@ type Vector a if self.length >= n then self else self + (Vector.fill n-self.length elem) - ## Vector to JSON conversion. - - > Example - Convert a vector of numbers to JSON. - - [1, 2, 3].to_json - to_json : Json - to_json self = Json.Array (self.map .to_json) - ## Get the first element from the vector, or an `Empty_Error` if the vector is empty. @@ -878,7 +870,7 @@ type Vector a compare = if order == Sort_Direction.Ascending then comp_ascending else comp_descending - handle_incomparable_value <| + Incomparable_Values_Error.handle_errors <| new_vec_arr.sort compare Vector.from_polyglot_array new_vec_arr @@ -916,7 +908,7 @@ type Vector a More details on the HashCode / HashMap ticket https://www.pivotaltracker.com/story/show/181027272. - handle_incomparable_value <| + Incomparable_Values_Error.handle_errors <| builder = Vector.new_builder result = self.fold Map.empty existing-> item-> @@ -926,16 +918,6 @@ type Vector a existing.insert key True if result.is_error then result else builder.to_vector - - ## UNSTABLE - - Transform the vector into text for displaying as part of its default - visualization. - to_default_visualization_data : Text - to_default_visualization_data self = - json = self.take (Index_Sub_Range.First 100) . to_json - json.to_text - ## UNSTABLE Converts the vector to a list with the same elements. to_list : List @@ -998,6 +980,10 @@ type Builder not_empty : Boolean not_empty self = self.is_empty.not + ## Gets the current length of the builder. + length : Integer + length self = self.java_builder.getSize + ## Appends a new element into this builder and returns it, propagating any errors that the provided element could have contained. @@ -1107,17 +1093,6 @@ type Empty_Error to_display_text : Text to_display_text self = "The vector is empty." -## ADVANCED - Catches possible errors from comparing values and throws an - Incomparable_Values_Error if any occur. -handle_incomparable_value ~function = - handle t = Panic.catch t handler=(_-> Error.throw Incomparable_Values_Error) - handle ClassCastException <| - handle No_Such_Method_Error_Data <| - handle Type_Error_Data <| - handle Unsupported_Argument_Types_Data <| - function.catch Type_Error_Data handler=(_-> Error.throw Incomparable_Values_Error) - ## PRIVATE Creates a new vector where for each range, a corresponding section of the source vector is added to the result. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso new file mode 100644 index 000000000000..1f3697ced55d --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso @@ -0,0 +1,142 @@ +import project.Any.Any +import project.Data.Text.Text +import project.Meta +import project.Panic.Panic +import project.Runtime.Stack_Trace_Element + +from project.Data.Boolean import Boolean, True, False + +## A type representing dataflow errors. + + A dataflow error in Enso is one that behaves like a standard value, and + hence represents erroneous states in a way that exists _within_ standard + control flow. + + ? Dataflow Errors or Panics + Whilst a Panic is useful for unrecoverable situations, most Enso APIs + are designed to use dataflow errors instead. As they exist within the + normal program control flow, they are able to be represented on the + Enso graph. +@Builtin_Type +type Error + ## Creates a new dataflow error containing the provided payload. + + Arguments: + - payload: The contents of the dataflow error to be created. + + > Example + Throw a dataflow error containing the text "Oops". + + Error.throw "Oops" + throw : Any -> Error + throw payload = @Builtin_Method "Error.throw" + + ## PRIVATE + + Executes the provided handler on a dataflow error, or executes as + identity on a non-error value. + + Arguments: + - handler: The function to call on this if it is an error value. + catch_primitive : (Error -> Any) -> Any + catch_primitive self handler = @Builtin_Method "Error.catch_primitive" + + ## PRIVATE + UNSTABLE + + Returns a textual representation of the stack trace attached to an error. + get_stack_trace_text : Text + get_stack_trace_text self = @Builtin_Method "Error.get_stack_trace_text" + + ## Converts an error to a corresponding textual representation. + + > Example + Converting a thrown error to text. + + Error.throw "foo" . to_text + to_text : Text + to_text self = @Builtin_Method "Error.to_text" + + ## UNSTABLE + + Returns a human-readable text representing this error. + to_display_text : Text + to_display_text self = "Error: " + (self.catch Any .to_display_text) + + ## Executes the provided handler on an error, or returns the value unchanged. + + Arguments: + - error_type: The type of error to handle. Defaults to `Any` to handle + all errors. + - handler: The function to call on this if it is an error value of a + matching type. By default this is identity. + + > Example + Catching an `Illegal_Argument` and returning its message. + + from Standard.Base import all + + example_catch = + error = Error.throw (Illegal_Argument.Error "My message") + error.catch Illegal_Argument.Error (err -> err.message) + + > Example + Catching any dataflow error and turning it into a regular value. + + from Standard.Base import all + + example_catch = + error = Error.throw 42 + error.catch == 42 + catch : Any -> (Error -> Any) -> Any + catch self (error_type = Any) (handler = x->x) = + self.catch_primitive error_value-> + case error_value.is_a error_type of + True -> handler error_value + False -> self + + ## Transforms an error. + + Arguments: + - f: The function used to transform the error. + + If `self` is a non-error value it is returned unchanged. However, if `self` + is an error, the error is transformed using the provided function + + > Example + Transforming an error value. + + import Standard.Examples + + example_map_error = + map = Examples.map + map.get 10 . map_error (_ -> "The element 10 was not found.") + map_error : (Error -> Error) -> Any + map_error self f = self.catch Any (x -> Error.throw (f x)) + + ## ADVANCED + UNSTABLE + + Returns the attached stack trace of the error. + + The ordering of the resulting vector is such that the top stack frame is the + first element. + stack_trace : Vector Stack_Trace_Element + stack_trace self = + Panic.get_attached_stack_trace self + + ## Checks if `self` is an error. + + > Example + Checking if the value 1 is an error. + + 1.is_error + is_error : Boolean + is_error self = True + + ## PRIVATE + TODO this is a kludge until we have proper eigentypes and statics. + Allows to check equality of the `Error` type with itself. + == self that = if Meta.is_error self then self else + if Meta.is_error that then that else + Meta.is_same_object self that diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso index c918ebaddeed..f5e163f79b53 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso @@ -1,181 +1,18 @@ -from Standard.Base import all -import Standard.Base.Runtime +import project.Data.Text.Text +import project.Error.Error +import project.Meta +import project.Nothing.Nothing +import project.Panic.Panic polyglot java import java.lang.IllegalArgumentException -polyglot java import java.lang.Throwable -## A type representing dataflow errors. - - A dataflow error in Enso is one that behaves like a standard value, and - hence represents erroneous states in a way that exists _within_ standard - control flow. - - ? Dataflow Errors or Panics - Whilst a Panic is useful for unrecoverable situations, most Enso APIs - are designed to use dataflow errors instead. As they exist within the - normal program control flow, they are able to be represented on the - Enso graph. -@Builtin_Type -type Error - ## Creates a new dataflow error containing the provided payload. - - Arguments: - - payload: The contents of the dataflow error to be created. - - > Example - Throw a dataflow error containing the text "Oops". - - Error.throw "Oops" - throw : Any -> Error - throw payload = @Builtin_Method "Error.throw" - - ## PRIVATE - - Executes the provided handler on a dataflow error, or executes as - identity on a non-error value. - - Arguments: - - handler: The function to call on this if it is an error value. - catch_primitive : (Error -> Any) -> Any - catch_primitive self handler = @Builtin_Method "Error.catch_primitive" - - ## PRIVATE - UNSTABLE - - Returns a textual representation of the stack trace attached to an error. - get_stack_trace_text : Text - get_stack_trace_text self = @Builtin_Method "Error.get_stack_trace_text" - - ## Converts an error to a corresponding textual representation. - - > Example - Converting a thrown error to text. - - Error.throw "foo" . to_text - to_text : Text - to_text self = @Builtin_Method "Error.to_text" - - ## UNSTABLE - - Returns a human-readable text representing this error. - to_display_text : Text - to_display_text self = "Error: " + (self.catch Any .to_display_text) - - ## Executes the provided handler on an error, or returns the value unchanged. - - Arguments: - - error_type: The type of error to handle. Defaults to `Any` to handle - all errors. - - handler: The function to call on this if it is an error value of a - matching type. By default this is identity. - - > Example - Catching an `Illegal_Argument_Error` and returning its message. - - from Standard.Base import all - - example_catch = - error = Error.throw (Illegal_Argument_Error_Data "My message") - error.catch Illegal_Argument_Error_Data (err -> err.message) - - > Example - Catching any dataflow error and turning it into a regular value. - - from Standard.Base import all - - example_catch = - error = Error.throw 42 - error.catch == 42 - catch : Any -> (Error -> Any) -> Any - catch self (error_type = Any) (handler = x->x) = - self.catch_primitive error_value-> - case error_value.is_a error_type of - True -> handler error_value - False -> self - - ## UNSTABLE - - Returns a display representation of the dataflow error on which it is called. - - > Example - Displaying a dataflow error. - - import Standard.Examples - - example_display = Examples.throw_error.to_default_visualization_data - to_default_visualization_data : Text - to_default_visualization_data self = self.catch Any .to_default_visualization_data - - ## UNSTABLE - - Returns a JSON representation of the dataflow error. - - > Example - Converting a dataflow error to JSON. - - import Standard.Examples - - example_to_json = Examples.throw_error.to_json - to_json : Json - to_json self = - error_type = ["type", "Error"] - caught = self.catch - error_content = ["content", caught.to_json] - error_message = ["message", caught.to_display_text] - Json.from_pairs [error_type, error_content, error_message] - - ## Transforms an error. - - Arguments: - - f: The function used to transform the error. - - If `self` is a non-error value it is returned unchanged. However, if `self` - is an error, the error is transformed using the provided function - - > Example - Transforming an error value. - - import Standard.Examples - - example_map_error = - map = Examples.map - map.get 10 . map_error (_ -> "The element 10 was not found.") - map_error : (Error -> Error) -> Any - map_error self f = self.catch Any (x -> Error.throw (f x)) - - ## ADVANCED - UNSTABLE - - Returns the attached stack trace of the error. - - The ordering of the resulting vector is such that the top stack frame is the - first element. - stack_trace : Vector Runtime.Stack_Trace_Element - stack_trace self = - Panic.get_attached_stack_trace self - - ## Checks if `self` is an error. - - > Example - Checking if the value 1 is an error. - - 1.is_error - is_error : Boolean - is_error self = True - - ## PRIVATE - TODO this is a kludge until we have proper eigentypes and statics. - Allows to check equality of the `Error` type with itself. - == self that = if Meta.is_error self then self else - if Meta.is_error that then that else - Meta.is_same_object self that - -# TODO Dubious constructor export -from project.Error.Common.Illegal_State_Error import all -from project.Error.Common.Illegal_State_Error export all - -type Illegal_State_Error +polyglot java import java.io.IOException +polyglot java import java.nio.file.AccessDeniedException +polyglot java import java.nio.file.NoSuchFileException +polyglot java import java.nio.file.FileAlreadyExistsException +polyglot java import java.lang.ClassCastException +type Illegal_State ## UNSTABLE A generic error that indicates that a given operation cannot be performed @@ -185,13 +22,15 @@ type Illegal_State_Error - message: the error message explaining why the operation cannot be performed. - cause: (optional) another error that is the cause of this one. - Illegal_State_Error_Data message cause=Nothing + Error message cause=Nothing -# TODO Dubious constructor export -from project.Error.Common.Illegal_Argument_Error import all -from project.Error.Common.Illegal_Argument_Error export all + ## PRIVATE -type Illegal_Argument_Error + Provides a human-readable representation of the encoding error. + to_display_text : Text + to_display_text self = "Illegal State: " + self.message + +type Illegal_Argument ## UNSTABLE @@ -201,12 +40,18 @@ type Illegal_Argument_Error Arguments: - message: the error message explaining why the argument is illegal. - cause: (optional) another error that is the cause of this one. - Illegal_Argument_Error_Data message cause=Nothing + Error message cause=Nothing + + ## PRIVATE + + Provides a human-readable representation of the encoding error. + to_display_text : Text + to_display_text self = "Illegal Argument: " + self.message ## PRIVATE Capture a Java IllegalArgumentException and rethrow handle_java_exception = - Panic.catch_java IllegalArgumentException handler=(cause-> Error.throw (Illegal_Argument_Error_Data cause.getMessage cause)) + Panic.catch_java IllegalArgumentException handler=(cause-> Error.throw (Illegal_Argument.Error cause.getMessage cause)) # TODO Dubious constructor export from project.Error.Common.Index_Out_Of_Bounds_Error import all @@ -230,270 +75,6 @@ type Index_Out_Of_Bounds_Error to_display_text self = "The index " + self.index.to_text + " is out of bounds in a collection of length " + self.length.to_text + "." -# TODO Dubious constructor export -from project.Error.Common.Wrapped_Dataflow_Error import all -from project.Error.Common.Wrapped_Dataflow_Error export all - -## PRIVATE - Wraps a dataflow error lifted to a panic, making possible to distinguish it - from other panics. -type Wrapped_Dataflow_Error - Wrapped_Dataflow_Error_Data payload - - ## PRIVATE - Throws the original error. - unwrap self = Error.throw self.payload - -# TODO Dubious constructor export -from project.Error.Common.Caught_Panic import all -from project.Error.Common.Caught_Panic export all - -@Builtin_Type -type Caught_Panic - ## A wrapper for a caught panic. - - Arguments: - - payload: the payload carried by the error. - - internal_original_exception (private): the original Java exception that is - the source of this panic. Only for internal use. To get the Java exception - from polyglot exceptions, match the `payload` on `Polyglot_Error` and - extract the Java object from there. - Caught_Panic_Data payload internal_original_exception - - ## Converts this caught panic into a dataflow error containing the same - payload and stack trace. - convert_to_dataflow_error : Error - convert_to_dataflow_error self = @Builtin_Method "Caught_Panic.convert_to_dataflow_error" - - ## Returns the stack trace of the caught panic. - stack_trace : Vector Runtime.Stack_Trace_Element - stack_trace self = - Panic.get_attached_stack_trace self - -## A panic is an error condition that is based _outside_ of the normal - program control flow. - - Panics "bubble up" through the program until they reach either an - invocation of Panic.recover Any or the program's main method. An unhandled - panic in main will terminate the program. - - ? Dataflow Errors or Panics - Panics are designed to be used for unrecoverable situations that need - to be handled through non-linear control flow mechanisms. -@Builtin_Type -type Panic - - ## Throws a new panic with the provided payload. - - Arguments: - - payload: The contents of the panic to be thrown. If the payload is a - `Caught_Panic` or a raw Java exception, instead of throwing a new panic - with it as a payload, the original exception is rethrown, preserving - its stacktrace. - - > Example - Throwing a panic containing the text "Oh no!". - - Panic.throw "Oh no!" - - > Example - Use together with `Panic.catch` to catch only specific types of errors - and rethrow any others, without affecting their stacktraces. - - Panic.catch Any (Panic.throw "foo") caught_panic-> case caught_panic.payload of - Illegal_Argument_Error_Data message _ -> "Illegal arguments were provided: "+message - other_panic -> Panic.throw other_panic - throw : Any -> Panic - throw payload = @Builtin_Method "Panic.throw" - - ## PRIVATE - Executes the provided action and if any panic was thrown, calls the - provided callback. - - If action executes successfully, the result of `Panic.catch Any` is the - result of that action. Otherwise, it is the result of the provided - handler callback, executed with the caught panic as its first argument. - - Arguments: - - action: The code to execute that potentially panics. - - handler: The callback to handle any panics. - catch_primitive : Any -> (Caught_Panic -> Any) -> Any - catch_primitive ~action handler = @Builtin_Method "Panic.catch_primitive" - - ## PRIVATE - - Returns a raw representation of the stack trace attached to the provided - throwable. It can be a dataflow error, a panic or a native Java exception. - You probably want `Panic.get_attached_stack_trace` instead. - primitive_get_attached_stack_trace : Throwable -> Array - primitive_get_attached_stack_trace throwable = @Builtin_Method "Panic.primitive_get_attached_stack_trace" - - ## ADVANCED - UNSTABLE - - Returns the attached stack trace of the given throwable. Can be used to get - an Enso friendly stack trace from native Java exceptions. - - The ordering of the resulting vector is such that the top stack frame is the - first element. - get_attached_stack_trace : Caught_Panic | Throwable -> Vector Runtime.Stack_Trace_Element - get_attached_stack_trace error = - throwable = case error of - Caught_Panic_Data _ internal_original_exception -> internal_original_exception - throwable -> throwable - prim_stack = Panic.primitive_get_attached_stack_trace throwable - stack_with_prims = Vector.from_polyglot_array prim_stack - stack_with_prims.map Runtime.wrap_primitive_stack_trace_element - - ## Takes any value, and if it is a dataflow error, throws it as a Panic, - otherwise, returns the original value unchanged. - - Arguments: - - value: The value to rethrow any errors on as a panic. - - > Example - Rethrowing a dataflow error as a panic. - - import Standard.Examples - - example_rethrow = Panic.rethrow Examples.throw_error - rethrow : (Any ! Any) -> Any - rethrow value = value.catch Any Panic.throw - - ## Executes the provided action and if a panic matching the provided type was - thrown, calls the provided callback. - - If action executes successfully, the result of `Panic.catch` is the result of - that action. Otherwise, if a matching panic is thrown from within the action, - the result is obtained by calling the provided handler callback. Any - non-matching panics are forwarded without changes. - - Arguments: - - panic_type: The expected panic type. It can either be an Enso type or a - Java class. If the Java class is provided, `Polyglot_Error` containing a - Java exception of this class will be matched. - - action: The code to execute that potentially panics. - - handler: The callback to handle the panics. The callback will be provided - with a `Caught_Panic` instance encapsulating the `payload` of the caught - panic and its stacktrace. - - > Example - Handling a specific type of panic. - - Panic.catch Illegal_Argument_Error_Data (Panic.throw (Illegal_Argument_Error_Data "Oh no!" Nothing)) error-> - "Caught an `Illegal_Argument_Error`: "+error.payload.message - - > Example - Handling any panic. - - Panic.catch Any (Panic.throw (Illegal_Argument_Error_Data "Oh no!" Nothing)) error-> - "Caught some panic!" - - > Example - Convert a string to an integer, catching the Java `NumberFormatException` - and converting it to a more Enso-friendly dataflow error. - - polyglot java import java.lang.Long - polyglot java import java.lang.NumberFormatException - parse str = - Panic.catch NumberFormatException (Long.parseLong str) caught_panic-> - Error.throw (Illegal_Argument_Error_Data "The provided string is not a valid number: "+caught_panic.payload.cause.getMessage) - catch : Any -> Any -> (Caught_Panic -> Any) -> Any - catch panic_type ~action handler = - Panic.catch_primitive action caught_panic-> - case Meta.get_polyglot_language panic_type == "java" of - False -> case caught_panic.payload.is_a panic_type of - True -> handler caught_panic - False -> Panic.throw caught_panic - True -> case caught_panic.payload of - Polyglot_Error_Data java_exception -> - case java_exception.is_a panic_type of - True -> handler caught_panic - False -> Panic.throw caught_panic - _ -> Panic.throw caught_panic - - ## Executes the provided action and if a Java exception matching the provided type was - thrown, calls the provided callback. - - Normally, Java exceptions are wrapped in a `Polyglot_Error` instance, so - using a `Panic.catch` requires unwrapping the error by calling - `caught_panic.payload.cause`. This helper function allows the handler to - work with the Java exception directly. The downside is that if the Java - exception is rethrown, it will be rethrown as a Java exception object - wrapped in an Enso panic. So if the handler needs to rethrow the original - exception preserving its shape and stacktrace, `Panic.catch` should still - be preferred.` - - > Example - Convert a string to an integer, catching the Java `NumberFormatException` - and converting it to a more Enso-friendly dataflow error. - - polyglot java import java.lang.Long - polyglot java import java.lang.NumberFormatException - parse str = - Panic.catch_java NumberFormatException (Long.parseLong str) java_exception-> - Error.throw (Illegal_Argument_Error_Data "The provided string is not a valid number: "+java_exception.getMessage) - catch_java : Any -> Any -> (Throwable -> Any) -> Any - catch_java panic_type ~action handler = - Panic.catch_primitive action caught_panic-> case caught_panic.payload of - Polyglot_Error_Data java_exception -> - case (panic_type == Any) || (java_exception.is_a panic_type) of - True -> handler java_exception - False -> Panic.throw caught_panic - _ -> Panic.throw caught_panic - - ## Executes the provided action and converts a possible panic matching any of - the provided types into a dataflow Error. - - If action executes successfully, the result of `Panic.recover` is the result - of that action. Otherwise, if it panicked with a type matching one of the - expected error types, that panic is returned as a dataflow error. Unexpected - panics are passed through as-is. it is the panic that was thrown after - conversion to a dataflow error. - - Arguments: - - expected_types: The types of expected panics which should be recovered. - This can either be a Vector of types or a single type. - - action: The code to execute that potentially panics. - - > Example - Converting an expected panic to a dataflow error. - - Panic.recover Illegal_Argument_Error_Data (Panic.throw (Illegal_Argument_Error_Data "Oh!" Nothing)) - - > Example - Converting one of many expected panic types to a dataflow error. - - Panic.recover [Illegal_Argument_Error, Illegal_State_Error] (Panic.throw (Illegal_Argument_Error_Data "Oh!" Nothing)) - recover : (Vector Any | Any) -> Any -> Any - recover expected_types ~action = - types_to_check = case expected_types of - _ : Vector -> expected_types - _ -> [expected_types] - Panic.catch Any action caught_panic-> - is_matched = types_to_check.exists typ-> - caught_panic.payload.is_a typ - case is_matched of - True -> caught_panic.convert_to_dataflow_error - False -> Panic.throw caught_panic - - ## If a dataflow error had occurred, wrap it in a `Wrapped_Dataflow_Error` and promote to a Panic. - - Arguments: - - value: value to return if not an error, or rethrow as a Panic. - throw_wrapped_if_error : Any -> Any - throw_wrapped_if_error ~value = - if value.is_error then Panic.throw (Wrapped_Dataflow_Error_Data value.catch) else value - - ## Catch any `Wrapped_Dataflow_Error` Panic and rethrow it as a dataflow error. - - Arguments: - - action: The code to execute that potentially raised a Wrapped_Dataflow_Error. - handle_wrapped_dataflow_error : Any -> Any - handle_wrapped_dataflow_error ~action = - Panic.catch Wrapped_Dataflow_Error_Data action caught_panic-> - Error.throw caught_panic.payload.payload - # TODO Dubious constructor export from project.Error.Common.Syntax_Error import all from project.Error.Common.Syntax_Error export all @@ -599,10 +180,6 @@ type No_Such_Method_Error method_name self = Meta.meta self.symbol . name -# TODO Dubious constructor export -from project.Error.Common.Polyglot_Error import all -from project.Error.Common.Polyglot_Error export all - ## An error that occurred across a polyglot boundary. Arguments: @@ -709,18 +286,14 @@ from project.Error.Common.No_Such_Conversion_Error export all type No_Such_Conversion_Error No_Such_Conversion_Error_Data target that conversion -## TODO Dubious constructor export -from project.Error.Common.Unimplemented_Error import all -from project.Error.Common.Unimplemented_Error export all - ## UNSTABLE A type used to represent that something has not yet been implemented. Arguments: - message: The message describing what implementation is missing. -type Unimplemented_Error - Unimplemented_Error_Data message +type Unimplemented + Error message ## PRIVATE @@ -728,22 +301,22 @@ type Unimplemented_Error to_display_text : Text to_display_text self = "An implementation is missing: " + self.message -## ADVANCED + ## ADVANCED - A function that can be used to indicate that something hasn't been - implemented yet. + A function that can be used to indicate that something hasn't been + implemented yet. - Arguments: - - message: A description of what implementation is missing. + Arguments: + - message: A description of what implementation is missing. - > Example - Throwing an error to show that something is unimplemented. + > Example + Throwing an error to show that something is unimplemented. - import Standard.Base.Error.Common as Errors + import Standard.Base.Error.Common as Errors - example_unimplemented = Errors.unimplemented -unimplemented : Text -> Nothing -unimplemented message="" = Panic.throw (Unimplemented_Error_Data message) + example_unimplemented = Errors.Unimplemented.throw + throw : Text -> Nothing + throw message="" = Panic.throw (Unimplemented.Error message) type Time_Error @@ -775,6 +348,16 @@ type Unsupported_File_Type An error indicating that some elements are incomparable. type Incomparable_Values_Error + ## ADVANCED + Catches possible errors from comparing values and throws an + Incomparable_Values_Error if any occur. + handle_errors ~function = + handle t = Panic.catch t handler=(_-> Error.throw Incomparable_Values_Error) + handle ClassCastException <| + handle No_Such_Method_Error_Data <| + handle Type_Error_Data <| + handle Unsupported_Argument_Types_Data <| + function.catch Type_Error_Data handler=(_-> Error.throw Incomparable_Values_Error) ## Errors when reading or writing to a file. type File_Error @@ -804,3 +387,28 @@ type File_Error File_Error.IO_Error file msg -> msg.to_text + " (" + file.path + ")." File_Error.Already_Exists file -> "The file at "+file.path+" already exists." + ## PRIVATE + + Utility method for running an action with Java exceptions mapping. + handle_java_exceptions file ~action = + Panic.catch IOException action caught_panic-> + File_Error.wrap_io_exception file caught_panic.payload.cause + + ## PRIVATE + + Converts a Java `IOException` into its Enso counterpart. + wrap_io_exception file io_exception = + if io_exception.is_a NoSuchFileException then Error.throw (File_Error.Not_Found file) else + if io_exception.is_a FileAlreadyExistsException then Error.throw (File_Error.Already_Exists file) else + if io_exception.is_a AccessDeniedException then Error.throw (File_Error.IO_Error file "You do not have permission to access the file") else + Error.throw (File_Error.IO_Error file "An IO error has occurred: "+io_exception.to_text) + +## One or more byte sequences were not decodable using the Encoding. +type Encoding_Error + Error (message:Text) + + ## PRIVATE + + Provides a human-readable representation of the encoding error. + to_display_text : Text + to_display_text self = "Encoding_Error: " + self.message diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Problem_Behavior.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Problem_Behavior.enso index d980530d67ab..25912a256b48 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Problem_Behavior.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Error/Problem_Behavior.enso @@ -1,11 +1,9 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Vector.Vector +import project.Error.Error import project.Warning.Warning -from project.Error.Common import Error - from project.Error.Problem_Behavior.Problem_Behavior import all -from project.Error.Problem_Behavior.Problem_Behavior export all ## Specifies how to handle problems. type Problem_Behavior diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Function.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Function.enso index 30ba1295c847..ed22a6612bc6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Function.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Function.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Vector.Vector ## A function is any type that represents a not-yet evaluated computation. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/IO.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/IO.enso index 5934d97e715d..82466041400f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/IO.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/IO.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Text.Text import project.Nothing.Nothing diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index 5d44a766b741..716edd6b830a 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -1,14 +1,18 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Boolean import project.Data.List.List import project.Data.Numbers import project.Data.Map.Map +import project.Data.Text.Text import project.Data.Vector.Vector +import project.Error.Error import project.Function import project.IO import project.Math +import project.Meta import project.Nothing.Nothing +import project.Panic.Panic import project.Polyglot import project.Polyglot.Java import project.Runtime @@ -26,14 +30,18 @@ import project.System.Process import project.System.Process.Exit_Code.Exit_Code import project.Warning.Warning -export project.Data.Any.Any +export project.Any.Any export project.Data.Array.Array export project.Data.List.List export project.Data.Map.Map +export project.Data.Text.Text export project.Data.Vector.Vector +export project.Error.Error export project.IO export project.Math +export project.Meta export project.Nothing.Nothing +export project.Panic.Panic export project.Polyglot export project.Polyglot.Java export project.Runtime @@ -60,7 +68,8 @@ import project.Data.Filter_Condition.Filter_Condition import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Interval.Bound import project.Data.Interval.Interval -import project.Data.Json +import project.Data.Json.Json +import project.Data.Json.Extensions import project.Data.Locale.Locale import project.Data.Maybe.Maybe import project.Data.Noise @@ -68,11 +77,23 @@ import project.Data.Ordering.Natural_Order import project.Data.Ordering.Ordering import project.Data.Ordering.Sort_Direction.Sort_Direction import project.Data.Pair.Pair -import project.Data.Range +import project.Data.Range.Range +import project.Data.Range.Extensions import project.Data.Regression import project.Data.Statistics +import project.Data.Text.Case.Case import project.Data.Text.Case_Sensitivity.Case_Sensitivity +import project.Data.Text.Encoding.Encoding +import project.Data.Text.Extensions import project.Data.Text.Line_Ending_Style.Line_Ending_Style +import project.Data.Text.Location +import project.Data.Text.Matching_Mode +import project.Data.Text.Regex +import project.Data.Text.Regex.Regex_Mode.Regex_Mode +import project.Data.Text.Regex.Regex_Option.Regex_Option +import project.Data.Text.Regex_Matcher.Regex_Matcher +import project.Data.Text.Text_Matcher.Text_Matcher +import project.Data.Text.Text_Ordering.Text_Ordering import project.Data.Text.Text_Sub_Range.Text_Sub_Range import project.Data.Time.Date.Date import project.Data.Time.Date_Period.Date_Period @@ -81,10 +102,18 @@ import project.Data.Time.Day_Of_Week.Day_Of_Week import project.Data.Time.Day_Of_Week_From import project.Data.Time.Duration.Duration import project.Data.Time.Period.Period +import project.Data.Text.Span.Span +import project.Data.Text.Span.Utf_16_Span import project.Data.Time.Time_Of_Day.Time_Of_Day import project.Data.Time.Time_Period.Time_Period import project.Data.Time.Time_Zone.Time_Zone -import project.Error.Problem_Behavior +import project.Error.Problem_Behavior.Problem_Behavior +import project.Network.Extensions +import project.Network.HTTP.HTTP +import project.Network.HTTP.HTTP_Method.HTTP_Method +import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code +import project.Network.HTTP.HTTP_Version.HTTP_Version +import project.Network.URI.URI import project.Random export project.Data @@ -92,15 +121,30 @@ export project.Data.Filter_Condition.Filter_Condition export project.Data.Index_Sub_Range.Index_Sub_Range export project.Data.Interval.Bound export project.Data.Interval.Interval +export project.Data.Json.Json +export project.Data.Json.Extensions export project.Data.Locale.Locale export project.Data.Maybe.Maybe export project.Data.Ordering.Natural_Order export project.Data.Ordering.Ordering export project.Data.Ordering.Sort_Direction.Sort_Direction export project.Data.Pair.Pair +export project.Data.Range.Range +export project.Data.Range.Extensions export project.Data.Regression +export project.Data.Text.Case.Case export project.Data.Text.Case_Sensitivity.Case_Sensitivity +export project.Data.Text.Encoding.Encoding +export project.Data.Text.Extensions export project.Data.Text.Line_Ending_Style.Line_Ending_Style +export project.Data.Text.Location +export project.Data.Text.Matching_Mode +export project.Data.Text.Regex +export project.Data.Text.Regex.Regex_Mode.Regex_Mode +export project.Data.Text.Regex.Regex_Option.Regex_Option +export project.Data.Text.Regex_Matcher.Regex_Matcher +export project.Data.Text.Text_Matcher.Text_Matcher +export project.Data.Text.Text_Ordering.Text_Ordering export project.Data.Text.Text_Sub_Range.Text_Sub_Range export project.Data.Time.Date.Date export project.Data.Time.Date_Period.Date_Period @@ -109,46 +153,31 @@ export project.Data.Time.Day_Of_Week.Day_Of_Week export project.Data.Time.Day_Of_Week_From export project.Data.Time.Duration.Duration export project.Data.Time.Period.Period +export project.Data.Text.Span.Span +export project.Data.Text.Span.Utf_16_Span export project.Data.Time.Time_Of_Day.Time_Of_Day export project.Data.Time.Time_Period.Time_Period export project.Data.Time.Time_Zone.Time_Zone +export project.Error.Problem_Behavior.Problem_Behavior +export project.Network.Extensions +export project.Network.HTTP.HTTP +export project.Network.HTTP.HTTP_Method.HTTP_Method +export project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code +export project.Network.HTTP.HTTP_Version.HTTP_Version +export project.Network.URI.URI export project.Random -from project.Data.Json export all hiding Json_Parse_Error, No_Such_Field, Marshalling_Error from project.Data.Noise export all hiding Noise, Generator, Deterministic_Random, Long, Random -from project.Data.Range export all hiding throw_zero_step_error from project.Data.Statistics export all hiding to_moment_statistic, wrap_java_call, calculate_correlation_statistics, calculate_spearman_rank, calculate_correlation_statistics_matrix, Moments, MomentStatistic, CountMinMax, CorrelationStatistics, Rank, ClassCastException, NullPointerException -from project.Error.Problem_Behavior export all from project.Data.Index_Sub_Range.Index_Sub_Range import First, Last +from project.Error.Problem_Behavior.Problem_Behavior import all +from project.System.File_Format.Plain_Text_Format import Plain_Text + from project.Data.Index_Sub_Range.Index_Sub_Range export First, Last +from project.Error.Problem_Behavior.Problem_Behavior export all +from project.System.File_Format.Plain_Text_Format export Plain_Text -# Not refactored modules below: -import project.Data.Text -import project.Data.Text.Case -import project.Data.Text.Encoding -import project.Data.Text.Extensions -import project.Data.Text.Matching -import project.Data.Text.Text_Matcher -import project.Data.Text.Regex_Matcher -import project.Data.Text.Text_Ordering -import project.Data.Text.Span -import project.Data.Text.Regex -import project.Data.Text.Regex.Regex_Mode +## ToDo: import project.Error.Common -import project.Meta - -export project.Data.Text.Case_Sensitivity -export project.Data.Text.Regex -export project.Data.Text.Regex.Regex_Mode -export project.Data.Text.Text_Ordering -export project.Data.Text.Text_Matcher -export project.Data.Text.Regex_Matcher -export project.Meta - -from project.Data.Text.Extensions export Text, Case, Location, Matching_Mode -from project.Data.Text.Matching export No_Matches_Found_Data -from project.Data.Text export all hiding Encoding, Span, Line_Ending_Style, Case_Sensitivity -from project.Data.Text.Encoding export Encoding, Encoding_Error -from project.Data.Text.Span export all from project.Error.Common export all diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso index 163879130036..87826dea096f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso @@ -1,9 +1,21 @@ -from Standard.Base import all hiding Java -import Standard.Base - -# TODO Dubious constructor export -from project.Meta.Atom import all -from project.Meta.Atom export all +import project.Any.Any +import project.Data.Array.Array +import project.Data.Numbers.Decimal +import project.Data.Numbers.Integer +import project.Data.Numbers.Number +import project.Data.Text.Text +import project.Data.Time.Date_Time.Date_Time +import project.Data.Time.Date.Date +import project.Data.Time.Duration.Duration +import project.Data.Time.Time_Of_Day.Time_Of_Day +import project.Data.Time.Time_Zone.Time_Zone +import project.Data.Vector.Vector +import project.Polyglot.Java + +import project.Error.Error as Base_Error +import project.Polyglot.Polyglot as Base_Polyglot + +from project.Data.Boolean import Boolean, True, False ## UNSTABLE ADVANCED @@ -13,11 +25,21 @@ from project.Meta.Atom export all Arguments: - value: The value of the atom in the meta representation. type Atom - Atom_Data value + Value value + + ## UNSTABLE + ADVANCED + + Returns a vector of field values of the given atom. + fields : Vector + fields self = Vector.from_polyglot_array (get_atom_fields self.value) + + ## UNSTABLE + ADVANCED -# TODO Dubious constructor export -from project.Meta.Constructor import all -from project.Meta.Constructor export all + Returns a constructor value of the given atom. + constructor : Constructor + constructor self = get_atom_constructor self.value ... ## UNSTABLE ADVANCED @@ -27,25 +49,42 @@ from project.Meta.Constructor export all Arguments: - value: The value of the constructor in the meta representation. type Constructor - Constructor_Data value + Value value -# TODO Dubious constructor export -from project.Meta.Primitive import all -from project.Meta.Primitive export all + ## UNSTABLE + ADVANCED + + Returns a vector of field names defined by a constructor. + fields : Vector + fields self = Vector.from_polyglot_array (get_constructor_fields self.value) + + ## UNSTABLE + ADVANCED + + Returns the name of a constructor. + name : Text + name self = get_constructor_name self.value + + ## UNSTABLE + ADVANCED + + Creates a new atom of the given constructor. + + Arguments: + - fields: A vector of arguments to pass to the constructor when creating the + new atom. + new : Vector -> Any + new self fields = new_atom self.value fields.to_array ## UNSTABLE ADVANCED - A primitive value meta-prepresentation. + A primitive value meta-representation. Arguments: - value: The value of the primitive object in the meta representation. type Primitive - Primitive_Data value - -# TODO Dubious constructor export -from project.Meta.Unresolved_Symbol import all -from project.Meta.Unresolved_Symbol export all + Value value ## UNSTABLE ADVANCED @@ -55,7 +94,7 @@ from project.Meta.Unresolved_Symbol export all Arguments: - value: The value of the unresolved symbol in the meta representation. type Unresolved_Symbol - Unresolved_Symbol_Data value + Value value ## UNSTABLE ADVANCED @@ -83,10 +122,6 @@ type Unresolved_Symbol scope : Any scope self = get_unresolved_symbol_scope self.value -# TODO Dubious constructor export -from project.Meta.Error import all -from project.Meta.Error export all - ## UNSTABLE ADVANCED @@ -95,11 +130,7 @@ from project.Meta.Error export all Arguments: - value: The payload of the error. type Error - Error_Data value - -# TODO Dubious constructor export -from project.Meta.Polyglot import all -from project.Meta.Polyglot export all + Value value ## UNSTABLE ADVANCED @@ -109,10 +140,48 @@ from project.Meta.Polyglot export all Arguments: - value: The polyglot value contained in the meta representation. type Polyglot - Polyglot_Data value + Value value + + ## UNSTABLE + ADVANCED + + Returns the language with which a polyglot value is associated. + get_language : Language + get_language self = + lang_str = get_polyglot_language self.value + if lang_str == "java" then Language.Java else Language.Unknown +## UNSTABLE + ADVANCED + + Checks whether `self` represents the same underlying reference as `value`. + + Arguments: + - value_1: The first value. + - value_2: The second value. +Any.is_same_object_as : Any -> Boolean +Any.is_same_object_as self value = is_same_object self value + +## UNSTABLE + ADVANCED + + Checks if `self` is an instance of `typ`. + + Arguments: + - typ: The type to check `self` against. +Any.is_a : Any -> Boolean +Any.is_a self typ = is_a self typ + +## UNSTABLE + ADVANCED + + Checks if `self` is an instance of `typ`. + + Arguments: + - typ: The type to check `self` against. +Base_Error.is_a : Any -> Boolean +Base_Error.is_a self typ = typ==Any || typ==Base_Error -## Atom methods ## PRIVATE Gets the atom constructor instance for the provided atom. @@ -131,21 +200,6 @@ get_atom_constructor atom = @Builtin_Method "Meta.get_atom_constructor" get_atom_fields : Atom -> Array get_atom_fields atom = @Builtin_Method "Meta.get_atom_fields" -## UNSTABLE - ADVANCED - - Returns a vector of field values of the given atom. -Atom.fields : Vector -Atom.fields self = Vector.from_polyglot_array (get_atom_fields self.value) - -## UNSTABLE - ADVANCED - - Returns a constructor value of the given atom. -Atom.constructor : Constructor -Atom.constructor self = get_atom_constructor self.value ... - -# Polyglot methods ## PRIVATE Get a textual representation of the language from which an object comes. @@ -155,16 +209,6 @@ Atom.constructor self = get_atom_constructor self.value ... get_polyglot_language : Any -> Text get_polyglot_language value = @Builtin_Method "Meta.get_polyglot_language" -## UNSTABLE - ADVANCED - - Returns the language with which a polyglot value is associated. -Polyglot.get_language : Language -Polyglot.get_language self = - lang_str = get_polyglot_language self.value - if lang_str == "java" then Java else Unknown - -# UnresolvedSymbol methods ## PRIVATE Creates an unresolved symbol for the name name in the scope. @@ -193,7 +237,6 @@ get_unresolved_symbol_name symbol = @Builtin_Method "Meta.get_unresolved_symbol_ get_unresolved_symbol_scope : Unresolved_Symbol -> Any get_unresolved_symbol_scope symbol = @Builtin_Method "Meta.get_unresolved_symbol_scope" -# Constructor methods ## PRIVATE Get the fields of an atom constructor. @@ -222,32 +265,6 @@ get_constructor_name atom_constructor = @Builtin_Method "Meta.get_constructor_na new_atom : Constructor -> Array -> Atom new_atom constructor fields = @Builtin_Method "Meta.new_atom" -## UNSTABLE - ADVANCED - - Returns a vector of field names defined by a constructor. -Constructor.fields : Vector -Constructor.fields self = Vector.from_polyglot_array (get_constructor_fields self.value) - -## UNSTABLE - ADVANCED - - Returns the name of a constructor. -Constructor.name : Text -Constructor.name self = get_constructor_name self.value - -## UNSTABLE - ADVANCED - - Creates a new atom of the given constructor. - - Arguments: - - fields: A vector of arguments to pass to the constructor when creating the - new atom. -Constructor.new : Vector -> Any -Constructor.new self fields = new_atom self.value fields.to_array - - ## UNSTABLE ADVANCED @@ -255,13 +272,13 @@ Constructor.new self fields = new_atom self.value fields.to_array Arguments: - value: The runtime entity to get the meta representation of. -meta : Any -> Meta -meta value = if is_atom value then Atom_Data value else - if is_atom_constructor value then Constructor_Data value else - if is_polyglot value then Polyglot_Data value else - if is_unresolved_symbol value then Unresolved_Symbol_Data value else - if is_error value then Error_Data value.catch else - Primitive_Data value +meta : Any -> Atom | Constructor | Primitive | Polyglot | Unresolved_Symbol | Error +meta value = if is_atom value then Atom.Value value else + if is_atom_constructor value then Constructor.Value value else + if is_polyglot value then Polyglot.Value value else + if is_unresolved_symbol value then Unresolved_Symbol.Value value else + if is_error value then Error.Value value.catch else + Primitive.Value value ## UNSTABLE ADVANCED @@ -274,37 +291,6 @@ meta value = if is_atom value then Atom_Data value else is_same_object : Any -> Any -> Boolean is_same_object value_1 value_2 = @Builtin_Method "Meta.is_same_object" -## UNSTABLE - ADVANCED - - Checks whether `self` represents the same underlying reference as `value`. - - Arguments: - - value_1: The first value. - - value_2: The second value. -Any.is_same_object_as : Any -> Boolean -Any.is_same_object_as self value = is_same_object self value - -## UNSTABLE - ADVANCED - - Checks if `self` is an instance of `typ`. - - Arguments: - - typ: The type to check `self` against. -Any.is_a : Any -> Boolean -Any.is_a self typ = is_a self typ - -## UNSTABLE - ADVANCED - - Checks if `self` is an instance of `typ`. - - Arguments: - - typ: The type to check `self` against. -Base.Error.is_a : Any -> Boolean -Base.Error.is_a self typ = typ==Any || typ==Base.Error - ## UNSTABLE ADVANCED @@ -316,7 +302,7 @@ Base.Error.is_a self typ = typ==Any || typ==Base.Error is_a : Any -> Any -> Boolean is_a value typ = if is_same_object value typ then True else if typ == Any then True else - if is_error value then typ == Base.Error else + if is_error value then typ == Base_Error else case value of _ : Vector -> typ.is_same_object_as Vector _ : Array -> typ == Array @@ -330,28 +316,27 @@ is_a value typ = if is_same_object value typ then True else _ : Duration -> typ.is_same_object_as Duration _ : Time_Of_Day -> typ.is_same_object_as Time_Of_Day _ : Time_Zone -> typ.is_same_object_as Time_Zone - Base.Polyglot.Polyglot -> - typ==Base.Polyglot.Polyglot || java_instance_check value typ + Base_Polyglot -> typ==Base_Polyglot || java_instance_check value typ _ -> meta_val = meta value case meta_val of - Atom_Data _ -> if is_atom typ then typ == value else + _ : Atom -> if is_atom typ then typ == value else meta_val.constructor == typ - Constructor_Data _ -> + _ : Constructor -> meta_typ = meta typ case meta_typ of - Atom_Data _ -> meta_val == meta_typ.constructor - Constructor_Data _ -> meta_val == meta_typ + _ : Atom -> meta_val == meta_typ.constructor + _ : Constructor -> meta_val == meta_typ _ -> False - Error_Data _ -> typ == Error - Unresolved_Symbol_Data _ -> typ == Unresolved_Symbol + _ : Error -> typ == Error + _ : Unresolved_Symbol -> typ == Unresolved_Symbol _ -> False ## PRIVATE java_instance_check value typ = val_java = get_polyglot_language value == "java" typ_java = get_polyglot_language typ == "java" - val_java && typ_java && Base.Java.is_instance value typ + val_java && typ_java && Java.is_instance value typ ## UNSTABLE ADVANCED @@ -363,10 +348,6 @@ java_instance_check value typ = type_of : Any -> Any type_of value = @Builtin_Method "Meta.type_of" -# TODO Dubious constructor export -from project.Meta.Language import all -from project.Meta.Language export all - ## Represents a polyglot language. type Language diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Extensions.enso new file mode 100644 index 000000000000..165433df3401 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Extensions.enso @@ -0,0 +1,16 @@ +import project.Data.Text.Text +import project.Network.URI.URI +import project.Error.Common.Syntax_Error + +## Convert Text to a URI. + + Throws a `Syntax_Error` when `self` cannot be parsed as a URI. + + > Example + Parse URI text. + + import Standard.Base.Network.URI.Extensions + + example_parse = "http://example.com".to_uri +Text.to_uri : URI ! Syntax_Error +Text.to_uri self = URI.parse self diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso similarity index 52% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index 69b0d513bc49..c765396c9866 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -1,16 +1,28 @@ -from Standard.Base import all - -from Standard.Base.Error.Common import Time_Error - -import Standard.Base.Network.Http.Form -import Standard.Base.Network.Http.Header -import Standard.Base.Network.Http.Method -import Standard.Base.Network.Http.Request -import Standard.Base.Network.Http.Request.Body as Request_Body -import Standard.Base.Network.Http.Response -import Standard.Base.Network.Http.Version -import Standard.Base.Network.Proxy -import Standard.Base.Network.URI +import project.Any.Any +import project.Data.Json.Json +import project.Data.Pair.Pair +import project.Data.Text.Text +import project.Data.Time.Duration.Duration +import project.Data.Vector.Vector +import project.Error.Error +import project.Meta +import project.Nothing.Nothing +import project.Panic.Panic + +import project.Network.HTTP.Form.Form +import project.Network.HTTP.Form.Part_Value +import project.Network.HTTP.Request.Request +import project.Network.HTTP.Request_Body.Request_Body +import project.Network.HTTP.Response.Response + +import project.Network.HTTP.Header.Header +import project.Network.HTTP.HTTP_Method.HTTP_Method +import project.Network.HTTP.HTTP_Version.HTTP_Version +import project.Network.Proxy.Proxy +import project.Network.URI.URI + +from project.Data.Boolean import Boolean, True, False +from project.Error.Common import Time_Error polyglot java import java.net.http.HttpClient polyglot java import java.net.http.HttpRequest @@ -21,269 +33,76 @@ polyglot java import java.net.URI polyglot java import java.time.Duration as Java_Duration polyglot java import org.enso.base.Http_Utils -## Create a new instance of the HTTP client. - - Arguments: - - timeout: The length of time the client will wait for responses. - - follow_redirects: Whether or not the client should follow redirects. - - proxy: The proxy that the client should use, if any. - - version: The HTTP version supported by the client. - - > Example - Create an HTTP client with default settings. - Http.new - - > Example - Create an HTTP client with extended timeout. - Http.new timeout=(Duration.new seconds=30) - - > Example - Create an HTTP client with extended timeout and proxy settings. - - import Standard.Base.Data.Time.Duration.Duration - import Standard.Base.Network.Http - import Standard.Base.Network.Proxy - - example_new = - Http.new (timeout = (Duration.new seconds=30)) (proxy = Proxy.new "example.com" 8080) -new : Duration -> Boolean -> Proxy -> Http -new (timeout = (Duration.new seconds=10)) (follow_redirects = True) (proxy = Proxy.System) (version = Version.Http_1_1) = - Http_Data timeout follow_redirects proxy version - -## Send an Options request. - - Arguments: - - uri: The address to which the request will be sent. - - headers: Any headers for the options request. - - > Example - Send an Options request. NOTE: This example will make a network request. - - import Standard.Base.Network.Http - - example_options = Http.options "http://httpbin.org" -options : (Text | URI) -> Vector -> Response ! Request_Error -options uri (headers = []) = new.options uri headers - -## ALIAS GET Request - - Send a Get request. - - Arguments: - - uri: The address to which the request will be sent. - - headers: Any headers for the options request. - - > Example - Send a Get request. NOTE: This example will make a network request. - - import Standard.Base.Network.Http - - example_get = Http.get "http://httpbin.org/get" - - > Example - Send authenticated Get request (note the use of TLS). NOTE: This example - will make a network request. - - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header - - example_get = - headers = [Header.authorization_basic "user" "pass"] - Http.get "https://httpbin.org/basic-auth/user/pass" headers - - > Example - Download a file. NOTE: This example will make a network request. - - import Standard.Base.Network.Http - import Standard.Examples - - example_get = - out_file = Examples.scratch_file - res = Http.get "http://httpbin.org/bytes/1024" - res.body.to_file out_file -get : (Text | URI) -> Vector -> Response ! Request_Error -get uri (headers = []) = new.get uri headers - -## ALIAS Fetch Data - - Send the Get request and return the body. - - Arguments: - - uri: The address to which the request will be sent. - - headers: Any headers for the options request. - - > Example - Send a Get request and return the body. NOTE: This example will make a - network request. - - import Standard.Base.Network.Http - - example_fetch = Http.fetch "http://httpbin.org/get" - - > Example - Send authenticated Get request (note the use of TLS) and return the body. - NOTE: This example will make a network request. - - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header - - example_fetch = - headers = [Header.authorization_basic "user" "pass"] - Http.fetch "https://httpbin.org/basic-auth/user/pass" headers - - > Example - Download a file. NOTE: This example will make a network request. - - import Standard.Base.Network.Http - import Standard.Examples - - example_fetch = - out_file = Examples.scratch_file - res = Http.fetch "http://httpbin.org/bytes/1024" . to_file out_file -fetch : (Text | URI) -> Vector -> Response ! Request_Error -fetch uri (headers = []) = - new.get uri headers . body - -## Send a Head request. - - Arguments: - - uri: The address to which the request will be sent. - - headers: Any headers for the options request. - - > Example - Send a Head request. NOTE: This example will make a network request. - - import Standard.Base.Network.Http - - example_head = Http.head "http://httpbin.org" -head : (Text | URI) -> Vector -> Response ! Request_Error -head uri (headers = []) = new.options uri headers - -## ALIAS POST Request - - Send a Post request. - - Arguments: - - uri: The address to which the request will be sent. - - body: The contents of the post request. - - headers: Any headers for the options request. - - > Example - Send a Post request with binary data. NOTE: This example will make a - network request. - - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request.Body - - example_post = - body = Body.Bytes "Hello".utf_8 - header_binary = Header.content_type "application/octet-stream" - Http.post "http://httpbin.org/post" body [header_binary] -post : (Text | URI) -> Request_Body -> Vector -> Response ! Request_Error -post uri body (headers = []) = new.post uri body headers +type HTTP + ## Create a new instance of the HTTP client. -## Send a Post request with the form. By default it will be encoded as - "application/x-www-form-urlencoded". To encode the form as - "multipart/form-data" add the appropriate header. - - Arguments: - - uri: The address to which the request will be sent. - - parts: A form, or a vector of parts for creating a form. - - headers: Any headers for the options request. - - > Example - Send a Post request with form. NOTE: This example will make a network - request. - - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Form - - example_post_form = - form = [Form.text_field "name" "John Doe", Form.file_field "license.txt" (enso_project.root / "LICENSE")] - Http.post_form "http://httpbin.org/post" form - - > Example - Send a Post request with form encoded as "multipart/form-data". NOTE: This - example will make a network request. - - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Form - import Standard.Base.Network.Http.Header - - example_post_form = - form = [Form.text_field "name" "John Doe", Form.file_field "license.txt" (enso_project.root / "LICENSE")] - Http.post_form "http://httpbin.org/post" form [Header.multipart_form_data] -post_form : (Text | URI) -> (Vector | Form) -> Vector -> Response ! Request_Error -post_form uri parts (headers = []) = new.post_form uri parts headers + Arguments: + - timeout: The length of time the client will wait for responses. + - follow_redirects: Whether or not the client should follow redirects. + - proxy: The proxy that the client should use, if any. + - version: The HTTP version supported by the client. -## Send a Post request with body with content-type "application/json". + > Example + Create an HTTP client with default settings. + HTTP.new - Arguments: - - uri: The address to which the request will be sent. - - body_json: The json for the body. - - headers: Any headers for the options request. + > Example + Create an HTTP client with extended timeout. + HTTP.new timeout=(Duration.new seconds=30) - > Example - Send a Post request with json data. NOTE: This example will make a network - request. + > Example + Create an HTTP client with extended timeout and proxy settings. - import Standard.Base.Network.Http + import Standard.Base.Data.Time.Duration.Duration + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.Proxy.Proxy - example_post_json = - json = Json.parse '{"key":"val"}' - Http.post_json "http://httpbin.org/post" json -post_json : (Text | URI) -> Json -> Vector -> Response ! Request_Error -post_json uri body_json (headers = []) = new.post_json uri body_json headers + example_new = + HTTP.new (timeout = (Duration.new seconds=30)) (proxy = Proxy.new "example.com" 8080) + new : Duration -> Boolean -> Proxy -> HTTP_Version -> HTTP + new (timeout = (Duration.new seconds=10)) (follow_redirects = True) (proxy = Proxy.System) (version = HTTP_Version.HTTP_1_1) = + HTTP.Value timeout follow_redirects proxy version -## ALIAS PUT Request - Send a Put request. + ## ALIAS Fetch Data - Arguments: - - uri: The address to which the request will be sent. - - body: The contents of the put request. - - headers: Any headers for the options request. + Send the Get request and return the body. - > Example - Send a Put request with binary data. NOTE: This example will make a - network request. + Arguments: + - uri: The address to which the request will be sent. + - headers: Any headers for the options request. - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request.Body + > Example + Send a Get request and return the body. NOTE: This example will make a + network request. - example_post = - body = Body.Bytes "Hello".utf_8 - header_binary = Header.content_type "application/octet-stream" - Http.put "http://httpbin.org/put" body [header_binary] -put : (Text | URI) -> Request_Body -> Vector -> Response ! Request_Error -put uri body (headers = []) = new.put uri body headers + import Standard.Base.Network.HTTP.HTTP -## Send a Put request with body with content-type "application/json". + example_fetch = HTTP.fetch "http://httpbin.org/get" - Arguments: - - uri: The address to which the request will be sent. - - body_json: The JSON for the body of the put request. - - headers: Any headers for the options request. + > Example + Send authenticated Get request (note the use of TLS) and return the body. + NOTE: This example will make a network request. - > Example - Send a Put request with json data. NOTE: This example will make a - network request. + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.Header.Header - import Standard.Base.Network.Http + example_fetch = + headers = [Header.authorization_basic "user" "pass"] + HTTP.fetch "https://httpbin.org/basic-auth/user/pass" headers - example_post_json = - json = Json.parse '{"key":"val"}' - HTTP.put_json "http://httpbin.org/put" json -put_json : (Text | URI) -> (Text | Json) -> Vector -> Response ! Request_Error -put_json uri body_json (headers = []) = new.put_json uri body_json headers + > Example + Download a file. NOTE: This example will make a network request. -## TODO Dubious constructor export -from project.Network.Http.Http import all -from project.Network.Http.Http export all + import Standard.Base.Network.HTTP.HTTP + import Standard.Examples -type Http + example_fetch = + out_file = Examples.scratch_file + res = HTTP.fetch "http://httpbin.org/bytes/1024" . to_file out_file + fetch : (Text | URI) -> Vector -> Response ! Request_Error + fetch uri (headers = []) = + HTTP.new.get uri headers . body ## PRIVATE @@ -294,7 +113,7 @@ type Http - follow_redirects: Whether or not the client should follow redirects. - proxy: The proxy that the client should use, if any. - version: The HTTP version supported by the client. - Http_Data timeout follow_redirects proxy version + Value timeout follow_redirects proxy version ## Send an Options request. @@ -306,8 +125,10 @@ type Http Send an Options request. NOTE: This example will make a network request. + import Standard.Base.Network.HTTP.HTTP import Standard.Examples + static_options = HTTP.new.options "http://httpbin.org" example_options = Examples.http_client.options "http://httpbin.org" options : (Text | URI) -> Vector -> Response ! Request_Error options self uri (headers = []) = @@ -331,7 +152,7 @@ type Http Send authenticated Get request (note the use of TLS). NOTE: This example will make a network request. - import Standard.Base.Network.Http.Header + import Standard.Base.Network.HTTP.Header.Header import Standard.Examples example_get = @@ -382,12 +203,12 @@ type Http Send a Post request with binary data. NOTE: This example will make a network request. - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.Header.Header + import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Examples example_post = - body = Body.Bytes "Hello".utf_8 + body = Request_Body.Bytes "Hello".utf_8 Examples.http_client.post "http://httpbin.org/post" body [header_binary] post : (Text | URI) -> Request_Body -> Vector -> Response ! Request_Error post self uri body (headers = []) = @@ -408,7 +229,7 @@ type Http Send a Post request with form. NOTE: This example will make a network request. - import Standard.Base.Network.Http.Form + import Standard.Base.Network.HTTP.Form.Form import Standard.Examples example_post_form = @@ -419,8 +240,8 @@ type Http Send a Post request with form encoded as "multipart/form-data". NOTE: This example will make a network request. - import Standard.Base.Network.Http.Form - import Standard.Base.Network.Http.Header + import Standard.Base.Network.HTTP.Form.Form + import Standard.Base.Network.HTTP.Header.Header import Standard.Examples example_post_form = @@ -429,7 +250,7 @@ type Http post_form : (Text | URI) -> (Vector | Form) -> Vector -> Response ! Request_Error post_form self uri parts (headers = []) = new_headers = [Header.application_x_www_form_urlencoded] - req = Request.post uri (Request_Body.Form parts.to_form) new_headers . with_headers headers + req = Request.post uri (Request_Body.Form (Form.new parts)) new_headers . with_headers headers self.request req ## Send a Post request with body with content-type "application/json". @@ -465,12 +286,12 @@ type Http > Example Send a Put request with binary data. - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.Header.Header + import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Examples example_put = - body = Body.Bytes "contents".utf_8 + body = Request_Body.Bytes "contents".utf_8 Examples.http_client.put "http://httpbin.org/post" body [header_binary] put : (Text | URI) -> Request_Body -> Vector -> Response ! Request_Error put self uri body (headers = []) = @@ -525,12 +346,12 @@ type Http Send a Get request with headers. NOTE: This example will send a network request. - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Method - import Standard.Base.Network.Http.Request + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.HTTP_Method.HTTP_Method + import Standard.Base.Network.HTTP.Request.Request example_request = - req = Request.new Method.Get "http://httpbin.org/get" . with_header "X-Trace-Id" "00000" + req = Request.new HTTP_Method.Get "http://httpbin.org/get" . with_header "X-Trace-Id" "00000" res = Examples.http_client.request req res.body @@ -538,9 +359,9 @@ type Http Open a connection and send a Post request with form. NOTE: This example will send a network request. - import Standard.Base.Network.Http.Form - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.Form.Form + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Examples example_request = @@ -554,29 +375,29 @@ type Http Send a Post request with urlencoded form data. NOTE: This example will send a network request. - import Standard.Base.Network.Http.Form - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.Form.Form + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Examples example_request = form = [Form.text_field "name" "John Doe", Form.file_field "license.txt" (enso_project.root / "LICENSE")] - req = Request.post "http://httpbin.org/post" Body.Empty . with_form form + req = Request.post "http://httpbin.org/post" Request_Body.Empty . with_form form Examples.http_client.request req > Example Send a Post request with form encoded as "multipart/form-data". NOTE: This example will send a network request. - import Standard.Base.Network.Http.Form - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.Form.Form + import Standard.Base.Network.HTTP.Header.Header + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.HTTP.Request_Body.Request_Body import Standard.Examples example_request = form = [Form.text_field "name" "John Doe", Form.file_field "license.txt" (enso_project.root / "LICENSE")] - req = Request.post "http://httpbin.org/post" Body.Empty + req = Request.post "http://httpbin.org/post" Request_Body.Empty with_form = req.with_form form with_headers = with_form.with_headers [Header.multipart_form_data] Examples.http_client.request with_headers @@ -586,20 +407,20 @@ type Http example will send a network request. import Standard.Base.Data.Time.Duration.Duration - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Form - import Standard.Base.Network.Http.Method - import Standard.Base.Network.Http.Request + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.Form.Form + import Standard.Base.Network.HTTP.HTTP_Method.HTTP_Method + import Standard.Base.Network.HTTP.Request.Request example_request = form = [Form.text_field "name" "John Doe"] - req = Request.new Method.Post "http://httpbin.org/post" . with_form form - http = Http.new (timeout = (Duration.new seconds=30)) + req = Request.new HTTP_Method.Post "http://httpbin.org/post" . with_form form + http = HTTP.new (timeout = (Duration.new seconds=30)) http.request req request : Request -> Response ! Request_Error request self req = handle_request_error = - Panic.catch_java Any handler=(err-> Error.throw (Request_Error_Data 'IllegalArgumentException' err.getMessage)) + Panic.catch_java Any handler=(err-> Error.throw (Request_Error.Error 'IllegalArgumentException' err.getMessage)) Panic.recover Any <| handle_request_error <| body_publishers = HttpRequest.BodyPublishers builder = HttpRequest.newBuilder @@ -620,15 +441,15 @@ type Http add_multipart form = body_builder = Http_Utils.multipart_body_builder form.parts.map part-> case part.value of - Form.Part_Text text -> body_builder.add_part_text part.key text - Form.Part_File file -> body_builder.add_part_file part.key file.path + Part_Value.Text text -> body_builder.add_part_text part.key text + Part_Value.File file -> body_builder.add_part_file part.key file.path boundary = body_builder.get_boundary Pair.new (req.with_headers [Header.multipart_form_data boundary]) body_builder.build add_urlencoded form = body_builder = Http_Utils.urlencoded_body_builder form.parts.map part-> case part.value of - Form.Part_Text text -> body_builder.add_part_text part.key text - Form.Part_File file -> body_builder.add_part_file part.key file.path + Part_Value.Text text -> body_builder.add_part_text part.key text + Part_Value.File file -> body_builder.add_part_file part.key file.path Pair.new req body_builder.build if req.headers.contains Header.multipart_form_data then add_multipart form else add_urlencoded form @@ -636,15 +457,7 @@ type Http builder.header Header.application_octet_stream.name Header.application_octet_stream.value Pair.new req (body_publishers.ofByteArray bytes.to_array) # method - req_http_method = case req.method of - Method.Options -> "OPTIONS" - Method.Get -> "GET" - Method.Head -> "HEAD" - Method.Post -> "POST" - Method.Put -> "PUT" - Method.Delete -> "DELETE" - Method.Trace -> "TRACE" - Method.Connect -> "CONNECT" + req_http_method = req.method.to_http_method_name case req_with_body of Pair.Value req body -> # set method and body @@ -653,7 +466,7 @@ type Http req.headers.map h-> builder.header h.name h.value http_request = builder.build body_handler = HttpResponse.BodyHandlers . ofByteArray - Response.Response_Data (self.internal_http_client.send http_request body_handler) + Response.Value (self.internal_http_client.send http_request body_handler) ## PRIVATE @@ -670,7 +483,7 @@ type Http builder.followRedirects redirect_policy # proxy case self.proxy of - Proxy.Proxy_Addr proxy_host proxy_port -> + Proxy.Address proxy_host proxy_port -> proxy_selector = ProxySelector.of (InetSocketAddress.new proxy_host proxy_port) builder.proxy proxy_selector Proxy.System -> @@ -680,26 +493,22 @@ type Http Nothing # version case self.version of - Version.Http_1_1 -> + HTTP_Version.HTTP_1_1 -> builder.version HttpClient.Version.HTTP_1_1 - Version.Http_2 -> + HTTP_Version.HTTP_2 -> builder.version HttpClient.Version.HTTP_2 # build http client builder.build -## TODO Dubious constructor export -from project.Network.Http.Request_Error import all -from project.Network.Http.Request_Error export all - ## UNSTABLE - An error when sending an Http request. + An error when sending an HTTP request. Arguments: - error_type: The type of the error. - message: The message for the error. type Request_Error - Request_Error_Data error_type message + Error error_type message ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Form.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Form.enso new file mode 100644 index 000000000000..8f34cb7a061b --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Form.enso @@ -0,0 +1,94 @@ +import project.Data.Text.Text +import project.Data.Vector.Vector + +# Helpers for creating different parts of the form. + +## The HTTP form containing a vector of parts. +type Form + ## Create a text field of a Form. + + Arguments: + - key: The key for the field in the form. + - val: The text for the textual field. + + > Example + Create a textual form field. + + import Standard.Base.Network.HTTP.Form.Form + + example_text_field = Form.text_field "Foo" "bar" + text_field : Text -> Text -> Part + text_field key val = Part.Value key (Part_Value.Text val) + + ## Create a file field of a Form. + + Arguments: + - key: The key for the field in the form. + - file: The textual file contents. + + > Example + Create a file form field. + + import Standard.Base.Network.HTTP.Form.Form + + example_text_field = Form.file_field "Foo" "My file contents" + file_field : Text -> Text -> Part + file_field key file = Part.Value key (Part_Value.File file) + + ## Create Form data from Parts. + + Arguments: + - parts: A vector of parts to make up the form. + + > Example + Create a new form + + import Standard.Base.Network.HTTP.Form.Form + + example_form_new = Form.new (Form.text_field "foo" "bar") + new : Vector -> Form + new parts = Form.Value parts + + ## PRIVATE + + A type representing form data. + + Arguments: + - parts: A vector of form segments. + Value parts + + ## Convert this to a Form. + + > Example + Convert to a form. + + import Standard.Base.Network.HTTP.Form.Form + import Standard.Base.Network.HTTP.Form.Part + import Standard.Base.Network.HTTP.Form.Part_Value + + example_to_form = Form.new [Part "foo" (Part_Value.Text "bar")] . to_form + to_form : Form + to_form self = self + +## The key-value element of the form. +type Part + ## A form part. + + Arguments: + - key: The key for the form section. + - value: The value of the form section. + Value key value + +## The value of the form element. +type Part_Value + ## A textual value for a form part. + + Arguments: + - part_text: The text for the form part. + Text part_text + + ## A file value for a form part. + + Arguments: + - part_file: The file for the form part. + File part_file diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Method.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Method.enso new file mode 100644 index 000000000000..0eb48bef00c7 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Method.enso @@ -0,0 +1,38 @@ +import project.Data.Text.Text + +type HTTP_Method + ## The HTTP method "OPTIONS". + Options + + ## The HTTP method "GET". + Get + + ## The HTTP method "HEAD". + Head + + ## The HTTP method "POST". + Post + + ## The HTTP method "PUT". + Put + + ## The HTTP method "DELETE". + Delete + + ## The HTTP method "TRACE". + Trace + + ## The HTTP method "CONNECT". + Connect + + ## Convert to a Text of the HTTP method name. + to_http_method_name : Text + to_http_method_name self = case self of + HTTP_Method.Options -> "OPTIONS" + HTTP_Method.Get -> "GET" + HTTP_Method.Head -> "HEAD" + HTTP_Method.Post -> "POST" + HTTP_Method.Put -> "PUT" + HTTP_Method.Delete -> "DELETE" + HTTP_Method.Trace -> "TRACE" + HTTP_Method.Connect -> "CONNECT" diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Status_Code.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Status_Code.enso new file mode 100644 index 000000000000..93691dad4ca8 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Status_Code.enso @@ -0,0 +1,166 @@ +type HTTP_Status_Code + ## 100 Continue. + continue : HTTP_Status_Code + continue = HTTP_Status_Code.Value 100 + + ## 101 Switching Protocols. + switching_protocols : HTTP_Status_Code + switching_protocols = HTTP_Status_Code.Value 101 + + ## 200 OK. + ok : HTTP_Status_Code + ok = HTTP_Status_Code.Value 200 + + ## 201 Created. + created : HTTP_Status_Code + created = HTTP_Status_Code.Value 201 + + ## 202 Accepted. + accepted : HTTP_Status_Code + accepted = HTTP_Status_Code.Value 202 + + ## 203 Non-Authoritative Information. + non_authoritative_information : HTTP_Status_Code + non_authoritative_information = HTTP_Status_Code.Value 203 + + ## 204 No Content. + no_content : HTTP_Status_Code + no_content = HTTP_Status_Code.Value 204 + + ## 205 Reset Content. + reset_content : HTTP_Status_Code + reset_content = HTTP_Status_Code.Value 205 + + ## 206 Partial Content. + partial_content : HTTP_Status_Code + partial_content = HTTP_Status_Code.Value 206 + + ## 300 Multiple Choices. + multiple_choices : HTTP_Status_Code + multiple_choices = HTTP_Status_Code.Value 300 + + ## 301 Moved Permanently. + moved_permanently : HTTP_Status_Code + moved_permanently = HTTP_Status_Code.Value 301 + + ## 302 Found. + found : HTTP_Status_Code + found = HTTP_Status_Code.Value 302 + + ## 303 See Other. + see_other : HTTP_Status_Code + see_other = HTTP_Status_Code.Value 303 + + ## 304 Not Modified. + not_modified : HTTP_Status_Code + not_modified = HTTP_Status_Code.Value 304 + + ## 305 Use Proxy. + use_proxy : HTTP_Status_Code + use_proxy = HTTP_Status_Code.Value 305 + + ## 307 Temporary Redirect. + temporary_redirect : HTTP_Status_Code + temporary_redirect = HTTP_Status_Code.Value 307 + + ## 400 Bad Request. + bad_request : HTTP_Status_Code + bad_request = HTTP_Status_Code.Value 400 + + ## 401 Unauthorized. + unauthorized : HTTP_Status_Code + unauthorized = HTTP_Status_Code.Value 401 + + ## 402 Payment Required. + payment_required : HTTP_Status_Code + payment_required = HTTP_Status_Code.Value 402 + + ## 403 Forbidden. + forbidden : HTTP_Status_Code + forbidden = HTTP_Status_Code.Value 403 + + ## 404 Not Found. + not_found : HTTP_Status_Code + not_found = HTTP_Status_Code.Value 404 + + ## 405 Method Not Allowed. + method_not_allowed : HTTP_Status_Code + method_not_allowed = HTTP_Status_Code.Value 405 + + ## 406 Not Acceptable. + not_acceptable : HTTP_Status_Code + not_acceptable = HTTP_Status_Code.Value 406 + + ## 407 Proxy Authentication Required. + proxy_authentication_required : HTTP_Status_Code + proxy_authentication_required = HTTP_Status_Code.Value 407 + + ## 408 Request Timeout. + request_timeout : HTTP_Status_Code + request_timeout = HTTP_Status_Code.Value 408 + + ## 409 Conflict. + conflict : HTTP_Status_Code + conflict = HTTP_Status_Code.Value 409 + + ## 410 Gone. + gone : HTTP_Status_Code + gone = HTTP_Status_Code.Value 410 + + ## 411 Length Required. + length_required : HTTP_Status_Code + length_required = HTTP_Status_Code.Value 411 + + ## 412 Precondition Failed. + precondition_failed : HTTP_Status_Code + precondition_failed = HTTP_Status_Code.Value 412 + + ## 413 Request Entity Too Large. + request_entity_too_large : HTTP_Status_Code + request_entity_too_large = HTTP_Status_Code.Value 413 + + ## 414 Request-URI Too Long. + request_uri_too_long : HTTP_Status_Code + request_uri_too_long = HTTP_Status_Code.Value 414 + + ## 415 Unsupported Media Type. + unsupported_media_type : HTTP_Status_Code + unsupported_media_type = HTTP_Status_Code.Value 415 + + ## 416 Requested Range Not Satisfiable. + requested_range_not_satisfiable : HTTP_Status_Code + requested_range_not_satisfiable = HTTP_Status_Code.Value 416 + + ## 417 Expectation Failed. + expectation_failed : HTTP_Status_Code + expectation_failed = HTTP_Status_Code.Value 417 + + ## 500 Internal Server Error. + internal_server_error : HTTP_Status_Code + internal_server_error = HTTP_Status_Code.Value 500 + + ## 501 Not Implemented. + not_implemented : HTTP_Status_Code + not_implemented = HTTP_Status_Code.Value 501 + + ## 502 Bad Gateway. + bad_gateway : HTTP_Status_Code + bad_gateway = HTTP_Status_Code.Value 502 + + ## 503 Service Unavailable. + service_unavailable : HTTP_Status_Code + service_unavailable = HTTP_Status_Code.Value 503 + + ## 504 Gateway Timeout + gateway_timeout : HTTP_Status_Code + gateway_timeout = HTTP_Status_Code.Value 504 + + ## 505 HTTP Version Not Supported. + http_version_not_supported : HTTP_Status_Code + http_version_not_supported = HTTP_Status_Code.Value 505 + + ## An HTTP status code. + + Arguments: + - code: The numeric representation of the code. + Value code diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Version.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Version.enso new file mode 100644 index 000000000000..d0925385d5bd --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/HTTP_Version.enso @@ -0,0 +1,6 @@ +type HTTP_Version + ## HTTP version 1.1. + HTTP_1_1 + + ## HTTP version 2. + HTTP_2 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso new file mode 100644 index 000000000000..022decf4a894 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Header.enso @@ -0,0 +1,181 @@ +import project.Data.Boolean.Boolean +import project.Data.Text.Text + +polyglot java import org.enso.base.Http_Utils + +type Header + ## PRIVATE + + A type representing a header. + + Arguments: + - name: The header name. + - value: The header value. + Value name value + + ## Header equality. + + Arguments: + - that: The header to compare against. + + > Example + Compare two headers. + + import Standard.Base.Network.HTTP.Header.Header + + example_header_eq = + (Header.new "My_Header" "foo") == (Header.new "My_Header" "bar") + == : Header -> Boolean + == self that = (self.name.equals_ignore_case that.name) && self.value==that.value + + ## ALIAS Build a Header + + Create a new Header. + + Arguments: + - name: The name of the header. + - value: The value for the header. + + > Example + Create a new header called "My_Header". + + import Standard.Base.Network.HTTP.Header.Header + + example_new = Header.new "My_Header" "my header's value" + new : Text -> Text -> Header + new name value = Header.Value name value + + # Accept + + ## Create an "Accept" header. + + Arguments: + - value: The value for the accept header. + + > Example + Create an accept header. + + import Standard.Base.Network.HTTP.Header.Header + + example_accept = Header.accept "my_field" + accept : Text -> Header + accept value = Header.Value "Accept" value + + ## Create a header that accepts all (`"*/*"`). + + > Example + Create an accept all header. + + import Standard.Base.Network.HTTP.Header.Header + + example_accept_all = Header.accept_all + accept_all : Header + accept_all = Header.accept "*/*" + + ## ALIAS Build an Auth Header + + Create an "Authorization" header. + + Arguments: + - value: The value for the authorization header. + + > Example + Create an auth header containing "foo". + + import Standard.Base.Network.HTTP.Header.Header + + example_auth = Header.authorization "foo" + authorization : Text -> Header + authorization value = Header.Value "Authorization" value + + ## Create HTTP basic auth header. + + Arguments: + - user: The username. + - pass: The password. + + > Example + Create basic auth header. + + import Standard.Base.Network.HTTP.Header.Header + + example_auth_basic = Header.authorization_basic "user" "pass" + authorization_basic : Text -> Text -> Header + authorization_basic user pass = + Header.authorization (Http_Utils.header_basic_auth user pass) + + # Content-Type + + ## Create "Content-Type" header. + + Arguments: + - value: The value for the content type header. + + > Example + Create a content type header containing "my_type". + + import Standard.Base.Network.HTTP.Header.Header + + example_content_type = Header.content_type "my_type" + content_type : Text -> Header + content_type value = Header.Value "Content-Type" value + + ## Header "Content-Type: application/json". + + > Example + Create a header with content type "application/json". + + import Standard.Base.Network.HTTP.Header.Header + + example_app_json = Header.application_json + application_json : Header + application_json = Header.content_type "application/json" + + ## Header "Content-Type: application/octet-stream". + + > Example + Create a header with content type "application/octet-stream". + + import Standard.Base.Network.HTTP.Header.Header + + example_app_octet = Header.application_octet_stream + application_octet_stream : Header + application_octet_stream = Header.content_type "application/octet-stream" + + ## Header "Content-Type: application/x-www-form-urlencoded". + + > Example + Create a header with content type "application/x-www-form-urlencoded". + + import Standard.Base.Network.HTTP.Header.Header + + example_app_x_www = Header.application_x_www_form_urlencoded + application_x_www_form_urlencoded : Header + application_x_www_form_urlencoded = Header.content_type "application/x-www-form-urlencoded" + + ## Header "Content-Type: multipart/form-data". + + Arguments: + - boundary: The text that delimits boundaries between the parts of the form. + + > Example + Create a header with content type "multipart/form-data". + + import Standard.Base.Network.HTTP.Header.Header + + example_multipart = Header.multipart_form_data + multipart_form_data : Text -> Header + multipart_form_data (boundary = "") = + if boundary == "" then Header.content_type "multipart/form-data" else + Header.content_type ("multipart/form-data; boundary=" + boundary) + + ## Header "Content-Type: text/plain". + + > Example + Create a header with the content type "text/plain". + + import Standard.Base.Network.HTTP.Header.Header + + example_header_text_plain = Header.text_plain + text_plain : Header + text_plain = Header.content_type "text/plain" diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso new file mode 100644 index 000000000000..cc44168f0902 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso @@ -0,0 +1,240 @@ +import project.Any.Any +import project.Data.Json.Json +import project.Data.Pair.Pair +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Network.Extensions +import project.Network.HTTP.Form.Form +import project.Network.HTTP.Header.Header +import project.Network.HTTP.HTTP_Method.HTTP_Method +import project.Network.HTTP.Request_Body.Request_Body +import project.Network.URI.URI +import project.Panic.Panic + +from project.Data.Boolean import Boolean, True, False + +type Request + ## Create new HTTP request. + + Arguments: + - method: The HTTP method represented by the request. + - addr: The address for the request. + - headers: A vector containing headers for the request. + - body: The body of the request. + + > Example + Create a new post request with no headers and no body. + + import Standard.Base.Network.HTTP.HTTP_Method.HTTP_Method + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.URI.URI + + example_new = Request.new Method.Post (URI.parse "http://example.com") + new : HTTP_Method -> (Text | URI) -> Vector -> Request_Body -> Request + new method addr (headers = []) (body = Request_Body.Empty) = + Panic.recover Any (Request.Value method (Panic.rethrow (addr.to_uri)) headers body) + + ## Create an Options request. + + Arguments: + + > Example + Create a new options request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.URI.URI + + example_options = Request.options (URI.parse "http://example.com") + options : (Text | URI) -> Vector -> Request + options addr (headers = []) = Request.new HTTP_Method.Options addr headers + + ## Create a Get request. + + Arguments: + - addr: The address for the request. + - headers: A vector containing headers for the request. + + > Example + Create a new get request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.URI.URI + + example_get = Request.get (URI.parse "http://example.com") + get : (Text | URI) -> Vector -> Request + get addr (headers = []) = Request.new HTTP_Method.Get addr headers + + ## Create a Head request. + + Arguments: + - addr: The address for the request. + - headers: A vector containing headers for the request. + + > Example + Create a new head request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.URI.URI + + example_head = Request.head (URI.parse "http://example.com") + head : (Text | URI) -> Vector -> Request + head addr (headers = []) = Request.new HTTP_Method.Head addr headers + + ## Create a Post request. + + Arguments: + - addr: The address for the request. + - body: The body for the request. + - headers: A vector containing headers for the request. + + > Example + Create a new post request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.Request_Body.Request_Body + import Standard.Base.Network.URI.URI + + example_post = Request.post (URI.parse "http://example.com") Request_Body.Empty + post : (Text | URI) -> Request_Body -> Vector -> Request + post addr body (headers = []) = Request.new HTTP_Method.Post addr headers body + + ## Create a Put request. + + Arguments: + - addr: The address for the request. + - body: The body for the request. + - headers: A vector containing headers for the request. + + > Example + Create a new put request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.HTTP.Request_Body.Request_Body + import Standard.Base.Network.URI.URI + + example_put = Request.put (URI.parse "http://example.com") Request_Body.Empty + put : (Text | URI) -> Request_Body -> Vector -> Request + put addr body (headers = []) = Request.new HTTP_Method.Put addr headers body + + ## Create a Delete request. + + Arguments: + - addr: The address for the request. + - headers: A vector containing headers for the request. + + > Example + Create a new delete request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.URI.URI + + example_delete = Request.delete (URI.parse "http://example.com") + delete : (Text | URI) -> Vector -> Request + delete addr (headers = []) = Request.new HTTP_Method.Delete addr headers + + ## PRIVATE + + A type representing an HTTP request. + + Arguments: + - method: The HTTP method represented by the request. + - uri: The URI for the request. + - headers: A vector containing headers for the request. + - body: The body of the request. + Value method uri headers body + + ## Sets the header for the request. + + Arguments: + - key: The name for the header in this request. + - val: The value for the header in this request. + + > Example + Create a request and add a new header to it. + + import Standard.Base.Network.HTTP.Request.Request + + example_with_header = Request.delete.with_header "Foo" "bar" + with_header : Text -> Text -> Request + with_header self key val = + new_header = Header.new key val + update_header p h = case p of + Pair.Value acc True -> Pair.new (acc + [h]) True + Pair.Value acc False -> + if h.name . equals_ignore_case key then Pair.new (acc + [new_header]) True else Pair.new (acc + [h]) False + new_headers = case self.headers.fold (Pair.new [] False) update_header of + Pair.Value acc True -> acc + Pair.Value acc False -> acc + [new_header] + Request.Value self.method self.uri new_headers self.body + + ## Sets the headers in the request. + + Arguments: + - new_headers: A vector of headers to put in the request. If `self` has + any headers they will be replaced with new_headers. + + > Example + Create a request and unset all the headers. + + import Standard.Base.Network.HTTP.Request.Request + + example_with_headers = Request.delete.with_headers [] + with_headers : Vector Header -> Request + with_headers self new_headers = + update_header req new_header = req.with_header new_header.name new_header.value + new_headers.fold self update_header + + ## Set the body for the request. + + Arguments: + - new_body: The body to insert into the request. + + > Example + Unsetting the body in a post request. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.HTTP.Request_Body.Request_Body + import Standard.Base.Network.URI.URI + + example_with_body = + Request.post (URI.parse "http://example.com") Request_Body.Empty |> _.with_body Request_Body.Empty + with_body : Request_Body -> Request + with_body self new_body = Request.Value self.method self.uri self.headers new_body + + ## Set the body text in the request encoded as "application/json". + + Arguments: + - json_body: The body to insert into the request. It must be textual + JSON. + + > Example + Setting the body in a post request to some JSON. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.HTTP.Request_Body.Request_Body + import Standard.Base.Network.URI.URI + + example_with_json = + Request.post (URI.parse "http://example.com") Request_Body.Empty |> _.with_json '{ "a": "b" }' + with_json : (Text | Json) -> Request + with_json self json_body = + new_body = Request_Body.Json json_body + Request.Value self.method self.uri self.headers new_body . with_headers [Header.application_json] + + ## Set body as vector of parts encoded as "application/x-www-form-urlencoded". + + Arguments: + - parts: The parts of the form, or a form itself. + + > Example + Create a delete request with an empty form. + + import Standard.Base.Network.HTTP.Request.Request + import Standard.Base.Network.URI.URI + + example_delete = + Request.delete (URI.parse "http://example.com") . with_form [] + with_form : (Vector | Form) -> Request + with_form self parts = + new_body = Request_Body.Form (Form.new parts) + Request.Value self.method self.uri self.headers new_body . with_headers [Header.application_x_www_form_urlencoded] diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request/Body.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso similarity index 77% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request/Body.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso index 31c7a26aaf4e..dceb2c40624b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request/Body.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso @@ -1,11 +1,5 @@ -from Standard.Base import all - -## TODO Dubious constructor export -from project.Network.Http.Request.Body.Body import all -from project.Network.Http.Request.Body.Body export all - ## The HTTP request body. -type Body +type Request_Body ## Empty request body. Empty diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Response.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso similarity index 70% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Response.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso index 2d809ef15929..537fbc6f211e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Response.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso @@ -1,25 +1,20 @@ -from Standard.Base import all - -import Standard.Base.Network.Http.Header -import Standard.Base.Network.Http.Response.Body as Response_Body -import Standard.Base.Network.Http.Status_Code +import project.Data.Json.Json +import project.Data.Vector.Vector +import project.Network.HTTP.Header.Header +import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code +import project.Network.HTTP.Response_Body.Response_Body polyglot java import org.enso.base.Http_Utils -## TODO Dubious constructor export -from project.Network.Http.Response.Response import all -from project.Network.Http.Response.Response export all - type Response - ## PRIVATE A type representing an HTTP response. Arguments: - - internal_http_response: The internal represnetation of the HTTP + - internal_http_response: The internal representation of the HTTP response. - Response_Data internal_http_response + Value internal_http_response ## Get the response headers. @@ -45,7 +40,7 @@ type Response example_body = Examples.get_response.body body : Response_Body - body self = Response_Body.Body_Data (Vector.from_polyglot_array self.internal_http_response.body) + body self = Response_Body.Value (Vector.from_polyglot_array self.internal_http_response.body) ## Get the response status code. @@ -56,8 +51,8 @@ type Response import Standard.Examples example_code = Examples.get_response.code - code : Status_Code - code self = Status_Code.Status_Code_Data self.internal_http_response.statusCode + code : HTTP_Status_Code + code self = HTTP_Status_Code.Value self.internal_http_response.statusCode ## Convert the response to JSON. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Response/Body.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso similarity index 85% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Response/Body.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso index f8e3a2b903df..55f5783d3871 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Response/Body.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso @@ -1,16 +1,16 @@ -from Standard.Base import all +import project.Data.Json.Json +import project.Data.Text.Extensions +import project.Data.Text.Text +import project.System.File.File +import project.System.File.Write_Extensions -## TODO Dubious constructor export -from project.Network.Http.Response.Body.Body import all -from project.Network.Http.Response.Body.Body export all - -type Body +type Response_Body ## Response body Arguments: - bytes: The body of the response as binary data. - Body_Data bytes + Value bytes ## Convert response body to Text. @@ -52,4 +52,3 @@ type Body to_file self file = self.bytes.write_bytes file file - diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Form.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Form.enso deleted file mode 100644 index b51202121540..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Form.enso +++ /dev/null @@ -1,119 +0,0 @@ -from Standard.Base import all - -## Create Form data from Parts. - - Arguments: - - parts: A vector of parts to make up the form. - - > Example - Create a new form - - import Standard.Base.Network.Http.Form - - example_form_new = Form.new (Form.text_field "foo" "bar") -new : Vector -> Form -new parts = Form_Data parts - -# Helpers for creating different parts of the form. - -## Create a text field of a Form. - - Arguments: - - key: The key for the field in the form. - - val: The text for the textual field. - - > Example - Create a textual form field. - - import Standard.Base.Network.Http.Form - - example_text_field = Form.text_field "Foo" "bar" -text_field : Text -> Text -> Part -text_field key val = Part_Data key (Part_Text val) - -## Create a file field of a Form. - - Arguments: - - key: The key for the field in the form. - - file: The textual file contents. - - > Example - Create a file form field. - - import Standard.Base.Network.Http.Form - - example_text_field = Form.file_field "Foo" "My file contents" -file_field : Text -> Text -> Part -file_field key file = Part_Data key (Part_File file) - -## TODO Dubious constructor export -from project.Network.Http.Form.Form import all -from project.Network.Http.Form.Form export all - -## The HTTP form containing a vector of parts. -type Form - - ## PRIVATE - - A type representing form data. - - Arguments: - - parts: A vector of form segments. - Form_Data parts - - ## Convert this to a Form. - - > Example - Convert to a form. - - import Standard.Base.Network.Http.Form - - example_to_form = Form.new [Part "foo" (Part_Text "bar")] . to_form - to_form : Form - to_form self = self - -## Convert Vector to a Form. - - > Example - Create a vector of form parts and convert it to a form. - - import Standard.Base.Network.Http.Form - - example_to_form = - part_1 = Form.text_field "Foo" "bar" - part_2 = Form.text_field "Baz" "quux" - [part_1, part_2].to_form -Vector.to_form self = Form_Data self - -## TODO Dubious constructor export -from project.Network.Http.Form.Part import all -from project.Network.Http.Form.Part export all - -## The key-value element of the form. -type Part - - ## A form part. - - Arguments: - - key: The key for the form section. - - value: The value of the form section. - Part_Data key value - -## TODO Dubious constructor export -from project.Network.Http.Form.Part_Value import all -from project.Network.Http.Form.Part_Value export all - -## The value of the form element. -type Part_Value - - ## A textual value for a form part. - - Arguments: - - part_text: The text for the form part. - Part_Text part_text - - ## A file value for a form part. - - Arguments: - - part_file: The file for the form part. - Part_File part_file diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Header.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Header.enso deleted file mode 100644 index b475818be392..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Header.enso +++ /dev/null @@ -1,186 +0,0 @@ -from Standard.Base import all - -polyglot java import org.enso.base.Http_Utils - -## ALIAS Build a Header - - Create a new Header. - - Arguments: - - name: The name of the header. - - value: The value for the header. - - > Example - Create a new header called "My_Header". - - import Standard.Base.Network.Http.Header - - example_new = Header.new "My_Header" "my header's value" -new : Text -> Text -> Header -new name value = Header_Data name value - -# Accept - -## Create an "Accept" header. - - Arguments: - - value: The value for the accept header. - - > Example - Create an accept header. - - import Standard.Base.Network.Http.Header - - example_accept = Header.accept "my_field" -accept : Text -> Header -accept value = Header_Data "Accept" value - -## Create a header that accepts all (`"*/*"`). - - > Example - Create an accept all header. - - import Standard.Base.Network.Http.Header - - example_accept_all = Header.accept_all -accept_all : Header -accept_all = accept "*/*" - -## ALIAS Build an Auth Header - - Create an "Authorization" header. - - Arguments: - - value: The value for the authorization header. - - > Example - Create an auth header containing "foo". - - import Standard.Base.Network.Http.Header - - example_auth = Header.authorization "foo" -authorization : Text -> Header -authorization value = Header_Data "Authorization" value - -## Create HTTP basic auth header. - - Arguments: - - user: The username. - - pass: The password. - - > Example - Create basic auth header. - - import Standard.Base.Network.Http.Header - - example_auth_basic = Header.authorization_basic "user" "pass" -authorization_basic : Text -> Text -> Header -authorization_basic user pass = - authorization (Http_Utils.header_basic_auth user pass) - -# Content-Type - -## Create "Content-Type" header. - - Arguments: - - value: The value for the content type header. - - > Example - Create a content type header containing "my_type". - - import Standard.Base.Network.Http.Header - - example_content_type = Header.content_type "my_type" -content_type : Text -> Header -content_type value = Header_Data "Content-Type" value - -## Header "Content-Type: application/json". - - > Example - Create a header with content type "application/json". - - import Standard.Base.Network.Http.Header - - example_app_json = Header.application_json -application_json : Header -application_json = content_type "application/json" - -## Header "Content-Type: application/octet-stream". - - > Example - Create a header with content type "application/octet-stream". - - import Standard.Base.Network.Http.Header - - example_app_octet = Header.application_octet_stream -application_octet_stream : Header -application_octet_stream = content_type "application/octet-stream" - -## Header "Content-Type: application/x-www-form-urlencoded". - - > Example - Create a header with content type "application/x-www-form-urlencoded". - - import Standard.Base.Network.Http.Header - - example_app_x_www = Header.application_x_www_form_urlencoded -application_x_www_form_urlencoded : Header -application_x_www_form_urlencoded = content_type "application/x-www-form-urlencoded" - -## Header "Content-Type: multipart/form-data". - - Arguments: - - boundary: The text that delimits boundaries between the parts of the form. - - > Example - Create a header with content type "multipart/form-data". - - import Standard.Base.Network.Http.Header - - example_multipart = Header.multipart_form_data -multipart_form_data : Text -> Header -multipart_form_data (boundary = "") = - if boundary == "" then content_type "multipart/form-data" else - content_type ("multipart/form-data; boundary=" + boundary) - -## Header "Content-Type: text/plain". - - > Example - Create a header with the content type "text/plain". - - import Standard.Base.Network.Http.Header - - example_header_text_plain = Header.text_plain -text_plain : Header -text_plain = content_type "text/plain" - -## TODO Dubious constructor export -from project.Network.Http.Header.Header import all -from project.Network.Http.Header.Header export all - -type Header - - ## PRIVATE - - A type representing a header. - - Arguments: - - name: The header name. - - value: The header value. - Header_Data name value - - ## Header equality. - - Arguments: - - that: The header to compare against. - - > Example - Compare two headers. - - import Standard.Base.Network.Http.Header - - example_header_eq = - (Header.new "My_Header" "foo") == (Header.new "My_Header" "bar") - == : Header -> Boolean - == self that = (self.name.equals_ignore_case that.name) && self.value==that.value - diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Method.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Method.enso deleted file mode 100644 index 997de30501c1..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Method.enso +++ /dev/null @@ -1,29 +0,0 @@ -## TODO Dubious constructor export -from project.Network.Http.Method.Method import all -from project.Network.Http.Method.Method export all - -type Method - - ## The HTTP method "OPTIONS". - Options - - ## The HTTP method "GET". - Get - - ## The HTTP method "HEAD". - Head - - ## The HTTP method "POST". - Post - - ## The HTTP method "PUT". - Put - - ## The HTTP method "DELETE". - Delete - - ## The HTTP method "TRACE". - Trace - - ## The HTTP method "CONNECT". - Connect diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request.enso deleted file mode 100644 index 8d240030d487..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Request.enso +++ /dev/null @@ -1,238 +0,0 @@ -from Standard.Base import all - -import Standard.Base.Network.Http.Form -import Standard.Base.Network.Http.Header -import Standard.Base.Network.Http.Method -import Standard.Base.Network.Http.Request.Body as Request_Body -import Standard.Base.Network.URI - -## Create new HTTP request. - - Arguments: - - method: The HTTP method represented by the request. - - addr: The address for the request. - - headers: A vector containing headers for the request. - - body: The body of the request. - - > Example - Create a new post request with no headers and no body. - - import Standard.Base.Network.Http.Method - import Standard.Base.Network.Http.Request - import Standard.Base.Network.URI - - example_new = Request.new Method.Post (URI.parse "http://example.com") -new : Method -> (Text | URI) -> Vector -> Request_Body -> Request -new method addr (headers = []) (body = Request_Body.Empty) = - Panic.recover Any (Request_Data method (Panic.rethrow (addr.to_uri)) headers body) - -## Create an Options request. - - Arguments: - - > Example - Create a new options request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.URI - - example_options = Request.options (URI.parse "http://example.com") -options : (Text | URI) -> Vector -> Request -options addr (headers = []) = new Method.Options addr headers - -## Create a Get request. - - Arguments: - - addr: The address for the request. - - headers: A vector containing headers for the request. - - > Example - Create a new get request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.URI - - example_get = Request.get (URI.parse "http://example.com") -get : (Text | URI) -> Vector -> Request -get addr (headers = []) = new Method.Get addr headers - -## Create a Head request. - - Arguments: - - addr: The address for the request. - - headers: A vector containing headers for the request. - - > Example - Create a new head request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.URI - - example_head = Request.head (URI.parse "http://example.com") -head : (Text | URI) -> Vector -> Request -head addr (headers = []) = new Method.Head addr headers - -## Create a Post request. - - Arguments: - - addr: The address for the request. - - body: The body for the request. - - headers: A vector containing headers for the request. - - > Example - Create a new post request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body as Request_Body - import Standard.Base.Network.URI - - example_post = Request.post (URI.parse "http://example.com") Request_Body.Empty -post : (Text | URI) -> Request_Body -> Vector -> Request -post addr body (headers = []) = new Method.Post addr headers body - -## Create a Put request. - - Arguments: - - addr: The address for the request. - - body: The body for the request. - - headers: A vector containing headers for the request. - - > Example - Create a new put request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body as Request_Body - import Standard.Base.Network.URI - - example_put = Request.put (URI.parse "http://example.com") Request_Body.Empty -put : (Text | URI) -> Request_Body -> Vector -> Request -put addr body (headers = []) = new Method.Put addr headers body - -## Create a Delete request. - - Arguments: - - addr: The address for the request. - - headers: A vector containing headers for the request. - - > Example - Create a new delete request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.URI - - example_delete = Request.delete (URI.parse "http://example.com") -delete : (Text | URI) -> Vector -> Request -delete addr (headers = []) = new Method.Delete addr headers - -## TODO Dubious constructor export -from project.Network.Http.Request.Request import all -from project.Network.Http.Request.Request export all - -type Request - - ## PRIVATE - - A type representing an HTTP request. - - Arguments: - - method: The HTTP method represented by the request. - - uri: The URI for the request. - - headers: A vector containing headers for the request. - - body: The body of the request. - Request_Data method uri headers body - - ## Sets the header for the request. - - Arguments: - - key: The name for the header in this request. - - val: The value for the header in this request. - - > Example - Create a request and add a new header to it. - - import Standard.Base.Network.Http.Request - - example_with_header = Request.delete.with_header "Foo" "bar" - with_header : Text -> Text -> Request - with_header self key val = - new_header = Header.new key val - update_header p h = case p of - Pair.Value acc True -> Pair.new (acc + [h]) True - Pair.Value acc False -> - if h.name . equals_ignore_case key then Pair.new (acc + [new_header]) True else Pair.new (acc + [h]) False - new_headers = case self.headers.fold (Pair.new [] False) update_header of - Pair.Value acc True -> acc - Pair.Value acc False -> acc + [new_header] - Request_Data self.method self.uri new_headers self.body - - ## Sets the headers in the request. - - Arguments: - - new_headers: A vector of headers to put in the request. If `self` has - any headers they will be replaced with new_headers. - - > Example - Create a request and unset all the headers. - - import Standard.Base.Network.Http.Request - - example_with_headers = Request.delete.with_headers [] - with_headers : [Header] -> Request - with_headers self new_headers = - update_header req new_header = req.with_header new_header.name new_header.value - new_headers.fold self update_header - - ## Set the body for the request. - - Arguments: - - new_body: The body to insert into the request. - - > Example - Unsetting the body in a post request. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body as Request_Body - import Standard.Base.Network.URI - - example_with_body = - Request.post (URI.parse "http://example.com") Request_Body.Empty |> _.with_body Request_Body.Empty - with_body : Request_Body -> Request - with_body self new_body = Request_Data self.method self.uri self.headers new_body - - ## Set the body text in the request encoded as "application/json". - - Arguments: - - json_body: The body to insert into the request. It must be textual - JSON. - - > Example - Setting the body in a post request to some JSON. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.Http.Request.Body as Request_Body - import Standard.Base.Network.URI - - example_with_json = - Request.post (URI.parse "http://example.com") Request_Body.Empty |> _.with_json '{ "a": "b" }' - with_json : (Text | Json) -> Request - with_json self json_body = - new_body = Request_Body.Json json_body - Request_Data self.method self.uri self.headers new_body . with_headers [Header.application_json] - - ## Set body as vector of parts encoded as "application/x-www-form-urlencoded". - - Arguments: - - parts: The parts of the form, or a form itself. - - > Example - Create a delete request with an empty form. - - import Standard.Base.Network.Http.Request - import Standard.Base.Network.URI - - example_delete = - Request.delete (URI.parse "http://example.com") . with_form [] - with_form : (Vector | Form) -> Request - with_form self parts = - new_body = Request_Body.Form parts.to_form - Request_Data self.method self.uri self.headers new_body . with_headers [Header.application_x_www_form_urlencoded] diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Status_Code.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Status_Code.enso deleted file mode 100644 index 543115452fd3..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Status_Code.enso +++ /dev/null @@ -1,173 +0,0 @@ -from Standard.Base import all - -## TODO Dubious constructor export -from project.Network.Http.Status_Code.Status_Code import all -from project.Network.Http.Status_Code.Status_Code export all - -type Status_Code - - ## An HTTP status code. - - Arguments: - - code: The numeric representation of the code. - Status_Code_Data code - -## 100 Continue. -continue : Status_Code -continue = Status_Code_Data 100 - -## 101 Switching Protocols. -switching_protocols : Status_Code -switching_protocols = Status_Code_Data 101 - -## 200 OK. -ok : Status_Code -ok = Status_Code_Data 200 - -## 201 Created. -created : Status_Code -created = Status_Code_Data 201 - -## 202 Accepted. -accepted : Status_Code -accepted = Status_Code_Data 202 - -## 203 Non-Authoritative Information. -non_authoritative_information : Status_Code -non_authoritative_information = Status_Code_Data 203 - -## 204 No Content. -no_content : Status_Code -no_content = Status_Code_Data 204 - -## 205 Reset Content. -reset_content : Status_Code -reset_content = Status_Code_Data 205 - -## 206 Partial Content. -partial_content : Status_Code -partial_content = Status_Code_Data 206 - -## 300 Multiple Choices. -multiple_choices : Status_Code -multiple_choices = Status_Code_Data 300 - -## 301 Moved Permanently. -moved_permanently : Status_Code -moved_permanently = Status_Code_Data 301 - -## 302 Found. -found : Status_Code -found = Status_Code_Data 302 - -## 303 See Other. -see_other : Status_Code -see_other = Status_Code_Data 303 - -## 304 Not Modified. -not_modified : Status_Code -not_modified = Status_Code_Data 304 - -## 305 Use Proxy. -use_proxy : Status_Code -use_proxy = Status_Code_Data 305 - -## 307 Temporary Redirect. -temporary_redirect : Status_Code -temporary_redirect = Status_Code_Data 307 - -## 400 Bad Request. -bad_request : Status_Code -bad_request = Status_Code_Data 400 - -## 401 Unauthorized. -unauthorized : Status_Code -unauthorized = Status_Code_Data 401 - -## 402 Payment Required. -payment_required : Status_Code -payment_required = Status_Code_Data 402 - -## 403 Forbidden. -forbidden : Status_Code -forbidden = Status_Code_Data 403 - -## 404 Not Found. -not_found : Status_Code -not_found = Status_Code_Data 404 - -## 405 Method Not Allowed. -method_not_allowed : Status_Code -method_not_allowed = Status_Code_Data 405 - -## 406 Not Acceptable. -not_acceptable : Status_Code -not_acceptable = Status_Code_Data 406 - -## 407 Proxy Authentication Required. -proxy_authentication_required : Status_Code -proxy_authentication_required = Status_Code_Data 407 - -## 408 Request Timeout. -request_timeout : Status_Code -request_timeout = Status_Code_Data 408 - -## 409 Conflict. -conflict : Status_Code -conflict = Status_Code_Data 409 - -## 410 Gone. -gone : Status_Code -gone = Status_Code_Data 410 - -## 411 Length Required. -length_required : Status_Code -length_required = Status_Code_Data 411 - -## 412 Precondition Failed. -precondition_failed : Status_Code -precondition_failed = Status_Code_Data 412 - -## 413 Request Entity Too Large. -request_entity_too_large : Status_Code -request_entity_too_large = Status_Code_Data 413 - -## 414 Request-URI Too Long. -request_uri_too_long : Status_Code -request_uri_too_long = Status_Code_Data 414 - -## 415 Unsupported Media Type. -unsupported_media_type : Status_Code -unsupported_media_type = Status_Code_Data 415 - -## 416 Requested Range Not Satisfiable. -requested_range_not_satisfiable : Status_Code -requested_range_not_satisfiable = Status_Code_Data 416 - -## 417 Expectation Failed. -expectation_failed : Status_Code -expectation_failed = Status_Code_Data 417 - -## 500 Internal Server Error. -internal_server_error : Status_Code -internal_server_error = Status_Code_Data 500 - -## 501 Not Implemented. -not_implemented : Status_Code -not_implemented = Status_Code_Data 501 - -## 502 Bad Gateway. -bad_gateway : Status_Code -bad_gateway = Status_Code_Data 502 - -## 503 Service Unavailable. -service_unavailable : Status_Code -service_unavailable = Status_Code_Data 503 - -## 504 Gateway Timeout -gateway_timeout : Status_Code -gateway_timeout = Status_Code_Data 504 - -## 505 HTTP Version Not Supported. -http_version_not_supported : Status_Code -http_version_not_supported = Status_Code_Data 505 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Version.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Version.enso deleted file mode 100644 index d3310c88dfca..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Http/Version.enso +++ /dev/null @@ -1,11 +0,0 @@ -## TODO Dubious constructor export -from project.Network.Http.Version.Version import all -from project.Network.Http.Version.Version export all - -type Version - - ## HTTP version 1.1. - Http_1_1 - - ## HTTP version 2. - Http_2 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Proxy.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Proxy.enso index a8786b81016d..3280478f3b44 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Proxy.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/Proxy.enso @@ -1,11 +1,22 @@ -from Standard.Base import all - -## TODO Dubious constructor export -from project.Network.Proxy.Proxy import all -from project.Network.Proxy.Proxy export all +import project.Data.Numbers.Integer +import project.Data.Text.Text ## Proxy settings. type Proxy + ## Create new proxy settings from a host and port. + + Arguments: + - host: The host address for the proxy. + - port: The port number for the proxy server on `host`. + + > Example + Create a new proxy running on localhost at port 80080. + + import Standard.Base.Network.Proxy.Proxy + + example_new = Proxy.new "localhost" 80800 + new : Text -> Integer -> Proxy + new host port=80 = Proxy.Address host port ## The proxy is disabled. None @@ -14,19 +25,4 @@ type Proxy System ## Use the provided proxy server. - Proxy_Addr proxy_host proxy_port - -## Create new proxy settings from a host and port. - - Arguments: - - host: The host address for the proxy. - - port: The port number for the proxy server on `host`. - - > Example - Create a new proxy running on localhost at port 80080. - - import Standard.Base.Network.Proxy - - example_new = Proxy.new "localhost" 80800 -new : Text -> Integer -> Proxy -new host port=80 = Proxy_Addr host port + Address proxy_host proxy_port diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso index c21497f34f7a..9a9e912a9b1a 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso @@ -1,48 +1,47 @@ -from Standard.Base import all +import project.Any.Any +import project.Data.Boolean.Boolean +import project.Data.Json.Json +import project.Data.Text.Text +import project.Error.Error +import project.Nothing.Nothing +import project.Panic.Panic -import Standard.Base.Network.URI.Internal +from project.Error.Common import Syntax_Error polyglot java import java.net.URI as Java_URI polyglot java import java.util.Optional -## ALIAS Get URI +## PRIVATE - Parse a URI from text. + Handle a nothing value. Arguments: - - text: The text to parse as a URI. + - value: The value that may possibly be nothing. +handle_nothing : Any -> Any ! Nothing +handle_nothing value = case value of + Nothing -> Error.throw Nothing + _ -> value - Throws a Syntax_Error when the text cannot be parsed as a URI. - - > Example - Parse URI text. - - import Standard.Base.Network.URI - - example_parse = URI.parse "http://example.com" -parse : Text -> URI ! Syntax_Error -parse text = - Panic.catch_java Any (URI_Data (Java_URI.create text)) java_exception-> - Error.throw (Syntax_Error_Data ("URI syntax error: " + java_exception.getMessage)) - -## Convert Text to a URI. +type URI + ## ALIAS Get URI - Throws a Syntax_Error when `self` cannot be parsed as a URI. + Parse a URI from text. - > Example - Parse URI text. + Arguments: + - text: The text to parse as a URI. - import Standard.Base.Network.URI + Throws a Syntax_Error when the text cannot be parsed as a URI. - example_parse = "http://example.com".to_uri -Text.to_uri : URI ! Syntax_Error -Text.to_uri self = parse self + > Example + Parse URI text. -## TODO Dubious constructor export -from project.Network.URI.URI import all -from project.Network.URI.URI export all + import Standard.Base.Network.URI.URI -type URI + example_parse = URI.parse "http://example.com" + parse : Text -> URI ! Syntax_Error + parse text = + Panic.catch_java Any (URI.Value (Java_URI.create text)) java_exception-> + Error.throw (Syntax_Error.Syntax_Error_Data ("URI syntax error: " + java_exception.getMessage)) ## PRIVATE @@ -50,7 +49,7 @@ type URI Arguments: - internal_uri: The internal representation of the URI. - URI_Data internal_uri + Value internal_uri ## Convert this to URI. @@ -72,7 +71,7 @@ type URI example_scheme = Examples.uri.scheme scheme : Text ! Nothing - scheme self = Internal.handle_nothing self.internal_uri.getScheme + scheme self = handle_nothing self.internal_uri.getScheme ## Get the user info part of this URI. @@ -83,7 +82,7 @@ type URI example_user_info = Examples.uri.user_info user_info : Text ! Nothing - user_info self = Internal.handle_nothing self.internal_uri.getUserInfo + user_info self = handle_nothing self.internal_uri.getUserInfo ## Get the host part of this URI. @@ -94,7 +93,7 @@ type URI example_host = Examples.uri.host host : Text ! Nothing - host self = Internal.handle_nothing self.internal_uri.getHost + host self = handle_nothing self.internal_uri.getHost ## Get the authority (user info and host) part of this URI. @@ -105,7 +104,7 @@ type URI example_authority = Examples.uri.authority authority : Text ! Nothing - authority self = Internal.handle_nothing self.internal_uri.getAuthority + authority self = handle_nothing self.internal_uri.getAuthority ## Get the port part of this URI. @@ -118,7 +117,7 @@ type URI port : Text ! Nothing port self = port_number = self.internal_uri.getPort - Internal.handle_nothing <| + handle_nothing <| if port_number == -1 then Nothing else port_number.to_text ## Get the path part of this URI. @@ -130,7 +129,7 @@ type URI example_path = Examples.uri.path path : Text ! Nothing - path self = Internal.handle_nothing self.internal_uri.getPath + path self = handle_nothing self.internal_uri.getPath ## Get the query part of this URI. @@ -141,7 +140,7 @@ type URI example_query = Examples.uri.query query : Text ! Nothing - query self = Internal.handle_nothing self.internal_uri.getQuery + query self = handle_nothing self.internal_uri.getQuery ## Get the fragment part of this URI. @@ -152,37 +151,37 @@ type URI example_fragment = Examples.uri.fragment fragment : Text ! Nothing - fragment self = Internal.handle_nothing self.internal_uri.getFragment + fragment self = handle_nothing self.internal_uri.getFragment ## ADVANCED Get the unescaped user info part of this URI. raw_user_info : Text ! Nothing - raw_user_info self = Internal.handle_nothing self.internal_uri.getRawUserInfo + raw_user_info self = handle_nothing self.internal_uri.getRawUserInfo ## ADVANCED Get the unescaped authority part of this URI. raw_authority : Text ! Nothing - raw_authority self = Internal.handle_nothing self.internal_uri.getRawAuthority + raw_authority self = handle_nothing self.internal_uri.getRawAuthority ## ADVANCED Get the unescaped path part of this URI. raw_path : Text ! Nothing - raw_path self = Internal.handle_nothing self.internal_uri.getRawPath + raw_path self = handle_nothing self.internal_uri.getRawPath ## ADVANCED Get the unescaped query part of this URI. raw_query : Text ! Nothing - raw_query self = Internal.handle_nothing self.internal_uri.getRawQuery + raw_query self = handle_nothing self.internal_uri.getRawQuery ## ADVANCED Get the unescaped fragment part of this URI. raw_fragment : Text ! Nothing - raw_fragment self = Internal.handle_nothing self.internal_uri.getRawFragment + raw_fragment self = handle_nothing self.internal_uri.getRawFragment ## Convert this URI to text. @@ -200,7 +199,7 @@ type URI > Example Convert a URI to JSON. - import Standard.Base.Network.URI + import Standard.Base.Network.URI.URI import Standard.Examples example_to_json = Examples.uri.to_json @@ -212,7 +211,7 @@ type URI > Example Check if two URIs are equal. - import Standard.Base.Network.URI + import Standard.Base.Network.URI.URI example_eq = "https://example.com".to_uri == "http://example.org".to_uri == : URI -> Boolean diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI/Internal.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI/Internal.enso deleted file mode 100644 index 200521e26dc2..000000000000 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI/Internal.enso +++ /dev/null @@ -1,12 +0,0 @@ -from Standard.Base import all - -## PRIVATE - - Handle a nothing value. - - Arguments: - - value: The value that may possibly be nothing. -handle_nothing : Any -> Any ! Nothing -handle_nothing value = case value of - Nothing -> Error.throw Nothing - _ -> value diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Nothing.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Nothing.enso index 31dca9d814fa..272274973e38 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Nothing.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Nothing.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any from project.Data.Boolean import Boolean, True diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso new file mode 100644 index 000000000000..b6c878c6d363 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso @@ -0,0 +1,268 @@ +import project.Any.Any +import project.Data.Array.Array +import project.Data.Vector.Vector +import project.Error.Error +import project.Error.Common.Polyglot_Error +import project.Meta +import project.Runtime + +from project.Data.Boolean import Boolean, True, False + +polyglot java import java.lang.Throwable + +## A panic is an error condition that is based _outside_ of the normal + program control flow. + + Panics "bubble up" through the program until they reach either an + invocation of Panic.recover Any or the program's main method. An unhandled + panic in main will terminate the program. + + ? Dataflow Errors or Panics + Panics are designed to be used for unrecoverable situations that need + to be handled through non-linear control flow mechanisms. +@Builtin_Type +type Panic + + ## Throws a new panic with the provided payload. + + Arguments: + - payload: The contents of the panic to be thrown. If the payload is a + `Caught_Panic` or a raw Java exception, instead of throwing a new panic + with it as a payload, the original exception is rethrown, preserving + its stacktrace. + + > Example + Throwing a panic containing the text "Oh no!". + + Panic.throw "Oh no!" + + > Example + Use together with `Panic.catch` to catch only specific types of errors + and rethrow any others, without affecting their stacktraces. + + Panic.catch Any (Panic.throw "foo") caught_panic-> case caught_panic.payload of + Illegal_Argument.Error message _ -> "Illegal arguments were provided: "+message + other_panic -> Panic.throw other_panic + throw : Any -> Panic + throw payload = @Builtin_Method "Panic.throw" + + ## PRIVATE + Executes the provided action and if any panic was thrown, calls the + provided callback. + + If action executes successfully, the result of `Panic.catch Any` is the + result of that action. Otherwise, it is the result of the provided + handler callback, executed with the caught panic as its first argument. + + Arguments: + - action: The code to execute that potentially panics. + - handler: The callback to handle any panics. + catch_primitive : Any -> (Caught_Panic -> Any) -> Any + catch_primitive ~action handler = @Builtin_Method "Panic.catch_primitive" + + ## PRIVATE + + Returns a raw representation of the stack trace attached to the provided + throwable. It can be a dataflow error, a panic or a native Java exception. + You probably want `Panic.get_attached_stack_trace` instead. + primitive_get_attached_stack_trace : Throwable -> Array + primitive_get_attached_stack_trace throwable = @Builtin_Method "Panic.primitive_get_attached_stack_trace" + + ## ADVANCED + UNSTABLE + + Returns the attached stack trace of the given throwable. Can be used to get + an Enso friendly stack trace from native Java exceptions. + + The ordering of the resulting vector is such that the top stack frame is the + first element. + get_attached_stack_trace : Caught_Panic | Throwable -> Vector Runtime.Stack_Trace_Element + get_attached_stack_trace error = + throwable = case error of + Caught_Panic.Caught_Panic_Data _ internal_original_exception -> internal_original_exception + throwable -> throwable + prim_stack = Panic.primitive_get_attached_stack_trace throwable + stack_with_prims = Vector.from_polyglot_array prim_stack + stack_with_prims.map Runtime.wrap_primitive_stack_trace_element + + ## Takes any value, and if it is a dataflow error, throws it as a Panic, + otherwise, returns the original value unchanged. + + Arguments: + - value: The value to rethrow any errors on as a panic. + + > Example + Rethrowing a dataflow error as a panic. + + import Standard.Examples + + example_rethrow = Panic.rethrow Examples.throw_error + rethrow : (Any ! Any) -> Any + rethrow value = value.catch Any Panic.throw + + ## Executes the provided action and if a panic matching the provided type was + thrown, calls the provided callback. + + If action executes successfully, the result of `Panic.catch` is the result of + that action. Otherwise, if a matching panic is thrown from within the action, + the result is obtained by calling the provided handler callback. Any + non-matching panics are forwarded without changes. + + Arguments: + - panic_type: The expected panic type. It can either be an Enso type or a + Java class. If the Java class is provided, `Polyglot_Error` containing a + Java exception of this class will be matched. + - action: The code to execute that potentially panics. + - handler: The callback to handle the panics. The callback will be provided + with a `Caught_Panic` instance encapsulating the `payload` of the caught + panic and its stacktrace. + + > Example + Handling a specific type of panic. + + Panic.catch Illegal_Argument.Error (Panic.throw (Illegal_Argument.Error "Oh no!" Nothing)) error-> + "Caught an `Illegal_Argument`: "+error.payload.message + + > Example + Handling any panic. + + Panic.catch Any (Panic.throw (Illegal_Argument.Error "Oh no!" Nothing)) error-> + "Caught some panic!" + + > Example + Convert a string to an integer, catching the Java `NumberFormatException` + and converting it to a more Enso-friendly dataflow error. + + polyglot java import java.lang.Long + polyglot java import java.lang.NumberFormatException + parse str = + Panic.catch NumberFormatException (Long.parseLong str) caught_panic-> + Error.throw (Illegal_Argument.Error "The provided string is not a valid number: "+caught_panic.payload.cause.getMessage) + catch : Any -> Any -> (Caught_Panic -> Any) -> Any + catch panic_type ~action handler = + Panic.catch_primitive action caught_panic-> + case Meta.get_polyglot_language panic_type == "java" of + False -> case caught_panic.payload.is_a panic_type of + True -> handler caught_panic + False -> Panic.throw caught_panic + True -> case caught_panic.payload of + Polyglot_Error.Polyglot_Error_Data java_exception -> + case java_exception.is_a panic_type of + True -> handler caught_panic + False -> Panic.throw caught_panic + _ -> Panic.throw caught_panic + + ## Executes the provided action and if a Java exception matching the provided type was + thrown, calls the provided callback. + + Normally, Java exceptions are wrapped in a `Polyglot_Error` instance, so + using a `Panic.catch` requires unwrapping the error by calling + `caught_panic.payload.cause`. This helper function allows the handler to + work with the Java exception directly. The downside is that if the Java + exception is rethrown, it will be rethrown as a Java exception object + wrapped in an Enso panic. So if the handler needs to rethrow the original + exception preserving its shape and stacktrace, `Panic.catch` should still + be preferred.` + + > Example + Convert a string to an integer, catching the Java `NumberFormatException` + and converting it to a more Enso-friendly dataflow error. + + polyglot java import java.lang.Long + polyglot java import java.lang.NumberFormatException + parse str = + Panic.catch_java NumberFormatException (Long.parseLong str) java_exception-> + Error.throw (Illegal_Argument.Error "The provided string is not a valid number: "+java_exception.getMessage) + catch_java : Any -> Any -> (Throwable -> Any) -> Any + catch_java panic_type ~action handler = + Panic.catch_primitive action caught_panic-> case caught_panic.payload of + Polyglot_Error.Polyglot_Error_Data java_exception -> + case (panic_type == Any) || (java_exception.is_a panic_type) of + True -> handler java_exception + False -> Panic.throw caught_panic + _ -> Panic.throw caught_panic + + ## Executes the provided action and converts a possible panic matching any of + the provided types into a dataflow Error. + + If action executes successfully, the result of `Panic.recover` is the result + of that action. Otherwise, if it panicked with a type matching one of the + expected error types, that panic is returned as a dataflow error. Unexpected + panics are passed through as-is. it is the panic that was thrown after + conversion to a dataflow error. + + Arguments: + - expected_types: The types of expected panics which should be recovered. + This can either be a Vector of types or a single type. + - action: The code to execute that potentially panics. + + > Example + Converting an expected panic to a dataflow error. + + Panic.recover Illegal_Argument.Error (Panic.throw (Illegal_Argument.Error "Oh!" Nothing)) + + > Example + Converting one of many expected panic types to a dataflow error. + + Panic.recover [Illegal_Argument.Error, Illegal_State.Error] (Panic.throw (Illegal_Argument.Error "Oh!" Nothing)) + recover : (Vector Any | Any) -> Any -> Any + recover expected_types ~action = + types_to_check = case expected_types of + _ : Vector -> expected_types + _ -> [expected_types] + Panic.catch Any action caught_panic-> + is_matched = types_to_check.exists typ-> + caught_panic.payload.is_a typ + case is_matched of + True -> caught_panic.convert_to_dataflow_error + False -> Panic.throw caught_panic + + ## If a dataflow error had occurred, wrap it in a `Wrapped_Dataflow_Error` and promote to a Panic. + + Arguments: + - value: value to return if not an error, or rethrow as a Panic. + throw_wrapped_if_error : Any -> Any + throw_wrapped_if_error ~value = + if value.is_error then Panic.throw (Wrapped_Dataflow_Error.Error value.catch) else value + + ## Catch any `Wrapped_Dataflow_Error` Panic and rethrow it as a dataflow error. + + Arguments: + - action: The code to execute that potentially raised a Wrapped_Dataflow_Error. + handle_wrapped_dataflow_error : Any -> Any + handle_wrapped_dataflow_error ~action = + Panic.catch Wrapped_Dataflow_Error.Error action caught_panic-> + Error.throw caught_panic.payload.payload + +@Builtin_Type +type Caught_Panic + ## A wrapper for a caught panic. + + Arguments: + - payload: the payload carried by the error. + - internal_original_exception (private): the original Java exception that is + the source of this panic. Only for internal use. To get the Java exception + from polyglot exceptions, match the `payload` on `Polyglot_Error` and + extract the Java object from there. + Caught_Panic_Data payload internal_original_exception + + ## Converts this caught panic into a dataflow error containing the same + payload and stack trace. + convert_to_dataflow_error : Error + convert_to_dataflow_error self = @Builtin_Method "Caught_Panic.convert_to_dataflow_error" + + ## Returns the stack trace of the caught panic. + stack_trace : Vector Runtime.Stack_Trace_Element + stack_trace self = + Panic.get_attached_stack_trace self + +## PRIVATE + Wraps a dataflow error lifted to a panic, making possible to distinguish it + from other panics. +type Wrapped_Dataflow_Error + Error payload + + ## PRIVATE + Throws the original error. + unwrap self = Error.throw self.payload + diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot.enso index 953fa3801373..a0c778888639 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot.enso @@ -2,7 +2,7 @@ Polyglot is a term that refers to other languages (such as Java) that are running on the same JVM. -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Boolean.Boolean import project.Data.Numbers.Integer diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot/Java.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot/Java.enso index 933c7cce25c6..0f09d0385548 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot/Java.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Polyglot/Java.enso @@ -1,6 +1,6 @@ ## Utilities for working with Java polyglot objects. -import project.Data.Any.Any +import project.Any.Any import project.Data.Boolean.Boolean import project.Data.Text.Text import project.Nothing.Nothing diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso index e045d5f82727..9589ed4bc62f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Integer import project.Data.Vector.Vector import project.System diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso index d8526dcb5ffc..03c6a7aec710 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Text.Text import project.Data.Vector.Vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Debug.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Debug.enso index 4bcb4636da6a..9666a5cd9f9b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Debug.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Debug.enso @@ -1,6 +1,6 @@ ## Debug utilities. -import project.Data.Any.Any +import project.Any.Any import project.Data.Text.Text import project.Nothing.Nothing diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Managed_Resource.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Managed_Resource.enso index c0584758e012..04191c827508 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Managed_Resource.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Managed_Resource.enso @@ -1,6 +1,6 @@ ## An API for manual resource management. -import project.Data.Any.Any +import project.Any.Any import project.Nothing.Nothing ## Resource provides an API for manual management of computation resources. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Ref.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Ref.enso index 8ede809c4a94..c72bef84ef7f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Ref.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Ref.enso @@ -1,4 +1,4 @@ -import project.Data.Any.Any +import project.Any.Any ## A mutable reference type. @Builtin_Type diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/State.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/State.enso index e59fad8c3444..63b30f20b1d6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/State.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/State.enso @@ -2,7 +2,7 @@ A container type for functionality for working with the runtime's integrated state functionality. -import project.Data.Any.Any +import project.Any.Any from project.Error.Common import Uninitialized_State diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Thread.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Thread.enso index d7fb9fc7807d..5c2d5ba38f04 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Thread.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Thread.enso @@ -1,7 +1,7 @@ ## Utilities for working with threads. Internal threading utilities used for working with threads. -import project.Data.Any.Any +import project.Any.Any ## ADVANCED diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Unsafe.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Unsafe.enso index a1e87a6ba3e1..6dbee4406a78 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Unsafe.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Unsafe.enso @@ -2,7 +2,7 @@ A container for unsafe operations that operate based on implementation details of the language. -import project.Data.Any.Any +import project.Any.Any import project.Data.Numbers.Integer import project.Meta.Atom diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso index dc00b45d753d..714e1c67c200 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso @@ -1,18 +1,19 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Json.Json import project.Data.Numbers.Integer import project.Data.Text.Encoding.Encoding -import project.Data.Text.Encoding.Encoding_Error import project.Data.Text.Extensions import project.Data.Text.Matching_Mode import project.Data.Text.Text import project.Data.Time.Time_Of_Day.Time_Of_Day import project.Data.Vector.Vector +import project.Error.Error import project.Error.Problem_Behavior.Problem_Behavior import project.Meta import project.Nothing.Nothing +import project.Panic.Panic import project.Runtime.Managed_Resource.Managed_Resource import project.System.File.File_Access.File_Access import project.System.File_Format.Auto_Detect @@ -20,15 +21,11 @@ import project.System.File_Format.File_Format import project.System.File.File_Permissions.File_Permissions from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Panic, Unsupported_File_Type, Illegal_Argument_Error_Data, File_Error +from project.Error.Common import Unsupported_File_Type, Illegal_Argument, File_Error, Encoding_Error polyglot java import org.enso.base.Encoding_Utils polyglot java import java.io.InputStream as Java_Input_Stream polyglot java import java.io.OutputStream as Java_Output_Stream -polyglot java import java.io.IOException -polyglot java import java.nio.file.AccessDeniedException -polyglot java import java.nio.file.NoSuchFileException -polyglot java import java.nio.file.FileAlreadyExistsException polyglot java import java.nio.file.FileSystems polyglot java import java.nio.file.Path polyglot java import java.nio.file.StandardCopyOption @@ -47,7 +44,7 @@ type File > Example Create a new file pointing to the `data.csv` file in the project directory. - import Standard.Base.System.File + import Standard.Base.System.File.File import Standard.Examples example_new = File.new Examples.csv_path @@ -56,7 +53,7 @@ type File case path of _ : Text -> get_file path _ : File -> path - _ -> Error.throw (Illegal_Argument_Error_Data "new file should be either a File or a Text") + _ -> Error.throw (Illegal_Argument.Error "new file should be either a File or a Text") ## ALIAS Current Directory @@ -65,7 +62,7 @@ type File > Example Get the program's current working directory. - import Standard.Base.System.File + import Standard.Base.System.File.File example_cwd = File.current_directory current_directory : File @@ -78,7 +75,7 @@ type File > Example Get the current user's home directory. - import Standard.Base.System.File + import Standard.Base.System.File.File example_home = File.home home : File @@ -275,7 +272,7 @@ type File example_exists = Examples.csv.creation_time creation_time : Time_Of_Day ! File_Error creation_time self = - handle_java_exceptions self <| self.creation_time_builtin + File_Error.handle_java_exceptions self <| self.creation_time_builtin ## Gets the last modified time of a file. @@ -287,7 +284,7 @@ type File example_exists = Examples.csv.last_modified_time last_modified_time : Time_Of_Day ! File_Error last_modified_time self = - handle_java_exceptions self <| self.last_modified_time_builtin + File_Error.handle_java_exceptions self <| self.last_modified_time_builtin ## Gets the POSIX permissions associated with the file. @@ -452,7 +449,7 @@ type File file.delete delete : Nothing ! File_Error delete self = - handle_java_exceptions self self.delete_builtin + File_Error.handle_java_exceptions self self.delete_builtin ## Moves the file to the specified destination. @@ -462,7 +459,7 @@ type File destination file already exists. Defaults to `False`. copy_to : File -> Boolean -> Nothing ! File_Error copy_to self destination replace_existing=False = - handle_java_exceptions self <| case replace_existing of + File_Error.handle_java_exceptions self <| case replace_existing of True -> copy_options = [StandardCopyOption.REPLACE_EXISTING].to_array self.copy_builtin destination copy_options @@ -476,7 +473,7 @@ type File destination file already exists. Defaults to `False`. move_to : File -> Boolean -> Nothing ! File_Error move_to self destination replace_existing=False = - handle_java_exceptions self <| case replace_existing of + File_Error.handle_java_exceptions self <| case replace_existing of True -> copy_options = [StandardCopyOption.REPLACE_EXISTING].to_array self.move_builtin destination copy_options @@ -509,7 +506,7 @@ type File new_input_stream : Vector File_Access -> Input_Stream ! File_Error new_input_stream self open_options = opts = open_options . map (_.to_java) . to_array - stream = handle_java_exceptions self (self.input_stream opts) + stream = File_Error.handle_java_exceptions self (self.input_stream opts) resource = Managed_Resource.register stream close_stream Input_Stream.Value self resource @@ -526,7 +523,7 @@ type File new_output_stream : Vector File_Access -> Output_Stream ! File_Error new_output_stream self open_options = opts = open_options . map (_.to_java) . to_array - stream = handle_java_exceptions self <| + stream = File_Error.handle_java_exceptions self <| self.output_stream opts resource = Managed_Resource.register stream close_stream Output_Stream.Value self resource @@ -537,7 +534,7 @@ type File returns a vector of bytes. read_last_bytes : Integer -> Vector ! File_Error read_last_bytes self n = - handle_java_exceptions self <| + File_Error.handle_java_exceptions self <| Vector.from_polyglot_array (self.read_last_bytes_builtin n) ## Lists files contained in the directory denoted by this file. @@ -666,7 +663,7 @@ type Output_Stream out_stream.close write_bytes : Vector File_Access -> Nothing ! File_Error write_bytes self contents = self.stream_resource . with java_stream-> - handle_java_exceptions self.file <| + File_Error.handle_java_exceptions self.file <| java_stream.write contents.to_array java_stream.flush Nothing @@ -754,7 +751,7 @@ type Input_Stream bytes read_all_bytes : Vector ! File_Error read_all_bytes self = self.stream_resource . with java_stream-> - handle_java_exceptions self.file <| + File_Error.handle_java_exceptions self.file <| Vector.from_polyglot_array java_stream.readAllBytes ## ADVANCED @@ -784,7 +781,7 @@ type Input_Stream bytes read_n_bytes : Integer -> Vector ! File_Error read_n_bytes self n = self.stream_resource . with java_stream-> - handle_java_exceptions self.file <| + File_Error.handle_java_exceptions self.file <| bytes = java_stream.readNBytes n Vector.from_polyglot_array bytes @@ -809,7 +806,7 @@ type Input_Stream bytes read_byte : Integer ! File_Error read_byte self = self.stream_resource . with java_stream-> - handle_java_exceptions self.file <| + File_Error.handle_java_exceptions self.file <| java_stream.read ## ADVANCED @@ -855,22 +852,6 @@ type Input_Stream problems = Vector.from_polyglot_array results.problems . map Encoding_Error.Error on_problems.attach_problems_after results.result problems -## PRIVATE - - Utility method for running an action with Java exceptions mapping. -handle_java_exceptions file ~action = - Panic.catch IOException action caught_panic-> - wrap_io_exception file caught_panic.payload.cause - -## PRIVATE - - Converts a Java `IOException` into its Enso counterpart. -wrap_io_exception file io_exception = - if io_exception.is_a NoSuchFileException then Error.throw (File_Error.Not_Found file) else - if io_exception.is_a FileAlreadyExistsException then Error.throw (File_Error.Already_Exists file) else - if io_exception.is_a AccessDeniedException then Error.throw (File_Error.IO_Error file "You do not have permission to access the file") else - Error.throw (File_Error.IO_Error file "An IO error has occurred: "+io_exception.to_text) - ## PRIVATE Utility method for closing primitive Java streams. Provided to avoid diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso index 65098ad358cd..211be52fe072 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso @@ -1,11 +1,14 @@ -import project.Data.Any.Any +import project.Any.Any +import project.Error.Error +import project.Nothing.Nothing +import project.Panic.Panic +import project.Panic.Caught_Panic import project.System.File.File import project.System.File.File_Access.File_Access import project.System.File.Output_Stream -import project.Nothing.Nothing from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Panic, File_Error +from project.Error.Common import File_Error ## Specifies the behavior of a write operation when the destination file already exists. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso index c31d0fa4ec15..9e4faa80b992 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso @@ -1,14 +1,15 @@ import project.Data.Text.Text import project.Data.Text.Encoding.Encoding -import project.Data.Text.Encoding.Encoding_Error import project.Data.Text.Extensions import project.Data.Vector.Vector +import project.Error.Error import project.Error.Problem_Behavior.Problem_Behavior import project.Nothing.Nothing +import project.Panic.Panic import project.System.File.Existing_File_Behavior.Existing_File_Behavior import project.System.File.File -from project.Error.Common import Panic, Error, Illegal_Argument_Error, Illegal_Argument_Error_Data, File_Error, Unsupported_Argument_Types_Data +from project.Error.Common import Illegal_Argument, File_Error, Unsupported_Argument_Types_Data, Encoding_Error polyglot java import org.enso.base.Array_Builder @@ -31,7 +32,7 @@ polyglot java import org.enso.base.Array_Builder If another error occurs, such as access denied, an `File_Error.IO_Error` is raised. Otherwise, the file is created with the encoded text written to it. -Text.write : (File|Text) -> Encoding -> Existing_File_Behavior -> Problem_Behavior -> Nothing ! Encoding_Error | Illegal_Argument_Error | File_Error +Text.write : (File|Text) -> Encoding -> Existing_File_Behavior -> Problem_Behavior -> Nothing ! Encoding_Error | Illegal_Argument | File_Error Text.write self path encoding=Encoding.utf_8 on_existing_file=Existing_File_Behavior.Backup on_problems=Problem_Behavior.Report_Warning = bytes = self.bytes encoding on_problems file = File.new path @@ -46,7 +47,7 @@ Text.write self path encoding=Encoding.utf_8 on_existing_file=Existing_File_Beha - on_existing_file: Specifies how to proceed if the file already exists. If the Vector contains any item which is not a `Byte`, an - `Illegal_Argument_Error` will be raised. Enso follows the Java convention, + `Illegal_Argument` will be raised. Enso follows the Java convention, that a `Byte` is between -128 and 127. If the path to the parent location cannot be found or the filename is invalid, a `File_Not_Found` is raised. @@ -66,9 +67,9 @@ Text.write self path encoding=Encoding.utf_8 on_existing_file=Existing_File_Beha import Standard.Examples [36, -62, -93, -62, -89, -30, -126, -84, -62, -94].write_bytes Examples.scratch_file.write_bytes Examples.scratch_file Existing_File_Behavior.Append -Vector.write_bytes : (File|Text) -> Existing_File_Behavior -> Nothing ! Illegal_Argument_Error | File_Error +Vector.write_bytes : (File|Text) -> Existing_File_Behavior -> Nothing ! Illegal_Argument | File_Error Vector.write_bytes self path on_existing_file=Existing_File_Behavior.Backup = - Panic.catch Unsupported_Argument_Types_Data handler=(_ -> Error.throw (Illegal_Argument_Error_Data "Only Vectors consisting of bytes (integers in the range from -128 to 127) are supported by the `write_bytes` method.")) <| + Panic.catch Unsupported_Argument_Types_Data handler=(_ -> Error.throw (Illegal_Argument.Error "Only Vectors consisting of bytes (integers in the range from -128 to 127) are supported by the `write_bytes` method.")) <| ## Convert to a byte array before writing - and fail early if there is any problem. byte_array = Array_Builder.ensureByteArray self.to_array diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 71cdfda94dea..f5daf3c22193 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -1,15 +1,13 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Text.Encoding.Encoding import project.Data.Vector.Vector +import project.Error.Error import project.Error.Problem_Behavior.Problem_Behavior import project.Nothing.Nothing import project.System.File.File from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error, Unsupported_File_Type, unimplemented - -from project.System.File_Format.Plain_Text_Format import Plain_Text -from project.System.File_Format.Plain_Text_Format export Plain_Text +from project.Error.Common import Unsupported_File_Type, Unimplemented polyglot java import org.enso.base.file_format.FileFormatSPI @@ -40,7 +38,7 @@ type File_Format ## PRIVATE Implements the `File.read` for this `File_Format` read : File -> Problem_Behavior -> Any - read _ _ = unimplemented "This is an interface only." + read _ _ = Unimplemented.throw "This is an interface only." type Plain_Text_Format Plain_Text (encoding:Encoding=Encoding.utf_8) @@ -49,8 +47,8 @@ type Plain_Text_Format for_file : File -> Plain_Text_Format | Nothing for_file file = case file.extension of - ".txt" -> Plain_Text - ".log" -> Plain_Text + ".txt" -> Plain_Text_Format.Plain_Text + ".log" -> Plain_Text_Format.Plain_Text _ -> Nothing ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/Process/Exit_Code.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/Process/Exit_Code.enso index 56da0dc64b7f..931511ced3ea 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/Process/Exit_Code.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/Process/Exit_Code.enso @@ -30,9 +30,9 @@ type Exit_Code > Example Convert a success code to a corresponding number. - import Standard.Base.System.Process.Exit_Code + import Standard.Base.System.Process.Exit_Code.Exit_Code - example_to_number = Exit_Code.Exit_Success.to_number + example_to_number = Exit_Code.Success.to_number to_number : Integer to_number self = case self of Exit_Code.Success -> 0 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso index 7f9d649fce2b..00b2d2e917bc 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso @@ -1,10 +1,11 @@ -import project.Data.Any.Any +import project.Any.Any import project.Data.Array.Array import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Maybe.Maybe import project.Data.Numbers.Integer import project.Data.Pair.Pair import project.Data.Vector.Vector +import project.Error.Error import project.Nothing.Nothing import project.Polyglot import project.Runtime @@ -12,7 +13,6 @@ import project.Runtime.Source_Location.Source_Location import project.Runtime.Stack_Trace_Element from project.Data.Boolean import Boolean, True, False -from project.Error.Common import Error ## A representation of a dataflow warning attached to a value. @Builtin_Type @@ -106,7 +106,7 @@ type Warning > Example Detach warnings of a specific type. - result = Warning.detach_selected_warnings value (_.is_a Illegal_State_Error) + result = Warning.detach_selected_warnings value (_.is_a Illegal_State.Error) result.first # `value` with the matched warnings removed result.second # the list of matched warnings detach_selected_warnings : Any -> (Any -> Boolean) -> Pair Any Vector diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 127cd98ca4c5..18db603033c3 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -6,13 +6,13 @@ import Standard.Table.Data.Table.Table as Materialized_Table import project.Data.SQL_Query.SQL_Query import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type -import project.Data.Table as Database_Table -# TODO[DB] Fix composite types #183857386 -import project.Data.Table.Table as Database_Table_Type +import project.Data.Table.Table import project.Internal.IR.Context.Context import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Query.Query +import project.Data.Table as Database_Table_Module + from project.Internal.Result_Set import read_column, result_set_to_table from project.Internal.JDBC_Connection import create_table_statement, handle_sql_errors from project.Errors import SQL_Error @@ -112,7 +112,7 @@ type Connection - query: name of the table or sql statement to query. If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - alias: optionally specify a friendly alias for the query. - query : Text | SQL_Query -> Text -> Database_Table_Type + query : Text | SQL_Query -> Text -> Table query self query alias="" = handle_sql_errors <| case query of _ : Text -> self.query alias=alias <| @@ -122,11 +122,11 @@ type Connection columns = self.jdbc_connection.fetch_columns raw_sql name = if alias == "" then (UUID.randomUUID.to_text) else alias ctx = Context.for_query raw_sql name - Database_Table.make_table self name columns ctx + Database_Table_Module.make_table self name columns ctx SQL_Query.Table_Name name -> ctx = Context.for_table name (if alias == "" then name else alias) columns = self.jdbc_connection.fetch_columns (self.dialect.generate_sql (Query.Select_All ctx)) - Database_Table.make_table self name columns ctx + Database_Table_Module.make_table self name columns ctx ## Execute the query and load the results into memory as a Table. @@ -177,8 +177,8 @@ type Connection usually not be visible to other connections. - batch_size: Specifies how many rows should be uploaded in a single batch. - upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table_Type - upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State_Error_Data <| + upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Table + upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State.Error <| create_sql = create_table_statement name table temporary create_table = self.execute_update create_sql diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso index 51895cc9472a..ec1ce4b84b98 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso @@ -10,9 +10,6 @@ import project.Internal.Postgres.Pgpass polyglot java import org.postgresql.Driver -from project.Connection.Postgres_Options.Postgres_Options import Postgres -from project.Connection.Postgres_Options.Postgres_Options export Postgres - type Postgres_Options ## Connect to a PostgreSQL database. @@ -58,7 +55,7 @@ type Postgres_Options Pair.Value Nothing Nothing -> Pgpass.read self.host self.port self.database Pair.Value Nothing _ -> - Error.throw (Illegal_State_Error_Data "PGPASSWORD is set, but PGUSER is not.") + Error.throw (Illegal_State.Error "PGPASSWORD is set, but PGUSER is not.") Pair.Value username Nothing -> Pgpass.read self.host self.port self.database username Pair.Value username password -> diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso index b3d0f3f82491..17b2a774fb25 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso @@ -13,9 +13,6 @@ polyglot java import com.amazon.redshift.jdbc.Driver polyglot java import java.util.Properties polyglot java import org.enso.database.JDBCProxy -from project.Connection.Redshift_Options.Redshift_Options import Redshift -from project.Connection.Redshift_Options.Redshift_Options export Redshift - type Redshift_Options ## Connect to a AWS Redshift database. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso index a16a64d7481d..17a14c213e0f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso @@ -3,9 +3,6 @@ from Standard.Base import all import project.Connection.Connection_Options import project.Internal.SQLite.SQLite_Connection -from project.Connection.SQLite_Options.SQLite_Options import SQLite -from project.Connection.SQLite_Options.SQLite_Options export SQLite - type SQLite_Options ## Connect to a SQLite DB File or InMemory DB. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso index edf627719474..2a55e7118dde 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso @@ -6,7 +6,6 @@ import Standard.Table.Data.Column.Column as Materialized_Column import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type -import project.Data.Table.Integrity_Error import project.Internal.Helpers import project.Internal.IR.Context.Context import project.Internal.IR.SQL_Expression.SQL_Expression @@ -15,7 +14,7 @@ import project.Internal.IR.Query.Query from project.Data.Table import Table, freshen_columns -from project.Errors import Unsupported_Database_Operation_Error, Unsupported_Database_Operation_Error_Data +from project.Errors import Unsupported_Database_Operation, Integrity_Error type Column @@ -132,7 +131,7 @@ type Column prepare_operand operand operand_type = case operand of other_column : Column -> if Helpers.check_integrity self other_column then other_column.expression else - Error.throw <| Unsupported_Database_Operation_Error_Data "Cannot use columns coming from different contexts in one expression without a join." + Error.throw <| Unsupported_Database_Operation.Error "Cannot use columns coming from different contexts in one expression without a join." constant -> actual_operand_type = operand_type.if_nothing self.sql_type SQL_Expression.Constant actual_operand_type constant @@ -476,7 +475,7 @@ type Column common type. left_type = get_approximate_type when_true self.sql_type right_type = get_approximate_type when_false self.sql_type - if left_type != right_type then Error.throw (Illegal_Argument_Error_Data "when_true and when_false types do not match") else + if left_type != right_type then Error.throw (Illegal_Argument.Error "when_true and when_false types do not match") else self.make_op "IIF" [when_true, when_false] new_type=left_type ## Returns a column of first non-`Nothing` value on each row of `self` and @@ -494,7 +493,7 @@ type Column coalesce : (Any | Vector Any) -> Column coalesce self values = case values of _ : Vector -> - if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument_Error_Data "self and values types do not all match") else + if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument.Error "self and values types do not all match") else self.make_op "COALESCE" values new_type=self.sql_type _ : Array -> self.coalesce (Vector.from_polyglot_array values) _ -> self.coalesce [values] @@ -513,7 +512,7 @@ type Column min : (Any | Vector Any) -> Column min self values = case values of _ : Vector -> - if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument_Error_Data "self and values types do not all match") else + if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument.Error "self and values types do not all match") else self.make_op "ROW_MIN" values new_type=self.sql_type _ : Array -> self.min (Vector.from_polyglot_array values) _ -> self.min [values] @@ -532,7 +531,7 @@ type Column max : (Any | Vector Any) -> Column max self values = case values of _ : Vector -> - if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument_Error_Data "self and values types do not all match") else + if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument.Error "self and values types do not all match") else self.make_op "ROW_MAX" values new_type=self.sql_type _ : Array -> self.max (Vector.from_polyglot_array values) _ -> self.max [values] @@ -605,7 +604,7 @@ type Column True -> is_used_in_index = self.context.meta_index.exists i-> i.name == new_name case is_used_in_index of - True -> Error.throw <| Illegal_State_Error_Data "Cannot rename the column to "+new_name+", because it has an index with the same name." + True -> Error.throw <| Illegal_State.Error "Cannot rename the column to "+new_name+", because it has an index with the same name." False -> Column.Value new_name self.connection self.sql_type self.expression self.context @@ -641,7 +640,7 @@ type Column take self range=(First 1) = _ = range msg = "`Column.take` is not yet implemented." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## UNSTABLE Creates a new Column from the input with the specified range of rows @@ -653,7 +652,7 @@ type Column drop self range=(First 1) = _ = range msg = "`Column.drop` is not yet implemented." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## UNSTABLE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso index e39f40a01ef4..9a06591c4455 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso @@ -1,5 +1,4 @@ from Standard.Base import all -import Standard.Base.Error.Common as Errors from Standard.Table import Aggregate_Column @@ -21,13 +20,13 @@ type Dialect ## PRIVATE Name of the dialect. name : Text - name self = Errors.unimplemented "This is an interface only." + name self = Unimplemented.throw "This is an interface only." ## PRIVATE A function which generates SQL code from the internal representation according to the specific dialect. generate_sql : Query -> SQL_Statement - generate_sql self = Errors.unimplemented "This is an interface only." + generate_sql self = Unimplemented.throw "This is an interface only." ## PRIVATE Deduces the result type for an aggregation operation. @@ -35,7 +34,7 @@ type Dialect The provided aggregate is assumed to contain only already resolved columns. You may need to transform it with `resolve_aggregate` first. resolve_target_sql_type : Aggregate_Column -> SQL_Type - resolve_target_sql_type self = Errors.unimplemented "This is an interface only." + resolve_target_sql_type self = Unimplemented.throw "This is an interface only." ## PRIVATE Prepares an ordering descriptor. @@ -43,7 +42,7 @@ type Dialect One of the purposes of this method is to verify if the expected ordering settings are supported by the given database backend. prepare_order_descriptor : Internal_Column -> Sort_Direction -> Text_Ordering -> Order_Descriptor - prepare_order_descriptor self = Errors.unimplemented "This is an interface only." + prepare_order_descriptor self = Unimplemented.throw "This is an interface only." ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso index c27669c1f772..b8df1ae18d1f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso @@ -78,7 +78,7 @@ type SQL_Type ## ADVANCED Given an Enso value gets the approximate SQL type. - approximate_type : Any -> SQL_Type ! Illegal_Argument_Error_Data + approximate_type : Any -> SQL_Type ! Illegal_Argument approximate_type value = case value of _ : Boolean -> SQL_Type.boolean _ : Integer -> SQL_Type.integer @@ -87,7 +87,7 @@ type SQL_Type _ : Date -> SQL_Type.date _ : Time_Of_Day -> SQL_Type.time_of_day _ : Date_Time -> SQL_Type.date_time - _ -> Error.throw (Illegal_Argument_Error_Data "Unsupported type.") + _ -> Error.throw (Illegal_Argument.Error "Unsupported type.") ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 808a60998ef6..716ce471f77a 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -26,7 +26,7 @@ import project.Internal.IR.Internal_Column.Internal_Column import project.Internal.IR.Join_Kind.Join_Kind import project.Internal.IR.Query.Query -from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data +from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error import project.Connection.Connection.Connection polyglot java import java.sql.JDBCType @@ -401,7 +401,7 @@ type Table _ : Filter_Condition -> on_problems.handle_errors fallback=self.with_no_rows <| mask (make_filter_column column filter) - _ : Function -> Error.throw (Unsupported_Database_Operation_Error_Data "Filtering with a custom predicate is not supported in the database.") + _ : Function -> Error.throw (Unsupported_Database_Operation.Error "Filtering with a custom predicate is not supported in the database.") _ : Text -> table_at = self.at column if table_at.is_error.not then self.filter table_at filter on_problems else @@ -430,7 +430,7 @@ type Table take self range=(First 1) = _ = range msg = "`Table.take` is not yet implemented." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## UNSTABLE Creates a new Table from the input with the specified range of rows @@ -443,7 +443,7 @@ type Table drop self range=(First 1) = _ = range msg = "`Table.drop` is not yet implemented." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## UNSTABLE @@ -496,7 +496,7 @@ type Table True -> is_used_in_index = self.context.meta_index.exists i-> i.name == name case is_used_in_index of - True -> Error.throw <| Illegal_State_Error_Data "Cannot override column "+name+", because it is used as an index. Remove the index or use a different name." + True -> Error.throw <| Illegal_State.Error "Cannot override column "+name+", because it is used as an index. Remove the index or use a different name." False -> resolved = case column of _ : Text -> self.evaluate column @@ -629,7 +629,7 @@ type Table descending order. table.order_by (Sort_Column_Selector.By_Index [1, Sort_Column.Index -7 Sort_Direction.Descending]) - order_by : Vector Text | Sort_Column_Selector -> Text_Ordering -> Problem_Behavior -> Table ! Incomparable_Values_Error + order_by : Text | Vector Text | Sort_Column_Selector -> Text_Ordering -> Problem_Behavior -> Table ! Incomparable_Values_Error order_by self (columns = (Sort_Column_Selector.By_Name [(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default on_problems=Report_Warning = Panic.handle_wrapped_dataflow_error <| problem_builder = Problem_Builder.new columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder @@ -675,7 +675,7 @@ type Table distinct : Vector Text | Column_Selector -> Case_Sensitivity -> Problem_Behavior -> Table distinct self (columns = Column_Selector.By_Name (self.columns.map .name)) case_sensitivity=Case_Sensitivity.Sensitive on_problems=Report_Warning = _ = [columns, case_sensitivity, on_problems] - Error.throw (Unsupported_Database_Operation_Error_Data "`Table.distinct` is not yet implemented for the database backend.") + Error.throw (Unsupported_Database_Operation.Error "`Table.distinct` is not yet implemented for the database backend.") ## UNSTABLE @@ -704,7 +704,7 @@ type Table _ : Table -> Panic.recover Any <| Panic.rethrow (Helpers.ensure_name_is_sane left_suffix && Helpers.ensure_name_is_sane right_suffix) if left_suffix == right_suffix then - Panic.throw <| Illegal_State_Error_Data "left_suffix must be different from right_suffix" + Panic.throw <| Illegal_State.Error "left_suffix must be different from right_suffix" kind = if drop_unmatched then Join_Kind.Inner else Join_Kind.Left # Prepare the left and right pairs of indices along which the join will be performed. @@ -715,7 +715,7 @@ type Table (Helpers.unify_vector_singleton on).map (self.resolve >> .as_internal) right_join_index = other.context.meta_index if left_join_index.length != right_join_index.length then - Panic.throw <| Illegal_State_Error_Data "Cannot join with multi-indexes of different lengths." + Panic.throw <| Illegal_State.Error "Cannot join with multi-indexes of different lengths." # TODO [RW] We may be able to avoid creating subqueries if there are no groups, orders or wheres, # so it may be worth optimizing that here (#1515). @@ -851,7 +851,7 @@ type Table because we need to keep the API consistent with the in-memory table. _ = [id_fields, name_field, value_field, on_problems] msg = "Transposing columns is not supported in database tables, the table has to be materialized first with `read`." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## Returns a new table using a chosen field as the column header and then aggregating the rows within each value as specified. Optionally, a set of @@ -888,7 +888,7 @@ type Table because we need to keep the API consistent with the in-memory table. _ = [group_by, name_column, values, on_problems] msg = "Cross tab of database tables is not supported, the table has to be materialized first with `read`." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## Parsing values is not supported in database tables, the table has to be loaded into memory first with `read`. @@ -898,7 +898,7 @@ type Table because we need to keep the API consistent with the in-memory table. _ = [value_formatter, column_types, on_problems] msg = "Parsing values is not supported in database tables, the table has to be materialized first with `read`." - Error.throw (Unsupported_Database_Operation_Error_Data msg) + Error.throw (Unsupported_Database_Operation.Error msg) ## ALIAS dropna ALIAS drop_missing_rows @@ -941,7 +941,7 @@ type Table read : (Integer | Nothing) -> Materialized_Table read self max_rows=Nothing = case self.context.meta_index.length > 1 of - True -> Error.throw <| Illegal_State_Error_Data "Multi-indexes are not implemented in the dataframes, if you want to materialize such a Table, remove the index first using `set_index`." + True -> Error.throw <| Illegal_State.Error "Multi-indexes are not implemented in the dataframes, if you want to materialize such a Table, remove the index first using `set_index`." False -> preprocessed = self.reset_index.limit max_rows case preprocessed.internal_columns.is_empty of @@ -973,7 +973,7 @@ type Table to_sql self = cols = self.internal_columns.map (c -> [c.name, c.expression]) case cols.is_empty of - True -> Error.throw <| Unsupported_Database_Operation_Error_Data "Cannot generate SQL for a table with no columns." + True -> Error.throw <| Unsupported_Database_Operation.Error "Cannot generate SQL for a table with no columns." False -> query = Query.Select cols self.context self.connection.dialect.generate_sql query @@ -1071,15 +1071,15 @@ type Table insert : Vector Any -> Nothing insert self values = table_name = case self.context.from_spec of - From_Spec.From_Table name _ -> name - _ -> Error.throw <| Illegal_State_Error_Data "Inserting can only be performed on tables as returned by `query`, any further processing is not allowed." + From_Spec.Table name _ -> name + _ -> Error.throw <| Illegal_State.Error "Inserting can only be performed on tables as returned by `query`, any further processing is not allowed." # TODO [RW] before removing the PRIVATE tag, add a check that no bad stuff was done to the table as described above pairs = self.internal_columns.zip values col-> value-> [col.name, SQL_Expression.Constant col.sql_type value] query = self.connection.dialect.generate_sql <| Query.Insert table_name pairs affected_rows = self.connection.execute_update query case affected_rows == 1 of - False -> Error.throw <| Illegal_State_Error_Data "The update unexpectedly affected "+affected_rows.to_text+" rows." + False -> Error.throw <| Illegal_State.Error "The update unexpectedly affected "+affected_rows.to_text+" rows." True -> Nothing ## Provides a simplified text representation for display in the REPL and errors. @@ -1110,7 +1110,7 @@ type Table Returns: - If an unsupported `File_Format` is specified, an - `Illegal_Argument_Error` is raised. + `Illegal_Argument` is raised. - If the path to the parent location cannot be found or the filename is invalid, a `File_Error.Not_Found` is raised. - If another IO error occurs, such as access denied, an @@ -1127,7 +1127,7 @@ type Table - `Auto_Detect`: The file format is determined by the provided file. - `Bytes` and `Plain_Text`: The Table does not support these types in the `write` function. If passed as format, an - `Illegal_Argument_Error` is raised. To write out the table as plain + `Illegal_Argument` is raised. To write out the table as plain text, the user needs to call the `Text.from Table` method and then use the `Text.write` function. @@ -1141,28 +1141,11 @@ type Table connection = Database.connect (SQLite (File.new "db.sqlite")) table = connection.query (SQL_Query.Table_Name "Table") table.write (enso_project.data / "example_csv_output.csv") - write : File|Text -> File_Format -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Nothing ! Column_Mismatch | Illegal_Argument_Error | File_Error + write : File|Text -> File_Format -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Nothing ! Column_Mismatch | Illegal_Argument | File_Error write self path format=Auto_Detect on_existing_file=Existing_File_Behavior.Backup match_columns=Match_Columns.By_Name on_problems=Report_Warning = # TODO This should ideally be done in a streaming manner, or at least respect the row limits. self.read.write path format on_existing_file match_columns on_problems -type Integrity_Error - - ## UNSTABLE - - Signalizes that an operation tried using objects coming from different - contexts. - - To use columns from different tables, you must first join them. - Error object_description - - # Return a readable description of this error. - to_text : Text - to_text self = self.object_description + " comes from a different context." - - to_display_text : Text - to_display_text self = self.to_text - ## PRIVATE Creates a Table out of a connection, name and list of column names. @@ -1242,7 +1225,7 @@ combine_names left_names right_names left_suffix right_suffix = new_name = pair.second case new_name!=original_name && (new_names_count new_name > 1) of True -> - Panic.throw <| Illegal_State_Error_Data "Duplicate column "+original_name+" was about to be renamed to "+new_name+" to disambiguate column names, but a column with name "+new_name+" already exists too. Please rename the columns before joining to avoid ambiguity." + Panic.throw <| Illegal_State.Error "Duplicate column "+original_name+" was about to be renamed to "+new_name+" to disambiguate column names, but a column with name "+new_name+" already exists too. Please rename the columns before joining to avoid ambiguity." False -> Nothing catch_ambiguity left_pairs catch_ambiguity right_pairs diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso index 8078f8299038..c4889bcd1438 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso @@ -2,14 +2,10 @@ from Standard.Base import all polyglot java import java.sql.SQLException -# TODO Dubious constructor export -from project.Errors.Unsupported_Database_Operation_Error import all -from project.Errors.Unsupported_Database_Operation_Error export all - ## Indicates that a requested operation is not supported, for example because a particular database backend does not support it. -type Unsupported_Database_Operation_Error - Unsupported_Database_Operation_Error_Data message +type Unsupported_Database_Operation + Error message ## UNSTABLE @@ -25,10 +21,6 @@ type Unsupported_Database_Operation_Error to_display_text self = "Unsupported database operation: " + self.message -# TODO Dubious constructor export -from project.Errors.SQL_Error import all -from project.Errors.SQL_Error export all - type SQL_Error ## UNSTABLE @@ -39,7 +31,7 @@ type SQL_Error - java_exception: The underlying exception. - related_query (optional): A string representation of a query that this error is related to. - SQL_Error_Data java_exception related_query=Nothing + Error java_exception related_query=Nothing ## UNSTABLE @@ -60,13 +52,9 @@ type SQL_Error Throws an error as if a SQL Exception was thrown. throw_sql_error : Text -> SQL_Error throw_sql_error message = - Error.throw (SQL_Error_Data (SQLException.new message)) + Error.throw (SQL_Error.Error (SQLException.new message)) -# TODO Dubious constructor export -from project.Errors.SQL_Timeout_Error import all -from project.Errors.SQL_Timeout_Error export all - -type SQL_Timeout_Error +type SQL_Timeout ## UNSTABLE Indicates that an operation has timed out. @@ -75,7 +63,7 @@ type SQL_Timeout_Error - java_exception: The underlying exception. - related_query (optional): A string representation of a query that this error is related to. - SQL_Timeout_Error_Data java_exception related_query=Nothing + Error java_exception related_query=Nothing ## UNSTABLE @@ -90,3 +78,52 @@ type SQL_Timeout_Error Pretty print the timeout error. to_display_text : Text to_display_text self = self.to_text + + +## UNSTABLE + + Signals that a name for a column or table is not supported. + + Arguments: + - text: The name that is not supported. + + Currently the names can only include ASCII letters, numbers and the + underscore. This is a temporary limitation simplifying name handling. It will + be removed in a future version. +type Unsupported_Name + Error text + + ## PRIVATE + + Creates a human-readable representation of the unsupported name error. + to_text : Text + to_text self = + "The name " + self.text + " is not currently supported by the Database library." + + ## UNSTABLE + + Pretty print the unsupported name error. + to_display_text : Text + to_display_text self = self.to_text + + +type Integrity_Error + ## UNSTABLE + + Signalizes that an operation tried using objects coming from different + contexts. + + To use columns from different tables, you must first join them. + Error object_description + + ## PRIVATE + + Creates a human-readable representation of the integrity error. + to_text : Text + to_text self = self.object_description + " comes from a different context." + + ## UNSTABLE + + Pretty print the integrity error. + to_display_text : Text + to_display_text self = self.to_text diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso index ed828b864f67..347676d42521 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso @@ -9,7 +9,7 @@ import project.Data.Table.Table import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Internal_Column.Internal_Column -from project.Errors import Unsupported_Database_Operation_Error_Data +from project.Errors import Unsupported_Database_Operation ## PRIVATE Creates an `Internal_Column` that computes the specified statistic. @@ -32,7 +32,7 @@ make_expression aggregate dialect = case aggregate of Group_By c _ -> c.expression Count _ -> SQL_Expression.Operation "COUNT_ROWS" [] - Count_Distinct columns _ ignore_nothing -> if columns.is_empty then Error.throw (Illegal_Argument_Error_Data "Count_Distinct must have at least one column.") else + Count_Distinct columns _ ignore_nothing -> if columns.is_empty then Error.throw (Illegal_Argument.Error "Count_Distinct must have at least one column.") else case ignore_nothing of True -> SQL_Expression.Operation "COUNT_DISTINCT" (columns.map .expression) False -> SQL_Expression.Operation "COUNT_DISTINCT_INCLUDE_NULL" (columns.map .expression) @@ -43,14 +43,14 @@ make_expression aggregate dialect = Percentile p c _ -> SQL_Expression.Operation "PERCENTILE" [SQL_Expression.Constant SQL_Type.double p, c.expression] Mode c _ -> SQL_Expression.Operation "MODE" [c.expression] First c _ ignore_nothing order_by -> case is_non_empty_selector order_by of - False -> Error.throw (Unsupported_Database_Operation_Error_Data "`First` aggregation requires at least one `order_by` column.") + False -> Error.throw (Unsupported_Database_Operation.Error "`First` aggregation requires at least one `order_by` column.") True -> order_bys = order_by.columns.map c-> dialect.prepare_order_descriptor c.column.as_internal c.direction Text_Ordering.Default case ignore_nothing of False -> SQL_Expression.Operation "FIRST" [c.expression]+order_bys True -> SQL_Expression.Operation "FIRST_NOT_NULL" [c.expression]+order_bys Last c _ ignore_nothing order_by -> case is_non_empty_selector order_by of - False -> Error.throw (Unsupported_Database_Operation_Error_Data "`Last` aggregation requires at least one `order_by` column.") + False -> Error.throw (Unsupported_Database_Operation.Error "`Last` aggregation requires at least one `order_by` column.") True -> order_bys = order_by.columns.map c-> dialect.prepare_order_descriptor c.column.as_internal c.direction Text_Ordering.Default case ignore_nothing of diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index 486b200e2f01..15244ca578f4 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -11,7 +11,7 @@ import project.Internal.IR.Nulls_Order.Nulls_Order import project.Internal.IR.Query.Query from project.Data.SQL import code -from project.Errors import Unsupported_Database_Operation_Error_Data +from project.Errors import Unsupported_Database_Operation type Internal_Dialect @@ -55,7 +55,7 @@ make_binary_op name = op = " " + name + " " ((arguments.at 0) ++ op ++ (arguments.at 1)).paren False -> - Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name) + Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name) ## PRIVATE @@ -70,7 +70,7 @@ make_unary_op name = True -> (code name+" ")++(arguments.at 0) . paren False -> - Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name) + Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name) ## PRIVATE @@ -84,7 +84,7 @@ make_unary_op name = lift_unary_op : Text -> (Builder -> Builder) -> [Text, (Vector Builder -> Builder)] lift_unary_op name function = generator = arguments -> case arguments.length == 1 of - False -> Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name + ".") + False -> Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name + ".") True -> function (arguments.at 0) [name, generator] @@ -100,7 +100,7 @@ lift_unary_op name function = lift_binary_op : Text -> (Builder -> Builder -> Builder) -> [Text, (Vector Builder -> Builder)] lift_binary_op name function = generator = arguments -> case arguments.length == 2 of - False -> Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation " + name + ".") + False -> Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation " + name + ".") True -> function (arguments.at 0) (arguments.at 1) [name, generator] @@ -140,7 +140,7 @@ make_function name = make_constant : Text -> Vector Builder -> Builder make_constant sql_code = arguments -> - if arguments.not_empty then Error.throw <| Illegal_State_Error_Data "No arguments were expected" else + if arguments.not_empty then Error.throw <| Illegal_State.Error "No arguments were expected" else code sql_code ## PRIVATE @@ -196,7 +196,7 @@ make_iif arguments = case arguments.length of when_false = arguments.at 2 (code "CASE WHEN" ++ expr ++ " THEN " ++ when_true ++ " WHEN " ++ expr ++ " IS NULL THEN NULL ELSE " ++ when_false ++ " END").paren _ -> - Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation IIF") + Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation IIF") ## PRIVATE make_between : Vector Builder -> Builder @@ -207,12 +207,12 @@ make_between arguments = case arguments.length of upper = arguments.at 2 (expr ++ " BETWEEN " ++ lower ++ " AND " ++ upper).paren _ -> - Error.throw <| Illegal_State_Error_Data ("Invalid amount of arguments for operation BETWEEN") + Error.throw <| Illegal_State.Error ("Invalid amount of arguments for operation BETWEEN") ## PRIVATE make_is_in : Vector Builder -> Builder make_is_in arguments = case arguments.length of - 0 -> Error.throw <| Illegal_State_Error_Data ("The operation IS_IN requires at least one argument.") + 0 -> Error.throw <| Illegal_State.Error ("The operation IS_IN requires at least one argument.") ## If only the self argument is provided, no value will ever be in the empty list, so we just short circuit to false. `IN ()` would be more meaningful, but it is a syntax error. 1 -> code 'FALSE' . paren @@ -234,7 +234,7 @@ make_is_in_column arguments = case arguments.length of is_in = code "COALESCE(" ++ expr ++ " IN (" ++ in_query ++ "), FALSE)" has_nulls = has_nulls_query.paren ++ " = TRUE" code "CASE WHEN " ++ expr ++ " IS NULL THEN " ++ has_nulls ++ " ELSE " ++ is_in ++ " END" - _ -> Error.throw <| Illegal_State_Error_Data ("The operation IS_IN_COLUMN requires at exactly 3 arguments: the expression, the IN subquery, the subquery checking for nulls.") + _ -> Error.throw <| Illegal_State.Error ("The operation IS_IN_COLUMN requires at exactly 3 arguments: the expression, the IN subquery, the subquery checking for nulls.") ## PRIVATE @@ -249,7 +249,7 @@ generate_expression dialect expr = case expr of dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name SQL_Expression.Constant sql_type value -> SQL.interpolation sql_type value SQL_Expression.Operation kind arguments -> - op = dialect.operation_map.get_or_else kind (Error.throw <| Unsupported_Database_Operation_Error_Data kind) + op = dialect.operation_map.get_or_else kind (Error.throw <| Unsupported_Database_Operation.Error kind) parsed_args = arguments.map (generate_expression dialect) op parsed_args query : Query -> generate_query dialect query @@ -277,9 +277,9 @@ alias dialect name = - from_spec: A description of the FROM clause. generate_from_part : Internal_Dialect -> From_Spec -> Builder generate_from_part dialect from_spec = case from_spec of - From_Spec.From_Table name as_name -> + From_Spec.Table name as_name -> dialect.wrap_identifier name ++ alias dialect as_name - From_Spec.From_Query raw_sql as_name -> + From_Spec.Query raw_sql as_name -> code raw_sql . paren ++ alias dialect as_name From_Spec.Join kind left_spec right_spec on -> left = generate_from_part dialect left_spec @@ -373,7 +373,7 @@ generate_query dialect query = case query of code "SELECT * " ++ generate_select_context dialect ctx Query.Insert table_name pairs -> generate_insert_query dialect table_name pairs - _ -> Error.throw <| Unsupported_Database_Operation_Error_Data "Unsupported query type." + _ -> Error.throw <| Unsupported_Database_Operation.Error "Unsupported query type." ## PRIVATE Arguments: @@ -399,7 +399,7 @@ generate_query dialect query = case query of is escaped by doubling each occurrence. make_concat make_raw_concat_expr make_contains_expr has_quote args = expected_args = if has_quote then 5 else 4 - if args.length != expected_args then Error.throw (Illegal_State_Error_Data "Unexpected number of arguments for the concat operation.") else + if args.length != expected_args then Error.throw (Illegal_State.Error "Unexpected number of arguments for the concat operation.") else expr = args.at 0 separator = args.at 1 prefix = args.at 2 diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso index 7fac882aa76d..17a76d975593 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso @@ -3,6 +3,8 @@ from Standard.Base import all import project.Data.Table.Table import project.Data.Column.Column +from project.Errors import Unsupported_Name + polyglot java import java.util.regex.Pattern ## PRIVATE @@ -46,26 +48,6 @@ unify_vector_singleton x = case x of _ : Vector -> x _ -> [x] -## UNSTABLE - - Signals that a name for a column or table is not supported. - - Arguments: - - text: The name that is not supported. - - Currently the names can only include ASCII letters, numbers and the - underscore. This is a temporary limitation simplifying name handling. It will - be removed in a future version. -type Unsupported_Name - Error text - - ## PRIVATE - - Creates a human-readable representation of the unsupported name error. - to_display_text : Text - to_display_text self = - "The name " + self.text + " is not currently supported by the Database library." - ## PRIVATE This is used to check if the new name is safe for use in SQL queries. @@ -78,7 +60,7 @@ type Unsupported_Name SQL-safe names for columns. # TODO [RW] better name handling in Tables (#1513) -ensure_name_is_sane : Text -> Boolean ! Unsupported_Name_Error +ensure_name_is_sane : Text -> Boolean ! Unsupported_Name ensure_name_is_sane name = is_safe = Pattern.matches "[A-Za-z_0-9]+" name diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso index 76df63611669..a74578e038a1 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso @@ -19,7 +19,7 @@ type Context - alias: An alias name to use for table within the query. for_table : Text -> Text -> Context for_table table_name alias=table_name = - Context.Value (From_Spec.From_Table table_name alias) [] [] [] [] Nothing + Context.Value (From_Spec.Table table_name alias) [] [] [] [] Nothing ## PRIVATE @@ -30,7 +30,7 @@ type Context - alias: An alias name to use for table within the query. for_query : Text -> Text -> Context for_query raw_sql alias = - Context.Value (From_Spec.From_Query raw_sql alias) [] [] [] [] Nothing + Context.Value (From_Spec.Query raw_sql alias) [] [] [] [] Nothing ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso index 0825ec425428..fe4ba24a35df 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso @@ -19,7 +19,7 @@ type From_Spec parts of the query, this is especially useful for example in self-joins, allowing to differentiate between different instances of the same table. - From_Table (table_name : Text) (alias : Text) + Table (table_name : Text) (alias : Text) ## PRIVATE @@ -31,7 +31,7 @@ type From_Spec parts of the query, this is especially useful for example in self-joins, allowing to differentiate between different instances of the same table. - From_Query (raw_sql : Text) (alias : Text) + Query (raw_sql : Text) (alias : Text) ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso index 0f20373c4709..24d06e1108a2 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso @@ -11,7 +11,7 @@ import project.Internal.Base_Generator import project.Data.Table.Table as Database_Table -from project.Errors import SQL_Error, SQL_Error_Data, SQL_Timeout_Error, SQL_Timeout_Error_Data +from project.Errors import SQL_Error, SQL_Timeout polyglot java import java.util.Properties @@ -112,10 +112,10 @@ type JDBC_Connection columns = table.columns check_rows updates_array expected_size = updates = Vector.from_polyglot_array updates_array - if updates.length != expected_size then Panic.throw <| Illegal_State_Error "The batch update unexpectedly affected "+updates.length.to_text+" rows instead of "+expected_size.to_text+"." else + if updates.length != expected_size then Panic.throw <| Illegal_State.Error "The batch update unexpectedly affected "+updates.length.to_text+" rows instead of "+expected_size.to_text+"." else updates.each affected_rows-> if affected_rows != 1 then - Panic.throw <| Illegal_State_Error "A single update within the batch unexpectedly affected "+affected_rows.to_text+" rows." + Panic.throw <| Illegal_State.Error "A single update within the batch unexpectedly affected "+affected_rows.to_text+" rows." 0.up_to num_rows . each row_id-> values = columns.map col-> col.at row_id holes = values.zip db_types @@ -161,13 +161,13 @@ close_connection connection = Arguments: - action: The computation to execute. This computation may throw SQL errors. -handle_sql_errors : Any -> (Text | Nothing) -> Any ! (SQL_Error | SQL_Timeout_Error) +handle_sql_errors : Any -> (Text | Nothing) -> Any ! (SQL_Error | SQL_Timeout) handle_sql_errors ~action related_query=Nothing = Panic.catch SQLException action caught_panic-> exc = caught_panic.payload.cause case Java.is_instance exc SQLTimeoutException of - True -> Error.throw (SQL_Timeout_Error_Data exc related_query) - False -> Error.throw (SQL_Error_Data exc related_query) + True -> Error.throw (SQL_Timeout.Error exc related_query) + False -> Error.throw (SQL_Error.Error exc related_query) ## PRIVATE Sets values inside of a prepared statement. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso index 5c00a3a5e06b..97a76a419f20 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso @@ -140,7 +140,7 @@ type Postgres_Connection - batch_size: Specifies how many rows should be uploaded in a single batch. upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table - upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State_Error <| + upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State.Error <| self.connection.upload_table name table temporary batch_size ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index c70e69d9d077..354413272957 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -15,7 +15,7 @@ import project.Internal.IR.Nulls_Order.Nulls_Order import project.Internal.IR.Query.Query from project.Data.SQL import code -from project.Errors import Unsupported_Database_Operation_Error_Data +from project.Errors import Unsupported_Database_Operation ## PRIVATE @@ -158,7 +158,7 @@ first_last_aggregators = [["FIRST", first], ["FIRST_NOT_NULL", first_not_null], ["LAST", last], ["LAST_NOT_NULL", last_not_null]] make_first_aggregator reverse ignore_null args = - if args.length < 2 then Error.throw (Illegal_State_Error_Data "Insufficient number of arguments for the operation.") else + if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else result_expr = args.head order_bys = args.tail @@ -192,7 +192,7 @@ concat_ops = ## PRIVATE -agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument_Error_Data "COUNT_DISTINCT requires at least one argument.")) else +agg_count_distinct args = if args.is_empty then (Error.throw (Illegal_Argument.Error "COUNT_DISTINCT requires at least one argument.")) else case args.length == 1 of True -> ## A single null value will be skipped. @@ -236,7 +236,7 @@ make_order_descriptor internal_column sort_direction text_ordering = case internal_column.sql_type.is_likely_text of True -> ## In the future we can modify this error to suggest using a custom defined collation. - if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation_Error_Data "Natural ordering is currently not supported. You may need to materialize the Table to perform this operation.") else + if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering is currently not supported. You may need to materialize the Table to perform this operation.") else case text_ordering.case_sensitivity of Nothing -> Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation=Nothing @@ -244,7 +244,7 @@ make_order_descriptor internal_column sort_direction text_ordering = Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation="ucs_basic" Case_Sensitivity.Insensitive locale -> case locale == Locale.default of False -> - Error.throw (Unsupported_Database_Operation_Error_Data "Case insensitive ordering with custom locale is currently not supported. You may need to materialize the Table to perform this operation.") + Error.throw (Unsupported_Database_Operation.Error "Case insensitive ordering with custom locale is currently not supported. You may need to materialize the Table to perform this operation.") True -> upper = SQL_Expression.Operation "UPPER" [internal_column.expression] folded_expression = SQL_Expression.Operation "LOWER" [upper] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso index 3e8349a3ca58..8cfb681b9de9 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso @@ -133,7 +133,7 @@ type SQLite_Connection - batch_size: Specifies how many rows should be uploaded in a single batch. upload_table : Text -> Materialized_Table -> Boolean -> Integer -> Database_Table - upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State_Error <| + upload_table self name table temporary=True batch_size=1000 = Panic.recover Illegal_State.Error <| self.connection.upload_table name table temporary batch_size ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 5796a670d507..f1e630f4c3e4 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -12,7 +12,7 @@ import project.Internal.IR.Order_Descriptor.Order_Descriptor import project.Internal.IR.Query.Query from project.Data.SQL import code -from project.Errors import Unsupported_Database_Operation_Error_Data +from project.Errors import Unsupported_Database_Operation ## PRIVATE @@ -58,7 +58,7 @@ type SQLite_Dialect prepare_order_descriptor : Internal_Column -> Sort_Direction -> Text_Ordering -> Order_Descriptor prepare_order_descriptor self internal_column sort_direction text_ordering = case internal_column.sql_type.is_likely_text of True -> - if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation_Error_Data "Natural ordering is not supported by the SQLite backend. You may need to materialize the Table to perform this operation.") else + if text_ordering.sort_digits_as_numbers then Error.throw (Unsupported_Database_Operation.Error "Natural ordering is not supported by the SQLite backend. You may need to materialize the Table to perform this operation.") else case text_ordering.case_sensitivity of Nothing -> Order_Descriptor.Value internal_column.expression sort_direction collation=Nothing @@ -66,7 +66,7 @@ type SQLite_Dialect Order_Descriptor.Value internal_column.expression sort_direction collation="BINARY" Case_Sensitivity.Insensitive locale -> case locale == Locale.default of False -> - Error.throw (Unsupported_Database_Operation_Error_Data "Case insensitive ordering with custom locale is not supported by the SQLite backend. You may need to materialize the Table to perform this operation.") + Error.throw (Unsupported_Database_Operation.Error "Case insensitive ordering with custom locale is not supported by the SQLite backend. You may need to materialize the Table to perform this operation.") True -> Order_Descriptor.Value internal_column.expression sort_direction collation="NOCASE" False -> @@ -110,7 +110,7 @@ resolve_target_sql_type aggregate = case aggregate of ## PRIVATE unsupported name = - Error.throw (Unsupported_Database_Operation_Error_Data name+" is not supported by SQLite backend. You may need to materialize the table and perform the operation in-memory.") + Error.throw (Unsupported_Database_Operation.Error name+" is not supported by SQLite backend. You may need to materialize the table and perform the operation in-memory.") ## PRIVATE agg_count_is_null = Base_Generator.lift_unary_op "COUNT_IS_NULL" arg-> @@ -154,7 +154,7 @@ first_last_aggregators = ## PRIVATE window_aggregate window_type ignore_null args = - if args.length < 2 then Error.throw (Illegal_State_Error_Data "Insufficient number of arguments for the operation.") else + if args.length < 2 then Error.throw (Illegal_State.Error "Insufficient number of arguments for the operation.") else result_expr = args.head order_exprs = args.tail @@ -174,7 +174,7 @@ concat_ops = ## PRIVATE agg_count_distinct args = case args.length == 1 of True -> code "COUNT(DISTINCT (" ++ args.first ++ "))" - False -> Error.throw (Illegal_Argument_Error_Data "COUNT_DISTINCT supports only single arguments in SQLite.") + False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.") ## PRIVATE agg_count_distinct_include_null args = case args.length == 1 of @@ -183,7 +183,7 @@ agg_count_distinct_include_null args = case args.length == 1 of count = code "COUNT(DISTINCT " ++ arg ++ ")" all_nulls_case = code "CASE WHEN COUNT(CASE WHEN " ++ arg ++ "IS NULL THEN 1 END) > 0 THEN 1 ELSE 0 END" count ++ " + " ++ all_nulls_case - False -> Error.throw (Illegal_Argument_Error_Data "COUNT_DISTINCT supports only single arguments in SQLite.") + False -> Error.throw (Illegal_Argument.Error "COUNT_DISTINCT supports only single arguments in SQLite.") ## PRIVATE starts_with = Base_Generator.lift_binary_op "starts_with" str-> sub-> diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso index 44cc0309fcc3..48942374eb74 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso @@ -1,24 +1,33 @@ -import project.Connection.Database -import project.Connection.Credentials.Credentials import project.Connection.Client_Certificate.Client_Certificate -import project.Connection.SSL_Mode.SSL_Mode import project.Connection.Connection_Options.Connection_Options - -import project.Connection.Postgres_Options -import project.Connection.SQLite_Options -import project.Connection.Redshift_Options +import project.Connection.Credentials.Credentials +import project.Connection.Database +import project.Connection.Postgres_Options.Postgres_Options +import project.Connection.Redshift_Options.Redshift_Options +import project.Connection.Redshift_Options.AWS_Credential +import project.Connection.SQLite_Options.SQLite_Options +import project.Connection.SQLite_Options.In_Memory +import project.Connection.SSL_Mode.SSL_Mode import project.Data.SQL_Query.SQL_Query -export project.Connection.Credentials.Credentials +from project.Connection.Postgres_Options.Postgres_Options import Postgres +from project.Connection.Redshift_Options.Redshift_Options import Redshift +from project.Connection.SQLite_Options.SQLite_Options import SQLite + -export project.Connection.SSL_Mode.SSL_Mode export project.Connection.Client_Certificate.Client_Certificate export project.Connection.Connection_Options.Connection_Options - +export project.Connection.Credentials.Credentials export project.Connection.Database +export project.Connection.Postgres_Options.Postgres_Options +export project.Connection.Redshift_Options.Redshift_Options +export project.Connection.Redshift_Options.AWS_Credential +export project.Connection.SQLite_Options.SQLite_Options +export project.Connection.SQLite_Options.In_Memory +export project.Connection.SSL_Mode.SSL_Mode export project.Data.SQL_Query.SQL_Query -from project.Connection.Postgres_Options export Postgres_Options, Postgres -from project.Connection.SQLite_Options export SQLite_Options, In_Memory, SQLite -from project.Connection.Redshift_Options export Redshift_Options, Redshift, AWS_Credential +from project.Connection.Postgres_Options.Postgres_Options export Postgres +from project.Connection.Redshift_Options.Redshift_Options export Redshift +from project.Connection.SQLite_Options.SQLite_Options export SQLite diff --git a/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso index 773df9e29f84..ffc5c2988188 100644 --- a/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso @@ -1,8 +1,8 @@ from Standard.Base import all -import Standard.Base.Network.Http -import Standard.Base.Network.Http.Response.Response as Http_Response -import Standard.Base.Network.URI +import Standard.Base.Network.HTTP.Response.Response +import Standard.Base.Network.HTTP.Response_Body.Response_Body + import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine import Standard.Base.Data.Text.Regex.Engine.Default.Match as Default_Engine_Match @@ -41,7 +41,7 @@ xls = url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/spreadsheet.xls" file = enso_project.data / 'spreadsheet.xls' if file.exists.not then - Http.fetch url . to_file file + HTTP.fetch url . to_file file file ## An example XLSX file for experimenting with Table and its APIs. @@ -56,7 +56,7 @@ xlsx = url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/spreadsheet.xlsx" file = enso_project.data / 'spreadsheet.xlsx' if file.exists.not then - Http.fetch url . to_file file + HTTP.fetch url . to_file file file ## A file that is used for writing temporary data as part of tests. @@ -142,20 +142,20 @@ geo_data_url = "https://enso-data-samples.s3.us-west-1.amazonaws.com/Bus_Stop_Be ! Makes a Network Request Calling this method will cause Enso to make a network request to a data endpoint. -get_response : Http_Response -get_response = Http.get geo_data_url +get_response : Response +get_response = HTTP.fetch geo_data_url ## Gets HTTP data from a network endpoint. ! Makes a Network Request Calling this method will cause Enso to make a network request to a data endpoint. -get_geo_data : Http.Response.Body -get_geo_data = Http.fetch geo_data_url +get_geo_data : Response_Body +get_geo_data = HTTP.fetch geo_data_url ## A simple HTTP client for examples. -http_client : Http -http_client = Http.new (timeout = Duration.new seconds=30) +http_client : HTTP +http_client = HTTP.new (timeout = Duration.new seconds=30) ## A basic URI for examples. uri : URI @@ -173,7 +173,7 @@ image_file = url = "https://upload.wikimedia.org/wikipedia/commons/thumb/e/e9/Hue_alpha_falloff.png/320px-Hue_alpha_falloff.png" file = enso_project.data / "image.png" if file.exists.not then - Http.fetch url . to_file file + HTTP.fetch url . to_file file file ## A PNG image. @@ -273,4 +273,4 @@ match : Default_Engine_Match match = engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] - pattern.match "aa ab abc a bc bcd" mode=Regex_Mode.First + pattern.match "aa ab abc a bc bcd" mode=Matching_Mode.First diff --git a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Main.enso index 1baf2fa27241..915c11415f62 100644 --- a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Main.enso @@ -17,7 +17,7 @@ from Standard.Base import all Creating a node containing the text "Hello, Enso!". "Hello, Enso!" -text_input = Error.unimplemented "This function should not be called." +text_input = Unimplemented.throw "This function should not be called." ## ALIAS Number Input @@ -35,7 +35,7 @@ text_input = Error.unimplemented "This function should not be called." Creating a node containing the decimal 0.0. 0.0 -number_input = Error.unimplemented "This function should not be called." +number_input = Unimplemented.throw "This function should not be called." ## ALIAS Table Input @@ -52,5 +52,5 @@ number_input = Error.unimplemented "This function should not be called." column_1 = ["name", "Enso"] column_2 = ["stars", 5000] table = Table.new [column_1, column_2] -table_input = Error.unimplemented "This function should not be called." +table_input = Unimplemented.throw "This function should not be called." diff --git a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network.enso b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network.enso index f61926e2da0a..fd932951be3e 100644 --- a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network.enso +++ b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network.enso @@ -7,40 +7,40 @@ > Example Download a file. NOTE: This example will make a network request. - import Standard.Base.Network.Http + import Standard.Base.Network.HTTP.HTTP import Standard.Examples example_fetch = out_file = Examples.scratch_file - res = Http.fetch "http://httpbin.org/bytes/1024" . to_file out_file + res = HTTP.fetch "http://httpbin.org/bytes/1024" . to_file out_file > Example Send authenticated Get request (note the use of TLS). NOTE: This example will make a network request. - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.Header example_get = headers = [Header.authorization_basic "user" "pass"] - Http.get "https://httpbin.org/basic-auth/user/pass" headers + HTTP.fetch "https://httpbin.org/basic-auth/user/pass" headers > Example Send a Post request with binary data. NOTE: This example will make a network request. - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.Header.Header + import Standard.Base.Network.HTTP.Request_Body.Request_Body example_post = - body = Body.Bytes "Hello".utf_8 + body = Request_Body.Bytes "Hello".utf_8 header_binary = Header.content_type "application/octet-stream" - Http.post "http://httpbin.org/post" body [header_binary] + HTTP.new.post "http://httpbin.org/post" body [header_binary] > Example Parse URI text. - import Standard.Base.Network.URI + import Standard.Base.Network.URI.URI example_parse = URI.parse "http://example.com" diff --git a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network/Http.enso b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network/Http.enso index 45db8c121df4..b9a58181b118 100644 --- a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network/Http.enso +++ b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Network/Http.enso @@ -8,40 +8,40 @@ > Example Download a file. NOTE: This example will make a network request. - import Standard.Base.Network.Http + import Standard.Base.Network.HTTP.HTTP import Standard.Examples example_fetch = out_file = Examples.scratch_file - res = Http.fetch "http://httpbin.org/bytes/1024" . to_file out_file + res = HTTP.fetch "http://httpbin.org/bytes/1024" . to_file out_file > Example Send authenticated Get request (note the use of TLS). NOTE: This example will make a network request. - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.Header example_get = headers = [Header.authorization_basic "user" "pass"] - Http.get "https://httpbin.org/basic-auth/user/pass" headers + HTTP.fetch "https://httpbin.org/basic-auth/user/pass" headers > Example Send a Post request with binary data. NOTE: This example will make a network request. - import Standard.Base.Network.Http - import Standard.Base.Network.Http.Header - import Standard.Base.Network.Http.Request.Body + import Standard.Base.Network.HTTP.HTTP + import Standard.Base.Network.HTTP.Header.Header + import Standard.Base.Network.HTTP.Request_Body.Request_Body example_post = - body = Body.Bytes "Hello".utf_8 + body = Request_Body.Bytes "Hello".utf_8 header_binary = Header.content_type "application/octet-stream" - Http.post "http://httpbin.org/post" body [header_binary] + HTTP.new.post "http://httpbin.org/post" body [header_binary] > Example Parse URI text. - import Standard.Base.Network.URI + import Standard.Base.Network.URI.URI example_parse = URI.parse "http://example.com" diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso index efa9d9dcadd3..e529d79a672d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso @@ -541,7 +541,7 @@ type Column rs = s.iif true_val false_val Column.Column_Data (Java_Column.new "Result" ix rs) - _ -> Error.throw (Illegal_Argument_Error "`iif` can only be used with boolean columns.") + _ -> Error.throw (Illegal_Argument.Error "`iif` can only be used with boolean columns.") ## Returns a column of first non-`Nothing` value on each row of `self` and `values` list. @@ -779,7 +779,7 @@ type Column example_contains = Examples.text_column_1.like "F%." like : Column | Text -> Column like self pattern = - run_vectorized_binary_op self "like" (_ -> _ -> Error.throw (Illegal_State_Error "The `Like` operation should only be used on Text columns.")) pattern + run_vectorized_binary_op self "like" (_ -> _ -> Error.throw (Illegal_State.Error "The `Like` operation should only be used on Text columns.")) pattern ## Checks for each element of the column if it is contained within the provided vector or column. @@ -802,7 +802,7 @@ type Column case self.java_column.getStorage.isOpVectorized op_name of True -> fallback_fn _ _ = - Panic.throw (Illegal_State_Error_Data "Impossible: This is a bug in the Standard.Table library.") + Panic.throw (Illegal_State.Error "Impossible: This is a bug in the Standard.Table library.") true_vector = case vector of _ : Array -> Vector.from_polyglot_array vector _ : Vector -> vector @@ -1027,7 +1027,7 @@ type Column storage_type self = tp = self.java_column.getStorage.getType Storage.types.at tp . catch Index_Out_Of_Bounds_Error _-> - Panic.throw (Illegal_State_Error "Unknown storage type: "+tp.to_text) + Panic.throw (Illegal_State.Error "Unknown storage type: "+tp.to_text) ## UNSTABLE TODO this is a prototype that will be revisited later on value_type : Value_Type diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso index cdbee2fb2948..24c08c93392a 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso @@ -174,7 +174,7 @@ type Data_Formatter Date -> self.make_date_parser Date_Time -> self.make_date_time_parser Time_Of_Day -> self.make_time_of_day_parser - _ -> Error.throw (Illegal_Argument_Error_Data "Unsupported datatype: "+datatype.to_text) + _ -> Error.throw (Illegal_Argument.Error "Unsupported datatype: "+datatype.to_text) ## PRIVATE get_specific_type_parsers self = @@ -195,23 +195,23 @@ type Data_Formatter ## PRIVATE make_date_formatter self = - if self.date_formats.is_empty then Error.throw (Illegal_Argument_Error_Data "Formatting dates requires at least one entry in the `date_formats` parameter") else + if self.date_formats.is_empty then Error.throw (Illegal_Argument.Error "Formatting dates requires at least one entry in the `date_formats` parameter") else DateFormatter.new self.date_formats.first self.datetime_locale.java_locale ## PRIVATE make_time_of_day_formatter self = - if self.time_formats.is_empty then Error.throw (Illegal_Argument_Error_Data "Formatting times requires at least one entry in the `time_formats` parameter") else + if self.time_formats.is_empty then Error.throw (Illegal_Argument.Error "Formatting times requires at least one entry in the `time_formats` parameter") else TimeFormatter.new self.time_formats.first self.datetime_locale.java_locale ## PRIVATE make_date_time_formatter self = - if self.datetime_formats.is_empty then Error.throw (Illegal_Argument_Error_Data "Formatting date-times requires at least one entry in the `datetime_formats` parameter") else + if self.datetime_formats.is_empty then Error.throw (Illegal_Argument.Error "Formatting date-times requires at least one entry in the `datetime_formats` parameter") else DateTimeFormatter.new self.datetime_formats.first self.datetime_locale.java_locale ## PRIVATE make_boolean_formatter self = - if self.true_values.is_empty then Error.throw (Illegal_Argument_Error_Data "Formatting booleans requires at least one entry in the `true_values` parameter") else - if self.false_values.is_empty then Error.throw (Illegal_Argument_Error_Data "Formatting booleans requires at least one entry in the `false_values` parameter") else + if self.true_values.is_empty then Error.throw (Illegal_Argument.Error "Formatting booleans requires at least one entry in the `true_values` parameter") else + if self.false_values.is_empty then Error.throw (Illegal_Argument.Error "Formatting booleans requires at least one entry in the `false_values` parameter") else BooleanFormatter.new self.true_values.first self.false_values.first ## PRIVATE @@ -225,7 +225,7 @@ type Data_Formatter ## PRIVATE make_auto_formatter self = # TODO The panic rethrow+recover is a workaround for the vector error propagation bug. - formatters = Panic.recover Illegal_Argument_Error_Data (self.get_specific_type_formatters.map Panic.rethrow) + formatters = Panic.recover Illegal_Argument.Error (self.get_specific_type_formatters.map Panic.rethrow) AnyObjectFormatter.new formatters.to_array ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 4ba9bf480e03..2e3e8eb17b2a 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -1,13 +1,9 @@ from Standard.Base import all import Standard.Base.Data.Index_Sub_Range as Index_Sub_Range_Module -from Standard.Base.Data.Vector import handle_incomparable_value -import Standard.Base.Error.Common as Errors import Standard.Base.Data.Array_Proxy.Array_Proxy import Standard.Base.Data.Ordering.Comparator -import Standard.Base.Data.Text.Case import project.Data.Column.Column -from project.Data.Column import get_item_string import project.Data.Column_Name_Mapping.Column_Name_Mapping import project.Data.Column_Selector.Column_Selector import project.Data.Data_Formatter.Data_Formatter @@ -30,15 +26,14 @@ import project.Internal.Problem_Builder.Problem_Builder import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy import project.Data.Expression.Expression import project.Data.Expression.Expression_Error +import project.Delimited.Delimited_Format.Delimited_Format -from project.Data.Column import get_item_string from project.Data.Column_Type_Selection import Column_Type_Selection, Auto -from project.Delimited.Delimited_Format import Delimited -from project.Internal.Filter_Condition_Helpers import make_filter_column from project.Internal.Rows_View import Rows_View from project.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Type_Selector, No_Index_Set_Error, No_Such_Column_Error, No_Such_Column_Error_Data, No_Input_Columns_Selected, No_Output_Columns, Invalid_Value_Type -import Standard.Visualization +from project.Data.Column import get_item_string +from project.Internal.Filter_Condition_Helpers import make_filter_column polyglot java import org.enso.table.data.table.Table as Java_Table polyglot java import org.enso.table.data.table.Column as Java_Column @@ -185,18 +180,6 @@ type Table Json.from_pairs [['name', name], ['data', items]] Json.from_pairs [row_count, ['columns', cols]] . to_text - ## UNSTABLE - ADVANCED - - Guides the visualization system to display the most suitable graphical - representation for this table. - default_visualization : Visualization.Id.Id - default_visualization self = - cols = self.columns.map .name . map name-> name.to_case Case.Lower - if cols.contains "latitude" && cols.contains "longitude" then Visualization.Id.geo_map else - if cols.contains "x" && cols.contains "y" then Visualization.Id.scatter_plot else - Visualization.Id.table - ## Returns the column with the given name. Arguments: @@ -520,7 +503,7 @@ type Table aggregate self columns (on_problems=Report_Warning) = validated = Aggregate_Column_Helper.prepare_aggregate_columns columns self - on_problems.attach_problems_before validated.problems <| Illegal_Argument_Error.handle_java_exception <| + on_problems.attach_problems_before validated.problems <| Illegal_Argument.handle_java_exception <| java_key_columns = validated.key_columns.map .java_column index = self.java_table.indexFromColumns java_key_columns.to_array Comparator.new @@ -621,7 +604,7 @@ type Table table = Examples.inventory_table table.order_by (Sort_Column_Selector.By_Name ["total_stock", Sort_Column.Name "sold_stock" Sort_Direction.Descending]) - order_by : Vector Text | Sort_Column_Selector -> Text_Ordering -> Problem_Behavior -> Table ! Incomparable_Values_Error + order_by : Text | Vector Text | Sort_Column_Selector -> Text_Ordering -> Problem_Behavior -> Table ! Incomparable_Values_Error order_by self (columns = (Sort_Column_Selector.By_Name [(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default on_problems=Report_Warning = problem_builder = Problem_Builder.new columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder @@ -629,7 +612,7 @@ type Table selected_columns = columns_for_ordering.map c->c.column.java_column ordering = columns_for_ordering.map c->c.associated_selector.direction.to_sign comparator = Comparator.for_text_ordering text_ordering - java_table = Illegal_Argument_Error.handle_java_exception <| handle_incomparable_value <| + java_table = Illegal_Argument.handle_java_exception <| Incomparable_Values_Error.handle_errors <| self.java_table.orderBy selected_columns.to_array ordering.to_array comparator Table.Table_Data java_table @@ -668,8 +651,8 @@ type Table key_columns = Warning.map_warnings_and_errors warning_mapper <| self.columns_helper.select_columns selector=columns reorder=True on_problems=on_problems java_columns = key_columns.map .java_column - text_folding_strategy = Case.folding_strategy case_sensitivity - java_table = Illegal_Argument_Error.handle_java_exception <| + text_folding_strategy = Case_Sensitivity.folding_strategy case_sensitivity + java_table = Illegal_Argument.handle_java_exception <| self.java_table.distinct java_columns.to_array text_folding_strategy on_problems.attach_problems_after (Table.Table_Data java_table) <| problems = java_table.getProblems @@ -793,7 +776,7 @@ type Table Replace texts in quotes with parentheses in column at index 1. table.replace_text 1 '"(.*?)"' '($1)' matcher=Regex_Matcher.Regex_Matcher_Data - replace_text : (Text | Integer | Column_Selector) -> Text -> Text -> Matching_Mode | Regex_Mode -> (Text_Matcher | Regex_Matcher) -> Problem_Behavior -> Table + replace_text : (Text | Integer | Column_Selector) -> Text -> Text -> Matching_Mode.First | Matching_Mode.Last | Regex_Mode -> (Text_Matcher | Regex_Matcher) -> Problem_Behavior -> Table replace_text self columns=(Column_Selector.By_Index [0]) term="" new_text="" mode=Regex_Mode.All matcher=Text_Matcher.Case_Sensitive on_problems=Problem_Behavior.Report_Warning = if term.is_empty then self else problem_builder = Problem_Builder.new @@ -1288,10 +1271,10 @@ type Table is_group_by c = case c of Aggregate_Column.Group_By _ _ -> True _ -> False - if resolved_values.any is_group_by then problem_builder.report_other_warning (Illegal_Argument_Error_Data "Cannot use group_by for a cross_tab value.") + if resolved_values.any is_group_by then problem_builder.report_other_warning (Illegal_Argument.Error "Cannot use group_by for a cross_tab value.") validated_values = resolved_values.filter c->((c!=Nothing) && (is_group_by c).not) - on_problems.attach_problems_before (problem_builder.build_problemset) <| Illegal_Argument_Error.handle_java_exception <| + on_problems.attach_problems_before (problem_builder.build_problemset) <| Illegal_Argument.handle_java_exception <| java_key_columns = grouping.map .java_column index = self.java_table.indexFromColumns java_key_columns.to_array Comparator.new @@ -1384,7 +1367,7 @@ type Table Returns: - If an unsupported `File_Format` is specified, an - `Illegal_Argument_Error` is raised. + `Illegal_Argument` is raised. - If the path to the parent location cannot be found or the filename is invalid, a `File_Error.Not_Found` is raised. - If another IO error occurs, such as access denied, an @@ -1401,7 +1384,7 @@ type Table - `Auto_Detect`: The file format is determined by the provided file. - `Bytes` and `Plain_Text`: The Table does not support these types in the `write` function. If passed as format, an - `Illegal_Argument_Error` is raised. To write out the table as plain + `Illegal_Argument` is raised. To write out the table as plain text, the user needs to call the `Text.from Table` method and then use the `Text.write` function. @@ -1420,22 +1403,22 @@ type Table from Standard.Table import Excel example_to_xlsx = Examples.inventory_table.write (enso_project.data / "example_xlsx_output.xlsx") Excel - write : File|Text -> File_Format -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Nothing ! Column_Mismatch | Illegal_Argument_Error | File_Error + write : File|Text -> File_Format -> Existing_File_Behavior -> Match_Columns -> Problem_Behavior -> Nothing ! Column_Mismatch | Illegal_Argument | File_Error write self path format=Auto_Detect on_existing_file=Existing_File_Behavior.Backup match_columns=Match_Columns.By_Name on_problems=Report_Warning = file = File.new path case format of _ : Auto_Detect -> base_format = format.get_format file - if base_format == Nothing then Error.throw (Errors.Unsupported_File_Type.Error ("No File_Format supports '" + file.extension + "'")) else + if base_format == Nothing then Error.throw (Unsupported_File_Type.Error ("No File_Format supports '" + file.extension + "'")) else self.write file format=base_format on_existing_file match_columns on_problems _ -> - Panic.catch Errors.No_Such_Method_Error_Data (format.write_table file self on_existing_file match_columns on_problems) _-> + Panic.catch No_Such_Method_Error_Data (format.write_table file self on_existing_file match_columns on_problems) _-> name = Meta.get_constructor_name (Meta.meta format).constructor - Error.throw (Errors.Illegal_Argument_Error_Data ("Saving a Table as " + name + " is not supported.")) + Error.throw (Illegal_Argument.Error ("Saving a Table as " + name + " is not supported.")) ## Creates a text representation of the table using the CSV format. to_csv : Text - to_csv self = Text.from self (Delimited delimiter=",") + to_csv self = Text.from self (Delimited_Format.Delimited delimiter=",") ## PRIVATE columns_helper : Table_Column_Helper @@ -1516,7 +1499,7 @@ slice_ranges table ranges = make_join_helpers left_table right_table = make_equals left right = Java_Join_Equals.new left.java_column right.java_column make_equals_ignore_case _ _ = - unimplemented "Conditions other than Equals are not implemented yet." + Unimplemented.throw "Conditions other than Equals are not implemented yet." make_between _ _ _ = - unimplemented "Conditions other than Equals are not implemented yet." + Unimplemented.throw "Conditions other than Equals are not implemented yet." Join_Helpers.Join_Condition_Resolver.Value (left_table.at _) (right_table.at _) make_equals make_equals_ignore_case make_between diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso index 6eaf7707761c..10ea7603cabc 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table_Conversions.enso @@ -1,16 +1,14 @@ from Standard.Base import all import project.Data.Table.Table +import project.Delimited.Delimited_Format.Delimited_Format import project.Delimited.Delimited_Reader import project.Delimited.Delimited_Writer -from project.Delimited.Delimited_Format import Delimited_Format, Delimited -from project.Errors import unimplemented +Table.from (that : Text) (format:Delimited_Format = Delimited_Format.Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) = + if format.is_a Delimited_Format.Delimited then Delimited_Reader.read_text that format on_problems else + Unimplemented.throw "Table.from for fixed-width files is not yet implemented." -Table.from (that : Text) (format:Delimited_Format = Delimited '\t') (on_problems:Problem_Behavior=Report_Warning) = - if format.is_a Delimited then Delimited_Reader.read_text that format on_problems else - unimplemented "Table.from for fixed-width files is not yet implemented." - -Text.from (that : Table) (format:Delimited_Format = Delimited '\t') = - if format.is_a Delimited then Delimited_Writer.write_text that format else - unimplemented "Text.from for fixed-width files is not yet implemented." +Text.from (that : Table) (format:Delimited_Format = Delimited_Format.Delimited '\t') = + if format.is_a Delimited_Format.Delimited then Delimited_Writer.write_text that format else + Unimplemented.throw "Text.from for fixed-width files is not yet implemented." diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso index d53090520880..062840bfa8d1 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso @@ -7,9 +7,6 @@ import project.Delimited.Delimited_Reader import project.Delimited.Delimited_Writer import project.Delimited.Quote_Style.Quote_Style -from project.Delimited.Delimited_Format.Delimited_Format import Delimited -from project.Delimited.Delimited_Format.Delimited_Format export Delimited - ## Read delimited files such as CSVs into a Table. type Delimited_Format ## Read delimited files such as CSVs into a Table. @@ -20,7 +17,7 @@ type Delimited_Format Arguments: - delimiter: The delimiter character to split the file into columns. An - `Illegal_Argument_Error` error is returned if this is an empty string. + `Illegal_Argument` error is returned if this is an empty string. - encoding: The encoding to use when reading the file. - skip_rows: The number of rows to skip from the top of the file. - row_limit: The maximum number of rows to read from the file. This count @@ -54,8 +51,8 @@ type Delimited_Format for_file : File -> Delimited_Format | Nothing for_file file = case file.extension of - ".csv" -> Delimited ',' - ".tsv" -> Delimited '\t' + ".csv" -> Delimited_Format.Delimited ',' + ".tsv" -> Delimited_Format.Delimited '\t' _ -> Nothing ## Implements the `File.read` for this `File_Format` @@ -73,7 +70,7 @@ type Delimited_Format Note: This function is internal until such time as Atom cloning with modification is built into Enso. clone : Text -> Text -> (Boolean|Infer) -> Data_Formatter -> Boolean -> (Text|Nothing) -> (Text|Nothing) -> Delimited_Format clone self (quote_style=self.quote_style) (headers=self.headers) (value_formatter=self.value_formatter) (keep_invalid_rows=self.keep_invalid_rows) (line_endings=self.line_endings) (comment_character=self.comment_character) = - Delimited self.delimiter self.encoding self.skip_rows self.row_limit quote_style headers value_formatter keep_invalid_rows line_endings comment_character + Delimited_Format.Delimited self.delimiter self.encoding self.skip_rows self.row_limit quote_style headers value_formatter keep_invalid_rows line_endings comment_character ## Create a clone of this with specified quoting settings. with_quotes : Text -> Text -> Boolean -> Delimited_Format diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Reader.enso index 8372c5c8905c..e2aa47f7a1ac 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Reader.enso @@ -1,7 +1,5 @@ from Standard.Base import all -import Standard.Base.Error.Common as Errors import Standard.Base.System.File.Input_Stream -from Standard.Base.System.File import wrap_io_exception import project.Data.Table.Table import project.Data.Data_Formatter.Data_Formatter @@ -90,7 +88,7 @@ read_stream format stream on_problems max_columns=default_max_columns related_fi integer. read_from_reader : Delimited_Format -> Reader -> Problem_Behavior -> Integer -> Any read_from_reader format java_reader on_problems max_columns=4096 = - Illegal_Argument_Error.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| + Illegal_Argument.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| reader = prepare_reader java_reader format max_columns on_problems result_with_problems = reader.read parsing_problems = Vector.from_polyglot_array (result_with_problems.problems) . map translate_reader_problem @@ -106,7 +104,7 @@ prepare_reader java_reader format max_columns on_problems newline_override=Nothi Nothing -> -1 _ : Integer -> format.row_limit _ -> - Error.throw (Illegal_Argument_Error_Data "`row_limit` should be Integer or Nothing.") + Error.throw (Illegal_Argument.Error "`row_limit` should be Integer or Nothing.") warnings_as_errors = on_problems == Problem_Behavior.Report_Error quote_characters = case format.quote_style of Quote_Style.No_Quotes -> Pair.new Nothing Nothing @@ -166,7 +164,7 @@ type Detected_File_Metadata detect_metadata : File -> Delimited_Format -> Detected_File_Metadata detect_metadata file format = on_problems = Problem_Behavior.Ignore - result = handle_io_exception file <| Illegal_Argument_Error.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| + result = handle_io_exception file <| Illegal_Argument.handle_java_exception <| handle_parsing_failure <| handle_parsing_exception <| trailing_line_separator = newline_at_eof file format.encoding has_trailing_line_separator = trailing_line_separator.is_nothing.not file.with_input_stream [File_Access.Read] stream-> @@ -231,7 +229,7 @@ handle_parsing_exception = ## PRIVATE handle_io_exception related_file ~action = Panic.catch_java IOException action java_exception-> - Error.throw (wrap_io_exception related_file java_exception) + Error.throw (File_Error.wrap_io_exception related_file java_exception) ## PRIVATE default_max_columns = 4096 diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Writer.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Writer.enso index 419a0ba0f0ef..5d393de88ad2 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Writer.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Writer.enso @@ -1,5 +1,4 @@ from Standard.Base import all -import Standard.Base.Error.Common as Errors import Standard.Base.System.File.Output_Stream import project.Data.Table.Table @@ -8,8 +7,9 @@ import project.Data.Match_Columns.Match_Columns import project.Data.Storage.Storage import project.Delimited.Delimited_Format.Delimited_Format import project.Delimited.Quote_Style.Quote_Style +import project.Delimited.Delimited_Reader +import project.Delimited.Delimited_Reader.Detected_Headers -from project.Delimited.Delimited_Reader import Detected_Headers, detect_metadata from project.Errors import Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Invalid_Row, Mismatched_Quote, Parser_Error, Additional_Invalid_Rows, Column_Count_Mismatch, Column_Name_Mismatch polyglot java import org.enso.table.write.DelimitedWriter @@ -48,9 +48,9 @@ write_file table format file on_existing_file match_columns on_problems = If the file does not exist or is empty, it acts like a regular overwrite. append_to_file : Table -> Delimited_Format -> File -> Match_Columns -> Problem_Behavior -> Any append_to_file table format file match_columns on_problems = - Column_Name_Mismatch.handle_java_exception <| Column_Count_Mismatch.handle_java_exception <| Panic.recover Illegal_Argument_Error_Data <| + Column_Name_Mismatch.handle_java_exception <| Column_Count_Mismatch.handle_java_exception <| Panic.recover Illegal_Argument.Error <| inferring_format = format.with_line_endings Infer - metadata = detect_metadata file inferring_format + metadata = Delimited_Reader.detect_metadata file inferring_format preexisting_headers = metadata.headers effective_line_separator = case format.line_endings of Infer -> metadata.line_separator.if_nothing default_line_separator_for_writing.to_text @@ -58,7 +58,7 @@ append_to_file table format file match_columns on_problems = selected_separator = other_ending_style.to_text existing_separator = metadata.line_separator if existing_separator.is_nothing.not && (selected_separator != existing_separator) then - Panic.throw <| Illegal_Argument_Error_Data <| + Panic.throw <| Illegal_Argument.Error <| # Ensure that these are properly escaped once `to_text` meaning is changed. "The explicitly provided line endings (" + selected_separator.to_text + ") do not match the line endings in the file (" + existing_separator.to_text + ")." other_ending_style.to_text @@ -73,7 +73,7 @@ append_to_file table format file match_columns on_problems = ColumnMapper.mapColumnsByPosition table.java_table column_count Detected_Headers.None column_count -> case match_columns of Match_Columns.By_Name -> - Error.throw (Illegal_Argument_Error_Data "Cannot append by name when headers are not present in the existing data.") + Error.throw (Illegal_Argument.Error "Cannot append by name when headers are not present in the existing data.") Match_Columns.By_Position -> ColumnMapper.mapColumnsByPosition table.java_table column_count reordered_table = Table.Table_Data reordered_java_table @@ -112,7 +112,7 @@ write_text table format = write_to_stream : Table -> Delimited_Format -> Output_Stream -> Problem_Behavior -> File | Nothing -> Text | Nothing -> Boolean -> Any write_to_stream table format stream on_problems related_file=Nothing separator_override=Nothing needs_leading_newline=False = handle_io_exception ~action = Panic.catch IOException action caught_panic-> - Error.throw (File.wrap_io_exception related_file caught_panic.payload.cause) + Error.throw (File_Error.wrap_io_exception related_file caught_panic.payload.cause) handle_io_exception <| stream.with_stream_encoder format.encoding on_problems reporting_stream_encoder-> @@ -132,11 +132,11 @@ write_to_stream table format stream on_problems related_file=Nothing separator_o instead of the one from `format`. write_to_writer : Table -> Delimited_Format -> Writer -> Text | Nothing -> Boolean -> Any write_to_writer table format java_writer separator_override=Nothing needs_leading_newline=False = - column_formatters = Panic.recover Illegal_Argument_Error_Data <| case format.value_formatter of + column_formatters = Panic.recover Illegal_Argument.Error <| case format.value_formatter of Nothing -> table.columns.map column-> case column.storage_type of Storage.Text -> TextFormatter.new _ -> - Panic.throw (Illegal_Argument_Error_Data "If the expected file format does not specify a valid `Data_Formatter`, only Text columns are allowed.") + Panic.throw (Illegal_Argument.Error "If the expected file format does not specify a valid `Data_Formatter`, only Text columns are allowed.") value_formatter -> table.columns.map column-> storage_type = column.storage_type value_formatter.make_formatter_for_column_type storage_type diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso index 8cae1365779a..8184695e0a81 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Format.enso @@ -3,13 +3,9 @@ from Standard.Base import all import project.Data.Table.Table import project.Data.Match_Columns.Match_Columns import project.Excel.Excel_Reader +import project.Excel.Excel_Section.Excel_Section import project.Excel.Excel_Writer -from project.Excel.Excel_Section import Excel_Section, Worksheet, Sheet_Names, Range_Names - -from project.Excel.Excel_Format.Excel_Format import Excel -from project.Excel.Excel_Format.Excel_Format export Excel - ## PRIVATE Resolve the xls_format setting to a boolean. should_treat_as_xls_format : (Boolean|Infer) -> File -> Boolean | Illegal_Argument @@ -20,7 +16,7 @@ should_treat_as_xls_format xls_format file = ".xlsm" -> False ".xls" -> True ".xlt" -> True - _ -> Error.throw (Illegal_Argument_Error_Data ("Unknown file extension for Excel file (" + file.extension + ")")) + _ -> Error.throw (Illegal_Argument.Error ("Unknown file extension for Excel file (" + file.extension + ")")) ## Read the file to a `Table` from an Excel file @@ -43,14 +39,14 @@ type Excel_Format If set to `True`, the file is read as an Excel 95-2003 format. If set to `False`, the file is read as an Excel 2007+ format. `Infer` will attempt to deduce this from the extension of the filename. - Excel (section:Excel_Section=Worksheet) (headers:(Boolean|Infer)=Infer) (xls_format:(Boolean|Infer)=Infer) + Excel (section:Excel_Section=Excel_Section.Worksheet) (headers:(Boolean|Infer)=Infer) (xls_format:(Boolean|Infer)=Infer) ## If the File_Format supports reading from the file, return a configured instance. for_file : File -> Excel_Format | Nothing for_file file = is_xls = should_treat_as_xls_format Infer file if is_xls.is_error then Nothing else - Excel xls_format=is_xls + Excel_Format.Excel xls_format=is_xls ## Implements the `File.read` for this `File_Format` read : File -> Problem_Behavior -> Any @@ -64,6 +60,6 @@ type Excel_Format format = should_treat_as_xls_format self.xls_format file case self.section of - Sheet_Names -> Error.throw (Illegal_Argument_Error_Data "Sheet_Names cannot be used for `write`.") - Range_Names -> Error.throw (Illegal_Argument_Error_Data "Range_Names cannot be used for `write`.") + Excel_Section.Sheet_Names -> Error.throw (Illegal_Argument.Error "Sheet_Names cannot be used for `write`.") + Excel_Section.Range_Names -> Error.throw (Illegal_Argument.Error "Range_Names cannot be used for `write`.") _ -> Excel_Writer.write_file file table on_existing_file self.section self.headers match_columns on_problems format diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Range.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Range.enso index 634cd28e4a9f..af2de996aee3 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Range.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Range.enso @@ -6,7 +6,7 @@ polyglot java import org.enso.table.excel.ExcelRange as Java_Range Wrapper for validation of a value prior to execution. validate : Boolean -> Text -> Any validate validation ~error_message ~wrapped = - if validation then wrapped else Error.throw (Illegal_Argument_Error_Data error_message) + if validation then wrapped else Error.throw (Illegal_Argument.Error error_message) ## PRIVATE excel_2007_column_limit = 16384 @@ -84,7 +84,7 @@ type Excel_Range ## Creates a Range from an address. from_address : Text -> Excel_Range from_address address = - Illegal_Argument_Error.handle_java_exception <| + Illegal_Argument.handle_java_exception <| Excel_Range.Value (Java_Range.new address) ## Create a Range for a single cell. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Reader.enso index fc884738a596..3c7c55c38f16 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Reader.enso @@ -1,10 +1,9 @@ from Standard.Base import all -from Standard.Base.System.File import handle_java_exceptions import project.Data.Table.Table import project.Excel.Excel_Range.Excel_Range +import project.Excel.Excel_Section.Excel_Section -from project.Excel.Excel_Section import Excel_Section, Sheet_Names, Range_Names, Worksheet, Cell_Range from project.Errors import Invalid_Location_Data, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data polyglot java import org.enso.table.excel.ExcelHeaders @@ -44,7 +43,7 @@ handle_reader file reader = bad_argument caught_panic = Error.throw (Invalid_Location_Data caught_panic.payload.cause.getCause) handle_bad_argument = Panic.catch InvalidLocationException handler=bad_argument - handle_java_exceptions file <| handle_bad_argument <| handle_bad_format <| + File_Error.handle_java_exceptions file <| handle_bad_argument <| handle_bad_format <| file.with_input_stream [File_Access.Read] stream-> stream.with_java_stream reader @@ -63,13 +62,13 @@ handle_reader file reader = read_file : File -> Excel_Section -> (Boolean|Infer) -> Problem_Behavior -> Boolean -> (Table | Vector) read_file file section headers on_problems xls_format=False = reader stream = case section of - Sheet_Names -> Vector.from_polyglot_array (ExcelReader.readSheetNames stream xls_format) - Range_Names -> Vector.from_polyglot_array (ExcelReader.readRangeNames stream xls_format) - Worksheet sheet skip_rows row_limit -> + Excel_Section.Sheet_Names -> Vector.from_polyglot_array (ExcelReader.readSheetNames stream xls_format) + Excel_Section.Range_Names -> Vector.from_polyglot_array (ExcelReader.readRangeNames stream xls_format) + Excel_Section.Worksheet sheet skip_rows row_limit -> prepare_reader_table on_problems <| case sheet of _ : Integer -> ExcelReader.readSheetByIndex stream sheet (make_java_headers headers) skip_rows row_limit xls_format _ : Text -> ExcelReader.readSheetByName stream sheet (make_java_headers headers) skip_rows row_limit xls_format - Cell_Range address skip_rows row_limit -> + Excel_Section.Cell_Range address skip_rows row_limit -> prepare_reader_table on_problems <| case address of _ : Excel_Range -> ExcelReader.readRange stream address.java_range (make_java_headers headers) skip_rows row_limit xls_format _ : Text -> ExcelReader.readRangeByName stream address (make_java_headers headers) skip_rows row_limit xls_format diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Section.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Section.enso index 2bce587bcf84..8a584b12a244 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Section.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Section.enso @@ -2,9 +2,6 @@ from Standard.Base import all import project.Excel.Excel_Range.Excel_Range -from project.Excel.Excel_Section.Excel_Section import all -from project.Excel.Excel_Section.Excel_Section export all - type Excel_Section ## Gets a list of sheets within a workbook. Sheet_Names diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Writer.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Writer.enso index f7a197c0a013..b8ba49b9a951 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Writer.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Writer.enso @@ -3,9 +3,8 @@ from Standard.Base import all import project.Data.Table import project.Data.Match_Columns.Match_Columns import project.Excel.Excel_Range.Excel_Range - -from project.Excel.Excel_Reader import handle_reader, make_java_headers -from project.Excel.Excel_Section import Worksheet, Cell_Range +import project.Excel.Excel_Section.Excel_Section +import project.Excel.Excel_Reader from project.Errors import Invalid_Location_Data, Range_Exceeded_Data, Existing_Data_Data, Column_Count_Mismatch, Column_Name_Mismatch polyglot java import org.enso.table.read.ExcelReader @@ -31,20 +30,21 @@ make_java_existing_data_mode on_existing_file match_columns = case on_existing_f Writes a Table to an Excel file. Arguments: -write_file : File -> Table -> Existing_File_Behavior -> (Sheet | Cell_Range) -> (Boolean|Infer) -> Match_Columns -> Problem_Behavior -> Boolean +write_file : File -> Table -> Existing_File_Behavior -> Excel_Section -> (Boolean|Infer) -> Match_Columns -> Problem_Behavior -> Boolean write_file file table on_existing_file section headers match_columns _ xls_format=False = workbook = if file.exists.not then ExcelWriter.createWorkbook xls_format else - handle_reader file stream->(ExcelReader.getWorkbook stream xls_format) + Excel_Reader.handle_reader file stream->(ExcelReader.getWorkbook stream xls_format) existing_data_mode = make_java_existing_data_mode on_existing_file match_columns - java_headers = make_java_headers headers + java_headers = Excel_Reader.make_java_headers headers ExcelWriter.setEnsoToTextCallbackIfUnset (.to_text) result = handle_writer <| case section of - Worksheet sheet skip_rows row_limit -> + Excel_Section.Worksheet sheet skip_rows row_limit -> ExcelWriter.writeTableToSheet workbook sheet existing_data_mode skip_rows table.java_table row_limit java_headers - Cell_Range address skip_rows row_limit -> case address of + Excel_Section.Cell_Range address skip_rows row_limit -> case address of Excel_Range.Value java_range -> ExcelWriter.writeTableToRange workbook java_range existing_data_mode skip_rows table.java_table row_limit java_headers _ : Text -> ExcelWriter.writeTableToRange workbook address existing_data_mode skip_rows table.java_table row_limit java_headers + _ : Excel_Section -> Error.throw (Illegal_Argument.Error "Only a Worksheet or Cell_Range is allowed in write_file") if result.is_error then result else write_stream stream = stream.with_java_stream java_stream-> @@ -68,10 +68,10 @@ handle_writer ~writer = handle_existing_data = Panic.catch ExistingDataException handler=throw_existing_data ## Should be impossible - occurs if no fallback serializer is provided. - throw_illegal_state caught_panic = Panic.throw (Illegal_State_Error_Data caught_panic.payload.cause.getMessage) + throw_illegal_state caught_panic = Panic.throw (Illegal_State.Error caught_panic.payload.cause.getMessage) handle_illegal_state = Panic.catch IllegalStateException handler=throw_illegal_state handle_illegal_state <| Column_Name_Mismatch.handle_java_exception <| Column_Count_Mismatch.handle_java_exception <| handle_bad_location <| - Illegal_Argument_Error.handle_java_exception <| handle_range_exceeded <| handle_existing_data <| + Illegal_Argument.handle_java_exception <| handle_range_exceeded <| handle_existing_data <| writer diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_Values_Helper.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_Values_Helper.enso index 1a1cc6b36ef4..bdb07462c5eb 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_Values_Helper.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Parse_Values_Helper.enso @@ -12,5 +12,5 @@ translate_parsing_problem expected_datatype problem = translations = [invalid_format, leading_zeros] found = translations.find t-> Java.is_instance problem t.first translation = found.catch Any _-> - Error.throw (Illegal_State_Error_Data "Reported an unknown problem type: "+problem.to_text) + Error.throw (Illegal_State.Error "Reported an unknown problem type: "+problem.to_text) translation.second problem diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index d89a89a9d3f1..215ce06fbe41 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -1,7 +1,6 @@ from Standard.Base import all import Standard.Base.Data.Text.Matching import Standard.Base.Data.Ordering.Vector_Lexicographic_Order -import Standard.Base.Data.Text.Text_Ordering.Text_Ordering import project.Data.Column_Name_Mapping.Column_Name_Mapping import project.Data.Column_Selector.Column_Selector @@ -183,7 +182,7 @@ type Table_Column_Helper Nothing -> True 1 -> True 0 -> False - _ -> Panic.throw (Illegal_State_Error_Data "Unexpected result. Perhaps an implementation bug of Column_Selector.Blank_Columns.") + _ -> Panic.throw (Illegal_State.Error "Unexpected result. Perhaps an implementation bug of Column_Selector.Blank_Columns.") ## PRIVATE A helper function which selects a single column from the table. It is aligned @@ -194,7 +193,7 @@ type Table_Column_Helper _ : Text -> matched_columns = self.internal_columns.filter column->(column.name==selector) if matched_columns.length == 1 then matched_columns.first else - if matched_columns.length != 0 then Panic.throw (Illegal_State_Error_Data "A single exact match should never match more than one column. Perhaps the table breaks the invariant of unique column names?") else + if matched_columns.length != 0 then Panic.throw (Illegal_State.Error "A single exact match should never match more than one column. Perhaps the table breaks the invariant of unique column names?") else expression = (self.table.evaluate selector).catch Any _->Nothing if Nothing != expression then expression else problem_builder.report_missing_input_columns [selector] @@ -242,11 +241,11 @@ rename_columns internal_columns mapping on_problems = Nothing -> Nothing _ -> matched.add index - new_name = case Meta.type_of ms of - Regex_Matcher.Regex_Matcher -> + new_name = case ms of + _ : Regex_Matcher -> pattern = ms.compile ((good_names.at index).at 0) pattern.replace name ((good_names.at index).at 1) - Text_Matcher.Text_Matcher -> (good_names.at index).at 1 + _ : Text_Matcher -> (good_names.at index).at 1 unique.make_unique new_name new_names = 0.up_to col_count . map i->(mapper (internal_columns.at i).name) @@ -317,7 +316,7 @@ sort_columns internal_columns direction text_ordering = Converts the generic `No_Matches_Found` error to a more specific `Missing_Input_Columns`. Any other errors are returned as-is. promote_no_matches_to_missing_columns error = case error of - Matching.No_Matches_Found_Data criteria -> Maybe.Some <| Missing_Input_Columns_Data criteria + Matching.No_Matches_Found.Error criteria -> Maybe.Some <| Missing_Input_Columns_Data criteria _ -> Nothing ## PRIVATE @@ -406,9 +405,12 @@ type Column_Transform_Element Value column associated_selector ## PRIVATE -prepare_order_by : Vector -> Vector Text | Sort_Column_Selector -> Problem_Builder -> Vector Column_Transform_Element +prepare_order_by : Vector -> Text | Vector Text | Sort_Column_Selector -> Problem_Builder -> Vector Column_Transform_Element prepare_order_by internal_columns column_selectors problem_builder = selected_elements = case column_selectors of + _ : Text -> + unified_name_selectors = [Sort_Column.Name unified_name_selectors] + select_columns_by_name internal_columns unified_name_selectors Text_Matcher.Case_Sensitive problem_builder name_extractor=(_.name) _ : Vector -> unified_name_selectors = column_selectors.map (Sort_Column.Name _) select_columns_by_name internal_columns unified_name_selectors Text_Matcher.Case_Sensitive problem_builder name_extractor=(_.name) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso index e3b5a7e25c20..ce661a583a59 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso @@ -1,43 +1,50 @@ from Standard.Base import all -import project.Data.Table.Table +import project.Data.Aggregate_Column.Aggregate_Column import project.Data.Column.Column -import project.Data.Column_Selector.Column_Selector -import project.Data.Sort_Column.Sort_Column -import project.Data.Sort_Column_Selector.Sort_Column_Selector import project.Data.Column_Name_Mapping.Column_Name_Mapping +import project.Data.Column_Selector.Column_Selector import project.Data.Data_Formatter.Data_Formatter +import project.Data.Join_Condition.Join_Condition +import project.Data.Join_Kind.Join_Kind import project.Data.Match_Columns.Match_Columns import project.Data.Position.Position -import project.Data.Aggregate_Column.Aggregate_Column - -import project.Delimited.Quote_Style.Quote_Style -import project.Delimited.Delimited_Format +import project.Data.Sort_Column.Sort_Column +import project.Data.Sort_Column_Selector.Sort_Column_Selector +import project.Data.Table.Table import project.Data.Table_Conversions - -import project.Excel.Excel_Section +import project.Delimited.Delimited_Format.Delimited_Format +import project.Delimited.Quote_Style.Quote_Style +import project.Excel.Excel_Format.Excel_Format import project.Excel.Excel_Range.Excel_Range -import project.Excel.Excel_Format +import project.Excel.Excel_Section.Excel_Section -export project.Data.Table.Table +export project.Data.Aggregate_Column.Aggregate_Column export project.Data.Column.Column -export project.Data.Column_Selector.Column_Selector -export project.Data.Sort_Column.Sort_Column -export project.Data.Sort_Column_Selector.Sort_Column_Selector export project.Data.Column_Name_Mapping.Column_Name_Mapping +export project.Data.Column_Selector.Column_Selector +export project.Data.Data_Formatter.Data_Formatter +export project.Data.Join_Condition.Join_Condition +export project.Data.Join_Kind.Join_Kind export project.Data.Match_Columns.Match_Columns export project.Data.Position.Position -export project.Data.Aggregate_Column.Aggregate_Column - -export project.Delimited.Quote_Style.Quote_Style -from project.Delimited.Delimited_Format export Delimited_Format, Delimited +export project.Data.Sort_Column.Sort_Column +export project.Data.Sort_Column_Selector.Sort_Column_Selector +export project.Data.Table.Table export project.Data.Table_Conversions - -from project.Excel.Excel_Format export Excel_Format, Excel -from project.Excel.Excel_Section export Excel_Section, Sheet_Names, Range_Names, Worksheet, Cell_Range +export project.Delimited.Delimited_Format.Delimited_Format +export project.Delimited.Quote_Style.Quote_Style +export project.Excel.Excel_Format.Excel_Format export project.Excel.Excel_Range.Excel_Range +export project.Excel.Excel_Section.Excel_Section + +from project.Delimited.Delimited_Format.Delimited_Format import Delimited +from project.Excel.Excel_Format.Excel_Format import Excel +from project.Excel.Excel_Section.Excel_Section import Sheet_Names, Range_Names, Worksheet, Cell_Range +from project.Delimited.Delimited_Format.Delimited_Format export Delimited +from project.Excel.Excel_Format.Excel_Format export Excel +from project.Excel.Excel_Section.Excel_Section export Sheet_Names, Range_Names, Worksheet, Cell_Range -export project.Data.Data_Formatter.Data_Formatter from Standard.Geo.Geo_Json import Object_Type import Standard.Geo.Geo_Json diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso index 9beff9bf7f0c..3999576c46f5 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso @@ -3,6 +3,9 @@ from Standard.Base import all from Standard.Table import Table, Column import Standard.Table.Data.Storage.Storage +import project.Id as Id_Module +import project.Id.Id + ## PRIVATE Returns the given value if this is not an error. Propagates error otherwise. @@ -108,6 +111,18 @@ Table.lookup_ignore_case self name = self.all_columns.find <| col-> col.name.equals_ignore_case name +## UNSTABLE + ADVANCED + + Guides the visualization system to display the most suitable graphical + representation for this table. +Table.default_visualization : Id +Table.default_visualization self = + cols = self.columns.map .name . map name-> name.to_case Case.Lower + if cols.contains "latitude" && cols.contains "longitude" then Id_Module.geo_map else + if cols.contains "x" && cols.contains "y" then Id_Module.scatter_plot else + Id_Module.table + ## PRIVATE Checks if the column stores numbers. diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso index d4ac72f8ea6b..11caae82e534 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/SQL/Visualization.enso @@ -42,8 +42,11 @@ prepare_visualization x = Helpers.recover_errors <| types it will return `Nothing`. find_expected_enso_type_for_sql : SQL_Type -> Text find_expected_enso_type_for_sql sql_type = - if sql_type.is_definitely_integer then "Standard.Base.Data.Numbers.Integer" else - if sql_type.is_definitely_double then "Standard.Base.Data.Numbers.Decimal" else - if sql_type.is_definitely_text then "Standard.Base.Data.Text.Text" else - if sql_type.is_definitely_boolean then "Standard.Base.Boolean.Boolean" else - Nothing + expected_type = if sql_type.is_definitely_integer then Integer else + if sql_type.is_definitely_double then Decimal else + if sql_type.is_definitely_text then Text else + if sql_type.is_definitely_boolean then Boolean else + Nothing + case expected_type of + Nothing -> Nothing + _ -> Meta.get_qualified_type_name expected_type diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala b/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala index 608a2e24e94a..1ae79a108e5b 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala @@ -101,7 +101,7 @@ object Suggestions { val methodOnAny: Suggestion.Method = Suggestion.Method( externalId = Some(UUID.fromString("6cfe1538-5df7-42e4-bf55-64f8ac2ededa")), - module = "Standard.Base.Data.Any.Extensions", + module = "Standard.Base.Any.Extensions", name = "<<", arguments = Vector( Suggestion.Argument("this", "Any", false, false, None), diff --git a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/ReplTest.scala b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/ReplTest.scala index 4760790955f9..f46de3acd7e6 100644 --- a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/ReplTest.scala +++ b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/ReplTest.scala @@ -298,7 +298,7 @@ class ReplTest val code = """ |import Standard.Base.Runtime.Debug - |from Standard.Base.Error.Common import Panic + |import Standard.Base.Panic.Panic | |main = | Debug.breakpoint diff --git a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeErrorsTest.scala b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeErrorsTest.scala index 7a6ff5061e98..51a7c7f0e3c9 100644 --- a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeErrorsTest.scala +++ b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeErrorsTest.scala @@ -87,7 +87,7 @@ class RuntimeErrorsTest .asHostObject[org.enso.interpreter.runtime.EnsoContext] languageContext.getLanguage.getIdExecutionService.ifPresent( _.overrideTimer(new TestTimer) - ); + ) def writeMain(contents: String): File = Files.write(pkg.mainFile.toPath, contents.getBytes).toFile @@ -290,12 +290,12 @@ class RuntimeErrorsTest val metadata = new Metadata // foo body id metadata.addItem(79, 5) - val xId = metadata.addItem(93, 19) - val yId = metadata.addItem(121, 8) - val mainResId = metadata.addItem(134, 7) + val xId = metadata.addItem(83, 19) + val yId = metadata.addItem(111, 8) + val mainResId = metadata.addItem(124, 7) val code = - """from Standard.Base.Error.Common import all + """import Standard.Base.Error.Error | |type MyError | @@ -1327,13 +1327,13 @@ class RuntimeErrorsTest val requestId = UUID.randomUUID() val moduleName = "Enso_Test.Test.Main" val metadata = new Metadata - val xId = metadata.addItem(108, 3) - val yId = metadata.addItem(120, 5) - val mainResId = metadata.addItem(130, 12) + val xId = metadata.addItem(98, 3) + val yId = metadata.addItem(110, 5) + val mainResId = metadata.addItem(120, 12) val code = """import Standard.Base.IO - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error | |foo = | Error.throw 9 diff --git a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala index c42cafca2634..1781dce1d4da 100644 --- a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala +++ b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala @@ -88,7 +88,7 @@ class RuntimeServerTest languageContext.getEnvironment.getPublicLanguages.get(LanguageInfo.ID) languageContext.getLanguage.getIdExecutionService.ifPresent( _.overrideTimer(new TestTimer) - ); + ) def writeMain(contents: String): File = Files.write(pkg.mainFile.toPath, contents.getBytes).toFile @@ -3365,8 +3365,8 @@ class RuntimeServerTest val code = """import Standard.Base.IO - |from Standard.Base.Error.Common import all - |from Standard.Base.Data.Any import all + |import Standard.Base.Panic.Panic + |import Standard.Base.Any.Any | |main = | x = Panic.catch_primitive () .convert_to_dataflow_error diff --git a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala index ec1a7cf743a8..cad471f50568 100644 --- a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala +++ b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeVisualizationsTest.scala @@ -1957,12 +1957,12 @@ class RuntimeVisualizationsTest val moduleName = "Enso_Test.Test.Main" val metadata = new Metadata - val idMain = metadata.addItem(116, 28) + val idMain = metadata.addItem(106, 28) val code = """import Standard.Base.Data.List |import Standard.Visualization - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error | |main = | Error.throw List.Empty_Error diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/Error.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/Error.java index 09515d82812b..04a82954bd64 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/Error.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/Error.java @@ -2,7 +2,7 @@ import org.enso.interpreter.dsl.BuiltinType; -@BuiltinType(name = "Standard.Base.Error.Common.Error") +@BuiltinType(name = "Standard.Base.Error.Error") public class Error extends Builtin { @Override protected Class getSuperType() { diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/Panic.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/Panic.java index 6b4ac0462c5b..4f4ba3a5a2c6 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/Panic.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/Panic.java @@ -3,5 +3,5 @@ import org.enso.interpreter.dsl.BuiltinType; import org.enso.interpreter.node.expression.builtin.Builtin; -@BuiltinType(name = "Standard.Base.Error.Common.Panic") +@BuiltinType(name = "Standard.Base.Error.Panic") public class Panic extends Builtin {} diff --git a/engine/runtime/src/test/java/org/enso/compiler/EnsoCompilerTest.java b/engine/runtime/src/test/java/org/enso/compiler/EnsoCompilerTest.java index 823802c31230..abaed1350234 100644 --- a/engine/runtime/src/test/java/org/enso/compiler/EnsoCompilerTest.java +++ b/engine/runtime/src/test/java/org/enso/compiler/EnsoCompilerTest.java @@ -136,7 +136,7 @@ public void testTypeMethodWithSignature() throws Exception { @Test public void testImport() throws Exception { parseTest(""" - from Standard.Base.Data.Any import all + import Standard.Base.Any.Any import project.IO import Standard.Base as Enso_List from Standard.Base import all hiding Number, Boolean, Decimal, Any diff --git a/engine/runtime/src/test/java/org/enso/compiler/ParseStdLibTest.java b/engine/runtime/src/test/java/org/enso/compiler/ParseStdLibTest.java index abeb54d02fc3..b22ca7c5c894 100644 --- a/engine/runtime/src/test/java/org/enso/compiler/ParseStdLibTest.java +++ b/engine/runtime/src/test/java/org/enso/compiler/ParseStdLibTest.java @@ -159,7 +159,7 @@ public void runBare() throws Throwable { Arrays.asList( // Files containing type expressions not supported by old parser. "Data/Index_Sub_Range.enso", - "Data/Text/Regex/Engine/Default.enso", + "Data/Text/Regex/Regex_Mode.enso", "Internal/Base_Generator.enso", "Data/Sort_Column_Selector.enso", "Data/Value_Type.enso")); diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/CompileDiagnosticsTest.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/CompileDiagnosticsTest.scala index 3dd8643908f7..67bb65c215d3 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/CompileDiagnosticsTest.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/CompileDiagnosticsTest.scala @@ -10,7 +10,8 @@ class CompileDiagnosticsTest extends InterpreterTest { ): Unit = { "surface ast-processing errors in the language" in { val code = - """from Standard.Base.Error.Common import all + """from Standard.Base.Error.Common import Syntax_Error_Data + |import Standard.Base.Panic.Panic | |main = | x = Panic.catch_primitive () .convert_to_dataflow_error @@ -26,6 +27,7 @@ class CompileDiagnosticsTest extends InterpreterTest { "surface parsing errors in the language" in { val code = """from Standard.Base.Error.Common import all + |import Standard.Base.Panic.Panic | |main = | x = Panic.catch_primitive @ caught_panic-> caught_panic.payload @@ -37,6 +39,7 @@ class CompileDiagnosticsTest extends InterpreterTest { "surface redefinition errors in the language" in { val code = """from Standard.Base.Error.Common import all + |import Standard.Base.Panic.Panic | |foo = | x = 1 @@ -52,6 +55,7 @@ class CompileDiagnosticsTest extends InterpreterTest { "surface non-existent variable errors in the language" in { val code = """from Standard.Base.Error.Common import all + |import Standard.Base.Panic.Panic | |foo = | my_var = 10 diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/DataflowErrorsTest.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/DataflowErrorsTest.scala index 3ea8a2a22c65..60de65f3f93d 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/DataflowErrorsTest.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/DataflowErrorsTest.scala @@ -12,7 +12,7 @@ class DataflowErrorsTest extends InterpreterTest { "propagate through pattern matches" in { val code = """import Standard.Base.Nothing - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error |import Standard.Base.IO | |type MyError @@ -32,7 +32,7 @@ class DataflowErrorsTest extends InterpreterTest { "propagate through specialized pattern matches" in { val code = """import Standard.Base.Nothing - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error |import Standard.Base.IO | |type MyError @@ -52,7 +52,7 @@ class DataflowErrorsTest extends InterpreterTest { "be catchable by a user-provided special handling function" in { val code = - """from Standard.Base.Error.Common import all + """import Standard.Base.Error.Error | |main = | intError = Error.throw 1 @@ -64,7 +64,7 @@ class DataflowErrorsTest extends InterpreterTest { "accept a constructor handler in catch function" in { val code = """import Standard.Base.Nothing - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error |import Standard.Base.IO | |type My_Cons @@ -80,7 +80,7 @@ class DataflowErrorsTest extends InterpreterTest { "accept a method handle in catch function" in { val code = - """from Standard.Base.Error.Common import all + """import Standard.Base.Error.Error |import Standard.Base.IO | |type My_Recovered @@ -106,7 +106,7 @@ class DataflowErrorsTest extends InterpreterTest { "propagate through atom construction" in { val code = - """from Standard.Base.Error.Common import all + """import Standard.Base.Error.Error |import Standard.Base.IO | |type My_Atom @@ -126,7 +126,7 @@ class DataflowErrorsTest extends InterpreterTest { "propagate through method resolution" in { val code = """import Standard.Base.IO - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error | |type My_Atom |type My_Error @@ -146,7 +146,7 @@ class DataflowErrorsTest extends InterpreterTest { "propagate through function calls" in { val code = """import Standard.Base.IO - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error | |type My_Error | @@ -163,7 +163,7 @@ class DataflowErrorsTest extends InterpreterTest { "propagate through builtin methods" in { val code = """import Standard.Base.IO - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error | |type My_Error | @@ -179,7 +179,7 @@ class DataflowErrorsTest extends InterpreterTest { "not propagate when explicitly accepted by type and by annotation" in { val code = """import Standard.Base.IO - |from Standard.Base.Error.Common import all + |import Standard.Base.Error.Error | |type My_Error | diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/InteropTest.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/InteropTest.scala index b1feb2c25b01..f1fd8e87d30f 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/InteropTest.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/InteropTest.scala @@ -44,7 +44,7 @@ class InteropTest extends InterpreterTest { "work with oversaturated calls on unresolved methods returned from functions" in { val code = - """from Standard.Base.Data.Any import all + """import Standard.Base.Any.Any | |Any.method self = self | diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/MethodsTest.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/MethodsTest.scala index 7241eef7cde4..39ca6e5511ef 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/MethodsTest.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/MethodsTest.scala @@ -88,7 +88,7 @@ class MethodsTest extends InterpreterTest { "be definable as blocks without arguments" in { val code = - """from Standard.Base.Data.Any import all + """import Standard.Base.Any.Any | |Any.method self = | x = self * self @@ -112,7 +112,7 @@ class MethodsTest extends InterpreterTest { "be callable for any type when defined on Any" in { val code = - """from Standard.Base.Data.Any import all + """import Standard.Base.Any.Any |import Standard.Base.IO |import Standard.Base.Nothing | @@ -141,7 +141,7 @@ class MethodsTest extends InterpreterTest { "be callable for any type when defined on Any (resolved as a type name)" in { import annotation.unused @unused val code = - """from Standard.Base.Data.Any import all + """import Standard.Base.Any.Any | |Any.method self = 1 | diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/TextTest.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/TextTest.scala index 9431efae71d8..7918d9514ffd 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/TextTest.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/semantic/TextTest.scala @@ -108,8 +108,10 @@ class TextTest extends InterpreterTest { """ |import Standard.Base.Data.List.List |from Standard.Base.Error.Common import all + |import Standard.Base.Panic.Panic |import Standard.Base.IO |import Standard.Base.Nothing + |import Standard.Base.Data.Json.Extensions | |main = | IO.println (List.Cons Nothing Nothing).to_display_text diff --git a/std-bits/base/src/main/java/org/enso/base/Array_Builder.java b/std-bits/base/src/main/java/org/enso/base/Array_Builder.java index d14f65c87098..fe59277c41bd 100644 --- a/std-bits/base/src/main/java/org/enso/base/Array_Builder.java +++ b/std-bits/base/src/main/java/org/enso/base/Array_Builder.java @@ -127,4 +127,8 @@ public Object toArray() { return EMPTY_ARRAY; } } + + public int getSize() { + return size; + } } diff --git a/test/Benchmarks/src/Equality.enso b/test/Benchmarks/src/Equality.enso index b697957ba155..37d117f7152a 100644 --- a/test/Benchmarks/src/Equality.enso +++ b/test/Benchmarks/src/Equality.enso @@ -26,7 +26,7 @@ count_entries vector element expected_count=1 = if count != expected_count then msg = "Expected " + expected_count.to_text + " entries of " + element.to_text + ", but got " + count.to_text IO.println msg - Panic.throw (Illegal_State_Error msg) + Panic.throw (Illegal_State.Error msg) ## Alternative implementation delegating to equals, for comparing polyglot performance. count_entries_polyglot vector element expected_count=1 = @@ -37,7 +37,7 @@ count_entries_polyglot vector element expected_count=1 = if count != expected_count then msg = "Expected " + expected_count.to_text + " entries of " + element.to_text + ", but got " + count.to_text IO.println msg - Panic.throw (Illegal_State_Error msg) + Panic.throw (Illegal_State.Error msg) bench = n = 100000 diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 07fce98670ad..9fae4eee7a8b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -4,7 +4,7 @@ from Standard.Table import Table, Sort_Column, Sort_Column_Selector, Column_Sele from Standard.Table.Data.Column_Selector.Column_Selector import By_Name from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all from Standard.Table.Errors import Missing_Input_Columns_Data, Column_Indexes_Out_Of_Range_Data, No_Output_Columns, Duplicate_Output_Column_Names_Data, Invalid_Output_Column_Names_Data, Invalid_Aggregation_Data, Floating_Point_Grouping_Data, Unquoted_Delimiter_Data, Additional_Warnings_Data -from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data +from Standard.Database.Errors import Unsupported_Database_Operation from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions @@ -1170,7 +1170,7 @@ spec setup = materialized.column_count . should_equal 1 materialized.columns.first.name . should_equal "Sum A" materialized.columns.first.to_vector . should_equal [6] - problems = [Unsupported_Database_Operation_Error_Data "`First` aggregation requires at least one `order_by` column.", Unsupported_Database_Operation_Error_Data "`Last` aggregation requires at least one `order_by` column."] + problems = [Unsupported_Database_Operation.Error "`First` aggregation requires at least one `order_by` column.", Unsupported_Database_Operation.Error "`Last` aggregation requires at least one `order_by` column."] Problems.test_problem_handling action problems tester Test.group prefix+"Table.aggregate should raise warnings when there are issues" pending=(resolve_pending test_selection.problem_handling) <| @@ -1333,7 +1333,7 @@ spec setup = warnings = Warning.get_all result . map .value warnings.length . should_equal error_count warnings.each warning-> - warning.should_be_an Unsupported_Database_Operation_Error_Data + warning.should_be_an Unsupported_Database_Operation.Error if test_selection.first_last_row_order.not then Test.specify "with First and Last in row order" <| diff --git a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso index 1d1e22abb756..59bbae9c3974 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Expression_Spec.enso @@ -3,8 +3,6 @@ from Standard.Base import all from Standard.Table import Table, Column, Sort_Column, Column_Selector, Sort_Column_Selector, Aggregate_Column import Standard.Table.Data.Expression.Expression_Error -import Standard.Visualization - from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions diff --git a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso index 7bfedddd74f8..ddb97300637f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Filter_Spec.enso @@ -3,7 +3,7 @@ from Standard.Base import all import Standard.Table.Data.Value_Type.Value_Type from Standard.Table.Errors import all -from Standard.Database.Errors import SQL_Error_Data +from Standard.Database.Errors import SQL_Error from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -45,7 +45,7 @@ spec setup = v = t.filter "X" (Filter_Condition.Equal to="SOME TEXT :)") . at "X" . to_vector case test_selection.allows_mixed_type_comparisons of True -> v.should_equal [] - False -> v.should_fail_with SQL_Error_Data + False -> v.should_fail_with SQL_Error.Error t.filter "X" (Filter_Condition.Equal to=(t.at "Y")) . at "X" . to_vector . should_equal [100] t.filter "X" (Filter_Condition.Less than=(t.at "Y")) . at "X" . to_vector . should_equal [3] @@ -67,7 +67,7 @@ spec setup = v = t.filter "X" (Filter_Condition.Equal to=52) . at "X" . to_vector case test_selection.allows_mixed_type_comparisons of True -> v.should_equal [] - False -> v.should_fail_with SQL_Error_Data + False -> v.should_fail_with SQL_Error.Error t.filter "X" (Filter_Condition.Greater than=(t.at "Y")) . at "X" . to_vector . should_equal ["abb", "baca"] t.filter "X" (Filter_Condition.Equal_Or_Greater than=(t.at "Y")) . at "X" . to_vector . should_equal ["abb", "baca", "b", "c"] @@ -202,11 +202,11 @@ spec setup = v1 = t.filter "X" (Filter_Condition.Is_In ["c", "f", "b", "b", "b", 15, Nothing]) on_problems=Report_Error . at "X" . to_vector case test_selection.allows_mixed_type_comparisons of True -> v1.should_equal ["b", "f"] - False -> v1.should_fail_with SQL_Error_Data + False -> v1.should_fail_with SQL_Error.Error v2 = t.filter "ix" (Filter_Condition.Is_In ["c", 3, 2, "a"]) on_problems=Report_Error . at "ix" . to_vector case test_selection.allows_mixed_type_comparisons of True -> v2.should_equal [2, 3] - False -> v2.should_fail_with SQL_Error_Data + False -> v2.should_fail_with SQL_Error.Error t2 = table_builder [["A", [True, False, True]], ["B", [False, False, False]], ["C", [True, False, Nothing]]] t2.filter "A" (Filter_Condition.Is_In (t1.at "bool")) . at "A" . to_vector . should_equal [True, True] diff --git a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso index 39c29835530c..e965032b0043 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso @@ -4,7 +4,7 @@ from Standard.Table import Column_Selector from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Count_Distinct from Standard.Table.Errors import all -from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data +from Standard.Database.Errors import Unsupported_Database_Operation from Standard.Test import Test import Standard.Test.Extensions @@ -118,7 +118,7 @@ spec setup = if test_selection.is_nan_and_nothing_distinct.not then Test.specify "this backend treats NaN as Nothing" <| t3.at "X" . to_vector . should_equal [2.0, 1.5, Nothing, Nothing] - t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation_Error_Data + t3.at "X" . is_nan . to_vector . should_fail_with Unsupported_Database_Operation.Error Test.specify "Blank_Columns selector should work for all kinds of methods accepting Column_Selector" <| t = table_builder [["X", [1, 2, 3, 4]], ["Y", [Nothing, "", Nothing, Nothing]], ["Z", [Nothing, True, False, Nothing]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index 8b98a6fcbac4..ceaf53eaae10 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -111,8 +111,8 @@ spec setup = table.take (Every 2 first=100) . at "alpha" . to_vector . should_equal [] table.take (Every 200) . at "alpha" . to_vector . should_equal [1] empty.take (Every 2) . should_equal empty - table.take (Every 0) . should_fail_with Illegal_Argument_Error_Data - empty.take (Every 0) . should_fail_with Illegal_Argument_Error_Data + table.take (Every 0) . should_fail_with Illegal_Argument.Error + empty.take (Every 0) . should_fail_with Illegal_Argument.Error table.drop (Every 1) . should_equal empty table.drop (Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8] @@ -121,8 +121,8 @@ spec setup = table.drop (Every 2 first=100) . should_equal table table.drop (Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] empty.drop (Every 2) . should_equal empty - table.drop (Every 0) . should_fail_with Illegal_Argument_Error_Data - empty.drop (Every 0) . should_fail_with Illegal_Argument_Error_Data + table.drop (Every 0) . should_fail_with Illegal_Argument.Error + empty.drop (Every 0) . should_fail_with Illegal_Argument.Error Test.specify "should allow sampling rows" <| empty = table_builder [["X", []]] @@ -251,8 +251,8 @@ spec setup = alpha.take (Every 2 first=100) . to_vector . should_equal [] alpha.take (Every 200) . to_vector . should_equal [1] empty_beta.take (Every 2) . should_equal empty_beta - beta.take (Every 0) . should_fail_with Illegal_Argument_Error_Data - empty_beta.take (Every 0) . should_fail_with Illegal_Argument_Error_Data + beta.take (Every 0) . should_fail_with Illegal_Argument.Error + empty_beta.take (Every 0) . should_fail_with Illegal_Argument.Error alpha.drop (Every 1) . should_equal empty_alpha alpha.drop (Every 3) . to_vector . should_equal [2, 3, 5, 6, 8] @@ -261,8 +261,8 @@ spec setup = alpha.drop (Every 2 first=100) . should_equal alpha beta.drop (Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] empty_beta.drop (Every 2) . should_equal empty_beta - beta.drop (Every 0) . should_fail_with Illegal_Argument_Error_Data - empty_beta.drop (Every 0) . should_fail_with Illegal_Argument_Error_Data + beta.drop (Every 0) . should_fail_with Illegal_Argument.Error + empty_beta.drop (Every 0) . should_fail_with Illegal_Argument.Error Test.specify "should allow sampling rows" <| three = table_builder [["X", ["a", "a", "a"]]] . at "X" diff --git a/test/Table_Tests/src/Database/Codegen_Spec.enso b/test/Table_Tests/src/Database/Codegen_Spec.enso index e7acc092ec38..b787197aca9f 100644 --- a/test/Table_Tests/src/Database/Codegen_Spec.enso +++ b/test/Table_Tests/src/Database/Codegen_Spec.enso @@ -8,7 +8,7 @@ from Standard.Database import all import Standard.Database.Data.Dialect import Standard.Database.Data.SQL_Type.SQL_Type from Standard.Database.Data.Table import combine_names, fresh_names -from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data +from Standard.Database.Errors import Unsupported_Database_Operation from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions @@ -63,7 +63,7 @@ spec = json = Json.from_pairs [["query", Nothing], ["message", "The table has no columns so a query cannot be generated."]] empty.to_json . should_equal json empty.column_count . should_equal 0 - empty.to_sql . should_fail_with Unsupported_Database_Operation_Error_Data + empty.to_sql . should_fail_with Unsupported_Database_Operation.Error Test.group "[Codegen] Building Expressions" <| Test.specify "should allow building expressions from columns and constants" <| @@ -149,11 +149,11 @@ spec = Fake_Test_Connection.make Dialect.sqlite tables t1 = connection.query (SQL_Query.Table_Name "T1") t2 = connection.query (SQL_Query.Table_Name "T2") - (t1.set_index "X").join (t2.set_index "X") . should_fail_with Illegal_State_Error_Data + (t1.set_index "X").join (t2.set_index "X") . should_fail_with Illegal_State.Error Test.specify "should ensure that name suffixes are distinct" <| err = (t1.set_index 'A').join (t2.set_index 'D') left_suffix='foo' right_suffix='foo' - err . should_fail_with Illegal_State_Error_Data + err . should_fail_with Illegal_State.Error Test.specify "should correctly handle self-joins" <| r1 = t1.join (t1.set_index 'A') on='B' @@ -202,7 +202,7 @@ spec = combined.first . should_equal ["A_1", "B"] combined.second . should_equal ["A_2", "C"] - Test.expect_panic_with (combine_names ["A", "A_1"] ["A"] "_1" "_2") Illegal_State_Error_Data + Test.expect_panic_with (combine_names ["A", "A_1"] ["A"] "_1" "_2") Illegal_State.Error Test.specify "fresh_names should provide fresh names" <| used_names = ["A", "A_1"] preferred_names = ["A", "A", "B"] diff --git a/test/Table_Tests/src/Database/Common_Spec.enso b/test/Table_Tests/src/Database/Common_Spec.enso index 83d102db7449..32e951e61b58 100644 --- a/test/Table_Tests/src/Database/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common_Spec.enso @@ -5,7 +5,7 @@ from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all from Standard.Table.Errors import No_Input_Columns_Selected, Missing_Input_Columns_Data from Standard.Database import all -from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data +from Standard.Database.Errors import Unsupported_Database_Operation from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -321,7 +321,7 @@ spec prefix connection = t2.at "avg price" . to_vector . should_equal [(110.2 / 11)] Test.group prefix+"Table.filter" <| Test.specify "report error when trying to filter by a custom predicate" <| - t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation_Error_Data + t1.filter "a" (x -> x % 2 == 0) . should_fail_with Unsupported_Database_Operation.Error clean_table name = Panic.recover Any <| sql = 'DROP TABLE "' + name + '"' diff --git a/test/Table_Tests/src/Database/Helpers/Fake_Test_Connection.enso b/test/Table_Tests/src/Database/Helpers/Fake_Test_Connection.enso index 702e155ae80d..b0202e19b06b 100644 --- a/test/Table_Tests/src/Database/Helpers/Fake_Test_Connection.enso +++ b/test/Table_Tests/src/Database/Helpers/Fake_Test_Connection.enso @@ -21,7 +21,7 @@ type Fake_Test_Connection query self query alias="" = case query of Text -> self.query (SQL_Query.Table_Name query) alias SQL_Query.Raw_SQL _ -> - Error.throw (Illegal_Argument_Error_Data "Cannot query a fake connection with raw SQL") + Error.throw (Illegal_Argument.Error "Cannot query a fake connection with raw SQL") SQL_Query.Table_Name name -> columns = self.tables.get name Database_Table.make_table self name columns (Context.for_table name) diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index fca4f5fe26ce..15be31c1914d 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -319,7 +319,7 @@ connection_setup_spec = Test.group "[PostgreSQL] Connection setup" <| c1.jdbc_properties . should_equal <| add_ssl [] Test_Environment.unsafe_with_environment_override "PGPASSWORD" "somepassword" <| - c1.jdbc_properties . should_fail_with Illegal_State_Error_Data + c1.jdbc_properties . should_fail_with Illegal_State.Error c1.jdbc_properties.catch.message . should_equal "PGPASSWORD is set, but PGUSER is not." Test_Environment.unsafe_with_environment_override "PGUSER" "someuser" <| diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index dcb253d302bb..61f2c7b81577 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -4,7 +4,7 @@ import Standard.Base.Runtime.Ref.Ref from Standard.Table import Table from Standard.Database import Database, SQLite, In_Memory, SQL_Query -from Standard.Database.Errors import SQL_Error_Data +from Standard.Database.Errors import SQL_Error from Standard.Test import Test, Test_Suite import Standard.Test.Extensions @@ -26,8 +26,8 @@ sqlite_specific_spec connection = Meta.is_same_object connection (connection.set_schema Nothing) . should_be_true Test.specify "does not allow changing schema or database" <| - connection.set_schema "foo" . should_fail_with SQL_Error_Data - connection.set_database "foo" . should_fail_with SQL_Error_Data + connection.set_schema "foo" . should_fail_with SQL_Error.Error + connection.set_database "foo" . should_fail_with SQL_Error.Error Test.group "[SQLite] Tables and Table Types" <| tinfo = Name_Generator.random_name "TestTable" @@ -66,11 +66,11 @@ sqlite_specific_spec connection = Test.group "[SQLite] Error Handling" <| Test.specify "should wrap errors" <| - connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error_Data - connection.execute_update "foobar" . should_fail_with SQL_Error_Data + connection.read (SQL_Query.Raw_SQL "foobar") . should_fail_with SQL_Error.Error + connection.execute_update "foobar" . should_fail_with SQL_Error.Error action = connection.read (SQL_Query.Raw_SQL "SELECT A FROM undefined_table") - action . should_fail_with SQL_Error_Data + action . should_fail_with SQL_Error.Error action.catch.to_text . should_equal "There was an SQL error: '[SQLITE_ERROR] SQL error or missing database (no such table: undefined_table)'. [Query was: SELECT A FROM undefined_table]" Test.group "[SQLite] Metadata" <| diff --git a/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso b/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso index 17e5d4171b7f..264c15c2096d 100644 --- a/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso +++ b/test/Table_Tests/src/Formatting/Data_Formatter_Spec.enso @@ -17,11 +17,11 @@ type Custom_Type_With_To_Text type Custom_Type_With_Error to_text : Text - to_text self = Error.throw (Illegal_State_Error_Data "foo_error") + to_text self = Error.throw (Illegal_State.Error "foo_error") type Custom_Type_With_Panic to_text : Text - to_text self = Panic.throw (Illegal_State_Error_Data "foo_panic") + to_text self = Panic.throw (Illegal_State.Error "foo_panic") spec = Test.group "DataFormatter.parse" <| @@ -155,7 +155,7 @@ spec = formatter = Data_Formatter.Value true_values=["YES", "1", "true"] false_values=["NO", "0", "false"] formatter.format True . should_equal "YES" formatter.format False . should_equal "NO" - (Data_Formatter.Value true_values=[] false_values=[]).format True . should_fail_with Illegal_Argument_Error_Data + (Data_Formatter.Value true_values=[] false_values=[]).format True . should_fail_with Illegal_Argument.Error Test.specify "should format dates" <| formatter = Data_Formatter.Value @@ -188,8 +188,8 @@ spec = Test.specify "should correctly pass through errors from custom type's `.to_text` method" pending="TODO: figure out the desired behavior, see: https://www.pivotaltracker.com/story/show/182522644" <| formatter = Data_Formatter.Value - formatter.format Custom_Type_With_Error . should_fail_with Illegal_State_Error_Data - Test.expect_panic_with (formatter.format Custom_Type_With_Panic) Illegal_State_Error_Data + formatter.format Custom_Type_With_Error . should_fail_with Illegal_State.Error + Test.expect_panic_with (formatter.format Custom_Type_With_Panic) Illegal_State.Error Test.group "DataFormatter builders" <| # We create a formatter with all non-default values to ensure that the builders keep the existing values of other properties instead of switching to the constructor's defaults. diff --git a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso index e2fa4a7ac7e3..2fc1da57b6ab 100644 --- a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso +++ b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso @@ -5,8 +5,6 @@ from Standard.Table.Data.Table import Empty_Error from Standard.Table.Data.Column_Type_Selection import Column_Type_Selection, Auto from Standard.Table.Errors import Invalid_Format, Leading_Zeros, Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Type_Selector -import Standard.Visualization - from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions diff --git a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso index 9c1cc2f9c9fa..fd15b5c540aa 100644 --- a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso @@ -309,10 +309,10 @@ spec = Test.specify "should check arguments" <| path = (enso_project.data / "simple_empty.csv") pb = Problem_Behavior.Report_Error - path.read (Delimited "," headers=False . with_quotes quote='abc') pb . should_fail_with Illegal_Argument_Error_Data - path.read (Delimited "," headers=False . with_quotes quote='🚧') pb . should_fail_with Illegal_Argument_Error_Data - path.read (Delimited "," headers=False . with_quotes quote_escape='//') pb . should_fail_with Illegal_Argument_Error_Data - path.read (Delimited 'a\u{301}' headers=False) pb . should_fail_with Illegal_Argument_Error_Data + path.read (Delimited "," headers=False . with_quotes quote='abc') pb . should_fail_with Illegal_Argument.Error + path.read (Delimited "," headers=False . with_quotes quote='🚧') pb . should_fail_with Illegal_Argument.Error + path.read (Delimited "," headers=False . with_quotes quote_escape='//') pb . should_fail_with Illegal_Argument.Error + path.read (Delimited 'a\u{301}' headers=False) pb . should_fail_with Illegal_Argument.Error Test.specify "should correctly guess column types" <| t = (enso_project.data / "data_small.csv") . read (Delimited "," headers=True) diff --git a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso index b6cbb92029a7..5be5e1c053f4 100644 --- a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso @@ -221,7 +221,7 @@ spec = file_2 = (enso_project.data / "transient" / "non-text_but_no_formatter.csv") file_2.delete_if_exists result_2 = table_2.write file_2 format - result_2 . should_fail_with Illegal_Argument_Error_Data + result_2 . should_fail_with Illegal_Argument.Error text_2 = Data.read_text_file file_2 text_2.should_equal "" @@ -290,7 +290,7 @@ spec = file.delete_if_exists no_header_format = Delimited "," . without_headers existing_table.write file no_header_format on_existing_file=Existing_File_Behavior.Overwrite - appending_table.write file on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument_Error_Data + appending_table.write file on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument.Error file.delete Test.specify "should fail when appending and matching columns by name but headers are disabled (headers=False)" <| @@ -300,7 +300,7 @@ spec = file.delete_if_exists no_header_format = Delimited "," . without_headers existing_table.write file on_existing_file=Existing_File_Behavior.Overwrite - appending_table.write file no_header_format on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument_Error_Data + appending_table.write file no_header_format on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument.Error file.delete Test.specify "should fail on column mismatch when appending to a file by name" <| @@ -533,7 +533,7 @@ spec = file.delete_if_exists initial_table.write file (Delimited ',' line_endings=Line_Ending_Style.Mac_Legacy) result = table_to_append.write file (Delimited ',' line_endings=Line_Ending_Style.Unix) on_existing_file=Existing_File_Behavior.Append match_columns=Match_Columns.By_Position - result . should_fail_with Illegal_Argument_Error_Data + result . should_fail_with Illegal_Argument.Error result.catch.message . should_equal "The explicitly provided line endings ('\n') do not match the line endings in the file ('\r')." file.delete diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index b3100104a6d3..7f5000f7891d 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -320,8 +320,8 @@ spec_write suffix test_sheet_name = (enso_project.data / test_sheet_name) . copy_to out lmd = out.last_modified_time extra_another = Table.new [['CC',[True, False]], ['BB',[4, 5]], ['AA', ['d', 'e']], ['DD', ['2022-01-20', '2022-01-21']], ['EE', ['2022-01-20', '2022-01-21']]] - extra_another.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument_Error_Data - extra_another.write out (Excel (Worksheet "Another") False) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument_Error_Data + extra_another.write out (Excel (Worksheet "NoHeaders")) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument.Error + extra_another.write out (Excel (Worksheet "Another") False) on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument.Error out.last_modified_time.should_equal lmd out.delete_if_exists @@ -394,66 +394,66 @@ spec = check_range (Excel_Range.from_address "Test!R1C1:R5C3") 'Test' [1, 1, 5, 3] Test.specify 'should fail gracefully for invalid patterns' <| - Excel_Range.from_address "Test!$$QA1" . should_fail_with Illegal_Argument_Error_Data - Excel_Range.from_address "Test!BADADDRESS" . should_fail_with Illegal_Argument_Error_Data + Excel_Range.from_address "Test!$$QA1" . should_fail_with Illegal_Argument.Error + Excel_Range.from_address "Test!BADADDRESS" . should_fail_with Illegal_Argument.Error Test.specify 'should allow Range creation for a cell' <| check_range (Excel_Range.for_cell "Hello World" 123 14) 'Hello World' [14, 123, 14, 123] True check_range (Excel_Range.for_cell "Hello World" "DS" 14) 'Hello World' [14, 123, 14, 123] True Excel_Range.for_cell "Test" 123 14 . address . should_equal "Test!DS14" Excel_Range.for_cell "Hello World" 123 14 . address . should_equal "'Hello World'!DS14" - Excel_Range.for_cell "Test" 20000 1 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_cell "Test" "ZZZ" 1 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_cell "Test" 0 1 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_cell "Test" 1 10000000 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_cell "Test" 1 0 . should_fail_with Illegal_Argument_Error_Data + Excel_Range.for_cell "Test" 20000 1 . should_fail_with Illegal_Argument.Error + Excel_Range.for_cell "Test" "ZZZ" 1 . should_fail_with Illegal_Argument.Error + Excel_Range.for_cell "Test" 0 1 . should_fail_with Illegal_Argument.Error + Excel_Range.for_cell "Test" 1 10000000 . should_fail_with Illegal_Argument.Error + Excel_Range.for_cell "Test" 1 0 . should_fail_with Illegal_Argument.Error Test.specify 'should allow Range creation for a range' <| check_range (Excel_Range.for_range "Hello World" 55 120 123 14) 'Hello World' [14, 55, 120, 123] check_range (Excel_Range.for_range "Hello World" "BC" 120 "DS" 14) 'Hello World' [14, 55, 120, 123] Excel_Range.for_range "Test" 55 120 123 14 . address . should_equal "Test!BC14:DS120" Excel_Range.for_range "Hello World" 55 120 123 14 . address . should_equal "'Hello World'!BC14:DS120" - Excel_Range.for_range "Test" 20000 1 123 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" "ZZZ" 1 123 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 0 1 123 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 5 1 20000 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 5 1 0 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 5 0 123 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 5 10000000 123 14 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 5 1 123 0 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_range "Test" 5 1 123 10000000 . should_fail_with Illegal_Argument_Error_Data + Excel_Range.for_range "Test" 20000 1 123 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" "ZZZ" 1 123 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 0 1 123 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 5 1 20000 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 5 1 0 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 5 0 123 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 5 10000000 123 14 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 5 1 123 0 . should_fail_with Illegal_Argument.Error + Excel_Range.for_range "Test" 5 1 123 10000000 . should_fail_with Illegal_Argument.Error Test.specify 'should allow Range creation for a column' <| check_range (Excel_Range.for_columns "Hello World" 123) 'Hello World' [Nothing, 123, Nothing, 123] check_range (Excel_Range.for_columns "Hello World" "DS") 'Hello World' [Nothing, 123, Nothing, 123] Excel_Range.for_columns "Test" 123 . address . should_equal "Test!DS" Excel_Range.for_columns "Hello World" 123 . address . should_equal "'Hello World'!DS" - Excel_Range.for_columns "Test" 20000 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_columns "Test" "ZZZ" . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_columns "Test" 0 . should_fail_with Illegal_Argument_Error_Data + Excel_Range.for_columns "Test" 20000 . should_fail_with Illegal_Argument.Error + Excel_Range.for_columns "Test" "ZZZ" . should_fail_with Illegal_Argument.Error + Excel_Range.for_columns "Test" 0 . should_fail_with Illegal_Argument.Error Test.specify 'should allow Range creation for columns' <| check_range (Excel_Range.for_columns "Hello World" "BC" 123) 'Hello World' [Nothing, 55, Nothing, 123] check_range (Excel_Range.for_columns "Hello World" 55 "DS") 'Hello World' [Nothing, 55, Nothing, 123] Excel_Range.for_columns "Test" 55 123 . address . should_equal "Test!BC:DS" Excel_Range.for_columns "Hello World" "BC" "DS" . address . should_equal "'Hello World'!BC:DS" - Excel_Range.for_columns "Test" 55 20000 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_columns "Test" 55 "ZZZ" . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_columns "Test" 55 0 . should_fail_with Illegal_Argument_Error_Data + Excel_Range.for_columns "Test" 55 20000 . should_fail_with Illegal_Argument.Error + Excel_Range.for_columns "Test" 55 "ZZZ" . should_fail_with Illegal_Argument.Error + Excel_Range.for_columns "Test" 55 0 . should_fail_with Illegal_Argument.Error Test.specify 'should allow Range creation for a row' <| check_range (Excel_Range.for_rows "Hello World" 123) 'Hello World' [123, Nothing, 123, Nothing] Excel_Range.for_rows "Test" 123 . address . should_equal "Test!123" Excel_Range.for_rows "Hello World" 123 . address . should_equal "'Hello World'!123" - Excel_Range.for_rows "Test" 20000000 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_rows "Test" 0 . should_fail_with Illegal_Argument_Error_Data + Excel_Range.for_rows "Test" 20000000 . should_fail_with Illegal_Argument.Error + Excel_Range.for_rows "Test" 0 . should_fail_with Illegal_Argument.Error Test.specify 'should allow Range creation for rows' <| check_range (Excel_Range.for_rows "Hello World" 55 123) 'Hello World' [55, Nothing, 123, Nothing] Excel_Range.for_rows "Test" 55 123 . address . should_equal "Test!55:123" Excel_Range.for_rows "Hello World" 55 123 . address . should_equal "'Hello World'!55:123" - Excel_Range.for_rows "Test" 55 20000000 . should_fail_with Illegal_Argument_Error_Data - Excel_Range.for_rows "Test" 55 0 . should_fail_with Illegal_Argument_Error_Data + Excel_Range.for_rows "Test" 55 20000000 . should_fail_with Illegal_Argument.Error + Excel_Range.for_rows "Test" 55 0 . should_fail_with Illegal_Argument.Error xlsx_sheet = enso_project.data / "TestSheet.xlsx" xlsx_path = xlsx_sheet.path diff --git a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso index 78059ea2050c..1a0beb40b6d2 100644 --- a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso @@ -31,7 +31,7 @@ spec = Test.group "Aggregate Columns" <| result = acc = Aggregate_Column_Helper.java_aggregator "Name" resolved indexes = Vector.new table.row_count v->v - Illegal_Argument_Error.handle_java_exception <| + Illegal_Argument.handle_java_exception <| acc.aggregate indexes.to_array if epsilon != False then ((result - expected_result).abs < epsilon).should_be_true else diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 5b0540174e83..2130f773a09e 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -648,24 +648,6 @@ spec = t_2.at 'col2' . to_vector . should_equal (expected.at 'col2' . to_vector) t_2.at 'col3' . to_vector . should_equal (expected.at 'col3' . to_vector) - Test.group "Visualizing tables" <| - Test.specify "should specify default visualizations correctly" <| - c_1_1 = ['x', [1, 2, 3]] - c_1_2 = ['Y', [5.3, 56.2, 6.3]] - t_1 = Table.new [c_1_1, c_1_2] - t_1.default_visualization.should_equal Visualization.Id.scatter_plot - - c_2_1 = ['LaTituDe', [5,3,2]] - c_2_2 = ['longitude', [6,7,8]] - t_2 = Table.new [c_2_1, c_2_2] - t_2.default_visualization.should_equal Visualization.Id.geo_map - - c_3_1 = ['latitude', [5,2,5]] - c_3_2 = ['Y', [2,4,2]] - c_3_3 = ['name', ["foo", "bar", "baz"]] - t_3 = Table.new [c_3_1, c_3_2, c_3_3] - t_3.default_visualization.should_equal Visualization.Id.table - Test.group "Use First Row As Names" <| expect_column_names names table = table.columns . map .name . should_equal names frames_to_skip=2 @@ -884,7 +866,7 @@ spec = Test.specify "until hashing is supported, should throw an error when trying to aggregate a custom object" <| t = Table.new [["X", [My.Data 1 2, My.Data 3 4, My.Data 1 2]]] - t.distinct . should_fail_with Illegal_Argument_Error_Data + t.distinct . should_fail_with Illegal_Argument.Error Test.specify "should group by all columns by default" <| a = ["A", ["a", "b", "a", "b", "a", "b"]] diff --git a/test/Tests/src/Data/Array_Spec.enso b/test/Tests/src/Data/Array_Spec.enso index 7e1c2b6cd139..c2bc76304b37 100644 --- a/test/Tests/src/Data/Array_Spec.enso +++ b/test/Tests/src/Data/Array_Spec.enso @@ -43,9 +43,9 @@ spec = arr.method . should_equal 0 Test.specify "should propagate dataflow errors" <| - err = Error.throw (Illegal_State_Error_Data "Foo") + err = Error.throw (Illegal_State.Error "Foo") res = Array.new err - res . should_fail_with Illegal_State_Error_Data + res . should_fail_with Illegal_State.Error Test.specify "should be able to be converted to a visualization rep" <| arr = make_enso_array (Vector.fill 1000 0) diff --git a/test/Tests/src/Data/Noise/Generator_Spec.enso b/test/Tests/src/Data/Noise/Generator_Spec.enso index ef3015934496..21cd4812604a 100644 --- a/test/Tests/src/Data/Noise/Generator_Spec.enso +++ b/test/Tests/src/Data/Noise/Generator_Spec.enso @@ -12,7 +12,7 @@ spec = gen = Generator Test.specify "should not be invokable" <| interval = Interval.inclusive 0 1 - Test.expect_panic_with (gen.step 1 interval) Common.Unimplemented_Error_Data + Test.expect_panic_with (gen.step 1 interval) Unimplemented.Error Test.group "Deterministic Random Noise Generator" <| gen = Deterministic_Random Test.specify "should always return the same output for the same input" <| diff --git a/test/Tests/src/Data/Range_Spec.enso b/test/Tests/src/Data/Range_Spec.enso index 451aa35842e3..bb219a2e15a9 100644 --- a/test/Tests/src/Data/Range_Spec.enso +++ b/test/Tests/src/Data/Range_Spec.enso @@ -38,10 +38,10 @@ spec = Test.group "Range" <| 10.down_to 0 . with_step 2 . should_equal (Range.Value 10 0 -2) 10.down_to 0 . with_step 2 . to_vector . should_equal [10, 8, 6, 4, 2] - 1.up_to 2 . with_step 0.5 . should_fail_with Illegal_Argument_Error_Data - 1.up_to 2 . with_step -1 . should_fail_with Illegal_Argument_Error_Data - 0.up_to 2.0 . should_fail_with Illegal_Argument_Error_Data - 0.down_to 2.0 . should_fail_with Illegal_Argument_Error_Data + 1.up_to 2 . with_step 0.5 . should_fail_with Illegal_Argument.Error + 1.up_to 2 . with_step -1 . should_fail_with Illegal_Argument.Error + 0.up_to 2.0 . should_fail_with Illegal_Argument.Error + 0.down_to 2.0 . should_fail_with Illegal_Argument.Error Test.expect_panic_with (0.0.up_to 2) No_Such_Method_Error_Data Test.expect_panic_with (0.0.down_to 2) No_Such_Method_Error_Data @@ -146,11 +146,11 @@ spec = Test.group "Range" <| 3.up_to 5 . contains 2 . should_be_false 0.up_to 10 . contains -3 . should_be_false - 0.up_to 10 . contains 2.5 . should_fail_with Illegal_Argument_Error_Data - 0.up_to 10 . contains 3.0 . should_fail_with Illegal_Argument_Error_Data + 0.up_to 10 . contains 2.5 . should_fail_with Illegal_Argument.Error + 0.up_to 10 . contains 3.0 . should_fail_with Illegal_Argument.Error - 5.down_to 0 . contains 2.5 . should_fail_with Illegal_Argument_Error_Data - 5.down_to 0 . contains 3.0 . should_fail_with Illegal_Argument_Error_Data + 5.down_to 0 . contains 2.5 . should_fail_with Illegal_Argument.Error + 5.down_to 0 . contains 3.0 . should_fail_with Illegal_Argument.Error verify_contains range expected unexpected = @@ -370,22 +370,22 @@ spec = Test.group "Range" <| verify_contains r4 [3] [-3, -2, -1, 0, 1, 2, 4, 5, 6, 7, 10] Test.specify "should report errors if trying to set step to 0" <| - 0.up_to 0 . with_step 0 . should_fail_with Illegal_State_Error_Data + 0.up_to 0 . with_step 0 . should_fail_with Illegal_State.Error invalid_range = Range.Value 0 0 0 - invalid_range . length . should_fail_with Illegal_State_Error_Data - invalid_range . is_empty . should_fail_with Illegal_State_Error_Data - invalid_range . not_empty . should_fail_with Illegal_State_Error_Data - invalid_range . each x->x . should_fail_with Illegal_State_Error_Data - invalid_range . fold 0 (+) . should_fail_with Illegal_State_Error_Data + invalid_range . length . should_fail_with Illegal_State.Error + invalid_range . is_empty . should_fail_with Illegal_State.Error + invalid_range . not_empty . should_fail_with Illegal_State.Error + invalid_range . each x->x . should_fail_with Illegal_State.Error + invalid_range . fold 0 (+) . should_fail_with Illegal_State.Error ## FIXME [RW] These tests are disabled because they fail in an unexpected way due to a codegen issue (noted below). They should be enabled once that is resolved. See: https://www.pivotaltracker.com/story/show/181652841 - #invalid_range . map x->x . should_fail_with Illegal_State_Error - #invalid_range . to_vector . should_fail_with Illegal_State_Error - invalid_range . any _->True . should_fail_with Illegal_State_Error_Data - invalid_range . all _->True . should_fail_with Illegal_State_Error_Data - invalid_range . find _->True . should_fail_with Illegal_State_Error_Data - invalid_range . contains 0 . should_fail_with Illegal_State_Error_Data + #invalid_range . map x->x . should_fail_with Illegal_State.Error + #invalid_range . to_vector . should_fail_with Illegal_State.Error + invalid_range . any _->True . should_fail_with Illegal_State.Error + invalid_range . all _->True . should_fail_with Illegal_State.Error + invalid_range . find _->True . should_fail_with Illegal_State.Error + invalid_range . contains 0 . should_fail_with Illegal_State.Error main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Regression_Spec.enso b/test/Tests/src/Data/Regression_Spec.enso index 725ebc47f683..78bd4be29dbd 100644 --- a/test/Tests/src/Data/Regression_Spec.enso +++ b/test/Tests/src/Data/Regression_Spec.enso @@ -1,4 +1,4 @@ -from Standard.Base import Nothing, Vector, Number, Decimal, True, Illegal_Argument_Error_Data, False, Regression +from Standard.Base import Nothing, Vector, Number, Decimal, True, Illegal_Argument, False, Regression from Standard.Test import Test, Test_Suite import Standard.Test.Extensions @@ -19,7 +19,7 @@ spec = Test.specify "return an error if the vector lengths do not match" <| known_xs = [2, 3, 5, 7, 9] known_ys = [4, 5, 7, 10] - Regression.fit_least_squares known_xs known_ys . should_fail_with Illegal_Argument_Error_Data + Regression.fit_least_squares known_xs known_ys . should_fail_with Illegal_Argument.Error Test.specify "return an error if the X values are all the same" <| known_xs = [2, 2, 2, 2] diff --git a/test/Tests/src/Data/Statistics_Spec.enso b/test/Tests/src/Data/Statistics_Spec.enso index d465f91de762..74ca6311a768 100644 --- a/test/Tests/src/Data/Statistics_Spec.enso +++ b/test/Tests/src/Data/Statistics_Spec.enso @@ -137,12 +137,12 @@ spec = ord_set = [Ord.Value 10, Ord.Value 2, Nothing, Ord.Value 9] no_ord_set = [No_Ord.Value 10, No_Ord.Value 2, Nothing, No_Ord.Value 9] - Test.specify "should fail with Illegal_Argument_Error on number based statistics for text Vector" <| - text_set.compute Statistic.Sum . should_fail_with Illegal_Argument_Error_Data - text_set.compute Statistic.Mean . should_fail_with Illegal_Argument_Error_Data - text_set.compute Statistic.Variance . should_fail_with Illegal_Argument_Error_Data - text_set.compute Statistic.Skew . should_fail_with Illegal_Argument_Error_Data - text_set.compute Statistic.Kurtosis . should_fail_with Illegal_Argument_Error_Data + Test.specify "should fail with Illegal_Argument on number based statistics for text Vector" <| + text_set.compute Statistic.Sum . should_fail_with Illegal_Argument.Error + text_set.compute Statistic.Mean . should_fail_with Illegal_Argument.Error + text_set.compute Statistic.Variance . should_fail_with Illegal_Argument.Error + text_set.compute Statistic.Skew . should_fail_with Illegal_Argument.Error + text_set.compute Statistic.Kurtosis . should_fail_with Illegal_Argument.Error Test.specify "should be able to do Count, Minimum and Maximum on custom type with compare_to" <| ord_set.compute . should_equal 3 @@ -185,8 +185,8 @@ spec = Test.specify "should fail with Incomparable_Values_Error on mixed Vectors" <| Statistic.rank_data [1, "A"] . should_fail_with Incomparable_Values_Error - Test.specify "should fail with Illegal_Argument_Error on Vectors with Nothing" <| - Statistic.rank_data [1, Nothing, 4] . should_fail_with Illegal_Argument_Error_Data + Test.specify "should fail with Illegal_Argument on Vectors with Nothing" <| + Statistic.rank_data [1, Nothing, 4] . should_fail_with Illegal_Argument.Error Test.group "Correlation Statistics" <| series_a = [0.22345,0.258315,0.74663,Nothing,0.686843,0.692246,Nothing,0.401859,0.725442,Nothing,0.963527,0.520363,0.633053,0.397123,Nothing,0.458942,0.036499,0.368194,0.598939,0.296476,0.093746,0.609329] @@ -221,30 +221,30 @@ spec = vector_compare (matrix.at 1) [-0.09313725, 1, 0] vector_compare (matrix.at 2) [-0.43382353, 0, 1] - Test.specify "should fail with Illegal_Argument_Error if different lengths" <| + Test.specify "should fail with Illegal_Argument if different lengths" <| data = [[1,2,3,4],[10,20,30]] - data.first.compute (Statistic.Covariance data.second) . should_fail_with Illegal_Argument_Error_Data - data.first.compute (Statistic.Pearson data.second) . should_fail_with Illegal_Argument_Error_Data - data.first.compute (Statistic.Spearman data.second) . should_fail_with Illegal_Argument_Error_Data - data.first.compute (Statistic.R_Squared data.second) . should_fail_with Illegal_Argument_Error_Data - Statistic.covariance_matrix data . should_fail_with Illegal_Argument_Error_Data - Statistic.pearson_correlation data . should_fail_with Illegal_Argument_Error_Data - Statistic.spearman_correlation data . should_fail_with Illegal_Argument_Error_Data - - Test.specify "should fail with Illegal_Argument_Error if not number based" <| + data.first.compute (Statistic.Covariance data.second) . should_fail_with Illegal_Argument.Error + data.first.compute (Statistic.Pearson data.second) . should_fail_with Illegal_Argument.Error + data.first.compute (Statistic.Spearman data.second) . should_fail_with Illegal_Argument.Error + data.first.compute (Statistic.R_Squared data.second) . should_fail_with Illegal_Argument.Error + Statistic.covariance_matrix data . should_fail_with Illegal_Argument.Error + Statistic.pearson_correlation data . should_fail_with Illegal_Argument.Error + Statistic.spearman_correlation data . should_fail_with Illegal_Argument.Error + + Test.specify "should fail with Illegal_Argument if not number based" <| text = [["A","BC","CD"], ["0", "1", "2"], ["H", "I", "J"]] - text.first.compute (Statistic.Covariance text.second) . should_fail_with Illegal_Argument_Error_Data - text.first.compute (Statistic.Pearson text.second) . should_fail_with Illegal_Argument_Error_Data - text.first.compute (Statistic.Spearman text.second) . should_fail_with Illegal_Argument_Error_Data - text.first.compute (Statistic.R_Squared text.second) . should_fail_with Illegal_Argument_Error_Data - Statistic.covariance_matrix text . should_fail_with Illegal_Argument_Error_Data - Statistic.pearson_correlation text . should_fail_with Illegal_Argument_Error_Data - Statistic.spearman_correlation text . should_fail_with Illegal_Argument_Error_Data + text.first.compute (Statistic.Covariance text.second) . should_fail_with Illegal_Argument.Error + text.first.compute (Statistic.Pearson text.second) . should_fail_with Illegal_Argument.Error + text.first.compute (Statistic.Spearman text.second) . should_fail_with Illegal_Argument.Error + text.first.compute (Statistic.R_Squared text.second) . should_fail_with Illegal_Argument.Error + Statistic.covariance_matrix text . should_fail_with Illegal_Argument.Error + Statistic.pearson_correlation text . should_fail_with Illegal_Argument.Error + Statistic.spearman_correlation text . should_fail_with Illegal_Argument.Error Test.group "Statistics - invalid input" <| - Test.specify "should fail with Illegal_Argument_Error on number based statistics for text Vector" <| + Test.specify "should fail with Illegal_Argument on number based statistics for text Vector" <| series = [["A", "B", Nothing, "D"], ["A", "B", Nothing, "D"]] - Statistic.covariance_matrix series . should_fail_with Illegal_Argument_Error_Data - Statistic.pearson_correlation series . should_fail_with Illegal_Argument_Error_Data + Statistic.covariance_matrix series . should_fail_with Illegal_Argument.Error + Statistic.pearson_correlation series . should_fail_with Illegal_Argument.Error main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Text/Default_Regex_Engine_Spec.enso b/test/Tests/src/Data/Text/Default_Regex_Engine_Spec.enso index 8e3b22085580..0181d174c824 100644 --- a/test/Tests/src/Data/Text/Default_Regex_Engine_Spec.enso +++ b/test/Tests/src/Data/Text/Default_Regex_Engine_Spec.enso @@ -1,8 +1,8 @@ from Standard.Base import all -from Standard.Base.Data.Text.Regex import No_Such_Group_Error_Data +from Standard.Base.Data.Text.Regex import No_Such_Group, Invalid_Option import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine -import Standard.Base.Data.Text.Regex.Option as Global_Option +import Standard.Base.Data.Text.Regex.Regex_Option.Regex_Option polyglot java import java.util.regex.Pattern as Java_Pattern @@ -15,7 +15,7 @@ spec = Test.group "The default regex engine's options handling" <| Test.specify "should convert options to Java" <| - options = [Global_Option.Comments, Global_Option.Multiline, Default_Engine.Unix_Lines] + options = [Regex_Option.Comments, Regex_Option.Multiline, Default_Engine.Option.Unix_Lines] expected_mask = Java_Pattern.UNIX_LINES.bit_or Java_Pattern.COMMENTS . bit_or Java_Pattern.MULTILINE . bit_or default_mask actual_mask = Default_Engine.from_enso_options options @@ -27,12 +27,12 @@ spec = actual_mask . should_equal default_mask Test.specify "should handle ascii matching by disabling unicode" <| - actual_mask = Default_Engine.from_enso_options [Global_Option.Ascii_Matching] + actual_mask = Default_Engine.from_enso_options [Regex_Option.Ascii_Matching] actual_mask . should_equal 0 Test.specify "should result in an error when an option is invalid" <| - Default_Engine.from_enso_options [""] . should_fail_with Default_Engine.Invalid_Option_Error_Data - Default_Engine.from_enso_options ["", Global_Option.Ascii_Matching] . should_fail_with Default_Engine.Invalid_Option_Error_Data + Default_Engine.from_enso_options [""] . should_fail_with Invalid_Option.Error + Default_Engine.from_enso_options ["", Regex_Option.Ascii_Matching] . should_fail_with Invalid_Option.Error Test.group "The default regex engine (Default_Engine)" <| @@ -45,24 +45,24 @@ spec = Test.specify "should be able to compile patterns with global options" <| engine = Default_Engine.new - pattern = engine.compile "^a$" [Global_Option.Multiline] + pattern = engine.compile "^a$" [Regex_Option.Multiline] pattern.engine . should_equal engine - pattern.options . should_equal [Global_Option.Multiline] + pattern.options . should_equal [Regex_Option.Multiline] pattern.internal_pattern.flags . should_equal (default_mask.bit_or Java_Pattern.MULTILINE) Test.specify "should be able to compile patterns with engine-specific options" <| - engine = Default_Engine.new [Default_Engine.Literal_Pattern] + engine = Default_Engine.new [Default_Engine.Option.Literal_Pattern] pattern = engine.compile "^a$" [] pattern.engine . should_equal engine - pattern.options . should_equal [Default_Engine.Literal_Pattern] + pattern.options . should_equal [Default_Engine.Option.Literal_Pattern] pattern.internal_pattern.flags . should_equal (default_mask.bit_or Java_Pattern.LITERAL) Test.specify "should be able to compile patterns with combined options" <| - engine = Default_Engine.new [Default_Engine.Literal_Pattern] - pattern = engine.compile "^a$" [Global_Option.Comments] + engine = Default_Engine.new [Default_Engine.Option.Literal_Pattern] + pattern = engine.compile "^a$" [Regex_Option.Comments] pattern.engine . should_equal engine - pattern.options.contains Default_Engine.Literal_Pattern . should_be_true - pattern.options.contains Global_Option.Comments . should_be_true + pattern.options.contains Default_Engine.Option.Literal_Pattern . should_be_true + pattern.options.contains Regex_Option.Comments . should_be_true pattern.internal_pattern.flags . should_equal (default_mask . bit_or Java_Pattern.LITERAL . bit_or Java_Pattern.COMMENTS) Test.specify "should return a syntax error of the regex syntax is invalid" <| @@ -71,7 +71,7 @@ spec = Test.specify "should throw an invalid options error if an option is invalid" <| engine = Default_Engine.new - engine.compile "^a$" ["invalid"] . should_fail_with Default_Engine.Invalid_Option_Error_Data + engine.compile "^a$" ["invalid"] . should_fail_with Invalid_Option.Error Test.specify "should escape an expression for use as a literal" <| pattern = "http://example.com" @@ -102,14 +102,14 @@ spec = Test.specify "should be able to `match` the first instance of the pattern in the input" <| pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value match.group 0 . should_equal input Test.specify "should return `Nothing` if there are no matches in first mode" <| pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "abc" - match = pattern.match input mode=Regex_Mode.First + match = pattern.match input mode=Matching_Mode.First match . should_equal Nothing Test.specify "should be able to `match` at most N instances of the pattern in the input" <| @@ -157,7 +157,7 @@ spec = pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" match = pattern.match input mode=Regex_Mode.Full - match . should_be_a Default_Engine.Match_Data + match . should_be_a Default_Engine.Match.Value match.group 0 . should_equal input Test.specify "should return `Nothing` if a full match does not match the entire input" <| @@ -165,8 +165,8 @@ spec = input = "aa ab" full_match = pattern.match input mode=Regex_Mode.Full full_match . should_equal Nothing - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should be able to `match` the pattern against bounded input" <| pattern = engine.compile "(..)" [] @@ -199,14 +199,14 @@ spec = Test.specify "should be able to `find` the first instance of the pattern in the input" <| pattern = engine.compile "(..)" [] input = "abcdefghij" - match = pattern.find input mode=Regex_Mode.First + match = pattern.find input mode=Matching_Mode.First match . should_be_a Text match . should_equal "ab" Test.specify "should return `Nothing` if there are no matches in first mode" <| pattern = engine.compile "(aa)" [] input = "abcdefghij" - match = pattern.find input mode=Regex_Mode.First + match = pattern.find input mode=Matching_Mode.First match . should_equal Nothing Test.specify "should be able to `find` at most N instances of the pattern in the input" <| @@ -299,7 +299,7 @@ spec = Test.specify "should be able to `split` on the first instance of the pattern" <| pattern = engine.compile "cd" [] input = "abcdefghij" - match = pattern.split input mode=Regex_Mode.First + match = pattern.split input mode=Matching_Mode.First match.length . should_equal 2 match.at 0 . should_equal "ab" match.at 1 . should_equal "efghij" @@ -307,7 +307,7 @@ spec = Test.specify "should return the original text if there are no matches in first mode" <| pattern = engine.compile "(aa)" [] input = "abcdefghij" - match = pattern.split input mode=Regex_Mode.First + match = pattern.split input mode=Matching_Mode.First match . should_equal ["abcdefghij"] Test.specify "should be able to `split` on at most N instances of the pattern in the input" <| @@ -348,14 +348,14 @@ spec = Test.specify "should be able to `replace` the first instance of the pattern in the input" <| pattern = engine.compile "abc" [] input = "aa ab abc a bc abc" - match = pattern.replace input "REPLACED" mode=Regex_Mode.First + match = pattern.replace input "REPLACED" mode=Matching_Mode.First match . should_be_a Text match . should_equal "aa ab REPLACED a bc abc" Test.specify "should return the string unchanged if there are no matches to replace in first mode" <| pattern = engine.compile "xyz" [] input = "aa ab ac ad" - match = pattern.replace input "REPLACED" mode=Regex_Mode.First + match = pattern.replace input "REPLACED" mode=Matching_Mode.First match . should_equal input Test.specify "should be able to replace at most N instances of the pattern in the input" <| @@ -425,7 +425,7 @@ spec = pattern.replace "foo bar, baz" "[$1]" mode=2 . should_equal "[foo] [bar], baz" pattern.replace "foo bar, baz" "[$1]" mode=3 . should_equal "[foo] [bar], [baz]" pattern.replace "foo bar, baz" "[$1]" mode=4 . should_equal "[foo] [bar], [baz]" - pattern.replace "foo bar, baz" "[$1]" mode=Regex_Mode.First . should_equal "[foo] bar, baz" + pattern.replace "foo bar, baz" "[$1]" mode=Matching_Mode.First . should_equal "[foo] bar, baz" pattern.replace "foo bar, baz" "[$1]" mode=Matching_Mode.Last . should_equal "foo bar, [baz]" pattern.replace "foo bar, baz" "[${capture}]" mode=Regex_Mode.All . should_equal "[foo] [bar], [baz]" @@ -434,7 +434,7 @@ spec = pattern.replace "foo bar, baz" "[${capture}]" mode=2 . should_equal "[foo] [bar], baz" pattern.replace "foo bar, baz" "[${capture}]" mode=3 . should_equal "[foo] [bar], [baz]" pattern.replace "foo bar, baz" "[${capture}]" mode=4 . should_equal "[foo] [bar], [baz]" - pattern.replace "foo bar, baz" "[${capture}]" mode=Regex_Mode.First . should_equal "[foo] bar, baz" + pattern.replace "foo bar, baz" "[${capture}]" mode=Matching_Mode.First . should_equal "[foo] bar, baz" pattern.replace "foo bar, baz" "[${capture}]" mode=Matching_Mode.Last . should_equal "foo bar, [baz]" Test.specify "should handle capture groups in replacement in All mode" <| @@ -450,8 +450,8 @@ spec = engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the full match with index 0" <| match.group 0 . should_equal "aa ab abc a bc bcd" @@ -466,8 +466,8 @@ spec = match.group 3 . should_equal Nothing Test.specify "should fail with No_Such_Group_Error if the group did not exist" <| - match.group "fail" . should_fail_with No_Such_Group_Error_Data - match.group 5 . should_fail_with No_Such_Group_Error_Data + match.group "fail" . should_fail_with No_Such_Group.Error + match.group 5 . should_fail_with No_Such_Group.Error Test.specify "should make named groups accessible by index" <| match.group 2 . should_equal (match.group "letters") @@ -476,8 +476,8 @@ spec = engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the results of all groups" <| groups = match.groups @@ -493,8 +493,8 @@ spec = engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the results of all named groups" <| groups = match.named_groups @@ -512,8 +512,8 @@ spec = engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the start of a group by index" <| match.start 1 . should_equal 0 @@ -526,15 +526,15 @@ spec = match.start "empty" . should_equal Nothing Test.specify "should return No_Such_Group_Error if the group doesn't exist" <| - match.start 5 . should_fail_with No_Such_Group_Error_Data - match.start "nonexistent" . should_fail_with No_Such_Group_Error_Data + match.start 5 . should_fail_with No_Such_Group.Error + match.start "nonexistent" . should_fail_with No_Such_Group.Error Test.group "Match.end" <| engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the end of a group by index" <| match.end 1 . should_equal 6 @@ -547,36 +547,36 @@ spec = match.end "empty" . should_equal Nothing Test.specify "should return No_Such_Group_Error if the group doesn't exist" <| - match.end 5 . should_fail_with No_Such_Group_Error_Data - match.end "nonexistent" . should_fail_with No_Such_Group_Error_Data + match.end 5 . should_fail_with No_Such_Group.Error + match.end "nonexistent" . should_fail_with No_Such_Group.Error Test.group "Match.span" <| engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should get the span of a group by index" <| - match.span 1 . should_equal (Utf_16_Span_Data (0.up_to 6) input) + match.span 1 . should_equal (Utf_16_Span.Value (0.up_to 6) input) Test.specify "should get the span of a group by name" <| - match.span "letters" . should_equal (Utf_16_Span_Data (6.up_to 18) input) + match.span "letters" . should_equal (Utf_16_Span.Value (6.up_to 18) input) Test.specify "should return Nothing if the group didn't match" <| match.span 3 . should_equal Nothing match.span "empty" . should_equal Nothing Test.specify "should fail with a No_Such_Group_Error if the group doesn't exist" <| - match.span 5 . should_fail_with No_Such_Group_Error_Data - match.span "nonexistent" . should_fail_with No_Such_Group_Error_Data + match.span 5 . should_fail_with No_Such_Group.Error + match.span "nonexistent" . should_fail_with No_Such_Group.Error Test.group "Match.start_position" <| engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the region start over which self match was performed" <| match.start_position . should_equal 0 @@ -585,8 +585,8 @@ spec = engine = Default_Engine.new pattern = engine.compile "(.. .. )(?.+)()??(?)??" [] input = "aa ab abc a bc bcd" - match = pattern.match input mode=Regex_Mode.First - match . should_be_a Default_Engine.Match_Data + match = pattern.match input mode=Matching_Mode.First + match . should_be_a Default_Engine.Match.Value Test.specify "should return the region end over which self match was performed" <| match.end_position . should_equal 18 diff --git a/test/Tests/src/Data/Text/Encoding_Spec.enso b/test/Tests/src/Data/Text/Encoding_Spec.enso index f7b8fbdb890b..1bf37240fcb2 100644 --- a/test/Tests/src/Data/Text/Encoding_Spec.enso +++ b/test/Tests/src/Data/Text/Encoding_Spec.enso @@ -1,7 +1,5 @@ from Standard.Base import all -from Standard.Base.Data.Text.Encoding import all_character_sets, all_encodings, Encoding - from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions @@ -16,15 +14,15 @@ spec = Test.specify "Catches invalid character sets" <| invalid = Encoding.Value "NotAValidCharacterSet" - invalid.to_java_charset . should_fail_with Illegal_Argument_Error_Data + invalid.to_java_charset . should_fail_with Illegal_Argument.Error Test.specify "Can get full set of character sets" <| - character_sets = all_character_sets + character_sets = Encoding.all_character_sets character_sets.length . should_not_equal 0 character_sets.contains "UTF-8" . should_equal True Test.specify "Can get full set of encoding objects" <| - encodings = all_encodings + encodings = Encoding.all_encodings encodings.length . should_not_equal 0 encodings.contains Encoding.utf_8 . should_equal True diff --git a/test/Tests/src/Data/Text/Matching_Spec.enso b/test/Tests/src/Data/Text/Matching_Spec.enso index 121039328e86..b45552d9695b 100644 --- a/test/Tests/src/Data/Text/Matching_Spec.enso +++ b/test/Tests/src/Data/Text/Matching_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Data.Text.Matching from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions @@ -50,11 +51,11 @@ spec = Test.group 'Matching Helper' <| Test.specify 'should correctly handle criteria which did not match anything' <| action = Text_Matcher.Case_Sensitive.match_criteria ["foo", "bar", "baz"] ["baz", "unknown_column"] reorder=True on_problems=_ tester = _.should_equal ["baz"] - problems = [No_Matches_Found_Data ["unknown_column"]] + problems = [Matching.No_Matches_Found.Error ["unknown_column"]] Problems.test_problem_handling action problems tester action_2 = Text_Matcher.Case_Sensitive.match_criteria ["foo", "bar", "baz"] ["baz", "unknown_column_1", "unknown_column_2"] reorder=False on_problems=_ - problems_2 = [No_Matches_Found_Data ["unknown_column_1", "unknown_column_2"]] + problems_2 = [Matching.No_Matches_Found.Error ["unknown_column_1", "unknown_column_2"]] Problems.test_problem_handling action_2 problems_2 tester Test.specify 'should correctly work with complex object using a function extracting their names' <| diff --git a/test/Tests/src/Data/Text/Regex_Spec.enso b/test/Tests/src/Data/Text/Regex_Spec.enso index f51d8f0f3aa7..9df40c71cb0f 100644 --- a/test/Tests/src/Data/Text/Regex_Spec.enso +++ b/test/Tests/src/Data/Text/Regex_Spec.enso @@ -1,6 +1,5 @@ from Standard.Base import all -import Standard.Base.Data.Text.Regex.Option import Standard.Base.Data.Text.Regex.Engine.Default as Default_Engine from Standard.Test import Test, Test_Suite @@ -10,17 +9,17 @@ spec = Test.group "Regex options handling" <| Test.specify "should work properly with flag options" <| flags = Regex.from_flags match_ascii=True case_insensitive=Nothing dot_matches_newline=True multiline=False comments=True extra_opts=[] - flags . should_equal [Option.Ascii_Matching, Option.Dot_Matches_Newline, Option.Comments] + flags . should_equal [Regex_Option.Ascii_Matching, Regex_Option.Dot_Matches_Newline, Regex_Option.Comments] Test.specify "should properly override vector options" <| - flags = Regex.from_flags match_ascii=True case_insensitive=Nothing dot_matches_newline=True multiline=False comments=True extra_opts=[Option.Multiline, Option.Case_Insensitive] - flags . should_equal [Option.Ascii_Matching, Option.Case_Insensitive, Option.Dot_Matches_Newline, Option.Comments] + flags = Regex.from_flags match_ascii=True case_insensitive=Nothing dot_matches_newline=True multiline=False comments=True extra_opts=[Regex_Option.Multiline, Regex_Option.Case_Insensitive] + flags . should_equal [Regex_Option.Ascii_Matching, Regex_Option.Case_Insensitive, Regex_Option.Dot_Matches_Newline, Regex_Option.Comments] Test.group "Regexes" <| Test.specify "should be able to be compiled" <| pattern = Regex.compile "(?..)" case_insensitive=True - pattern . should_be_a Default_Engine.Pattern_Data - pattern.options . should_equal [Option.Case_Insensitive] + pattern . should_be_a Default_Engine.Pattern.Value + pattern.options . should_equal [Regex_Option.Case_Insensitive] Test.specify "should be able to be escaped" <| pattern = "http://example.com" diff --git a/test/Tests/src/Data/Text/Span_Spec.enso b/test/Tests/src/Data/Text/Span_Spec.enso index 6b835b20f1a7..4c17d3377ff6 100644 --- a/test/Tests/src/Data/Text/Span_Spec.enso +++ b/test/Tests/src/Data/Text/Span_Spec.enso @@ -8,7 +8,7 @@ spec = Test.group "Text.Span" <| Test.specify "should be able to be created over a text" <| text = "Hello!" - span = Span_Data (0.up_to 3) text + span = Span.Value (0.up_to 3) text span.start . should_equal 0 span.end . should_equal 3 span.parent . should_equal text @@ -16,30 +16,25 @@ spec = Test.group "Text.Span" <| Test.specify "should be able to be converted to code units" <| text = 'ae\u{301}fz' - span = Span_Data (1.up_to 3) text - span.to_utf_16_span . should_equal (Utf_16_Span_Data (1.up_to 4) text) + span = Span.Value (1.up_to 3) text + span.to_utf_16_span . should_equal (Utf_16_Span.Value (1.up_to 4) text) span.text . should_equal 'e\u{301}f' Test.specify "should expand to the associated grapheme clusters" <| text = 'a\u{301}e\u{302}o\u{303}' - span = Utf_16_Span_Data (1.up_to 5) text + span = Utf_16_Span.Value (1.up_to 5) text extended = span.to_grapheme_span - extended . should_equal (Span_Data (0.up_to 3) text) - extended.to_utf_16_span . should_equal (Utf_16_Span_Data (0.up_to 6) text) - - Utf_16_Span_Data (0.up_to 2) text . to_grapheme_span . should_equal (Span_Data (0.up_to 1) text) - Utf_16_Span_Data (0.up_to 1) text . to_grapheme_span . should_equal (Span_Data (0.up_to 1) text) - Utf_16_Span_Data (0.up_to 0) text . to_grapheme_span . should_equal (Span_Data (0.up_to 0) text) - Utf_16_Span_Data (1.up_to 1) text . to_grapheme_span . should_equal (Span_Data (0.up_to 0) text) - Utf_16_Span_Data (2.up_to 2) text . to_grapheme_span . should_equal (Span_Data (1.up_to 1) text) - - Utf_16_Span_Data (0.up_to 4) text . to_grapheme_span . should_equal (Span_Data (0.up_to 2) text) - Utf_16_Span_Data (0.up_to 3) text . to_grapheme_span . should_equal (Span_Data (0.up_to 2) text) - Utf_16_Span_Data (0.up_to 2) text . to_grapheme_span . should_equal (Span_Data (0.up_to 1) text) - - Test.specify "should be able to use the conversions" <| - text = 'ae\u{301}fz' - Utf_16_Span.from (Span_Data (1.up_to 3) text) . should_equal (Utf_16_Span_Data (1.up_to 4) text) - Span.from (Utf_16_Span_Data (2.up_to 4) text) . should_equal (Span_Data (1.up_to 3) text) + extended . should_equal (Span.Value (0.up_to 3) text) + extended.to_utf_16_span . should_equal (Utf_16_Span.Value (0.up_to 6) text) + + Utf_16_Span.Value (0.up_to 2) text . to_grapheme_span . should_equal (Span.Value (0.up_to 1) text) + Utf_16_Span.Value (0.up_to 1) text . to_grapheme_span . should_equal (Span.Value (0.up_to 1) text) + Utf_16_Span.Value (0.up_to 0) text . to_grapheme_span . should_equal (Span.Value (0.up_to 0) text) + Utf_16_Span.Value (1.up_to 1) text . to_grapheme_span . should_equal (Span.Value (0.up_to 0) text) + Utf_16_Span.Value (2.up_to 2) text . to_grapheme_span . should_equal (Span.Value (1.up_to 1) text) + + Utf_16_Span.Value (0.up_to 4) text . to_grapheme_span . should_equal (Span.Value (0.up_to 2) text) + Utf_16_Span.Value (0.up_to 3) text . to_grapheme_span . should_equal (Span.Value (0.up_to 2) text) + Utf_16_Span.Value (0.up_to 2) text . to_grapheme_span . should_equal (Span.Value (0.up_to 1) text) main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Text/Utils_Spec.enso b/test/Tests/src/Data/Text/Utils_Spec.enso index 89d1a539dd5a..2204b367cc06 100644 --- a/test/Tests/src/Data/Text/Utils_Spec.enso +++ b/test/Tests/src/Data/Text/Utils_Spec.enso @@ -56,8 +56,8 @@ spec = folded.findGrapheme ix . index grapheme_ixes . should_equal [0, 0, 1, 2, 3, 3, 4, 4, 4, 5, 6] - Test.expect_panic_with (folded.findGrapheme -1) Polyglot_Error_Data - Test.expect_panic_with (folded.findGrapheme folded.getFoldedString.char_vector.length+1) Polyglot_Error_Data + Test.expect_panic_with (folded.findGrapheme -1) Polyglot_Error.Polyglot_Error_Data + Test.expect_panic_with (folded.findGrapheme folded.getFoldedString.char_vector.length+1) Polyglot_Error.Polyglot_Error_Data Test.specify "should correctly take prefix and suffix of a string" <| txt = 's\u0301ąśc\u0301' diff --git a/test/Tests/src/Data/Text_Spec.enso b/test/Tests/src/Data/Text_Spec.enso index 29f8f7bbc7bb..f7b773f465a1 100644 --- a/test/Tests/src/Data/Text_Spec.enso +++ b/test/Tests/src/Data/Text_Spec.enso @@ -212,7 +212,7 @@ spec = "".split "." . should_equal [""] "abc[a-z]def".split "[a-z]" . should_equal ["abc", "def"] 'aśbs\u{301}c'.split 'ś' . should_equal ['a', 'b', 'c'] - 'abc'.split '' . should_fail_with Illegal_Argument_Error_Data + 'abc'.split '' . should_fail_with Illegal_Argument.Error Test.specify "should be able to split the text on arbitrary text sequence, case-insensitively" <| matcher = Text_Matcher.Case_Insensitive @@ -222,7 +222,7 @@ spec = "baB".split "b" matcher . should_equal ["", "a", ""] "".split "a" matcher . should_equal [""] 'aŚbS\u{301}c'.split 'ś' matcher . should_equal ['a', 'b', 'c'] - 'abc'.split '' matcher . should_fail_with Illegal_Argument_Error_Data + 'abc'.split '' matcher . should_fail_with Illegal_Argument.Error Test.specify "should be able to split the text on Regex patterns" <| "cababdabe" . split "ab" (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_equal ["c", "", "d", "e"] @@ -234,7 +234,7 @@ spec = ".a.".split "\." (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_equal ["", "a", ""] "".split "a" (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_equal [""] 'aśbs\u{301}c'.split 'ś' (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_equal ['a', 'b', 'c'] - 'abc'.split '' (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_fail_with Illegal_Argument_Error_Data + 'abc'.split '' (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_fail_with Illegal_Argument.Error Test.specify "should be able to split the text on UTF-8 whitespace" <| utf_8_whitespace.split "\s+" (Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Sensitive) . should_equal utf_8_whitespace_split @@ -495,7 +495,7 @@ spec = "ABC".take (By_Index -1) . should_equal "C" "ABC".take (By_Index [-1, -1, -1, -3, 2]) . should_equal "CCCAC" "ABC".take (By_Index []) . should_equal "" - "ABC".take (By_Index ((-2).up_to -1)) . should_fail_with Illegal_Argument_Error_Data + "ABC".take (By_Index ((-2).up_to -1)) . should_fail_with Illegal_Argument.Error "".take (Every 2) . should_equal "" "".take (Every 2 first=1) . should_equal "" "ABC".take (Every 5) . should_equal "A" @@ -1019,9 +1019,9 @@ spec = "HELLO".pad 8 "AB" . should_equal "HELLOABA" "HELLO".pad 8 "AB" Location.Start . should_equal "BABHELLO" "".pad 4 . should_equal " " - "A".pad 3 "" . should_fail_with Illegal_Argument_Error_Data - "ABCDE".pad 3 "" . should_fail_with Illegal_Argument_Error_Data - "".pad 0 "" . should_fail_with Illegal_Argument_Error_Data + "A".pad 3 "" . should_fail_with Illegal_Argument.Error + "ABCDE".pad 3 "" . should_fail_with Illegal_Argument.Error + "".pad 0 "" . should_fail_with Illegal_Argument.Error "".pad 0 . should_equal "" "ABC".pad 3 . should_equal "ABC" @@ -1114,8 +1114,8 @@ spec = Test.specify "locate should work as shown in examples" <| example_1 = "Hello World!".locate "J" == Nothing - "Hello World!".locate "o" == Span_Data (4.up_to 5) "Hello World!" - "Hello World!".locate "o" mode=Matching_Mode.Last == Span_Data (4.up_to 5) "Hello World!" + "Hello World!".locate "o" == Span.Value (4.up_to 5) "Hello World!" + "Hello World!".locate "o" mode=Matching_Mode.Last == Span.Value (4.up_to 5) "Hello World!" example_2 = term = "straße" @@ -1169,105 +1169,105 @@ spec = "Hello World!".locate_all "o" . map .start . should_equal [4, 7] accents = 'a\u{301}e\u{301}o\u{301}' - accents.locate accent_1 . should_equal (Span_Data (1.up_to 2) accents) + accents.locate accent_1 . should_equal (Span.Value (1.up_to 2) accents) "".locate "foo" . should_equal Nothing "".locate "foo" mode=Matching_Mode.Last . should_equal Nothing "".locate_all "foo" . should_equal [] - "".locate "" . should_equal (Span_Data (0.up_to 0) "") - "".locate "" mode=Matching_Mode.Last . should_equal (Span_Data (0.up_to 0) "") - "".locate_all "" . should_equal [Span_Data (0.up_to 0) ""] + "".locate "" . should_equal (Span.Value (0.up_to 0) "") + "".locate "" mode=Matching_Mode.Last . should_equal (Span.Value (0.up_to 0) "") + "".locate_all "" . should_equal [Span.Value (0.up_to 0) ""] abc = 'A\u{301}ßC' - abc.locate "" . should_equal (Span_Data (0.up_to 0) abc) - abc.locate "" mode=Matching_Mode.Last . should_equal (Span_Data (3.up_to 3) abc) - abc.locate_all "" . should_equal [Span_Data (0.up_to 0) abc, Span_Data (1.up_to 1) abc, Span_Data (2.up_to 2) abc, Span_Data (3.up_to 3) abc] + abc.locate "" . should_equal (Span.Value (0.up_to 0) abc) + abc.locate "" mode=Matching_Mode.Last . should_equal (Span.Value (3.up_to 3) abc) + abc.locate_all "" . should_equal [Span.Value (0.up_to 0) abc, Span.Value (1.up_to 1) abc, Span.Value (2.up_to 2) abc, Span.Value (3.up_to 3) abc] Test.specify "should allow case-insensitive matching in locate" <| hello = "Hello WORLD!" case_insensitive = Text_Matcher.Case_Insensitive hello.locate "world" . should_equal Nothing - hello.locate "world" matcher=case_insensitive . should_equal (Span_Data (6.up_to 11) hello) + hello.locate "world" matcher=case_insensitive . should_equal (Span.Value (6.up_to 11) hello) - hello.locate "o" mode=Regex_Mode.First matcher=case_insensitive . should_equal (Span_Data (4.up_to 5) hello) - hello.locate "o" mode=Matching_Mode.Last matcher=case_insensitive . should_equal (Span_Data (7.up_to 8) hello) + hello.locate "o" mode=Matching_Mode.First matcher=case_insensitive . should_equal (Span.Value (4.up_to 5) hello) + hello.locate "o" mode=Matching_Mode.Last matcher=case_insensitive . should_equal (Span.Value (7.up_to 8) hello) accents = 'A\u{301}E\u{301}O\u{301}' - accents.locate accent_1 matcher=case_insensitive . should_equal (Span_Data (1.up_to 2) accents) + accents.locate accent_1 matcher=case_insensitive . should_equal (Span.Value (1.up_to 2) accents) - "Strasse".locate "ß" matcher=case_insensitive . should_equal (Span_Data (4.up_to 6) "Strasse") - "Monumentenstraße 42".locate "STRASSE" matcher=case_insensitive . should_equal (Span_Data (10.up_to 16) "Monumentenstraße 42") + "Strasse".locate "ß" matcher=case_insensitive . should_equal (Span.Value (4.up_to 6) "Strasse") + "Monumentenstraße 42".locate "STRASSE" matcher=case_insensitive . should_equal (Span.Value (10.up_to 16) "Monumentenstraße 42") - '\u0390'.locate '\u03B9\u0308\u0301' matcher=case_insensitive . should_equal (Span_Data (0.up_to 1) '\u0390') + '\u0390'.locate '\u03B9\u0308\u0301' matcher=case_insensitive . should_equal (Span.Value (0.up_to 1) '\u0390') 'ԵՒ'.locate 'և' . should_equal Nothing - 'ԵՒ'.locate 'և' matcher=case_insensitive . should_equal (Span_Data (0.up_to 2) 'ԵՒ') - 'և'.locate 'ԵՒ' matcher=case_insensitive . should_equal (Span_Data (0.up_to 1) 'և') + 'ԵՒ'.locate 'և' matcher=case_insensitive . should_equal (Span.Value (0.up_to 2) 'ԵՒ') + 'և'.locate 'ԵՒ' matcher=case_insensitive . should_equal (Span.Value (0.up_to 1) 'և') ligatures = 'ffafffiflffifflſtstZ' - ligatures.locate 'FFI' matcher=case_insensitive . should_equal (Span_Data (3.up_to 5) ligatures) - ligatures.locate 'FF' matcher=case_insensitive . should_equal (Span_Data (0.up_to 2) ligatures) - ligatures.locate 'ff' matcher=case_insensitive mode=Matching_Mode.Last . should_equal (Span_Data (7.up_to 8) ligatures) - ligatures.locate_all 'ff' . should_equal [Span_Data (0.up_to 2) ligatures] - ligatures.locate_all 'FF' matcher=case_insensitive . should_equal [Span_Data (0.up_to 2) ligatures, Span_Data (3.up_to 4) ligatures, Span_Data (6.up_to 7) ligatures, Span_Data (7.up_to 8) ligatures] - ligatures.locate_all 'ffi' matcher=case_insensitive . should_equal [Span_Data (3.up_to 5) ligatures, Span_Data (6.up_to 7) ligatures] - 'fffi'.locate_all 'ff' matcher=case_insensitive . should_equal [Span_Data (0.up_to 2) 'fffi'] + ligatures.locate 'FFI' matcher=case_insensitive . should_equal (Span.Value (3.up_to 5) ligatures) + ligatures.locate 'FF' matcher=case_insensitive . should_equal (Span.Value (0.up_to 2) ligatures) + ligatures.locate 'ff' matcher=case_insensitive mode=Matching_Mode.Last . should_equal (Span.Value (7.up_to 8) ligatures) + ligatures.locate_all 'ff' . should_equal [Span.Value (0.up_to 2) ligatures] + ligatures.locate_all 'FF' matcher=case_insensitive . should_equal [Span.Value (0.up_to 2) ligatures, Span.Value (3.up_to 4) ligatures, Span.Value (6.up_to 7) ligatures, Span.Value (7.up_to 8) ligatures] + ligatures.locate_all 'ffi' matcher=case_insensitive . should_equal [Span.Value (3.up_to 5) ligatures, Span.Value (6.up_to 7) ligatures] + 'fffi'.locate_all 'ff' matcher=case_insensitive . should_equal [Span.Value (0.up_to 2) 'fffi'] 'fffi'.locate_all 'ffi' . should_equal [] - 'fffi'.locate_all 'ffi' matcher=case_insensitive . should_equal [Span_Data (1.up_to 4) 'fffi'] - 'FFFI'.locate 'ffi' matcher=case_insensitive . should_equal (Span_Data (1.up_to 4) 'FFFI') + 'fffi'.locate_all 'ffi' matcher=case_insensitive . should_equal [Span.Value (1.up_to 4) 'fffi'] + 'FFFI'.locate 'ffi' matcher=case_insensitive . should_equal (Span.Value (1.up_to 4) 'FFFI') - 'ffiffl'.locate 'IF' matcher=case_insensitive . should_equal (Span_Data (0.up_to 2) 'ffiffl') - 'ffiffl'.locate 'F' Matching_Mode.Last matcher=case_insensitive . should_equal (Span_Data (1.up_to 2) 'ffiffl') - 'ffiffl'.locate_all 'F' matcher=case_insensitive . should_equal [Span_Data (0.up_to 1) 'ffiffl', Span_Data (0.up_to 1) 'ffiffl', Span_Data (1.up_to 2) 'ffiffl', Span_Data (1.up_to 2) 'ffiffl'] - 'aaffibb'.locate_all 'af' matcher=case_insensitive . should_equal [Span_Data (1.up_to 3) 'aaffibb'] - 'aaffibb'.locate_all 'affi' matcher=case_insensitive . should_equal [Span_Data (1.up_to 3) 'aaffibb'] - 'aaffibb'.locate_all 'ib' matcher=case_insensitive . should_equal [Span_Data (2.up_to 4) 'aaffibb'] - 'aaffibb'.locate_all 'ffib' matcher=case_insensitive . should_equal [Span_Data (2.up_to 4) 'aaffibb'] + 'ffiffl'.locate 'IF' matcher=case_insensitive . should_equal (Span.Value (0.up_to 2) 'ffiffl') + 'ffiffl'.locate 'F' Matching_Mode.Last matcher=case_insensitive . should_equal (Span.Value (1.up_to 2) 'ffiffl') + 'ffiffl'.locate_all 'F' matcher=case_insensitive . should_equal [Span.Value (0.up_to 1) 'ffiffl', Span.Value (0.up_to 1) 'ffiffl', Span.Value (1.up_to 2) 'ffiffl', Span.Value (1.up_to 2) 'ffiffl'] + 'aaffibb'.locate_all 'af' matcher=case_insensitive . should_equal [Span.Value (1.up_to 3) 'aaffibb'] + 'aaffibb'.locate_all 'affi' matcher=case_insensitive . should_equal [Span.Value (1.up_to 3) 'aaffibb'] + 'aaffibb'.locate_all 'ib' matcher=case_insensitive . should_equal [Span.Value (2.up_to 4) 'aaffibb'] + 'aaffibb'.locate_all 'ffib' matcher=case_insensitive . should_equal [Span.Value (2.up_to 4) 'aaffibb'] "".locate "foo" matcher=case_insensitive . should_equal Nothing "".locate "foo" matcher=case_insensitive mode=Matching_Mode.Last . should_equal Nothing "".locate_all "foo" matcher=case_insensitive . should_equal [] - "".locate "" matcher=case_insensitive . should_equal (Span_Data (0.up_to 0) "") - "".locate "" matcher=case_insensitive mode=Matching_Mode.Last . should_equal (Span_Data (0.up_to 0) "") - "".locate_all "" matcher=case_insensitive . should_equal [Span_Data (0.up_to 0) ""] + "".locate "" matcher=case_insensitive . should_equal (Span.Value (0.up_to 0) "") + "".locate "" matcher=case_insensitive mode=Matching_Mode.Last . should_equal (Span.Value (0.up_to 0) "") + "".locate_all "" matcher=case_insensitive . should_equal [Span.Value (0.up_to 0) ""] abc = 'A\u{301}ßC' - abc.locate "" matcher=case_insensitive . should_equal (Span_Data (0.up_to 0) abc) - abc.locate "" matcher=case_insensitive mode=Matching_Mode.Last . should_equal (Span_Data (3.up_to 3) abc) - abc.locate_all "" matcher=case_insensitive . should_equal [Span_Data (0.up_to 0) abc, Span_Data (1.up_to 1) abc, Span_Data (2.up_to 2) abc, Span_Data (3.up_to 3) abc] + abc.locate "" matcher=case_insensitive . should_equal (Span.Value (0.up_to 0) abc) + abc.locate "" matcher=case_insensitive mode=Matching_Mode.Last . should_equal (Span.Value (3.up_to 3) abc) + abc.locate_all "" matcher=case_insensitive . should_equal [Span.Value (0.up_to 0) abc, Span.Value (1.up_to 1) abc, Span.Value (2.up_to 2) abc, Span.Value (3.up_to 3) abc] Test.specify "should allow regexes in locate" <| hello = "Hello World!" regex = Regex_Matcher.Regex_Matcher_Data regex_insensitive = Regex_Matcher.Regex_Matcher_Data case_sensitivity=Case_Sensitivity.Insensitive - hello.locate ".o" Matching_Mode.First matcher=regex . should_equal (Span_Data (3.up_to 5) hello) - hello.locate ".o" Matching_Mode.Last matcher=regex . should_equal (Span_Data (6.up_to 8) hello) + hello.locate ".o" Matching_Mode.First matcher=regex . should_equal (Span.Value (3.up_to 5) hello) + hello.locate ".o" Matching_Mode.Last matcher=regex . should_equal (Span.Value (6.up_to 8) hello) hello.locate_all ".o" matcher=regex . map .start . should_equal [3, 6] - "foobar".locate "BAR" Regex_Mode.First matcher=regex_insensitive . should_equal (Span_Data (3.up_to 6) "foobar") + "foobar".locate "BAR" Matching_Mode.First matcher=regex_insensitive . should_equal (Span.Value (3.up_to 6) "foobar") ## Regex matching does not do case folding - "Strasse".locate "ß" Regex_Mode.First matcher=regex_insensitive . should_equal Nothing + "Strasse".locate "ß" Matching_Mode.First matcher=regex_insensitive . should_equal Nothing ## But it should handle the Unicode normalization accents = 'a\u{301}e\u{301}o\u{301}' - accents.locate accent_1 Regex_Mode.First matcher=regex . should_equal (Span_Data (1.up_to 2) accents) + accents.locate accent_1 Matching_Mode.First matcher=regex . should_equal (Span.Value (1.up_to 2) accents) Test.specify "should correctly handle regex edge cases in locate" pending="Figure out how to make Regex correctly handle empty patterns." <| regex = Regex_Matcher.Regex_Matcher_Data "".locate "foo" matcher=regex . should_equal Nothing "".locate "foo" matcher=regex mode=Matching_Mode.Last . should_equal Nothing "".locate_all "foo" matcher=regex . should_equal [] - "".locate "" matcher=regex . should_equal (Span_Data (0.up_to 0) "") - "".locate_all "" matcher=regex . should_equal [Span_Data (0.up_to 0) ""] - "".locate "" matcher=regex mode=Matching_Mode.Last . should_equal (Span_Data (0.up_to 0) "") + "".locate "" matcher=regex . should_equal (Span.Value (0.up_to 0) "") + "".locate_all "" matcher=regex . should_equal [Span.Value (0.up_to 0) ""] + "".locate "" matcher=regex mode=Matching_Mode.Last . should_equal (Span.Value (0.up_to 0) "") abc = 'A\u{301}ßC' - abc.locate "" matcher=regex . should_equal (Span_Data (0.up_to 0) abc) - abc.locate_all "" matcher=regex . should_equal [Span_Data (0.up_to 0) abc, Span_Data (0.up_to 0) abc, Span_Data (1.up_to 1) abc, Span_Data (2.up_to 2) abc, Span_Data (3.up_to 3) abc] - abc.locate "" matcher=regex mode=Matching_Mode.Last . should_equal (Span_Data (3.up_to 3) abc) + abc.locate "" matcher=regex . should_equal (Span.Value (0.up_to 0) abc) + abc.locate_all "" matcher=regex . should_equal [Span.Value (0.up_to 0) abc, Span.Value (0.up_to 0) abc, Span.Value (1.up_to 1) abc, Span.Value (2.up_to 2) abc, Span.Value (3.up_to 3) abc] + abc.locate "" matcher=regex mode=Matching_Mode.Last . should_equal (Span.Value (3.up_to 3) abc) Test.specify "should handle overlapping matches as shown in the examples" <| - "aaa".locate "aa" mode=Matching_Mode.Last matcher=Text_Matcher.Case_Sensitive . should_equal (Span_Data (1.up_to 3) "aaa") - "aaa".locate "aa" mode=Matching_Mode.Last matcher=Regex_Matcher.Regex_Matcher_Data . should_equal (Span_Data (0.up_to 2) "aaa") + "aaa".locate "aa" mode=Matching_Mode.Last matcher=Text_Matcher.Case_Sensitive . should_equal (Span.Value (1.up_to 3) "aaa") + "aaa".locate "aa" mode=Matching_Mode.Last matcher=Regex_Matcher.Regex_Matcher_Data . should_equal (Span.Value (0.up_to 2) "aaa") - "aaa aaa".locate "aa" mode=Matching_Mode.Last matcher=Text_Matcher.Case_Sensitive . should_equal (Span_Data (5.up_to 7) "aaa aaa") - "aaa aaa".locate "aa" mode=Matching_Mode.Last matcher=Regex_Matcher.Regex_Matcher_Data . should_equal (Span_Data (4.up_to 6) "aaa aaa") + "aaa aaa".locate "aa" mode=Matching_Mode.Last matcher=Text_Matcher.Case_Sensitive . should_equal (Span.Value (5.up_to 7) "aaa aaa") + "aaa aaa".locate "aa" mode=Matching_Mode.Last matcher=Regex_Matcher.Regex_Matcher_Data . should_equal (Span.Value (4.up_to 6) "aaa aaa") Test.specify "should allow to match one or more occurrences of a pattern in the text" <| "abacadae".match_all "a[bc]" . should_equal ["ab", "ac"] @@ -1293,12 +1293,12 @@ spec = Test.specify "should default to exact matching for locate but regex for match" <| txt = "aba[bc]adacae" - "ab".locate "ab" . should_equal (Span_Data (0.up_to 2) "ab") + "ab".locate "ab" . should_equal (Span.Value (0.up_to 2) "ab") "ab".locate "a[bc]" . should_equal Nothing "ab".locate_all "a[bc]" . should_equal [] - txt.locate "a[bc]" . should_equal (Span_Data (2.up_to 7) txt) - txt.locate_all "a[bc]" . should_equal [Span_Data (2.up_to 7) txt] + txt.locate "a[bc]" . should_equal (Span.Value (2.up_to 7) txt) + txt.locate_all "a[bc]" . should_equal [Span.Value (2.up_to 7) txt] "ab".match "a[bc]" . should_equal "ab" "a[bc]".match "a[bc]" . should_equal Nothing diff --git a/test/Tests/src/Data/Time/Date_Spec.enso b/test/Tests/src/Data/Time/Date_Spec.enso index 192c063ae274..fbf55747c6e2 100644 --- a/test/Tests/src/Data/Time/Date_Spec.enso +++ b/test/Tests/src/Data/Time/Date_Spec.enso @@ -434,7 +434,7 @@ js_array_date year month=1 day=1 = js_set_zone arr.at(0) java_parse date_text pattern=Nothing = - Panic.catch Polyglot_Error_Data handler=(err -> Error.throw (Time_Error.Error err.payload.cause.getMessage)) <| + Panic.catch Polyglot_Error.Polyglot_Error_Data handler=(err -> Error.throw (Time_Error.Error err.payload.cause.getMessage)) <| if pattern.is_nothing then LocalDate.parse date_text else formatter = DateTimeFormatter.ofPattern pattern LocalDate.parse date_text formatter diff --git a/test/Tests/src/Data/Time/Date_Time_Spec.enso b/test/Tests/src/Data/Time/Date_Time_Spec.enso index 9dc8170666c6..bb176b0fa49b 100644 --- a/test/Tests/src/Data/Time/Date_Time_Spec.enso +++ b/test/Tests/src/Data/Time/Date_Time_Spec.enso @@ -638,7 +638,7 @@ maybe_parse_java_zoned text pattern=Nothing = ZonedDateTime.parse text pattern parse_java_local original_error text pattern=Nothing = - Panic.catch Polyglot_Error_Data handler=(_ -> Error.throw (Time_Error.Error original_error.payload.cause.getMessage)) <| + Panic.catch Polyglot_Error.Polyglot_Error_Data handler=(_ -> Error.throw (Time_Error.Error original_error.payload.cause.getMessage)) <| if pattern.is_nothing then LocalDateTime.parse text else formatter = DateTimeFormatter.ofPattern pattern LocalDateTime.parse text (formatter.withLocale Locale.default.java_locale) @@ -646,8 +646,8 @@ parse_java_local original_error text pattern=Nothing = java_parse date_text_raw pattern=Nothing = utc_replaced = date_text_raw.replace "[UTC]" "Z" date_text = if utc_replaced.ends_with "ZZ" then date_text_raw else utc_replaced - if pattern == Nothing then Panic.catch Polyglot_Error_Data (maybe_parse_java_zoned date_text) (err -> parse_java_local err date_text pattern) else + if pattern == Nothing then Panic.catch Polyglot_Error.Polyglot_Error_Data (maybe_parse_java_zoned date_text) (err -> parse_java_local err date_text pattern) else formatter = DateTimeFormatter.ofPattern(pattern) - Panic.catch Polyglot_Error_Data (maybe_parse_java_zoned date_text formatter) (err -> parse_java_local err date_text pattern) + Panic.catch Polyglot_Error.Polyglot_Error_Data (maybe_parse_java_zoned date_text formatter) (err -> parse_java_local err date_text pattern) main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Time/Time_Of_Day_Spec.enso b/test/Tests/src/Data/Time/Time_Of_Day_Spec.enso index 7cc176b5f179..6dbab92270fd 100644 --- a/test/Tests/src/Data/Time/Time_Of_Day_Spec.enso +++ b/test/Tests/src/Data/Time/Time_Of_Day_Spec.enso @@ -164,7 +164,7 @@ java_time hour minute=0 second=0 nanoOfSecond=0 = Panic.catch Any (LocalTime.of hour minute second nanoOfSecond) (err -> Error.throw (Time_Error.Error <| err.payload.to_display_text.drop (First 16))) java_parse time_text pattern=Nothing = - Panic.catch Polyglot_Error_Data handler=(err -> Error.throw (Time_Error.Error err.payload.cause.getMessage)) <| + Panic.catch Polyglot_Error.Polyglot_Error_Data handler=(err -> Error.throw (Time_Error.Error err.payload.cause.getMessage)) <| if pattern.is_nothing then LocalTime.parse time_text else formatter = DateTimeFormatter.ofPattern pattern LocalTime.parse time_text (formatter.withLocale Locale.default.java_locale) diff --git a/test/Tests/src/Data/Vector_Spec.enso b/test/Tests/src/Data/Vector_Spec.enso index 7992afed87c5..1c7c7bc32dfb 100644 --- a/test/Tests/src/Data/Vector_Spec.enso +++ b/test/Tests/src/Data/Vector_Spec.enso @@ -310,7 +310,7 @@ spec = Test.group "Vectors" <| [1, 2, 3, 4, 5, 6].short_display_text max_entries=3 . should_equal "[1, 2, 3 and 3 more elements]" (0.up_to 100).to_vector.short_display_text max_entries=2 . should_equal "[0, 1 and 98 more elements]" - [].short_display_text max_entries=0 . should_fail_with Illegal_Argument_Error_Data + [].short_display_text max_entries=0 . should_fail_with Illegal_Argument.Error Test.specify "should define equality" <| [1,2,3]==[1,2] . should_be_false @@ -398,8 +398,8 @@ spec = Test.group "Vectors" <| vec.take (Every 2 first=100) . should_equal [] vec.take (Every 200) . should_equal [1] [].take (Every 2) . should_equal [] - vec.take (Every 0) . should_fail_with Illegal_Argument_Error_Data - [].take (Every 0) . should_fail_with Illegal_Argument_Error_Data + vec.take (Every 0) . should_fail_with Illegal_Argument.Error + [].take (Every 0) . should_fail_with Illegal_Argument.Error vec.drop (Every 1) . should_equal [] vec.drop (Every 3) . should_equal [2, 3, 5, 6] @@ -408,8 +408,8 @@ spec = Test.group "Vectors" <| vec.drop (Every 2 first=100) . should_equal vec vec.drop (Every 200) . should_equal [2, 3, 4, 5, 6] [].drop (Every 2) . should_equal [] - vec.drop (Every 0) . should_fail_with Illegal_Argument_Error_Data - [].drop (Every 0) . should_fail_with Illegal_Argument_Error_Data + vec.drop (Every 0) . should_fail_with Illegal_Argument.Error + [].drop (Every 0) . should_fail_with Illegal_Argument.Error vec.take (By_Index 0) . should_equal [1] [].take (By_Index 0) . should_fail_with Index_Out_Of_Bounds_Error_Data diff --git a/test/Tests/src/Network/Http/Header_Spec.enso b/test/Tests/src/Network/Http/Header_Spec.enso index f66b20bfd68f..4907810341b3 100644 --- a/test/Tests/src/Network/Http/Header_Spec.enso +++ b/test/Tests/src/Network/Http/Header_Spec.enso @@ -1,8 +1,8 @@ from Standard.Base import all -import Standard.Base.Network.Http.Header +import Standard.Base.Network.HTTP.Header.Header -from Standard.Test import Test +from Standard.Test import Test, Test_Suite import Standard.Test.Extensions spec = @@ -12,3 +12,5 @@ spec = Header.new "A" "B" . should_equal (Header.new "a" "B") (Header.new "A" "B" == Header.new "A" "b") . should_equal False (Header.new "A" "B" == Header.new "a" "b") . should_equal False + +main = Test_Suite.run_main spec diff --git a/test/Tests/src/Network/Http/Request_Spec.enso b/test/Tests/src/Network/Http/Request_Spec.enso index 544cd27f100c..9f5100b0e56b 100644 --- a/test/Tests/src/Network/Http/Request_Spec.enso +++ b/test/Tests/src/Network/Http/Request_Spec.enso @@ -1,13 +1,11 @@ from Standard.Base import all -import Standard.Base.Network.Http.Form -import Standard.Base.Network.Http.Header -import Standard.Base.Network.Http.Method -import Standard.Base.Network.Http.Request -import Standard.Base.Network.Http.Request.Body as Request_Body -import Standard.Base.Network.URI +import Standard.Base.Network.HTTP.Form.Form +import Standard.Base.Network.HTTP.Header.Header +import Standard.Base.Network.HTTP.Request.Request +import Standard.Base.Network.HTTP.Request_Body.Request_Body -from Standard.Test import Test +from Standard.Test import Test, Test_Suite import Standard.Test.Extensions spec = @@ -15,10 +13,10 @@ spec = test_headers = [Header.application_json, Header.new "X-Foo-Id" "0123456789"] Test.group "Request" <| Test.specify "should return error when creating request from invalid URI" <| - Request.new Method.Post "invalid uri" . should_fail_with Syntax_Error_Data + Request.new HTTP_Method.Post "invalid uri" . should_fail_with Syntax_Error_Data Test.specify "should get method" <| - req = Request.new Method.Post test_uri - req.method.should_equal Method.Post + req = Request.new HTTP_Method.Post test_uri + req.method.should_equal HTTP_Method.Post Test.specify "should get uri" <| req = Request.get test_uri req.uri.should_equal test_uri @@ -47,5 +45,7 @@ spec = Test.specify "should set form body" <| body_form = [Form.text_field "key" "val"] req = Request.get test_uri . with_form body_form - req.body.should_equal (Request_Body.Form body_form.to_form) + req.body.should_equal (Request_Body.Form (Form.new body_form)) req.headers.should_equal [Header.application_x_www_form_urlencoded] + +main = Test_Suite.run_main spec diff --git a/test/Tests/src/Network/Http_Spec.enso b/test/Tests/src/Network/Http_Spec.enso index 46c1fb2253ef..75a6fb8b3701 100644 --- a/test/Tests/src/Network/Http_Spec.enso +++ b/test/Tests/src/Network/Http_Spec.enso @@ -1,15 +1,11 @@ from Standard.Base import all -import Standard.Base.Network.Http -import Standard.Base.Network.Http.Form -import Standard.Base.Network.Http.Header -import Standard.Base.Network.Http.Method -import Standard.Base.Network.Http.Request -import Standard.Base.Network.Http.Request.Body as Request_Body -import Standard.Base.Network.Http.Status_Code -import Standard.Base.Network.Http.Version -import Standard.Base.Network.Proxy -import Standard.Base.Network.URI +import Standard.Base.Network.HTTP.Form.Form +import Standard.Base.Network.HTTP.Header.Header +import Standard.Base.Network.HTTP.Request.Request +import Standard.Base.Network.HTTP.Request_Body.Request_Body +import Standard.Base.Network.HTTP.Request_Error +import Standard.Base.Network.Proxy.Proxy from Standard.Test import Test, Test_Suite import Standard.Test.Extensions @@ -21,31 +17,31 @@ spec = pending = if base_url != Nothing then Nothing else "The HTTP tests only run when the `ENSO_HTTP_TEST_HTTPBIN_URL` environment variable is set to URL of the httpbin server" - Test.group "Http" pending=pending <| + Test.group "HTTP" pending=pending <| # httpbin is picky about slashes in URL path. We need exactly one at the # beginning of path. Otherwise, POST request with double slash would - # fail with error 405. + # fail with error 405. base_url_with_slash = if base_url.ends_with "/" then base_url else base_url + "/" url_get = base_url_with_slash + "get" url_post = base_url_with_slash + "post" Test.specify "should create HTTP client with timeout setting" <| - http = Http.new (timeout = (Duration.new seconds=30)) + http = HTTP.new (timeout = (Duration.new seconds=30)) http.timeout.should_equal (Duration.new seconds=30) Test.specify "should create HTTP client with follow_redirects setting" <| - http = Http.new (follow_redirects = False) + http = HTTP.new (follow_redirects = False) http.follow_redirects.should_equal False Test.specify "should create HTTP client with proxy setting" <| - proxy_setting = Proxy.Proxy_Addr "example.com" 80 - http = Http.new (proxy = proxy_setting) + proxy_setting = Proxy.Address "example.com" 80 + http = HTTP.new (proxy = proxy_setting) http.proxy.should_equal proxy_setting Test.specify "should create HTTP client with version setting" <| - version_setting = Version.Http_2 - http = Http.new (version = version_setting) + version_setting = HTTP_Version.HTTP_2 + http = HTTP.new (version = version_setting) http.version.should_equal version_setting Test.specify "should throw error when requesting invalid URI" <| - Http.new.get "not a uri" . should_fail_with Syntax_Error_Data + HTTP.new.get "not a uri" . should_fail_with Syntax_Error_Data - Test.specify "should send Get request" <| + Test.specify "should send Get request using fetch" <| expected_response = Json.parse <| ''' { "headers": { @@ -56,8 +52,8 @@ spec = "url": "", "args": {} } - res = Http.new.get url_get - res.code.should_equal Status_Code.ok + res = HTTP.new.get url_get + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should send Get request using module method" <| expected_response = Json.parse <| ''' @@ -70,8 +66,8 @@ spec = "url": "", "args": {} } - res = Http.get url_get - res.code.should_equal Status_Code.ok + res = HTTP.new.get url_get + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should fetch the body of a Get request" <| @@ -85,14 +81,14 @@ spec = "url": "", "args": {} } - res = Http.fetch url_get + res = HTTP.fetch url_get res.to_json.should_equal expected_response Test.specify "should return error if the fetch method fails" <| - Http.fetch "http://undefined_host" . should_fail_with Http.Request_Error_Data + HTTP.fetch "http://undefined_host" . should_fail_with Request_Error.Error Test.specify "should send Head request" <| - res = Http.new.head url_get - res.code.should_equal Status_Code.ok + res = HTTP.new.head url_get + res.code.should_equal HTTP_Status_Code.ok res.body.to_text.should_equal '' Test.specify "should Post empty body" <| @@ -111,8 +107,8 @@ spec = "json": null } body_empty = Request_Body.Empty - res = Http.new.post url_post body_empty - res.code.should_equal Status_Code.ok + res = HTTP.new.post url_post body_empty + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post empty body using module method" <| expected_response = Json.parse <| ''' @@ -130,8 +126,8 @@ spec = "json": null } body_empty = Request_Body.Empty - res = Http.post url_post body_empty - res.code.should_equal Status_Code.ok + res = HTTP.new.post url_post body_empty + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post text body" <| expected_response = Json.parse <| ''' @@ -150,8 +146,8 @@ spec = "json": null } body_text = Request_Body.Text "Hello World!" - res = Http.new.post url_post body_text - res.code.should_equal Status_Code.ok + res = HTTP.new.post url_post body_text + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post form text" <| expected_response = Json.parse <| ''' @@ -170,8 +166,8 @@ spec = "json": null } form_parts = [Form.text_field "key" "val"] - res = Http.new.post_form url_post form_parts - res.code.should_equal Status_Code.ok + res = HTTP.new.post_form url_post form_parts + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post form text using module method" <| expected_response = Json.parse <| ''' @@ -190,19 +186,19 @@ spec = "json": null } form_parts = [Form.text_field "key" "val"] - res = Http.post_form url_post form_parts - res.code.should_equal Status_Code.ok + res = HTTP.new.post_form url_post form_parts + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post form file" <| test_file = enso_project.data / "sample.txt" form_parts = [Form.text_field "key" "val", Form.file_field "sample" test_file] - res = Http.new.post_form url_post form_parts - res.code.should_equal Status_Code.ok + res = HTTP.new.post_form url_post form_parts + res.code.should_equal HTTP_Status_Code.ok Test.specify "should Post form multipart" <| test_file = enso_project.data / "sample.txt" form_parts = [Form.text_field "key" "val", Form.file_field "sample" test_file] - res = Http.new.post_form url_post form_parts [Header.multipart_form_data] - res.code.should_equal Status_Code.ok + res = HTTP.new.post_form url_post form_parts [Header.multipart_form_data] + res.code.should_equal HTTP_Status_Code.ok Test.specify "should Post Json" <| expected_response = Json.parse <| ''' { @@ -223,8 +219,8 @@ spec = } json = Json.parse <| ''' {"key":"val"} - res = Http.new.post_json url_post json - res.code.should_equal Status_Code.ok + res = HTTP.new.post_json url_post json + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post Json using module method" <| expected_response = Json.parse <| ''' @@ -246,8 +242,8 @@ spec = } json = Json.parse <| ''' {"key":"val"} - res = Http.post_json url_post json - res.code.should_equal Status_Code.ok + res = HTTP.new.post_json url_post json + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should Post binary" <| expected_response = Json.parse <| ''' @@ -266,8 +262,8 @@ spec = "json": null } body_bytes = Request_Body.Bytes "Hello World!".utf_8 - res = Http.new.post url_post body_bytes - res.code.should_equal Status_Code.ok + res = HTTP.new.post url_post body_bytes + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should create and send Get request" <| @@ -281,9 +277,9 @@ spec = "url": "", "args": {} } - req = Request.new Method.Get url_get - res = Http.new.request req - res.code.should_equal Status_Code.ok + req = Request.new HTTP_Method.Get url_get + res = HTTP.new.request req + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should create and send Post request with json body" <| expected_response = Json.parse <| ''' @@ -305,10 +301,10 @@ spec = } json_body = Json.parse <| ''' { "key": "val" } - req = Request.new Method.Post url_post + req = Request.new HTTP_Method.Post url_post req_with_body = req.with_json json_body - res = Http.new.request req_with_body - res.code.should_equal Status_Code.ok + res = HTTP.new.request req_with_body + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response Test.specify "should create and send Post request with json text" <| expected_response = Json.parse <| ''' @@ -330,10 +326,10 @@ spec = } json_text = ''' { "key": "val" } - req = Request.new Method.Post url_post + req = Request.new HTTP_Method.Post url_post req_with_body = req.with_json json_text - res = Http.new.request req_with_body - res.code.should_equal Status_Code.ok + res = HTTP.new.request req_with_body + res.code.should_equal HTTP_Status_Code.ok res.body.to_json.should_equal expected_response main = Test_Suite.run_main spec diff --git a/test/Tests/src/Network/URI_Spec.enso b/test/Tests/src/Network/URI_Spec.enso index efef232d2b47..19baf594338a 100644 --- a/test/Tests/src/Network/URI_Spec.enso +++ b/test/Tests/src/Network/URI_Spec.enso @@ -1,8 +1,6 @@ from Standard.Base import all -import Standard.Base.Network.URI - -from Standard.Test import Test +from Standard.Test import Test, Test_Suite import Standard.Test.Extensions spec = @@ -31,3 +29,5 @@ spec = addr.raw_fragment.should_fail_with Nothing Test.specify "should return Syntax_Error when parsing invalid URI" <| URI.parse "a b c" . should_fail_with Syntax_Error_Data + +main = Test_Suite.run_main spec diff --git a/test/Tests/src/Runtime/Managed_Resource_Spec.enso b/test/Tests/src/Runtime/Managed_Resource_Spec.enso index c429ae43640c..f610de876ea1 100644 --- a/test/Tests/src/Runtime/Managed_Resource_Spec.enso +++ b/test/Tests/src/Runtime/Managed_Resource_Spec.enso @@ -16,33 +16,33 @@ spec = Test.group "Managed_Resource" <| log_2 = Vector.new_builder r_2 = Panic.recover Any <| Managed_Resource.bracket 42 log_2.append x-> log_2.append x+1 - Panic.throw (Illegal_State_Error_Data "foo") + Panic.throw (Illegal_State.Error "foo") log_2.append x+2 - r_2.catch . should_equal (Illegal_State_Error_Data "foo") + r_2.catch . should_equal (Illegal_State.Error "foo") log_2.to_vector . should_equal [43, 42] log_3 = Vector.new_builder r_3 = Managed_Resource.bracket 42 log_3.append x-> log_3.append x+1 - r = Error.throw (Illegal_State_Error_Data "foo") + r = Error.throw (Illegal_State.Error "foo") log_3.append x+2 r - r_3.catch . should_equal (Illegal_State_Error_Data "foo") + r_3.catch . should_equal (Illegal_State.Error "foo") log_3.to_vector . should_equal [43, 44, 42] Test.specify "should not proceed further if initialization fails" <| log_1 = Vector.new_builder - r_1 = Panic.recover Any <| Managed_Resource.bracket (Panic.throw (Illegal_State_Error_Data "foo")) (_ -> log_1.append "destructor") _-> + r_1 = Panic.recover Any <| Managed_Resource.bracket (Panic.throw (Illegal_State.Error "foo")) (_ -> log_1.append "destructor") _-> log_1.append "action" 42 - r_1.catch . should_equal (Illegal_State_Error_Data "foo") + r_1.catch . should_equal (Illegal_State.Error "foo") log_1.to_vector . should_equal [] log_2 = Vector.new_builder - r_2 = Managed_Resource.bracket (Error.throw (Illegal_State_Error_Data "foo")) (_ -> log_2.append "destructor") _-> + r_2 = Managed_Resource.bracket (Error.throw (Illegal_State.Error "foo")) (_ -> log_2.append "destructor") _-> log_2.append "action" 42 - r_2.catch . should_equal (Illegal_State_Error_Data "foo") + r_2.catch . should_equal (Illegal_State.Error "foo") log_2.to_vector . should_equal [] Test.specify "should forward panics thrown in initializer and destructor" <| diff --git a/test/Tests/src/Semantic/Conversion_Spec.enso b/test/Tests/src/Semantic/Conversion_Spec.enso index 66d5453184ad..b41d1c82e38e 100644 --- a/test/Tests/src/Semantic/Conversion_Spec.enso +++ b/test/Tests/src/Semantic/Conversion_Spec.enso @@ -91,7 +91,7 @@ spec = Test.specify "should support the meta functions" <| meta_from = Meta.meta .from is_symbol = case meta_from of - Meta.Unresolved_Symbol_Data _ -> True + _ : Meta.Unresolved_Symbol -> True _ -> False is_symbol.should_be_true diff --git a/test/Tests/src/Semantic/Error_Spec.enso b/test/Tests/src/Semantic/Error_Spec.enso index 091fc4bd7c16..333b00f78136 100644 --- a/test/Tests/src/Semantic/Error_Spec.enso +++ b/test/Tests/src/Semantic/Error_Spec.enso @@ -42,10 +42,10 @@ spec = Test.specify "should allow recovery of only a specific error-type" <| recover_illegal_argument ~action = - action . catch Illegal_Argument_Error_Data err-> + action . catch Illegal_Argument.Error err-> "recovered error: "+err.message - (recover_illegal_argument (Error.throw (Illegal_Argument_Error_Data "foo"))) . should_equal "recovered error: foo" - (recover_illegal_argument (Error.throw (Illegal_State_Error_Data "bar"))) . should_fail_with Illegal_State_Error_Data + (recover_illegal_argument (Error.throw (Illegal_Argument.Error "foo"))) . should_equal "recovered error: foo" + (recover_illegal_argument (Error.throw (Illegal_State.Error "bar"))) . should_fail_with Illegal_State.Error Test.specify "should be able to be shown in the default visualization" <| json = (Error.throw <| My_Type.Value "aaa").to_default_visualization_data @@ -150,20 +150,20 @@ spec = Test.specify "should work as in the examples" <| fun ~act = Panic.catch Any act caught_panic-> case caught_panic.payload of - Illegal_Argument_Error_Data message _ -> "Illegal arguments were provided: "+message + Illegal_Argument.Error message _ -> "Illegal arguments were provided: "+message other_panic -> Panic.throw other_panic Panic.recover Any (fun "bar") . should_equal "bar" Panic.recover Any (fun (Panic.throw "foo")) . catch . should_equal "foo" - Panic.recover Any (fun (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing))) . should_equal "Illegal arguments were provided: msg" + Panic.recover Any (fun (Panic.throw (Illegal_Argument.Error "msg" Nothing))) . should_equal "Illegal arguments were provided: msg" Test.specify "should allow catching Java exceptions easily" <| parse str = Panic.catch NumberFormatException (Long.parseLong str) caught_panic-> - Error.throw (Illegal_Argument_Error_Data "The provided string is not a valid number: "+caught_panic.payload.cause.getMessage) + Error.throw (Illegal_Argument.Error "The provided string is not a valid number: "+caught_panic.payload.cause.getMessage) parse "42" . should_equal 42 dataflow_error = parse "foo" - dataflow_error.catch . should_equal (Illegal_Argument_Error_Data 'The provided string is not a valid number: For input string: "foo"') + dataflow_error.catch . should_equal (Illegal_Argument.Error 'The provided string is not a valid number: For input string: "foo"') Test.expect_panic_with (parse 0.0) Unsupported_Argument_Types_Data Test.specify "should allow to throw raw Java exceptions" <| @@ -173,7 +173,7 @@ spec = caught_panic = Panic.catch Any (throw_raw_java "foo") x->x caught_panic.stack_trace.second.name . should_equal "Error_Spec.throw_raw_java" - caught_panic.payload . should_be_a Polyglot_Error_Data + caught_panic.payload . should_be_a Polyglot_Error.Polyglot_Error_Data Test.specify "should allow to re-throw raw Java exceptions" <| message_1 = Ref.new "" @@ -182,7 +182,7 @@ spec = message_1 . put caught_panic.payload.cause.getMessage Panic.throw caught_panic.payload.cause message_1.get . should_equal 'For input string: "foo"' - caught_1.catch . should_be_a Polyglot_Error_Data + caught_1.catch . should_be_a Polyglot_Error.Polyglot_Error_Data caught_1.stack_trace.at 2 . name . should_equal "Error_Spec.do_a_parse" message_2 = Ref.new "" @@ -191,23 +191,23 @@ spec = message_2.put caught_panic.payload.cause.getMessage Panic.throw caught_panic.payload.cause message_2.get . should_equal "foo" - caught_2.catch . should_be_a Polyglot_Error_Data + caught_2.catch . should_be_a Polyglot_Error.Polyglot_Error_Data caught_2.stack_trace.second.name . should_equal "Error_Spec.throw_raw_java" Test.specify "should allow to catch a specific panic type easily" <| - message_1 = Panic.catch Illegal_Argument_Error_Data (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) caught_panic-> + message_1 = Panic.catch Illegal_Argument.Error (Panic.throw (Illegal_Argument.Error "msg" Nothing)) caught_panic-> caught_panic.payload.message message_1 . should_equal "msg" - error = Panic.recover Any <| Panic.catch Illegal_Argument_Error_Data (Panic.throw (Illegal_State_Error_Data "foo" Nothing)) caught_panic-> + error = Panic.recover Any <| Panic.catch Illegal_Argument.Error (Panic.throw (Illegal_State.Error "foo" Nothing)) caught_panic-> caught_panic.payload.message - error.catch . should_be_an Illegal_State_Error_Data + error.catch . should_be_an Illegal_State.Error - message_2 = Panic.catch Any (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) _-> + message_2 = Panic.catch Any (Panic.throw (Illegal_Argument.Error "msg" Nothing)) _-> "caught" message_2 . should_equal "caught" - message_3 = Panic.catch Polyglot_Error_Data (Long.parseLong "foo") _-> + message_3 = Panic.catch Polyglot_Error.Polyglot_Error_Data (Long.parseLong "foo") _-> "polyglot" message_3 . should_equal "polyglot" message_4 = Panic.catch Any (Long.parseLong "foo") _-> @@ -217,20 +217,20 @@ spec = "uat" message_5 . should_equal "uat" - Test.expect_panic_with (Panic.catch Illegal_Argument_Error_Data (Long.parseLong "foo") (_->"polyglot3")) Polyglot_Error_Data + Test.expect_panic_with (Panic.catch Illegal_Argument.Error (Long.parseLong "foo") (_->"polyglot3")) Polyglot_Error.Polyglot_Error_Data Test.expect_panic_with (Panic.catch Nothing (Long.parseLong 0) (_->"polyglot4")) Unsupported_Argument_Types_Data Test.specify "should be able to be recovered selectively" <| - Panic.recover Illegal_Argument_Error_Data (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch . should_be_an Illegal_Argument_Error_Data - Panic.recover Any (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch . should_be_an Illegal_Argument_Error_Data - Panic.recover [Illegal_Argument_Error_Data] (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch . should_be_an Illegal_Argument_Error_Data - Panic.recover [Illegal_State_Error_Data, Illegal_Argument_Error_Data] (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch . should_be_an Illegal_Argument_Error_Data + Panic.recover Illegal_Argument.Error (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_an Illegal_Argument.Error + Panic.recover Any (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_an Illegal_Argument.Error + Panic.recover [Illegal_Argument.Error] (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_an Illegal_Argument.Error + Panic.recover [Illegal_State.Error, Illegal_Argument.Error] (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch . should_be_an Illegal_Argument.Error - Test.expect_panic_with <| Panic.recover Illegal_State_Error_Data (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch - Test.expect_panic_with <| Panic.recover [Illegal_State_Error_Data, Polyglot_Error_Data] (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch - Test.expect_panic_with <| Panic.recover [] (Panic.throw (Illegal_Argument_Error_Data "msg" Nothing)) . catch + Test.expect_panic_with <| Panic.recover Illegal_State.Error (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch + Test.expect_panic_with <| Panic.recover [Illegal_State.Error, Polyglot_Error.Polyglot_Error_Data] (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch + Test.expect_panic_with <| Panic.recover [] (Panic.throw (Illegal_Argument.Error "msg" Nothing)) . catch - Panic.recover [Polyglot_Error_Data] (do_a_parse "foo") . catch . should_be_a Polyglot_Error_Data + Panic.recover [Polyglot_Error.Polyglot_Error_Data] (do_a_parse "foo") . catch . should_be_a Polyglot_Error.Polyglot_Error_Data Panic.recover Any throw_a_bar_panicking . catch . should_equal "bar" Panic.recover Text throw_a_bar_panicking . stack_trace . second . name . should_equal "Error_Spec.throw_a_bar_panicking" diff --git a/test/Tests/src/Semantic/Meta_Spec.enso b/test/Tests/src/Semantic/Meta_Spec.enso index d265192f2140..65c8027193c8 100644 --- a/test/Tests/src/Semantic/Meta_Spec.enso +++ b/test/Tests/src/Semantic/Meta_Spec.enso @@ -40,12 +40,12 @@ spec = Test.group "Meta-Value Manipulation" <| atom_2 = Meta.new_atom My_Type.Value [1,"foo", Nothing].to_array (Meta.meta atom_2).constructor . should_equal My_Type.Value Test.specify "should correctly return representations of different classes of objects" <| - Meta.meta 1 . should_equal (Meta.Primitive_Data 1) - Meta.meta "foo" . should_equal (Meta.Primitive_Data "foo") + Meta.meta 1 . should_equal (Meta.Primitive.Value 1) + Meta.meta "foo" . should_equal (Meta.Primitive.Value "foo") Test.specify "should allow manipulation of error values" <| err = Error.throw "My Error" meta_err = Meta.meta err - meta_err.is_a Meta.Error_Data . should_be_true + meta_err.is_a Meta.Error.Value . should_be_true meta_err.value . should_equal "My Error" Test.specify "should allow checking if a value is of a certain type" <| 1.is_a Any . should_be_true @@ -148,8 +148,8 @@ spec = Test.group "Meta-Value Manipulation" <| Test.specify "should correctly handle Java values" <| java_meta = Meta.meta Random.new - java_meta . should_be_a Meta.Polyglot_Data - java_meta . get_language . should_equal Meta.Java + java_meta . should_be_a Meta.Polyglot.Value + java_meta . get_language . should_equal Meta.Language.Java Test.specify "should correctly handle equality of Java values" <| a = JavaLocale.new "en" diff --git a/test/Tests/src/System/File_Spec.enso b/test/Tests/src/System/File_Spec.enso index 3fc101dda400..34e6cfcda5bc 100644 --- a/test/Tests/src/System/File_Spec.enso +++ b/test/Tests/src/System/File_Spec.enso @@ -190,12 +190,12 @@ spec = f.read_bytes.should_equal (data + data_2) f.delete_if_exists - Test.specify "should fail with Illegal_Argument_Error when trying to write invalid byte vector" <| + Test.specify "should fail with Illegal_Argument when trying to write invalid byte vector" <| f = transient / "work.txt" f.delete_if_exists f.exists.should_be_false - [0, 1, 256].write_bytes f . should_fail_with Illegal_Argument_Error_Data - [0, 1, Nothing].write_bytes f . should_fail_with Illegal_Argument_Error_Data + [0, 1, 256].write_bytes f . should_fail_with Illegal_Argument.Error + [0, 1, Nothing].write_bytes f . should_fail_with Illegal_Argument.Error Test.specify "should not change the file when trying to write an invalid byte vector" <| f = transient / "work.txt" @@ -203,12 +203,12 @@ spec = f_bak = transient / "work.txt.bak" f_bak.delete_if_exists data.write_bytes f - [0, 1, 256].write_bytes f . should_fail_with Illegal_Argument_Error_Data + [0, 1, 256].write_bytes f . should_fail_with Illegal_Argument.Error f.read_bytes.should_equal data f_bak.exists.should_be_false - [0, 1, 256].write_bytes f on_existing_file=Existing_File_Behavior.Overwrite . should_fail_with Illegal_Argument_Error_Data + [0, 1, 256].write_bytes f on_existing_file=Existing_File_Behavior.Overwrite . should_fail_with Illegal_Argument.Error f.read_bytes.should_equal data - [0, 1, 256].write_bytes f on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument_Error_Data + [0, 1, 256].write_bytes f on_existing_file=Existing_File_Behavior.Append . should_fail_with Illegal_Argument.Error f.read_bytes.should_equal data f.delete_if_exists @@ -293,10 +293,10 @@ spec = new_file = transient / "work.txt.new" [bak_file, new_file].each .delete_if_exists - result = Panic.catch Illegal_State_Error_Data handler=(caught_panic-> caught_panic.payload.message) <| + result = Panic.catch Illegal_State.Error handler=(caught_panic-> caught_panic.payload.message) <| Existing_File_Behavior.Backup.write f output_stream-> output_stream.write_bytes "foo".utf_8 - Panic.throw (Illegal_State_Error_Data "baz") + Panic.throw (Illegal_State.Error "baz") output_stream.write_bytes "bar".utf_8 Test.fail "Control flow should never get here, because the panic should have been propagated and handled." result.should_equal "baz" @@ -307,10 +307,10 @@ spec = Test.fail "The temporary file should have been cleaned up." f.delete - result2 = Panic.catch Illegal_State_Error_Data handler=(caught_panic-> caught_panic.payload.message) <| + result2 = Panic.catch Illegal_State.Error handler=(caught_panic-> caught_panic.payload.message) <| Existing_File_Behavior.Backup.write f output_stream-> output_stream.write_bytes "foo".utf_8 - Panic.throw (Illegal_State_Error_Data "baz") + Panic.throw (Illegal_State.Error "baz") output_stream.write_bytes "bar".utf_8 Test.fail "Control flow should never get here, because the panic should have been propagated and handled." result2.should_equal "baz" @@ -326,8 +326,8 @@ spec = "OLD".write f on_existing_file=Existing_File_Behavior.Overwrite result3 = Existing_File_Behavior.Backup.write f output_stream-> output_stream.write_bytes "foo".utf_8 - Error.throw (Illegal_State_Error_Data "HMM...") - result3.should_fail_with Illegal_State_Error_Data + Error.throw (Illegal_State.Error "HMM...") + result3.should_fail_with Illegal_State.Error result3.catch.message . should_equal "HMM..." f.read_text . should_equal "OLD" if bak_file.exists then @@ -338,8 +338,8 @@ spec = result4 = Existing_File_Behavior.Backup.write f output_stream-> output_stream.write_bytes "foo".utf_8 - Error.throw (Illegal_State_Error_Data "HMM...") - result4.should_fail_with Illegal_State_Error_Data + Error.throw (Illegal_State.Error "HMM...") + result4.should_fail_with Illegal_State.Error result4.catch.message . should_equal "HMM..." if f.exists.not then Test.fail "Since we were writing to the original destination, the partially written file should have been preserved even upon failure." diff --git a/test/Visualization_Tests/src/Table_Spec.enso b/test/Visualization_Tests/src/Table_Spec.enso index 0014ddd6e7ac..7d22a94d5195 100644 --- a/test/Visualization_Tests/src/Table_Spec.enso +++ b/test/Visualization_Tests/src/Table_Spec.enso @@ -8,6 +8,9 @@ import Standard.Database.Data.Table.Table as Database_Table import Standard.Visualization.Table.Visualization +import Standard.Visualization.Helpers +import Standard.Visualization.Id + from Standard.Test import Test import Standard.Test.Extensions @@ -94,6 +97,24 @@ visualization_spec connection = json = Json.from_pairs [["json", (Foo.Value 42)]] vis . should_equal json.to_text + Test.specify "should specify default visualizations correctly" <| + c_1_1 = ['x', [1, 2, 3]] + c_1_2 = ['Y', [5.3, 56.2, 6.3]] + t_1 = Table.new [c_1_1, c_1_2] + t_1.default_visualization.should_equal Id.scatter_plot + + c_2_1 = ['LaTituDe', [5,3,2]] + c_2_2 = ['longitude', [6,7,8]] + t_2 = Table.new [c_2_1, c_2_2] + t_2.default_visualization.should_equal Id.geo_map + + c_3_1 = ['latitude', [5,2,5]] + c_3_2 = ['Y', [2,4,2]] + c_3_3 = ['name', ["foo", "bar", "baz"]] + t_3 = Table.new [c_3_1, c_3_2, c_3_3] + t_3.default_visualization.should_equal Id.table + + spec = enso_project.data.create_directory file = enso_project.data / "sqlite_test.db" diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Data/Any.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso similarity index 100% rename from test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Data/Any.enso rename to test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso new file mode 100644 index 000000000000..eed31dc0665f --- /dev/null +++ b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error.enso @@ -0,0 +1,9 @@ +import project.Any.Any + +@Builtin_Type +type Error + throw payload = @Builtin_Method "Error.throw" + catch_primitive self handler = @Builtin_Method "Error.catch_primitive" + catch self (error_type = Any) (handler = x->x) = + _ = error_type + self.catch_primitive handler diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso index fac9b745fb96..3c14bfeaabc5 100644 --- a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso +++ b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Error/Common.enso @@ -1,10 +1,3 @@ -from project.Data.Any import Any - -@Builtin_Type -type Panic - throw payload = @Builtin_Method "Panic.throw" - catch_primitive ~action handler = @Builtin_Method "Panic.catch_primitive" - # TODO Dubious constructor export from project.Error.Common.Syntax_Error import all from project.Error.Common.Syntax_Error export all @@ -53,11 +46,3 @@ from project.Error.Common.Arity_Error export all @Builtin_Type type Arity_Error Arity_Error_Data expected_min expected_max actual - -@Builtin_Type -type Error - throw payload = @Builtin_Method "Error.throw" - catch_primitive self handler = @Builtin_Method "Error.catch_primitive" - catch self (error_type = Any) (handler = x->x) = - _ = error_type - self.catch_primitive handler diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index 3c5ed00b1db6..030f87288b43 100644 --- a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -1,11 +1,17 @@ import project.IO export project.IO +import project.Panic.Panic +export project.Panic.Panic + +import project.Error.Error +export project.Error.Error + import project.Error.Common from project.Error.Common export all -import project.Data.Any.Any -export project.Data.Any.Any +import project.Any.Any +export project.Any.Any import project.Data.Array.Array export project.Data.Array.Array diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso new file mode 100644 index 000000000000..e8e1e86d4859 --- /dev/null +++ b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Panic.enso @@ -0,0 +1,4 @@ +@Builtin_Type +type Panic + throw payload = @Builtin_Method "Panic.throw" + catch_primitive ~action handler = @Builtin_Method "Panic.catch_primitive" diff --git a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Resource.enso b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Resource.enso index c9ff91e85446..3cfe8ff98f29 100644 --- a/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Resource.enso +++ b/test/micro-distribution/lib/Standard/Base/0.0.0-dev/src/Runtime/Resource.enso @@ -1,3 +1,6 @@ +import project.Any.Any +import project.Nothing.Nothing + bracket : Any -> (Any -> Nothing) -> (Any -> Any) -> Any bracket ~constructor ~destructor ~action = @Builtin_Method "Resource.bracket"