From bc6b9bcf5429f4b002834befa999139fd98f10c9 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Wed, 31 May 2023 10:51:55 +0200 Subject: [PATCH 01/39] Search for IR.Function deeper in the IR tree to find instance methods (#6902) --- .../compiler/pass/resolve/TypeNames.scala | 17 ++-- .../pass/resolve/TypeSignatures.scala | 5 +- .../enso/interpreter/test/SignatureTest.java | 80 +++++++++++++++++++ 3 files changed, 91 insertions(+), 11 deletions(-) diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala b/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala index 55b776feda08..81f8e784e341 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala @@ -66,15 +66,16 @@ case object TypeNames extends IRPass { ir: IR.Expression ): IR.Expression = { def go(ir: IR.Expression): IR.Expression = { - doResolveType(Nil, bindingsMap, ir.mapExpressions(go)) + val processedIr = ir match { + case fn: IR.Function.Lambda => + fn.copy(arguments = + fn.arguments.map(doResolveType(Nil, bindingsMap, _)) + ) + case x => x + } + doResolveType(Nil, bindingsMap, processedIr.mapExpressions(go)) } - go(ir match { - case fn: IR.Function.Lambda => - fn.copy(arguments = - fn.arguments.map(doResolveType(Nil, bindingsMap, _)) - ) - case x => x - }) + go(ir) } private def doResolveType[T <: IR]( diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala b/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala index 39d6f127cd8f..8092608276cf 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala @@ -86,11 +86,10 @@ case object TypeSignatures extends IRPass { res case meth: IR.Module.Scope.Definition.Method => val newMethod = meth.mapExpressions(resolveExpression) - val arr = newMethod.body match { + newMethod.body.preorder.foreach { case fn: IR.Function => verifyAscribedArguments(fn.arguments) - case _ => newMethod + case _ => } - arr.getClass() val res = lastSignature match { case Some(asc @ IR.Type.Ascription(typed, sig, _, _, _)) => val methodRef = meth.methodReference diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java index 3e40d128e6d4..62fb54733523 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java @@ -95,6 +95,86 @@ public void runtimeCheckOfAscribedFunctionSignature() throws Exception { assertTrue("Yields Error value", yieldsError.isException()); } + @Test + public void runtimeCheckOfAscribedInstanceMethodSignature() throws Exception { + final URI uri = new URI("memory://twice_instance.enso"); + final Source src = Source.newBuilder("enso", """ + from Standard.Base import Integer + type Neg + Singleton + + twice self (a : Integer) = a + a + """, uri.getHost()) + .uri(uri) + .buildLiteral(); + + var module = ctx.eval(src); + var neg = module.invokeMember("eval_expression", "Neg.Singleton.twice"); + + var ten = neg.execute(5); + assertEquals("Ten", 10, ten.asInt()); + + try { + var res = neg.execute("Hi"); + fail("Expecting an exception, not: " + res); + } catch (PolyglotException e) { + assertTypeError("`a`", "Integer", "Text", e.getMessage()); + } + } + + + @Test + public void runtimeCheckOfAscribedStaticMethodSignature() throws Exception { + final URI uri = new URI("memory://twice_static.enso"); + final Source src = Source.newBuilder("enso", """ + from Standard.Base import Integer + type Neg + twice (a : Integer) = a + a + """, uri.getHost()) + .uri(uri) + .buildLiteral(); + + var module = ctx.eval(src); + var neg = module.invokeMember("eval_expression", "Neg.twice"); + + var ten = neg.execute(5); + assertEquals("Ten", 10, ten.asInt()); + + try { + var res = neg.execute("Hi"); + fail("Expecting an exception, not: " + res); + } catch (PolyglotException e) { + assertTypeError("`a`", "Integer", "Text", e.getMessage()); + } + } + + @Test + public void runtimeCheckOfAscribedLocalMethodSignature() throws Exception { + final URI uri = new URI("memory://twice_local.enso"); + final Source src = Source.newBuilder("enso", """ + from Standard.Base import Integer + + call_twice x = + twice (a : Integer) = a + a + twice x + """, uri.getHost()) + .uri(uri) + .buildLiteral(); + + var module = ctx.eval(src); + var neg = module.invokeMember("eval_expression", "call_twice"); + + var ten = neg.execute(5); + assertEquals("Ten", 10, ten.asInt()); + + try { + var res = neg.execute("Hi"); + fail("Expecting an exception, not: " + res); + } catch (PolyglotException e) { + assertTypeError("`a`", "Integer", "Text", e.getMessage()); + } + } + @Test public void wrongAscribedInConstructor() throws Exception { final URI uri = new URI("memory://constructor.enso"); From 7e53cd9af1e29822c07c177dc802ff9d3833fa0c Mon Sep 17 00:00:00 2001 From: GregoryTravis Date: Wed, 31 May 2023 08:43:20 -0400 Subject: [PATCH 02/39] Add drop down for Locale like Encoding (#6654) Add dropdowns for locale parameters for format and parse methods. --- .../Base/0.0.0-dev/src/Data/Locale.enso | 26 +++++++++++++++++++ .../Base/0.0.0-dev/src/Data/Time/Date.enso | 1 + .../0.0.0-dev/src/Data/Time/Date_Time.enso | 2 ++ .../0.0.0-dev/src/Data/Time/Time_Of_Day.enso | 2 ++ .../Table/0.0.0-dev/src/Data/Column.enso | 1 + test/Tests/src/Data/Locale_Spec.enso | 12 ++++++--- 6 files changed, 41 insertions(+), 3 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso index fb62a605659e..be1d739a8726 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso @@ -1,9 +1,16 @@ import project.Any.Any +import project.Data.Filter_Condition.Filter_Condition +import project.Data.Text.Case.Case +import project.Data.Text.Extensions import project.Data.Text.Text import project.Data.Vector.Vector +import project.Metadata.Display import project.Nothing.Nothing +import project.Meta from project.Data.Boolean import Boolean, False +from project.Metadata.Widget import Single_Choice +from project.Metadata.Choice import Option polyglot java import java.util.Locale as JavaLocale @@ -421,3 +428,22 @@ type Locale Convert Locale to a friendly string. to_display_text : Text to_display_text self = "Locale(" + self.to_text + ")" + + ## PRIVATE + Gets the default drop down option for this encoding. + default_widget : Single_Choice + default_widget = Single_Choice values=Locale.widget_options display=Display.When_Modified + + ## PRIVATE + predefined_locale_fields : Vector Text + predefined_locale_fields = + locale_meta = Meta.meta Locale + remove_us = locale_meta.methods + ["Value", "new", "default", "from_language_tag", "from_java", "predefined_locale_fields", "default_widget", "widget_options"] + Meta.Type.Value (Meta.type_of locale_meta.value) . methods . filter (Filter_Condition.Not_In remove_us) . sort + + ## PRIVATE + widget_options : Vector Option + widget_options = Locale.predefined_locale_fields.map field_name-> + display_string = field_name.replace '_' ' ' . to_case (if field_name.length == 2 then Case.Upper else Case.Title) + code_string = "Locale." + field_name + Option display_string code_string diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso index ee7212ba0549..6fec6c39cbf4 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso @@ -705,6 +705,7 @@ type Date Format "2020-06-21" with French locale as "21. juin 2020" example_format = Date.new 2020 6 21 . format "d. MMMM yyyy" (Locale.new "fr") + @locale Locale.default_widget format : Text -> Locale -> Text format self pattern locale=Locale.default = case locale of Nothing -> Time_Utils.local_date_format self pattern diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso index e2bd0ad1eb7d..9cd24652241b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso @@ -244,6 +244,7 @@ type Date_Time example_parse = Date_Time.parse "06 of May 2020 at 04:30AM" "dd 'of' MMMM yyyy 'at' hh:mma" + @locale Locale.default_widget parse : Text -> Text | Nothing -> Locale -> Date_Time ! Time_Error parse text pattern=Nothing locale=Locale.default = Panic.catch JException handler=(cause -> Error.throw (Time_Error.Error cause.payload.getMessage)) <| @@ -656,6 +657,7 @@ type Date_Time example_format = Date_Time.parse "2020-06-21T16:41:13+03:00" . format "d. MMMM yyyy" (Locale.new "fr") + @locale Locale.default_widget format : Text -> Locale -> Text format self pattern locale=Locale.default = case locale of Nothing -> Time_Utils.date_time_format self pattern diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso index 1086b067191b..e6b8b0d035a6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso @@ -167,6 +167,7 @@ type Time_Of_Day from Standard.Base import Time_Of_Day example_parse = Time_Of_Day.parse "4:30AM" "h:mma" + @locale Locale.default_widget parse : Text -> Text | Nothing -> Locale -> Time_Of_Day ! Time_Error parse text pattern=Nothing locale=Locale.default = Panic.catch JException handler=(cause -> Error.throw (Time_Error.Error cause.payload.getMessage)) <| @@ -354,6 +355,7 @@ type Time_Of_Day from Standard.Base import Time_Of_Day example_format = Time_Of_Day.new 16 21 10 . format "'hour:'h" + @locale Locale.default_widget format : Text -> Locale -> Text format self pattern locale=Locale.default = case locale of Nothing -> Time_Utils.time_of_day_format self pattern diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso index 3be889442ffc..d526de4d2aec 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso @@ -1271,6 +1271,7 @@ type Column input = Column.from_vector "values" ["100000000", "2222", "3"] . parse numeric_type input.format "#,##0.00" locale=(Locale.new "fr") # ==> ["100 000 000,00", "2 222,00", "3,00"] + @locale Locale.default_widget format : Text | Column -> Locale -> Column ! Illegal_Argument format self format=Nothing locale=Locale.default = create_formatter = make_value_formatter_for_value_type self.value_type locale diff --git a/test/Tests/src/Data/Locale_Spec.enso b/test/Tests/src/Data/Locale_Spec.enso index 443e9f1d8e7d..2338116a68fd 100644 --- a/test/Tests/src/Data/Locale_Spec.enso +++ b/test/Tests/src/Data/Locale_Spec.enso @@ -1,10 +1,11 @@ from Standard.Base import all -polyglot java import java.util.Locale as JavaLocale - +from Standard.Base.Metadata.Choice import Option from Standard.Test import Test, Test_Suite import Standard.Test.Extensions +polyglot java import java.util.Locale as JavaLocale + with_locale locale ~test = default_locale = JavaLocale.getDefault JavaLocale.setDefault locale.java_locale @@ -14,7 +15,8 @@ with_locale locale ~test = JavaLocale.setDefault default_locale result -spec = Test.group "Locale" <| +spec = + Test.group "Locale" <| en_gb = Locale.new "en" "GB" Test.specify "allow constructing a locale with optional parts" <| loc = Locale.new "en" @@ -74,4 +76,8 @@ spec = Test.group "Locale" <| Locale.uk . should_equal Locale.uk Locale.uk . should_not_equal Locale.us + Test.specify "Should correctly auto-discover static Locales" <| + locale_names = Locale.widget_options.map x-> case x of Option locale_name _ _ _ -> locale_name + locale_names . should_equal ['Bangladesh', 'Brazil', 'Canada English', 'Canada French', 'China', 'France', 'Germany', 'India English', 'India Hindi', 'Indonesia', 'Italy', 'Japan', 'Mexico', 'Nigeria', 'Pakistan English', 'Pakistan Urdu', 'Poland', 'Russia', 'South Korea', 'UK', 'US'] + main = Test_Suite.run_main spec From 50aefd6746c3b76099444e2323335921b52b1c5c Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Thu, 1 Jun 2023 00:14:02 +1000 Subject: [PATCH 03/39] Fix #6892 (#6898) --- .../lib/dashboard/src/authentication/src/dashboard/dateTime.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/dateTime.ts b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/dateTime.ts index 7d931f3e4cec..aa40100a6254 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/dateTime.ts +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/dateTime.ts @@ -11,7 +11,7 @@ export type Rfc3339DateTime = newtype.Newtype /** Formats date time into the preferred format: `YYYY-MM-DD, hh:mm`. */ export function formatDateTime(date: Date) { const year = date.getFullYear() - const month = date.getMonth().toString().padStart(2, '0') + const month = (date.getMonth() + 1).toString().padStart(2, '0') const dayOfMonth = date.getDate().toString().padStart(2, '0') const hour = date.getHours().toString().padStart(2, '0') const minute = date.getMinutes().toString().padStart(2, '0') From 65958cba92d132347e3d9c76be9d61c8d225f999 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Wed, 31 May 2023 17:14:57 +0200 Subject: [PATCH 04/39] IGV Enso support recognizes lib/scala/pkg as project (#6904) Addition to #4098 - make sure IGV Enso support can recognize `lib/scala/pkg` as a project. --- build.sbt | 1 + lib/scala/pkg/src/main/java/org/enso/pkg/package-info.java | 1 + 2 files changed, 2 insertions(+) create mode 100644 lib/scala/pkg/src/main/java/org/enso/pkg/package-info.java diff --git a/build.sbt b/build.sbt index 70029a4a7025..74461909aa9e 100644 --- a/build.sbt +++ b/build.sbt @@ -666,6 +666,7 @@ lazy val graph = (project in file("lib/scala/graph/")) lazy val pkg = (project in file("lib/scala/pkg")) .settings( Compile / run / mainClass := Some("org.enso.pkg.Main"), + frgaalJavaCompilerSetting, version := "0.1", libraryDependencies ++= circe ++ Seq( "org.scalatest" %% "scalatest" % scalatestVersion % Test, diff --git a/lib/scala/pkg/src/main/java/org/enso/pkg/package-info.java b/lib/scala/pkg/src/main/java/org/enso/pkg/package-info.java new file mode 100644 index 000000000000..7b7b52911a51 --- /dev/null +++ b/lib/scala/pkg/src/main/java/org/enso/pkg/package-info.java @@ -0,0 +1 @@ +package org.enso.pkg; From ed3f9b306e14bc25691a04f5948a5a0ae3842d54 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Wed, 31 May 2023 16:47:48 +0100 Subject: [PATCH 05/39] Consistent self types (#6867) close #6800 Update the `executionContext/expressionUpdates` notification and send the list of not applied arguments in addition to the method pointer. # Important Notes IDE is updated to support the new API. --- .../src/language_server/tests.rs | 2 +- .../src/language_server/types.rs | 20 +- app/gui/src/model/execution_context.rs | 2 +- app/gui/src/model/project/synchronized.rs | 5 +- .../protocol-language-server.md | 19 +- .../runtime/ContextEventsListener.scala | 16 +- .../runtime/ContextRegistryProtocol.scala | 4 +- .../languageserver/runtime/MethodCall.scala | 32 + .../runtime/MethodPointer.scala | 16 - .../runtime/ContextEventsListenerSpec.scala | 11 +- .../org/enso/polyglot/runtime/Runtime.scala | 28 +- .../instrument/IdExecutionInstrument.java | 3 +- .../test/NodeCountingTestInstrument.java | 8 +- .../instrument/IncrementalUpdatesTest.java | 8 +- .../test/instrument/RuntimeErrorsTest.scala | 20 +- .../RuntimeExecutionEnvironmentTest.scala | 25 +- .../instrument/RuntimeInstrumentTest.scala | 6 +- .../test/instrument/RuntimeServerTest.scala | 1116 +++++++++++++++-- .../RuntimeVisualizationsTest.scala | 34 +- .../instrument/IdExecutionService.java | 69 +- .../interpreter/service/ExecutionService.java | 22 +- .../enso/compiler/codegen/IrToTruffle.scala | 6 +- .../job/ProgramExecutionSupport.scala | 29 +- .../test/instrument/TestMessages.scala | 68 +- 24 files changed, 1281 insertions(+), 288 deletions(-) create mode 100644 engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodCall.scala delete mode 100644 engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodPointer.scala diff --git a/app/gui/controller/engine-protocol/src/language_server/tests.rs b/app/gui/controller/engine-protocol/src/language_server/tests.rs index 336dfca8348a..b4ddc7a7d199 100644 --- a/app/gui/controller/engine-protocol/src/language_server/tests.rs +++ b/app/gui/controller/engine-protocol/src/language_server/tests.rs @@ -348,7 +348,7 @@ fn test_computed_value_update() { let update = &expression_updates.updates.first().unwrap(); assert_eq!(update.expression_id, id); assert_eq!(update.typename.as_deref(), Some(typename)); - assert!(update.method_pointer.is_none()); + assert!(update.method_call.is_none()); assert!(update.from_cache); assert!(matches!(update.payload, ExpressionUpdatePayload::Value { warnings: None })) } diff --git a/app/gui/controller/engine-protocol/src/language_server/types.rs b/app/gui/controller/engine-protocol/src/language_server/types.rs index eaec9b5772a5..ed6c3ccb6d59 100644 --- a/app/gui/controller/engine-protocol/src/language_server/types.rs +++ b/app/gui/controller/engine-protocol/src/language_server/types.rs @@ -216,7 +216,7 @@ pub struct ExpressionUpdate { pub expression_id: ExpressionId, #[serde(rename = "type")] // To avoid collision with the `type` keyword. pub typename: Option, - pub method_pointer: Option, + pub method_call: Option, pub profiling_info: Vec, pub from_cache: bool, pub payload: ExpressionUpdatePayload, @@ -740,6 +740,16 @@ pub struct MethodPointer { pub name: String, } +/// A representation of a method call. +#[derive(Hash, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct MethodCall { + /// The method pointer of a call. + pub method_pointer: MethodPointer, + /// Indexes of arguments that have not been applied to this method. + pub not_applied_arguments: Vec, +} + /// Used for entering a method. The first item on the execution context stack should always be /// an `ExplicitCall`. #[derive(Hash, Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] @@ -1226,7 +1236,7 @@ pub mod test { ExpressionUpdate { expression_id: id, typename: Some(typename.into()), - method_pointer: None, + method_call: None, profiling_info: default(), from_cache: false, payload: ExpressionUpdatePayload::Value { warnings: None }, @@ -1242,7 +1252,7 @@ pub mod test { ExpressionUpdate { expression_id: id, typename: None, - method_pointer: Some(method_pointer), + method_call: Some(MethodCall { method_pointer, not_applied_arguments: vec![] }), profiling_info: default(), from_cache: false, payload: ExpressionUpdatePayload::Value { warnings: None }, @@ -1256,7 +1266,7 @@ pub mod test { ExpressionUpdate { expression_id: id, typename: None, - method_pointer: None, + method_call: None, profiling_info: default(), from_cache: false, payload: ExpressionUpdatePayload::DataflowError { trace }, @@ -1274,7 +1284,7 @@ pub mod test { ExpressionUpdate { expression_id: id, typename: None, - method_pointer: None, + method_call: None, profiling_info: default(), from_cache: false, payload: ExpressionUpdatePayload::Panic { trace, message }, diff --git a/app/gui/src/model/execution_context.rs b/app/gui/src/model/execution_context.rs index 8316c13f7cac..bb106931fc62 100644 --- a/app/gui/src/model/execution_context.rs +++ b/app/gui/src/model/execution_context.rs @@ -64,7 +64,7 @@ impl From for ComputedValueInfo { fn from(update: ExpressionUpdate) -> Self { ComputedValueInfo { typename: update.typename.map(ImString::new), - method_call: update.method_pointer, + method_call: update.method_call.map(|mc| mc.method_pointer), payload: update.payload, } } diff --git a/app/gui/src/model/project/synchronized.rs b/app/gui/src/model/project/synchronized.rs index d6e950dde8a5..2607674abc91 100644 --- a/app/gui/src/model/project/synchronized.rs +++ b/app/gui/src/model/project/synchronized.rs @@ -950,7 +950,10 @@ mod test { // Context now has the information about type. let value_info = value_registry.get(&expression_id).unwrap(); assert_eq!(value_info.typename, value_update.typename.clone().map(ImString::new)); - assert_eq!(value_info.method_call, value_update.method_pointer); + assert_eq!( + value_info.method_call, + value_update.method_call.clone().map(|mc| mc.method_pointer) + ); } diff --git a/docs/language-server/protocol-language-server.md b/docs/language-server/protocol-language-server.md index bb31c4083814..e12c9124cbd6 100644 --- a/docs/language-server/protocol-language-server.md +++ b/docs/language-server/protocol-language-server.md @@ -22,6 +22,7 @@ transport formats, please look [here](./protocol-architecture). - [`ExpressionId`](#expressionid) - [`ContextId`](#contextid) - [`StackItem`](#stackitem) + - [`MethodCall`](#methodcall) - [`MethodPointer`](#methodpointer) - [`ProfilingInfo`](#profilinginfo) - [`ExpressionUpdate`](#expressionupdate) @@ -271,6 +272,20 @@ interface LocalCall { } ``` +### `MethodCall` + +A representation of a method call. + +```typescript +interface MethodCall { + /** The method pointer of a call. */ + methodPointer: MethodPointer; + + /** Indexes of arguments that have not been applied to this method. */ + notAppliedArguments: number[]; +} +``` + ### `MethodPointer` Points to a method definition. @@ -331,9 +346,9 @@ interface ExpressionUpdate { type?: String; /** - * The updated pointer to the method call. + * The updated method call info. */ - methodPointer?: MethodPointer; + methodCall?: MethodCall; /** * Profiling information about the expression. diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala index b5e58a04dc82..a386707b2e16 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextEventsListener.scala @@ -186,7 +186,7 @@ final class ContextEventsListener( ContextRegistryProtocol.ExpressionUpdate( update.expressionId, update.expressionType, - update.methodCall.map(toProtocolMethodPointer), + update.methodCall.map(toProtocolMethodCall), update.profilingInfo.map(toProtocolProfilingInfo), update.fromCache, toProtocolPayload(update.payload) @@ -248,8 +248,20 @@ final class ContextEventsListener( ProfilingInfo.ExecutionTime(t) } + /** Convert the runtime method call to the context registry protocol + * representation. + * + * @param methodCall the method call + * @return the registry protocol representation of the method call + */ + private def toProtocolMethodCall(methodCall: Api.MethodCall): MethodCall = + MethodCall( + toProtocolMethodPointer(methodCall.methodPointer), + methodCall.notAppliedArguments + ) + /** Convert the runtime method pointer to the context registry protocol - * representation + * representation. * * @param methodPointer the method pointer * @return the registry protocol representation of the method pointer diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala index 6ee85baf4438..b79d86dc1a4c 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/ContextRegistryProtocol.scala @@ -176,7 +176,7 @@ object ContextRegistryProtocol { * * @param expressionId the id of updated expression * @param `type` the updated type of expression - * @param methodPointer the updated method pointer + * @param methodCall the updated method call * @param profilingInfo profiling information about the expression * @param fromCache whether or not the expression's value came from the cache * @param payload an extra information about the computed value @@ -184,7 +184,7 @@ object ContextRegistryProtocol { case class ExpressionUpdate( expressionId: UUID, `type`: Option[String], - methodPointer: Option[MethodPointer], + methodCall: Option[MethodCall], profilingInfo: Vector[ProfilingInfo], fromCache: Boolean, payload: ExpressionUpdate.Payload diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodCall.scala b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodCall.scala new file mode 100644 index 000000000000..2191d2411c65 --- /dev/null +++ b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodCall.scala @@ -0,0 +1,32 @@ +package org.enso.languageserver.runtime + +import org.enso.polyglot.runtime.Runtime.Api + +/** A representation of a method call. + * + * @param methodPointer the method pointer of a call + * @param notAppliedArguments indexes of arguments that have not been applied + * to this method + */ +case class MethodCall( + methodPointer: MethodPointer, + notAppliedArguments: Vector[Int] +) { + + /** Convert this method call to the corresponding [[Api]] message. */ + def toApi: Api.MethodCall = + Api.MethodCall(methodPointer.toApi, notAppliedArguments) +} + +/** An object pointing to a method definition. + * + * @param module the module of the method file + * @param definedOnType method type + * @param name method name + */ +case class MethodPointer(module: String, definedOnType: String, name: String) { + + /** Convert this method pointer to the corresponding [[Api]] message. */ + def toApi: Api.MethodPointer = + Api.MethodPointer(module, definedOnType, name) +} diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodPointer.scala b/engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodPointer.scala deleted file mode 100644 index 1a47c411125b..000000000000 --- a/engine/language-server/src/main/scala/org/enso/languageserver/runtime/MethodPointer.scala +++ /dev/null @@ -1,16 +0,0 @@ -package org.enso.languageserver.runtime - -import org.enso.polyglot.runtime.Runtime.Api - -/** An object pointing to a method definition. - * - * @param module the module of the method file - * @param definedOnType method type - * @param name method name - */ -case class MethodPointer(module: String, definedOnType: String, name: String) { - - /** Convert to corresponding [[Api]] message. */ - def toApi: Api.MethodPointer = - Api.MethodPointer(module, definedOnType, name) -} diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/runtime/ContextEventsListenerSpec.scala b/engine/language-server/src/test/scala/org/enso/languageserver/runtime/ContextEventsListenerSpec.scala index 4b53c84d8b2c..98659a6ac00f 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/runtime/ContextEventsListenerSpec.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/runtime/ContextEventsListenerSpec.scala @@ -55,13 +55,14 @@ class ContextEventsListenerSpec Suggestions.method.selfType, Suggestions.method.name ) + val methodCall = Api.MethodCall(methodPointer) listener ! Api.ExpressionUpdates( contextId, Set( Api.ExpressionUpdate( Suggestions.method.externalId.get, Some(Suggestions.method.returnType), - Some(methodPointer), + Some(methodCall), Vector(), false, true, @@ -79,7 +80,7 @@ class ContextEventsListenerSpec ContextRegistryProtocol.ExpressionUpdate( Suggestions.method.externalId.get, Some(Suggestions.method.returnType), - Some(toProtocolMethodPointer(methodPointer)), + Some(toProtocolMethodCall(methodCall)), Vector(), false, ContextRegistryProtocol.ExpressionUpdate.Payload.Value(None) @@ -477,6 +478,12 @@ class ContextEventsListenerSpec } } + def toProtocolMethodCall(methodCall: Api.MethodCall): MethodCall = + MethodCall( + toProtocolMethodPointer(methodCall.methodPointer), + methodCall.notAppliedArguments + ) + def toProtocolMethodPointer(methodPointer: Api.MethodPointer): MethodPointer = MethodPointer( methodPointer.module, diff --git a/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala b/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala index 0936d61dcfbb..c68a7396902c 100644 --- a/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala +++ b/engine/polyglot-api/src/main/scala/org/enso/polyglot/runtime/Runtime.scala @@ -307,14 +307,34 @@ object Runtime { */ sealed trait Error extends ApiResponse - /** A representation of a pointer to a method definition. - */ + /** A representation of a pointer to a method definition. */ case class MethodPointer( module: String, definedOnType: String, name: String ) + /** A representation of a method call. + * + * @param methodPointer the method pointer of a call + * @param notAppliedArguments indexes of arguments that have not been applied + * to this method + */ + case class MethodCall( + methodPointer: MethodPointer, + notAppliedArguments: Vector[Int] + ) + object MethodCall { + + /** Create a method call with all the arguments applied. + * + * @param methodPointer the method pointer of a call + * @return a new [[MethodCall]]. + */ + def apply(methodPointer: MethodPointer): MethodCall = + MethodCall(methodPointer, Vector()) + } + /** A representation of an executable position in code. */ @JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type") @@ -362,7 +382,7 @@ object Runtime { * * @param expressionId the expression id * @param expressionType the type of expression - * @param methodCall the pointer to a method definition + * @param methodCall the underlying method call of this expression * @param profilingInfo profiling information about the execution of this * expression * @param fromCache whether or not the value for this expression came @@ -374,7 +394,7 @@ object Runtime { case class ExpressionUpdate( expressionId: ExpressionId, expressionType: Option[String], - methodCall: Option[MethodPointer], + methodCall: Option[MethodCall], profilingInfo: Vector[ProfilingInfo], fromCache: Boolean, typeChanged: Boolean, diff --git a/engine/runtime-instrument-id-execution/src/main/java/org/enso/interpreter/instrument/IdExecutionInstrument.java b/engine/runtime-instrument-id-execution/src/main/java/org/enso/interpreter/instrument/IdExecutionInstrument.java index 0b14a5f0c21d..6635661346f7 100644 --- a/engine/runtime-instrument-id-execution/src/main/java/org/enso/interpreter/instrument/IdExecutionInstrument.java +++ b/engine/runtime-instrument-id-execution/src/main/java/org/enso/interpreter/instrument/IdExecutionInstrument.java @@ -308,8 +308,7 @@ private FunctionCallInfo functionCallInfoById(UUID nodeId) { @CompilerDirectives.TruffleBoundary private void onFunctionReturn(UUID nodeId, FunctionCallInstrumentationNode.FunctionCall result, EventContext context) throws ThreadDeath { - calls.put( - nodeId, new FunctionCallInfo(result)); + calls.put(nodeId, new FunctionCallInfo(result)); functionCallCallback.accept(new ExpressionCall(nodeId, result)); // Return cached value after capturing the enterable function call in `functionCallCallback` Object cachedResult = cache.get(nodeId); diff --git a/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/NodeCountingTestInstrument.java b/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/NodeCountingTestInstrument.java index cd33a79446d3..1f786b1c07af 100644 --- a/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/NodeCountingTestInstrument.java +++ b/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/NodeCountingTestInstrument.java @@ -28,10 +28,10 @@ services = NodeCountingTestInstrument.class) public class NodeCountingTestInstrument extends TruffleInstrument { public static final String INSTRUMENT_ID = "node-count-test"; - private Map all = new ConcurrentHashMap<>(); + private final Map all = new ConcurrentHashMap<>(); private Map> counter = new ConcurrentHashMap<>(); - private Map calls = new ConcurrentHashMap<>(); + private final Map calls = new ConcurrentHashMap<>(); private Env env; @Override @@ -107,7 +107,7 @@ public ExecutionEventNode create(EventContext context) { } } - private class NodeWrapper extends ExecutionEventNode { + private static class NodeWrapper extends ExecutionEventNode { private final EventContext context; @@ -134,7 +134,7 @@ private void onFunctionReturn(FunctionCallInstrumentationNode node, FunctionCall } - public class FunctionCallInfo { + public static class FunctionCallInfo { private final QualifiedName moduleName; private final QualifiedName typeName; diff --git a/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/IncrementalUpdatesTest.java b/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/IncrementalUpdatesTest.java index e921a994fd34..0a3eeabd52cf 100644 --- a/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/IncrementalUpdatesTest.java +++ b/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/IncrementalUpdatesTest.java @@ -19,6 +19,7 @@ import org.enso.polyglot.runtime.Runtime$Api$ExecutionFailed; import org.enso.polyglot.runtime.Runtime$Api$ExpressionUpdates; import org.enso.polyglot.runtime.Runtime$Api$InitializedNotification; +import org.enso.polyglot.runtime.Runtime$Api$MethodCall; import org.enso.polyglot.runtime.Runtime$Api$MethodPointer; import org.enso.polyglot.runtime.Runtime$Api$PushContextRequest; import org.enso.polyglot.runtime.Runtime$Api$PushContextResponse; @@ -39,6 +40,7 @@ import scala.collection.immutable.Seq; import scala.collection.immutable.Set; import scala.collection.immutable.Set$; +import scala.collection.immutable.Vector$; import scala.collection.immutable.Vector1; public class IncrementalUpdatesTest { @@ -217,14 +219,14 @@ private static String extractPositions(String code, String chars, Map { + moduleName = methodNode.getModuleScope().getModule().getName(); + typeName = methodNode.getType().getQualifiedName(); + functionName = methodNode.getMethodName(); + } + case QualifiedAccessorNode qualifiedAccessor -> { + AtomConstructor atomConstructor = qualifiedAccessor.getAtomConstructor(); + moduleName = atomConstructor.getDefinitionScope().getModule().getName(); + typeName = atomConstructor.getType().getQualifiedName(); + functionName = atomConstructor.getDisplayName(); + } + case EnsoRootNode ensoRootNode -> { + moduleName = ensoRootNode.getModuleScope().getModule().getName(); + typeName = null; + functionName = rootNode.getName(); + } + case default -> { + moduleName = null; + typeName = null; + functionName = rootNode.getName(); + } } + + notAppliedArguments = collectNotAppliedArguments(call); } @Override @@ -268,5 +281,27 @@ public QualifiedName getTypeName() { public String getFunctionName() { return functionName; } + + /** @return the arguments of this function that have not yet been applied. */ + public int[] getNotAppliedArguments() { + return notAppliedArguments; + } + + private static int[] collectNotAppliedArguments(FunctionCallInstrumentationNode.FunctionCall call) { + Object[] arguments = call.getArguments(); + int[] notAppliedArgs = new int[arguments.length]; + int notAppliedArgsSize = 0; + boolean isStatic = arguments[0] instanceof Type; + int selfTypePosition = isStatic ? -1 : 0; + + for (int i = 0; i < arguments.length; i++) { + if (arguments[i] == null) { + notAppliedArgs[notAppliedArgsSize] = i + selfTypePosition; + notAppliedArgsSize += 1; + } + } + + return Arrays.copyOf(notAppliedArgs, notAppliedArgsSize); + } } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java b/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java index 8f092e447f4e..f6866ddd2ae9 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java @@ -377,16 +377,18 @@ public void modifyModuleSources( throw new SourceNotFoundException(module.getName(), e); } - JavaEditorAdapter.applyEdits(module.getLiteralSource(), edits) - .fold( - failure -> { - throw new FailedToApplyEditsException( - module.getName(), edits, failure, module.getLiteralSource()); - }, - rope -> { - module.setLiteralSource(rope, simpleUpdate); - return new Object(); - }); + if (edits.nonEmpty() || simpleUpdate != null) { + JavaEditorAdapter.applyEdits(module.getLiteralSource(), edits) + .fold( + failure -> { + throw new FailedToApplyEditsException( + module.getName(), edits, failure, module.getLiteralSource()); + }, + rope -> { + module.setLiteralSource(rope, simpleUpdate); + return new Object(); + }); + } } /** diff --git a/engine/runtime/src/main/scala/org/enso/compiler/codegen/IrToTruffle.scala b/engine/runtime/src/main/scala/org/enso/compiler/codegen/IrToTruffle.scala index e60f21659e45..07e842286503 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/codegen/IrToTruffle.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/codegen/IrToTruffle.scala @@ -961,7 +961,7 @@ class IrToTruffle( * @param block the block to generate code for * @return the truffle nodes corresponding to `block` */ - def processBlock(block: IR.Expression.Block): RuntimeExpression = { + private def processBlock(block: IR.Expression.Block): RuntimeExpression = { if (block.suspended) { val scopeInfo = block .unsafeGetMetadata( @@ -1383,7 +1383,9 @@ class IrToTruffle( * @param binding the binding to generate code for * @return the truffle nodes corresponding to `binding` */ - def processBinding(binding: IR.Expression.Binding): RuntimeExpression = { + private def processBinding( + binding: IR.Expression.Binding + ): RuntimeExpression = { val occInfo = binding .unsafeGetMetadata( AliasAnalysis, diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala index d6a8cf24e82c..9627d95a03b9 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ProgramExecutionSupport.scala @@ -326,7 +326,7 @@ object ProgramExecutionSupport { value: ExpressionValue )(implicit ctx: RuntimeContext): Unit = { val expressionId = value.getExpressionId - val methodPointer = toMethodPointer(value) + val methodPointer = toMethodCall(value) if ( !syncState.isExpressionSync(expressionId) || ( @@ -391,7 +391,7 @@ object ProgramExecutionSupport { Api.ProfilingInfo.ExecutionTime(e.getNanoTimeElapsed) }.toVector, value.wasCached(), - value.isTypeChanged() || value.isFunctionCallChanged(), + value.isTypeChanged || value.isFunctionCallChanged, payload ) ) @@ -535,23 +535,26 @@ object ProgramExecutionSupport { ) } - /** Extract method pointer information from the expression value. + /** Extract the method call information from the provided expression value. * * @param value the expression value. - * @return the method pointer info + * @return the method call info */ - private def toMethodPointer( - value: ExpressionValue - ): Option[Api.MethodPointer] = + private def toMethodCall(value: ExpressionValue): Option[Api.MethodCall] = for { call <- Option(value.getCallInfo).orElse(Option(value.getCachedCallInfo)) moduleName <- Option(call.getModuleName) typeName <- Option(call.getTypeName) - } yield Api.MethodPointer( - moduleName.toString, - typeName.toString, - call.getFunctionName - ) + } yield { + Api.MethodCall( + methodPointer = Api.MethodPointer( + moduleName.toString, + typeName.toString.stripSuffix(TypeSuffix), + call.getFunctionName + ), + notAppliedArguments = call.getNotAppliedArguments.toVector + ) + } /** Find source file path by the module name. * @@ -590,4 +593,6 @@ object ProgramExecutionSupport { cache: RuntimeCache, syncState: UpdatesSynchronizationState ) + + private val TypeSuffix = ".type" } diff --git a/engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala b/engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala index f72cdf2589a9..6e8fd9fbb653 100644 --- a/engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala +++ b/engine/runtime/src/test/scala/org/enso/interpreter/test/instrument/TestMessages.scala @@ -71,7 +71,7 @@ object TestMessages { * @param expressionType a type of the expression * @param fromCache whether or not the value for this expression came from cache * @param typeChanged a flag indicating whether the the type of expression has changed - * @param methodPointer method pointer + * @param methodCall the method call * @param payload the update payload * @return the expression update response */ @@ -79,10 +79,10 @@ object TestMessages { contextId: UUID, expressionId: UUID, expressionType: String, - fromCache: Boolean = false, - typeChanged: Boolean = true, - methodPointer: Option[Api.MethodPointer] = None, - payload: Api.ExpressionUpdate.Payload = Api.ExpressionUpdate.Payload.Value() + fromCache: Boolean = false, + typeChanged: Boolean = true, + methodCall: Option[Api.MethodCall] = None, + payload: Api.ExpressionUpdate.Payload = Api.ExpressionUpdate.Payload.Value() ): Api.Response = Api.Response( Api.ExpressionUpdates( @@ -91,7 +91,7 @@ object TestMessages { Api.ExpressionUpdate( expressionId, Some(expressionType), - methodPointer, + methodCall, Vector(Api.ProfilingInfo.ExecutionTime(0)), fromCache, typeChanged, @@ -106,23 +106,23 @@ object TestMessages { * @param contextId an identifier of the context * @param expressionId an identifier of the expression * @param expressionType a type of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @return the expression update response */ def update( contextId: UUID, expressionId: UUID, expressionType: String, - methodPointer: Api.MethodPointer + methodCall: Api.MethodCall ): Api.Response = - update(contextId, expressionId, expressionType, methodPointer, false, true) + update(contextId, expressionId, expressionType, methodCall, false, true) /** Create an update response. * * @param contextId an identifier of the context * @param expressionId an identifier of the expression * @param expressionType a type of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @param fromCache whether or not the value for this expression came * from the cache * @param typeChanged a flag indicating whether the the type of expression has changed @@ -132,7 +132,7 @@ object TestMessages { contextId: UUID, expressionId: UUID, expressionType: String, - methodPointer: Api.MethodPointer, + methodCall: Api.MethodCall, fromCache: Boolean, typeChanged: Boolean ): Api.Response = @@ -143,7 +143,7 @@ object TestMessages { Api.ExpressionUpdate( expressionId, Some(expressionType), - Some(methodPointer), + Some(methodCall), Vector(Api.ProfilingInfo.ExecutionTime(0)), fromCache, typeChanged, @@ -178,20 +178,20 @@ object TestMessages { * * @param contextId an identifier of the context * @param expressionId an identifier of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @param payload the error payload * @return the expression update response */ def error( contextId: UUID, expressionId: UUID, - methodPointer: Api.MethodPointer, + methodCall: Api.MethodCall, payload: Api.ExpressionUpdate.Payload ): Api.Response = error( contextId, expressionId, - methodPointer, + methodCall, false, true, payload @@ -201,7 +201,7 @@ object TestMessages { * * @param contextId an identifier of the context * @param expressionId an identifier of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @param fromCache whether or not the value for this expression came * from the cache * @param typeChanged a flag indicating whether the the type of expression has changed @@ -211,7 +211,7 @@ object TestMessages { def error( contextId: UUID, expressionId: UUID, - methodPointer: Api.MethodPointer, + methodCall: Api.MethodCall, fromCache: Boolean, typeChanged: Boolean, payload: Api.ExpressionUpdate.Payload @@ -219,7 +219,7 @@ object TestMessages { errorBuilder( contextId, expressionId, - Some(methodPointer), + Some(methodCall), fromCache, typeChanged, payload @@ -229,7 +229,7 @@ object TestMessages { * * @param contextId an identifier of the context * @param expressionId an identifier of the expression - * @param methodPointerOpt a pointer to the method definition + * @param methodCallOpt a pointer to the method definition * @param fromCache whether or not the value for this expression came * from the cache * @param typeChanged a flag indicating whether the the type of expression has changed @@ -239,7 +239,7 @@ object TestMessages { private def errorBuilder( contextId: UUID, expressionId: UUID, - methodPointerOpt: Option[Api.MethodPointer], + methodCallOpt: Option[Api.MethodCall], fromCache: Boolean, typeChanged: Boolean, payload: Api.ExpressionUpdate.Payload @@ -251,7 +251,7 @@ object TestMessages { Api.ExpressionUpdate( expressionId, Some(ConstantsGen.ERROR), - methodPointerOpt, + methodCallOpt, Vector(Api.ProfilingInfo.ExecutionTime(0)), fromCache, typeChanged, @@ -304,7 +304,7 @@ object TestMessages { * * @param contextId an identifier of the context * @param expressionId an identifier of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @param payload the error payload * @param builtin a flag indicating what is the type of Panic (a builtin Panic type or stdlib Panic) * @return the expression update response @@ -312,14 +312,14 @@ object TestMessages { def panic( contextId: UUID, expressionId: UUID, - methodPointer: Api.MethodPointer, + methodCall: Api.MethodCall, payload: Api.ExpressionUpdate.Payload, builtin: Boolean ): Api.Response = panicBuilder( contextId, expressionId, - Some(methodPointer), + Some(methodCall), payload, builtin, true @@ -329,7 +329,7 @@ object TestMessages { * * @param contextId an identifier of the context * @param expressionId an identifier of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @param payload the error payload * @param builtin the type to use * @return the expression update response @@ -337,14 +337,14 @@ object TestMessages { def panic( contextId: UUID, expressionId: UUID, - methodPointer: Api.MethodPointer, + methodCall: Api.MethodCall, payload: Api.ExpressionUpdate.Payload, builtin: Option[String] ): Api.Response = panicBuilder( contextId, expressionId, - Some(methodPointer), + Some(methodCall), payload, builtin, true @@ -353,7 +353,7 @@ object TestMessages { def panic( contextId: UUID, expressionId: UUID, - methodPointer: Api.MethodPointer, + methodCall: Api.MethodCall, payload: Api.ExpressionUpdate.Payload, builtin: Option[String], typeChanged: Boolean @@ -361,7 +361,7 @@ object TestMessages { panicBuilder( contextId, expressionId, - Some(methodPointer), + Some(methodCall), payload, builtin, typeChanged @@ -371,7 +371,7 @@ object TestMessages { * * @param contextId an identifier of the context * @param expressionId an identifier of the expression - * @param methodPointer a pointer to the method definition + * @param methodCall a pointer to the method definition * @param payload the error payload * @param builtin a flag indicating what is the type of Panic (a builtin Panic type or stdlib Panic) * @param typeChanged a flag indicating whether the the type of expression has changed @@ -380,14 +380,14 @@ object TestMessages { private def panicBuilder( contextId: UUID, expressionId: UUID, - methodPointer: Option[Api.MethodPointer], + methodCall: Option[Api.MethodCall], payload: Api.ExpressionUpdate.Payload, builtin: Boolean, typeChanged: Boolean ): Api.Response = panicBuilder( contextId, expressionId, - methodPointer, + methodCall, payload, Some( if (builtin) ConstantsGen.PANIC_BUILTIN else ConstantsGen.PANIC @@ -398,7 +398,7 @@ object TestMessages { private def panicBuilder( contextId: UUID, expressionId: UUID, - methodPointer: Option[Api.MethodPointer], + methodCall: Option[Api.MethodCall], payload: Api.ExpressionUpdate.Payload, builtin: Option[String], typeChanged: Boolean @@ -410,7 +410,7 @@ object TestMessages { Api.ExpressionUpdate( expressionId, builtin, - methodPointer, + methodCall, Vector(Api.ProfilingInfo.ExecutionTime(0)), false, typeChanged, From 2e39d7595fc30cd4aa52359b7d9776197ae176fe Mon Sep 17 00:00:00 2001 From: Adam Obuchowicz Date: Thu, 1 Jun 2023 09:55:13 +0200 Subject: [PATCH 06/39] Fix CB discarding changes if pressed Ctrl + Enter too quickly (#6875) Now, quick typing in component browser and pressing "enter" should not cut off the last part typed. Fixes #6733 https://github.com/enso-org/enso/assets/3919101/3979ed5a-ba4e-4e25-93e6-672e731b7bd8 On this occasion, also fixed "go-to-dashboard" button and "Unsupported engine version" being over the full-screen visualization. Fixes #6722 # Important Notes I did a significant refactoring of Project View: 1. The huge `frp::extend` block was split into multiple `init` methods. 2. Remaining of the "Old searcher" were removed. 3. The "Edited" event from node's input is emitted only when in edit mode (it's consistent with other API terminology, and makes FRP for showing CB much simpler. The code was _mostly_ moved around, but the check is advised anyway, as there were small changes here and there. --- app/gui/src/controller/searcher/action.rs | 10 +- app/gui/src/controller/searcher/component.rs | 28 +- .../src/component/node/input/area.rs | 10 +- app/gui/view/src/project.rs | 392 +++++++++--------- app/gui/view/src/root.rs | 10 +- .../src/size_capped_vec_deque.rs | 1 + 6 files changed, 232 insertions(+), 219 deletions(-) diff --git a/app/gui/src/controller/searcher/action.rs b/app/gui/src/controller/searcher/action.rs index bd24a47471d2..15badb559d32 100644 --- a/app/gui/src/controller/searcher/action.rs +++ b/app/gui/src/controller/searcher/action.rs @@ -135,8 +135,8 @@ pub enum MatchKind { /// The entry's label to be displayed in the component browser was matched. #[default] Label, - /// The code to be generated by the entry was matched. - Code, + /// The entry's name from the code was matched. + Name, /// An alias of the entry was matched, contains the specific alias that was matched. Alias(ImString), } @@ -204,7 +204,7 @@ impl ListEntry { fuzzly::find_best_subsequence(self.action.to_string(), pattern, metric) }); self.match_info = match subsequence { - Some(subsequence) => MatchInfo::Matches { subsequence, kind: MatchKind::Code }, + Some(subsequence) => MatchInfo::Matches { subsequence, kind: MatchKind::Label }, None => MatchInfo::DoesNotMatch, }; } @@ -442,7 +442,7 @@ impl<'a> CategoryBuilder<'a> { let category = self.category_id; built_list.entries.borrow_mut().extend(iter.into_iter().map(|action| { let match_info = - MatchInfo::Matches { subsequence: default(), kind: MatchKind::Code }; + MatchInfo::Matches { subsequence: default(), kind: MatchKind::Label }; ListEntry { category, match_info, action } })); } @@ -497,7 +497,7 @@ impl ListWithSearchResultBuilder { .iter() .map(|entry| { let match_info = - MatchInfo::Matches { subsequence: default(), kind: MatchKind::Code }; + MatchInfo::Matches { subsequence: default(), kind: MatchKind::Label }; let action = entry.action.clone_ref(); let category = self.search_result_category; ListEntry { category, match_info, action } diff --git a/app/gui/src/controller/searcher/component.rs b/app/gui/src/controller/searcher/component.rs index 142f8282cf50..41cfd7f1d5e5 100644 --- a/app/gui/src/controller/searcher/component.rs +++ b/app/gui/src/controller/searcher/component.rs @@ -176,36 +176,32 @@ impl Component { pub fn update_matching_info(&self, filter: Filter) { // Match the input pattern to the component label. let label = self.to_string(); - let label_matches = fuzzly::matches(&label, filter.pattern.clone_ref()); + let label_matches = fuzzly::matches(&label, filter.pattern.as_str()); let label_subsequence = label_matches.and_option_from(|| { let metric = fuzzly::metric::default(); - fuzzly::find_best_subsequence(label, filter.pattern.clone_ref(), metric) + fuzzly::find_best_subsequence(label, filter.pattern.as_str(), metric) }); let label_match_info = label_subsequence .map(|subsequence| MatchInfo::Matches { subsequence, kind: MatchKind::Label }); - // Match the input pattern to the code to be inserted. - let in_module = QualifiedName::as_ref(&filter.module_name); - let code = match &self.data { - Data::FromDatabase { entry, .. } => entry.code_to_insert(true, in_module).to_string(), - Data::Virtual { snippet } => snippet.code.to_string(), - }; - let code_matches = fuzzly::matches(&code, filter.pattern.clone_ref()); - let code_subsequence = code_matches.and_option_from(|| { + // Match the input pattern to the component name. + let name = self.name(); + let name_matches = fuzzly::matches(name, filter.pattern.as_str()); + let name_subsequence = name_matches.and_option_from(|| { let metric = fuzzly::metric::default(); - fuzzly::find_best_subsequence(code, filter.pattern.clone_ref(), metric) + fuzzly::find_best_subsequence(name, filter.pattern.as_str(), metric) }); - let code_match_info = code_subsequence.map(|subsequence| { + let name_match_info = name_subsequence.map(|subsequence| { let subsequence = fuzzly::Subsequence { indices: Vec::new(), ..subsequence }; - MatchInfo::Matches { subsequence, kind: MatchKind::Code } + MatchInfo::Matches { subsequence, kind: MatchKind::Name } }); // Match the input pattern to an entry's aliases and select the best alias match. let alias_matches = self.aliases().filter_map(|alias| { - if fuzzly::matches(alias, filter.pattern.clone_ref()) { + if fuzzly::matches(alias, filter.pattern.as_str()) { let metric = fuzzly::metric::default(); let subsequence = - fuzzly::find_best_subsequence(alias, filter.pattern.clone_ref(), metric); + fuzzly::find_best_subsequence(alias, filter.pattern.as_str(), metric); subsequence.map(|subsequence| (subsequence, alias)) } else { None @@ -221,7 +217,7 @@ impl Component { }); // Select the best match of the available label-, code- and alias matches. - let match_info_iter = [alias_match_info, code_match_info, label_match_info].into_iter(); + let match_info_iter = [alias_match_info, name_match_info, label_match_info].into_iter(); let best_match_info = match_info_iter.flatten().max_by(|lhs, rhs| lhs.cmp(rhs)); *self.match_info.borrow_mut() = best_match_info.unwrap_or(MatchInfo::DoesNotMatch); diff --git a/app/gui/view/graph-editor/src/component/node/input/area.rs b/app/gui/view/graph-editor/src/component/node/input/area.rs index 9e3c5c0daab2..7a0c73de8897 100644 --- a/app/gui/view/graph-editor/src/component/node/input/area.rs +++ b/app/gui/view/graph-editor/src/component/node/input/area.rs @@ -357,6 +357,7 @@ ensogl::define_endpoints! { Output { pointer_style (cursor::Style), width (f32), + /// Changes done when nodes is in edit mode. expression_edit (ImString, Vec>), editing (bool), @@ -477,9 +478,10 @@ impl Area { legit_edit <- frp.input.edit_expression.gate(&set_editing); model.edit_mode_label.select <+ legit_edit.map(|(range, _)| (range.start.into(), range.end.into())); model.edit_mode_label.insert <+ legit_edit._1(); - expression_changed_by_user <- model.edit_mode_label.content.gate(&set_editing); - frp.output.source.expression_edit <+ model.edit_mode_label.selections.map2( - &expression_changed_by_user, + expression_edited <- model.edit_mode_label.content.gate(&set_editing); + selections_edited <- model.edit_mode_label.selections.gate(&set_editing); + frp.output.source.expression_edit <+ selections_edited.gate(&set_editing).map2( + &model.edit_mode_label.content, f!([model](selection, full_content) { let full_content = full_content.into(); let to_byte = |loc| text::Byte::from_in_context_snapped(&model.edit_mode_label, loc); @@ -487,7 +489,7 @@ impl Area { (full_content, selections) }) ); - frp.output.source.on_port_code_update <+ expression_changed_by_user.map(|e| { + frp.output.source.on_port_code_update <+ expression_edited.map(|e| { // Treat edit mode update as a code modification at the span tree root. (default(), e.into()) }); diff --git a/app/gui/view/src/project.rs b/app/gui/view/src/project.rs index 185fa7f967d7..00cb7cafd592 100644 --- a/app/gui/view/src/project.rs +++ b/app/gui/view/src/project.rs @@ -15,7 +15,6 @@ use crate::graph_editor::GraphEditor; use crate::graph_editor::NodeId; use crate::popup; use crate::project_list::ProjectList; -use crate::searcher; use enso_config::ARGS; use enso_frp as frp; @@ -23,8 +22,9 @@ use ensogl::application; use ensogl::application::shortcut; use ensogl::application::Application; use ensogl::display; +use ensogl::display::Scene; use ensogl::system::web; -use ensogl::DEPRECATED_Animation; +use ensogl::Animation; use ensogl_component::text; use ensogl_component::text::selection::Selection; use ensogl_hardcoded_theme::Theme; @@ -85,10 +85,6 @@ ensogl::define_endpoints! { hide_project_list(), /// Close the searcher without taking any actions close_searcher(), - /// Show the graph editor. - show_graph_editor(), - /// Hide the graph editor. - hide_graph_editor(), /// Simulates a style toggle press event. toggle_style(), /// Toggles the visibility of private components in the component browser. @@ -126,7 +122,6 @@ ensogl::define_endpoints! { adding_new_node (bool), old_expression_of_edited_node (Expression), editing_aborted (NodeId), - editing_committed_old_searcher (NodeId, Option), editing_committed (NodeId, Option), project_list_shown (bool), code_editor_shown (bool), @@ -226,31 +221,13 @@ impl Model { } } - /// Update Searcher View - its visibility and position - when edited node changed. - fn update_searcher_view( - &self, - searcher_parameters: Option, - is_searcher_empty: bool, - searcher_left_top_position: &DEPRECATED_Animation>, - ) { - match searcher_parameters { - Some(SearcherParams { input, .. }) if !is_searcher_empty => { - self.searcher.show(); - let new_position = self.searcher_anchor_next_to_node(input); - searcher_left_top_position.set_target_value(new_position); - } - _ => { - self.searcher.hide(); - } - } - } - fn show_fullscreen_visualization(&self, node_id: NodeId) { let node = self.graph_editor.nodes().get_cloned_ref(&node_id); if let Some(node) = node { let visualization = node.view.model().visualization.fullscreen_visualization().clone_ref(); self.display_object.remove_child(&*self.graph_editor); + self.display_object.remove_child(&self.project_view_top_bar); self.display_object.add_child(&visualization); *self.fullscreen_vis.borrow_mut() = Some(visualization); } @@ -260,6 +237,7 @@ impl Model { if let Some(visualization) = std::mem::take(&mut *self.fullscreen_vis.borrow_mut()) { self.display_object.remove_child(&visualization); self.display_object.add_child(&*self.graph_editor); + self.display_object.add_child(&self.project_view_top_bar); } } @@ -292,14 +270,6 @@ impl Model { fn hide_project_list(&self) { self.display_object.remove_child(&*self.project_list); } - - fn show_graph_editor(&self) { - self.display_object.add_child(&*self.graph_editor); - } - - fn hide_graph_editor(&self) { - self.display_object.remove_child(&*self.graph_editor); - } } @@ -368,32 +338,36 @@ impl View { _ => Theme::Light, }; - let scene = app.display.default_scene.clone_ref(); + let scene = &app.display.default_scene; scene.begin_shader_initialization(); let model = Model::new(app); let frp = Frp::new(); - let network = &frp.network; - let searcher = &model.searcher.frp(); - let project_view_top_bar = &model.project_view_top_bar; - let graph = &model.graph_editor.frp; - let code_editor = &model.code_editor; - let project_list = &model.project_list; - let searcher_anchor = DEPRECATED_Animation::>::new(network); // FIXME[WD]: Think how to refactor it, as it needs to be done before model, as we do not // want shader recompilation. Model uses styles already. model.set_style(theme); - let input_change_delay = frp::io::timer::Timeout::new(network); + Self { model, frp } + .init_top_bar_frp(scene) + .init_graph_editor_frp() + .init_code_editor_frp() + .init_searcher_position_frp(scene) + .init_searcher_input_changes_frp() + .init_opening_searcher_frp() + .init_closing_searcher_frp() + .init_open_projects_dialog_frp(scene) + .init_style_toggle_frp() + .init_fullscreen_visualization_frp() + .init_debug_mode_frp() + } + + fn init_top_bar_frp(self, scene: &Scene) -> Self { + let frp = &self.frp; + let network = &frp.network; + let model = &self.model; + let project_view_top_bar = &model.project_view_top_bar; frp::extend! { network init <- source_(); - - eval_ frp.show_graph_editor(model.show_graph_editor()); - eval_ frp.hide_graph_editor(model.hide_graph_editor()); - - - // === Project View Top Bar === - let window_control_buttons = &project_view_top_bar.window_control_buttons; eval_ window_control_buttons.close (model.on_close_clicked()); eval_ window_control_buttons.fullscreen (model.on_fullscreen_clicked()); @@ -412,20 +386,56 @@ impl View { ); project_view_top_bar_width <- project_view_top_bar_display_object.on_resized.map(|new_size| new_size.x); - graph.graph_editor_top_bar_offset_x <+ project_view_top_bar_width; + self.model.graph_editor.graph_editor_top_bar_offset_x <+ project_view_top_bar_width; + } + init.emit(()); + self + } + fn init_graph_editor_frp(self) -> Self { + let frp = &self.frp; + let network = &frp.network; + let model = &self.model; + let graph = &model.graph_editor; + let searcher = &model.searcher; + let documentation = &searcher.model().documentation; - // === Read-only mode === + frp::extend! { network + // We block graph navigator if it interferes with other panels (searcher, documentation, + // etc.) + searcher_active <- searcher.is_hovered || documentation.frp.is_selected; + disable_navigation <- searcher_active || frp.project_list_shown; + graph.set_navigator_disabled <+ disable_navigation; + model.popup.set_label <+ graph.model.breadcrumbs.project_name_error; graph.set_read_only <+ frp.set_read_only; - code_editor.set_read_only <+ frp.set_read_only; + graph.set_debug_mode <+ frp.source.debug_mode; + + frp.source.fullscreen_visualization_shown <+ + graph.output.visualization_fullscreen.is_some(); + } + self + } + fn init_code_editor_frp(self) -> Self { + let _network = &self.frp.network; + frp::extend! { _network + self.model.code_editor.set_read_only <+ self.frp.set_read_only; + } + self + } - // === Searcher Position and Size === + fn init_searcher_position_frp(self, scene: &Scene) -> Self { + let frp = &self.frp; + let network = &frp.network; + let model = &self.model; + let main_cam = scene.layers.main.camera(); + let searcher_cam = scene.layers.node_searcher.camera(); + let main_cam_frp = &main_cam.frp(); + let searcher = &self.model.searcher; + let anchor = Animation::>::new(network); - let main_cam = app.display.default_scene.layers.main.camera(); - let searcher_cam = app.display.default_scene.layers.node_searcher.camera(); - let main_cam_frp = &main_cam.frp(); + frp::extend! { network // We want to: // 1. Preserve the zoom factor of the searcher. // 2. Keep it directly below edited node at all times. @@ -442,8 +452,10 @@ impl View { // x = 100 * 0.1 = 10 in searcher_cam-space. To compensate for that, we need to move // searcher (or rather searcher_cam) by 90 units, so that the node is at x = 100 both // in searcher_cam- and in main_cam-space. - searcher_cam_pos <- all_with3 - (&main_cam_frp.position, &main_cam_frp.zoom, &searcher_anchor.value, + searcher_cam_pos <- all_with3( + &main_cam_frp.position, + &main_cam_frp.zoom, + &anchor.value, |&main_cam_pos, &zoom, &searcher_pos| { let preserve_zoom = (main_cam_pos * zoom).xy(); let move_to_edited_node = searcher_pos * (1.0 - zoom); @@ -451,148 +463,151 @@ impl View { }); eval searcher_cam_pos ((pos) searcher_cam.set_xy(*pos)); + // Compute positions. It should be done _before_ showing searcher (or we display it at + // wrong position). + input <- frp.searcher.filter_map(|s| Some(s.as_ref()?.input)); + let node_position_set = model.graph_editor.output.node_position_set.clone_ref(); + is_input_position_update <- + node_position_set.map2(&input, |&(node_id, _), &input_id| node_id == input_id); + input_position_changed <- is_input_position_update.on_true(); + set_anchor_to_node <- all(input, input_position_changed)._0(); + anchor.target <+ set_anchor_to_node.map(f!((&input) model.searcher_anchor_next_to_node(input))); + anchor.skip <+ set_anchor_to_node.gate_not(&searcher.is_visible).constant(()); + let searcher_offset = &model.searcher.expression_input_position; + position <- all_with(&anchor.value, searcher_offset, |anchor, pos| anchor - pos); + eval position ((pos) model.searcher.set_xy(*pos)); + + // Showing searcher. + searcher.show <+ frp.searcher.is_some().on_true().constant(()); + searcher.hide <+ frp.searcher.is_none().on_true().constant(()); eval searcher.is_visible ([model](is_visible) { let is_attached = model.searcher.has_parent(); - if !is_attached && *is_visible { - model.display_object.add_child(&model.searcher); - } else if is_attached && !is_visible { - model.display_object.remove_child(&model.searcher); + match (is_attached, is_visible) { + (false, true) => model.display_object.add_child(&model.searcher), + (true, false) => model.display_object.remove_child(&model.searcher), + _ => () } }); + } + self + } + fn init_opening_searcher_frp(self) -> Self { + let frp = &self.frp; + let network = &frp.network; + let graph = &self.model.graph_editor; - // === Closing Searcher + frp::extend! { network + node_added_by_user <- graph.node_added.filter(|(_, _, should_edit)| *should_edit); + searcher_for_adding <- node_added_by_user.map( + |&(node, src, _)| SearcherParams::new_for_new_node(node, src) + ); + frp.source.adding_new_node <+ searcher_for_adding.to_true(); + new_node_edited <- graph.node_editing_started.gate(&frp.adding_new_node); + frp.source.searcher <+ searcher_for_adding.sample(&new_node_edited).some(); + edit_which_opens_searcher <- + graph.node_expression_edited.gate_not(&frp.is_searcher_opened).debounce(); + frp.source.searcher <+ edit_which_opens_searcher.map(|(node_id, _, selections)| { + let cursor_position = selections.last().map(|sel| sel.end).unwrap_or_default(); + Some(SearcherParams::new_for_edited_node(*node_id, cursor_position)) + }); frp.source.is_searcher_opened <+ frp.searcher.map(|s| s.is_some()); - last_searcher <- frp.searcher.filter_map(|&s| s); + } + self + } - finished_with_searcher <- graph.node_editing_finished.gate(&frp.is_searcher_opened); - frp.source.searcher <+ frp.close_searcher.constant(None); - frp.source.searcher <+ searcher.editing_committed.constant(None); - frp.source.searcher <+ finished_with_searcher.constant(None); + fn init_closing_searcher_frp(self) -> Self { + let frp = &self.frp; + let network = &frp.network; + let grid = &self.model.searcher.model().list.model().grid; + let graph = &self.model.graph_editor; + frp::extend! { network + last_searcher <- frp.searcher.filter_map(|&s| s); + + node_editing_finished <- graph.node_editing_finished.gate(&frp.is_searcher_opened); + committed_in_searcher <- + grid.expression_accepted.map2(&last_searcher, |&entry, &s| (s.input, entry)); aborted_in_searcher <- frp.close_searcher.map2(&last_searcher, |(), &s| s.input); + frp.source.editing_committed <+ committed_in_searcher; + frp.source.editing_committed <+ node_editing_finished.map(|id| (*id,None)); frp.source.editing_aborted <+ aborted_in_searcher; - } - let grid = &model.searcher.model().list.model().grid; - frp::extend! { network - committed_in_browser <- grid.expression_accepted.map2(&last_searcher, |&entry, &s| (s.input, entry)); - frp.source.editing_committed <+ committed_in_browser; - frp.source.editing_committed <+ finished_with_searcher.map(|id| (*id,None)); - } + // Should be done before we update `searcher` and `adding_new_node` outputs. + adding_committed <- committed_in_searcher.gate(&frp.adding_new_node); + graph.deselect_all_nodes <+ adding_committed.constant(()); + graph.select_node <+ adding_committed._0(); - let anchor = &searcher_anchor.value; - frp::extend! { network - committed_in_searcher_event <- searcher.editing_committed.constant(()); + node_editing_finished_event <- node_editing_finished.constant(()); + committed_in_searcher_event <- committed_in_searcher.constant(()); aborted_in_searcher_event <- aborted_in_searcher.constant(()); + searcher_should_close <- any( + node_editing_finished_event, + committed_in_searcher_event, + aborted_in_searcher_event + ); graph.stop_editing <+ any(&committed_in_searcher_event, &aborted_in_searcher_event); + frp.source.searcher <+ searcher_should_close.constant(None); + frp.source.adding_new_node <+ searcher_should_close.constant(false); + } + self + } + fn init_searcher_input_changes_frp(self) -> Self { + let frp = &self.frp; + let network = &frp.network; + let graph = &self.model.graph_editor; + let input_change_delay = frp::io::timer::Timeout::new(network); - // === Editing === - - node_edited_by_user <- graph.node_being_edited.gate_not(&frp.adding_new_node); - existing_node_edited <- graph.node_expression_edited.gate_not(&frp.is_searcher_opened); - open_searcher <- existing_node_edited.map2(&node_edited_by_user, - |(id, _, _), edited| edited.map_or(false, |edited| *id == edited) - ).on_true().debounce(); - cursor_position <- existing_node_edited.map2( - &node_edited_by_user, - |(node_id, _, selections), edited| { - edited.map_or(None, |edited| { - let position = || selections.last().map(|sel| sel.end).unwrap_or_default(); - (*node_id == edited).then(position) - }) - } - ).filter_map(|pos| *pos); - edited_node <- node_edited_by_user.filter_map(|node| *node); - position_and_edited_node <- cursor_position.map2(&edited_node, |pos, id| (*pos, *id)); - prepare_params <- position_and_edited_node.sample(&open_searcher); - frp.source.searcher <+ prepare_params.map(|(pos, node_id)| { - Some(SearcherParams::new_for_edited_node(*node_id, *pos)) - }); + frp::extend! { network searcher_input_change_opt <- graph.node_expression_edited.map2(&frp.searcher, |(node_id, expr, selections), searcher| { let input_change = || (*node_id, expr.clone_ref(), selections.clone()); (searcher.as_ref()?.input == *node_id).then(input_change) } ); - searcher_input_change <- searcher_input_change_opt.unwrap(); - input_change_delay.restart <+ searcher_input_change.constant(INPUT_CHANGE_DELAY_MS); - update_searcher_input_on_commit <- frp.output.editing_committed.constant(()); - input_change_delay.cancel <+ update_searcher_input_on_commit; - update_searcher_input <- any(&input_change_delay.on_expired, &update_searcher_input_on_commit); - input_change_and_searcher <- all_with(&searcher_input_change, &frp.searcher, - |c, s| (c.clone(), *s) - ); - updated_input <- input_change_and_searcher.sample(&update_searcher_input); + input_change <- searcher_input_change_opt.unwrap(); + input_change_delay.restart <+ input_change.constant(INPUT_CHANGE_DELAY_MS); + update_input_on_commit <- frp.output.editing_committed.constant(()); + input_change_delay.cancel <+ update_input_on_commit; + update_input <- any(&input_change_delay.on_expired, &update_input_on_commit); + input_change_and_searcher <- + all_with(&input_change, &frp.searcher, |c, s| (c.clone(), *s)); + updated_input <- input_change_and_searcher.sample(&update_input); input_changed <- updated_input.filter_map(|((node_id, expr, selections), searcher)| { let input_change = || (expr.clone_ref(), selections.clone()); (searcher.as_ref()?.input == *node_id).then(input_change) }); frp.source.searcher_input_changed <+ input_changed; + } + self + } - // === Adding Node === - - node_added_by_user <- graph.node_added.filter(|(_, _, should_edit)| *should_edit); - searcher_for_adding <- node_added_by_user.map( - |&(node, src, _)| SearcherParams::new_for_new_node(node, src) - ); - frp.source.adding_new_node <+ searcher_for_adding.to_true(); - new_node_edited <- graph.node_editing_started.gate(&frp.adding_new_node); - frp.source.searcher <+ searcher_for_adding.sample(&new_node_edited).map(|&s| Some(s)); - - adding_committed_new_searcher <- frp.editing_committed.map(|(id,_)| *id); - adding_committed_old_searcher <- frp.editing_committed_old_searcher.map(|(id,_)| *id); - adding_committed <- any(&adding_committed_new_searcher,&adding_committed_old_searcher).gate(&frp.adding_new_node); - adding_aborted <- frp.editing_aborted.gate(&frp.adding_new_node); - adding_finished <- any(adding_committed,adding_aborted); - frp.source.adding_new_node <+ adding_finished.constant(false); - frp.source.searcher <+ adding_finished.constant(None); - - eval adding_committed ([graph](node) { - graph.deselect_all_nodes(); - graph.select_node(node); - }); - - - // === Searcher Position and Visibility === - - visibility_conditions <- all(&frp.searcher,&searcher.is_empty); - _eval <- visibility_conditions.map2(&searcher.is_visible, - f!([model,searcher_anchor]((searcher,is_searcher_empty),is_visible) { - model.update_searcher_view(*searcher,*is_searcher_empty,&searcher_anchor); - if !is_visible { - // Do not animate - searcher_anchor.skip(); - } - }) - ); - - _eval <- graph.output.node_position_set.map2(&frp.searcher, - f!([searcher_anchor](&(node_id, position), &searcher) { - if searcher.map_or(false, |s| s.input == node_id) { - searcher_anchor.set_target_value(position); - } - }) - ); - - cb_position <- all_with(anchor, &model.searcher.expression_input_position, |anchor, pos| anchor - pos); - eval cb_position ((pos) model.searcher.set_xy(*pos)); - - // === Project Dialog === - + fn init_open_projects_dialog_frp(self, scene: &Scene) -> Self { + let frp = &self.frp; + let network = &frp.network; + let model = &self.model; + let project_list = &model.project_list; + frp::extend! { network eval_ frp.show_project_list (model.show_project_list()); project_chosen <- project_list.frp.selected_project.constant(()); - mouse_down <- scene.mouse.frp_deprecated.down.constant(()); - clicked_on_bg <- mouse_down.filter(f_!(scene.mouse.target.get().is_background())); + mouse_down <- scene.mouse.frp_deprecated.down.constant(()); + clicked_on_bg <- mouse_down.filter(f_!(scene.mouse.target.get().is_background())); should_be_closed <- any(frp.hide_project_list,project_chosen,clicked_on_bg); eval_ should_be_closed (model.hide_project_list()); frp.source.project_list_shown <+ bool(&should_be_closed,&frp.show_project_list); + frp.source.drop_files_enabled <+ frp.project_list_shown.map(|v| !v); + } + frp.source.drop_files_enabled.emit(true); + self + } - - // === Style toggle === - + fn init_style_toggle_frp(self) -> Self { + let frp = &self.frp; + let network = &frp.network; + let model = &self.model; + frp::extend! {network let style_toggle_ev = frp.toggle_style.clone_ref(); style_pressed <- style_toggle_ev.toggle() ; style_was_pressed <- style_pressed.previous(); @@ -603,10 +618,15 @@ impl View { }); frp.source.style <+ style_press_on_off; eval frp.style ((style) model.set_style(style.clone())); + } + self + } - - // === Fullscreen Visualization === - + fn init_fullscreen_visualization_frp(self) -> Self { + let network = &self.frp.network; + let model = &self.model; + let graph = &self.model.graph_editor; + frp::extend! { network // TODO[ao]: All DOM elements in visualizations are displayed below canvas, because // The mouse cursor must be displayed over them. But fullscreen visualization should // be displayed "above" nodes. The workaround is to hide whole graph editor except @@ -621,35 +641,21 @@ impl View { model.hide_fullscreen_visualization() } }); + } + self + } - // === Disabling Navigation === - - let documentation = &model.searcher.model().documentation; - searcher_active <- searcher.is_hovered || documentation.frp.is_selected; - disable_navigation <- searcher_active || frp.project_list_shown; - graph.set_navigator_disabled <+ disable_navigation; - - // === Disabling Dropping === - - frp.source.drop_files_enabled <+ init.constant(true); - frp.source.drop_files_enabled <+ frp.project_list_shown.map(|v| !v); - - // === Debug Mode === - + fn init_debug_mode_frp(self) -> Self { + let frp = &self.frp; + let network = &frp.network; + let popup = &self.model.debug_mode_popup; + frp::extend! { network frp.source.debug_mode <+ bool(&frp.disable_debug_mode, &frp.enable_debug_mode); - graph.set_debug_mode <+ frp.source.debug_mode; - - model.debug_mode_popup.enabled <+ frp.enable_debug_mode; - model.debug_mode_popup.disabled <+ frp.disable_debug_mode; - // === Error Pop-up === - - model.popup.set_label <+ model.graph_editor.model.breadcrumbs.project_name_error; + popup.enabled <+ frp.enable_debug_mode; + popup.disabled <+ frp.disable_debug_mode; } - - init.emit(()); - - Self { model, frp } + self } /// Graph Editor View. diff --git a/app/gui/view/src/root.rs b/app/gui/view/src/root.rs index 11984b2f081e..f48b82a4cb1b 100644 --- a/app/gui/view/src/root.rs +++ b/app/gui/view/src/root.rs @@ -84,9 +84,17 @@ impl Model { fn init_project_view(&self) { if self.project_view.get().is_none() { + let network = &self.frp.network; let view = self.app.new_view::(); let project_list_frp = &view.project_list().frp; - frp::extend! { network + let status_bar = &self.status_bar; + let display_object = &self.display_object; + frp::new_bridge_network! { [network, view.network] project_bridge + fs_vis_shown <- view.fullscreen_visualization_shown.on_true(); + fs_vis_hidden <- view.fullscreen_visualization_shown.on_false(); + eval fs_vis_shown ((_) status_bar.unset_parent()); + eval fs_vis_hidden ([display_object, status_bar](_) display_object.add_child(&status_bar)); + self.frp.source.selected_project <+ project_list_frp.selected_project; } self.project_view.set(Some(view)); diff --git a/lib/rust/data-structures/src/size_capped_vec_deque.rs b/lib/rust/data-structures/src/size_capped_vec_deque.rs index 97ac8a043c28..ad6ac0f0d1fe 100644 --- a/lib/rust/data-structures/src/size_capped_vec_deque.rs +++ b/lib/rust/data-structures/src/size_capped_vec_deque.rs @@ -1,5 +1,6 @@ //! A vector with a cap for its size. If the vector is full, adding a new element will remove //! an old one. + use std::collections::VecDeque; From da0c88b89ff488c150a459790a355c5349e9414a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wojciech=20Dani=C5=82o?= Date: Thu, 1 Jun 2023 13:02:48 +0200 Subject: [PATCH 07/39] Fix GPU performance tool panic (#6916) * Fix * Adding docs --- lib/rust/ensogl/core/src/display/world.rs | 37 +++++++++++++---------- 1 file changed, 21 insertions(+), 16 deletions(-) diff --git a/lib/rust/ensogl/core/src/display/world.rs b/lib/rust/ensogl/core/src/display/world.rs index 17ce3a6ca837..cc9f33fa8a83 100644 --- a/lib/rust/ensogl/core/src/display/world.rs +++ b/lib/rust/ensogl/core/src/display/world.rs @@ -560,22 +560,27 @@ impl WorldData { { if let Some(gpu_perf_results) = &gpu_perf_results { for result in gpu_perf_results { - // The monitor is not updated yet, so the last sample is from the previous - // frame. - let frame_offset = result.frame_offset - 1; - if frame_offset == 0 { - let stats_data = &mut self.stats.borrow_mut().stats_data; - stats_data.gpu_time = Some(result.total); - stats_data.cpu_and_idle_time = Some(stats_data.frame_time - result.total); - } else { - // The last sampler stored in monitor is from 2 frames ago, as the last - // frame stats are not submitted yet. - let sampler_offset = result.frame_offset - 2; - self.stats_monitor.with_last_nth_sample(sampler_offset, |sample| { - sample.gpu_time = Some(result.total); - sample.cpu_and_idle_time = Some(sample.frame_time - result.total); - }); - self.stats_monitor.redraw_historical_data(sampler_offset); + // If run in the first frame, the results can be reported with frame offset + // being 0. In such a case, we are ignoring it. + if result.frame_offset > 0 { + // The monitor is not updated yet, so the last sample is from the previous + // frame. + let frame_offset = result.frame_offset - 1; + if frame_offset == 0 { + let stats_data = &mut self.stats.borrow_mut().stats_data; + stats_data.gpu_time = Some(result.total); + stats_data.cpu_and_idle_time = + Some(stats_data.frame_time - result.total); + } else { + // The last sampler stored in monitor is from 2 frames ago, as the last + // frame stats are not submitted yet. + let sampler_offset = result.frame_offset - 2; + self.stats_monitor.with_last_nth_sample(sampler_offset, |sample| { + sample.gpu_time = Some(result.total); + sample.cpu_and_idle_time = Some(sample.frame_time - result.total); + }); + self.stats_monitor.redraw_historical_data(sampler_offset); + } } } } From d5116027871bdfdcf77bde7d7f1bb511194f69c6 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Thu, 1 Jun 2023 14:26:36 +0200 Subject: [PATCH 08/39] Don't swallow exceptions in RuntimeServerTest (#6914) --- .../enso/interpreter/test/instrument/RuntimeServerTest.scala | 5 +++-- .../instrument/execution/JobExecutionEngine.scala | 3 +++ 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala index 66b2b00907ed..7b78ad0d6c8f 100644 --- a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala +++ b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeServerTest.scala @@ -19,6 +19,7 @@ import org.scalatest.matchers.should.Matchers import java.io.{ByteArrayOutputStream, File} import java.nio.file.{Files, Path, Paths} import java.util.UUID +import org.apache.commons.io.output.TeeOutputStream @scala.annotation.nowarn("msg=multiarg infix syntax") class RuntimeServerTest @@ -67,8 +68,8 @@ class RuntimeServerTest .getAbsolutePath ) .option(RuntimeOptions.EDITION_OVERRIDE, "0.0.0-dev") - .logHandler(logOut) - .out(out) + .logHandler(new TeeOutputStream(logOut, System.err)) + .out(new TeeOutputStream(out, System.err)) .serverTransport(runtimeServerEmulator.makeServerTransport) .build() ) diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/execution/JobExecutionEngine.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/execution/JobExecutionEngine.scala index 97344df3ba10..5977a76d1c61 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/execution/JobExecutionEngine.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/execution/JobExecutionEngine.scala @@ -116,6 +116,9 @@ final class JobExecutionEngine( case NonFatal(ex) => logger.log(Level.SEVERE, s"Error executing $job", ex) promise.failure(ex) + case err: Throwable => + logger.log(Level.SEVERE, s"Error executing $job", err) + throw err } finally { runningJobsRef.updateAndGet(_.filterNot(_.id == jobId)) } From 03371803849aa44d267856f3c6b828f97e602e7a Mon Sep 17 00:00:00 2001 From: GregoryTravis Date: Thu, 1 Jun 2023 16:06:23 -0400 Subject: [PATCH 09/39] Add rounding functions to the Column type (#6817) --- CHANGELOG.md | 2 + .../Base/0.0.0-dev/src/Data/Numbers.enso | 33 +-- .../Base/0.0.0-dev/src/Data/Range.enso | 16 ++ .../Database/0.0.0-dev/src/Data/Column.enso | 21 ++ .../Table/0.0.0-dev/src/Data/Column.enso | 119 ++++++++- .../0.0.0-dev/src/Internal/Column_Format.enso | 56 ----- .../0.0.0-dev/src/Internal/Column_Ops.enso | 51 ++++ .../0.0.0-dev/src/Internal/Java_Exports.enso | 25 ++ .../column/builder/object/DateBuilder.java | 4 + .../src/In_Memory/Column_Spec.enso | 229 +++++++++++------- test/Tests/src/Data/Numbers_Spec.enso | 25 +- test/Tests/src/Data/Range_Spec.enso | 7 + 12 files changed, 410 insertions(+), 178 deletions(-) create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso diff --git a/CHANGELOG.md b/CHANGELOG.md index 5c6bbe6dc01b..862d1a96c44a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -470,6 +470,7 @@ - [Added `Date_Range`.][6621] - [Implemented the `cast` operation for `Table` and `Column`.][6711] - [Added `.round` and `.int` to `Integer` and `Decimal`.][6743] +- [Added `.round`, `.truncate`, `.ceil`, and `.floor` to `Column`.][6817] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -682,6 +683,7 @@ [6621]: https://github.com/enso-org/enso/pull/6621 [6711]: https://github.com/enso-org/enso/pull/6711 [6743]: https://github.com/enso-org/enso/pull/6743 +[6817]: https://github.com/enso-org/enso/pull/6817 #### Enso Compiler diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index dd91fa1987f1..f9e60b11a194 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -601,14 +601,14 @@ type Decimal Use Banker's Rounding. 2.5 . round use_bankers=True == 2 - round : Integer -> Integer | Decimal ! Illegal_Argument + round : Integer -> Boolean -> Integer | Decimal ! Illegal_Argument round self decimal_places=0 use_bankers=False = check_decimal_places decimal_places <| case self.is_nan || self.is_infinite of True -> msg = "round cannot accept " + self.to_text Error.throw (Arithmetic_Error.Error msg) - False -> + False -> check_round_input self <| decimal_result = case use_bankers of False -> scale = 10 ^ decimal_places @@ -923,18 +923,13 @@ type Integer Round to the nearest hundred, using Banker's Rounding. 12250 . round -2 use_bankers=True == 12200 - round : Integer -> Integer ! Illegal_Argument + round : Integer -> Boolean -> Integer ! Illegal_Argument round self decimal_places=0 use_bankers=False = - check_decimal_places decimal_places <| - case self < round_min_long || self > round_max_long of - True -> - msg = "Error: Integer.round can only accept values between " + round_min_long.to_text + " and " + round_max_long.to_text + "(inclusive), but was " + self.to_text - Error.throw (Illegal_Argument.Error msg) - False -> - ## It's already an integer so unless decimal_places is - negative, the value is unchanged. - if decimal_places >= 0 then self else - self.to_decimal.round decimal_places use_bankers . truncate + check_decimal_places decimal_places <| check_round_input self <| + ## It's already an integer so unless decimal_places is + negative, the value is unchanged. + if decimal_places >= 0 then self else + self.to_decimal.round decimal_places use_bankers . truncate ## Compute the negation of this. @@ -1132,8 +1127,18 @@ round_max_long = 99999999999999 round_min_long : Integer round_min_long = -99999999999999 +## PRIVATE + Restrict rounding decimal_places parameter. check_decimal_places : Integer -> Function check_decimal_places decimal_places ~action = if decimal_places >= round_min_decimal_places && decimal_places <= round_max_decimal_places then action else - msg = "round: decimal_places must be between " + round_min_decimal_places.to_text + " and " + round_max_decimal_places.to_text + "(inclusive), but was " + decimal_places.to_text + msg = "round: decimal_places must be between " + round_min_decimal_places.to_text + " and " + round_max_decimal_places.to_text + " (inclusive), but was " + decimal_places.to_text + Error.throw (Illegal_Argument.Error msg) + +## PRIVATE + Restrict allowed range of input to rounding methods. +check_round_input : Number -> Function +check_round_input n ~action = + if n >= round_min_long && n <= round_max_long then action else + msg = "Error: `round` can only accept values between " + round_min_long.to_text + " and " + round_max_long.to_text + " (inclusive), but was " + n.to_text Error.throw (Illegal_Argument.Error msg) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso index df9b105f93b3..1a7374d37885 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Range.enso @@ -200,6 +200,22 @@ type Range @Tail_Call go current+self.step go self.start + ## PRIVATE + ADVANCED + + Executes a function for each element in the range. Exits early if the body + produces an `Error`. + each_propagate : (Integer -> Nothing) -> Nothing ! Error + each_propagate self function = + if self.step == 0 then throw_zero_step_error else + end_condition = if self.step > 0 then (>=) else (<=) + go current = + if end_condition current self.end then Nothing else + result = function current + result.if_not_error <| + @Tail_Call go current+self.step + go self.start + ## PRIVATE ADVANCED Applies a function to each element of the range. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso index 3f261cb6d125..789fc8bb9576 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso @@ -557,6 +557,27 @@ type Column op_result = self.make_op "IIF" [when_true, when_false] new_name adapt_unified_column op_result common_type + ## Rounding values is not supported in database columns. + round : Integer -> Boolean -> Problem_Behavior -> Column | Illegal_Argument | Invalid_Value_Type + round self decimal_places=0 use_bankers=False on_problems=Report_Warning = + _ = [decimal_places, use_bankers, on_problems] + Error.throw <| Unsupported_Database_Operation.Error "`Column.round` is not implemented yet for the Database backends." + + ## Truncating values is not supported in database columns. + truncate : Column ! Invalid_Value_Type + truncate self = + Error.throw <| Unsupported_Database_Operation.Error "`Column.truncate` is not implemented yet for the Database backends." + + ## Taking the ceiling of values is not supported in database columns. + ceil : Column ! Invalid_Value_Type + ceil self = + Error.throw <| Unsupported_Database_Operation.Error "`Column.ceil` is not implemented yet for the Database backends." + + ## Taking the floor of values is not supported in database columns. + floor : Column ! Invalid_Value_Type + floor self = + Error.throw <| Unsupported_Database_Operation.Error "`Column.floor` is not implemented yet for the Database backends." + ## Returns a column of first non-`Nothing` value on each row of `self` and `values` list. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso index d526de4d2aec..a214e821e733 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso @@ -13,6 +13,7 @@ import project.Data.Type.Storage import project.Data.Type.Value_Type_Helpers import project.Data.Table.Table import project.Internal.Cast_Helpers +import project.Internal.Column_Ops import project.Internal.Java_Problems import project.Internal.Naming_Helpers.Naming_Helpers import project.Internal.Parse_Values_Helper @@ -23,8 +24,9 @@ from project.Internal.Column_Format import all from project.Data.Table import print_table from project.Data.Type.Value_Type import Value_Type, Auto from project.Errors import No_Index_Set_Error, Floating_Point_Equality, Invalid_Value_Type, Inexact_Type_Coercion, Conversion_Failure -from project.Internal.Java_Exports import make_string_builder +from project.Internal.Java_Exports import make_string_builder, make_double_builder, make_long_builder, make_date_builder_adapter +polyglot java import org.enso.table.data.column.builder.object.DateBuilder polyglot java import org.enso.table.data.column.operation.map.MapOperationProblemBuilder polyglot java import org.enso.table.data.column.storage.Storage as Java_Storage polyglot java import org.enso.table.data.table.Column as Java_Column @@ -656,6 +658,113 @@ type Column rs = s.iif true_val false_val storage_type Column.Value (Java_Column.new new_name rs) + ## Round the values in a numeric column to a specified number of decimal + places. + + For integers, rounding to 0 or more decimal places simply returns the + argument. For negative decimal places, see below. + + By default, rounding uses "asymmetric round-half-up", also known as + "round towards positive infinity." If use_bankers=True, then it uses + "round-half-even", also known as "banker's rounding". + + If the column is of type `Float` and `decimal_places` > 0, `round` + returns a column of `Float`; otherwise, it returns a column of + `Integer`. + + Arguments: + - decimal_places: The number of decimal places to round to. Can be + negative, which results in rounding to positive integer powers of 10. + Must be between -15 and 15 (inclusive). + - use_bankers: Rounds mid-point to nearest even number. + - on_problems: Specifies how to handle if a problem occurs, raising as a + warning by default. + + ! Error Conditions + Reports `Illegal_Argument` if the number is 15 or more decimal places. + Above 14 digits, it is possible that the underlying long, converted to + double in the rounding process, would lose precision in the least + significant bits. + (See https://en.wikipedia.org/wiki/Double-precision_floating-point_format.) + + If `decimal_places` is outside the range -15..15 (inclusive), an + `Illegal_Argument` error is thrown. + + ? Negative decimal place counts + Rounding to `n` digits can be thought of as "rounding to the nearest + multiple of 10^(-n)". For negative decimal counts, this results in + rounding to the nearest positive integer power of 10. + + > Example + Round a column of `Decimal` values`. + + Column.from_vector "foo" [1.2, 2.3, 3.6] . round == (Column.from_vector "foo" [1, 2, 4]) + round : Integer -> Boolean -> Problem_Behavior -> Column | Illegal_Argument | Invalid_Value_Type + round self decimal_places=0 use_bankers=False on_problems=Report_Warning = Value_Type.expect_numeric self <| + # If it's an integer column and decimal_places >=0 then it's a no-op. + if self.value_type.is_integer && decimal_places >= 0 then self else + returns_double = decimal_places > 0 + builder = if returns_double then make_double_builder else make_long_builder + fun = _.round decimal_places use_bankers + Column_Ops.map_over_storage self fun builder skip_nothing=True on_problems=on_problems + + ## ALIAS int + + If the column is numeric, truncate the floating-point values to an + integer by dropping the fractional part. This is equivalent to + "round-toward-zero". If the column is of type `Date_Time`, truncates the + values to `Date`. + + > Example + Truncate a column of `Decimal` values. + + Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate == (Column.from_vector "foo" [1, 2, 3]) + + > Example + Truncate a column of `Date_Time` values. + date_times = Column.from_vector "foo" [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3] + dates = Column.from_vector "foo" [Date.new 2020 10 24, Date.new 2020 10 24] + col.truncate == dates + truncate : Column ! Invalid_Value_Type + truncate self = + case self.value_type.is_numeric of + True -> + fun = _.truncate + Column_Ops.map_over_storage self fun make_long_builder skip_nothing=True + False -> case self.value_type == Value_Type.Date_Time of + True -> + fun = _.date + Column_Ops.map_over_storage self fun make_date_builder_adapter skip_nothing=True + False -> Error.throw <| Invalid_Value_Type.Column "Numeric or Date_Time" self.value_type + + ## Computes the nearest integer above this number for values in a numeric + column. + + Returns a column of `Integer`. + + > Example + Take the ceiling of a column of `Decimal` values. + + Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil == (Column.from_vector "foo" [2, 3, 4]) + ceil : Column ! Invalid_Value_Type + ceil self = Value_Type.expect_numeric self <| + fun = _.ceil + Column_Ops.map_over_storage self fun make_long_builder skip_nothing=True + + ## Computes the nearest integer below this number for values in a numeric + column. + + Returns a column of `Integer`. + + > Example + Take the floor of a column of `Decimal` values. + + Column.from_vector "foo" [1.25, 2.33, 3.57] . floor == (Column.from_vector "foo" [1, 2, 3]) + floor : Column ! Invalid_Value_Type + floor self = Value_Type.expect_numeric self <| + fun = _.floor + Column_Ops.map_over_storage self fun make_long_builder skip_nothing=True + ## Returns a column of first non-`Nothing` value on each row of `self` and `values` list. @@ -1279,18 +1388,18 @@ type Column new_column = case format of "" -> formatter = .to_text - map_over_storage self formatter make_string_builder + Column_Ops.map_over_storage self formatter make_string_builder on_problems=Problem_Behavior.Report_Error Nothing -> formatter = .to_text - map_over_storage self formatter make_string_builder + Column_Ops.map_over_storage self formatter make_string_builder on_problems=Problem_Behavior.Report_Error _ : Text -> formatter = create_formatter formatter.if_not_error <| - map_over_storage self (formatter format=format) make_string_builder + Column_Ops.map_over_storage self (formatter format=format) make_string_builder on_problems=Problem_Behavior.Report_Error format_column : Column -> Value_Type.expect_text format_column <| formatter = create_formatter formatter.if_not_error <| - map_2_over_storage self format_column formatter make_string_builder + Column_Ops.map_2_over_storage self format_column formatter make_string_builder _ -> Error.throw <| Illegal_Argument.Error <| "Unsupported format type: " + format.to_text new_column diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Format.enso index 48625a8dcc8c..08a5be12231f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Format.enso @@ -56,59 +56,3 @@ handle_illegal_argument_exception format_string ~action = Error.throw (Illegal_Argument.Error msg) Panic.catch IllegalArgumentException handler=handler <| Panic.catch UnsupportedTemporalTypeException handler=handler action - -## PRIVATE - Iterate over a range, exiting early if the body produces an `Error`. -each_propagate : Range -> (Number -> Any) -> Nothing ! Error -each_propagate range function = - if range.step == 0 then Error.throw (Illegal_State.Error "A range with step = 0 is ill-formed.") else - end_condition = if range.step > 0 then (>=) else (<=) - go current = - if end_condition current range.end then Nothing else - result = function current - result.if_not_error <| - @Tail_Call go current+range.step - go range.start - -## PRIVATE - Map a text-returning function over the column values, using Storage directly. - The output column has the same name as the input. -map_over_storage : Column -> (Any -> Text) -> (Integer -> Any) -> Boolean -> Column -map_over_storage input_column function builder skip_nothing=True = - input_storage = input_column.java_column.getStorage - num_input_rows = input_storage.size - output_storage_builder = builder num_input_rows - ok = each_propagate (0.up_to num_input_rows) i-> - input_value = input_storage.getItemBoxed i - if skip_nothing && input_value.is_nothing then output_storage_builder.append Nothing else - output_value = function input_value - output_value.if_not_error - output_storage_builder.append output_value - ok.if_not_error <| - output_storage = output_storage_builder.seal - Column.from_storage input_column.name output_storage - -## PRIVATE - Map a text-returning function over the values of two columns, using Storage - directly. The output column has the same name as the first input column. - `skip_nothing` applies to the first input to the function, not both inputs. -map_2_over_storage : Column -> Column -> (Any -> Any -> Text) -> (Integer -> Any) -> Boolean -> Column -map_2_over_storage input_column_0 input_column_1 function builder skip_nothing=True = - input_storage_0 = input_column_0.java_column.getStorage - input_storage_1 = input_column_1.java_column.getStorage - case input_storage_0.size != input_storage_1.size of - True -> - msg = "Column lengths differ: " + input_storage_0.size.to_text + " != " + input_storage_1.size.to_text - Error.throw (Illegal_Argument.Error msg) - False -> - num_input_rows = input_storage_0.size - output_storage_builder = builder num_input_rows - ok = each_propagate (0.up_to num_input_rows) i-> - input_value_0 = input_storage_0.getItemBoxed i - input_value_1 = input_storage_1.getItemBoxed i - if skip_nothing && input_value_0.is_nothing then output_storage_builder.append Nothing else - output_value = function input_value_0 input_value_1 - output_storage_builder.append output_value - ok.if_not_error <| - output_storage = output_storage_builder.seal - Column.from_storage input_column_0.name output_storage diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso new file mode 100644 index 000000000000..304b2e9c3b33 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Ops.enso @@ -0,0 +1,51 @@ +from Standard.Base import all + +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +import project.Data.Column.Column +import project.Internal.Problem_Builder.Problem_Builder + +## PRIVATE + Map a text-returning function over the column values, using Storage directly. + The output column has the same name as the input. +map_over_storage : Column -> (Any -> Text) -> (Integer -> Any) -> Boolean -> Problem_Behavior -> Column +map_over_storage input_column function builder skip_nothing=True on_problems=Report_Warning = + problem_builder = Problem_Builder.new + input_storage = input_column.java_column.getStorage + num_input_rows = input_storage.size + output_storage_builder = builder num_input_rows + 0.up_to num_input_rows . each i-> + input_value = input_storage.getItemBoxed i + if skip_nothing && input_value.is_nothing then output_storage_builder.append Nothing else + output_value = function input_value . catch Any err-> + problem_builder.report_other_warning err + Nothing + output_storage_builder.append output_value + output_storage = output_storage_builder.seal + new_column = Column.from_storage input_column.name output_storage + problem_builder.attach_problems_after on_problems new_column + +## PRIVATE + Map a text-returning function over the values of two columns, using Storage + directly. The output column has the same name as the first input column. + `skip_nothing` applies to the first input to the function, not both inputs. +map_2_over_storage : Column -> Column -> (Any -> Any -> Text) -> (Integer -> Any) -> Boolean -> Column +map_2_over_storage input_column_0 input_column_1 function builder skip_nothing=True = + input_storage_0 = input_column_0.java_column.getStorage + input_storage_1 = input_column_1.java_column.getStorage + case input_storage_0.size != input_storage_1.size of + True -> + msg = "Column lengths differ: " + input_storage_0.size.to_text + " != " + input_storage_1.size.to_text + Error.throw (Illegal_Argument.Error msg) + False -> + num_input_rows = input_storage_0.size + output_storage_builder = builder num_input_rows + ok = 0.up_to num_input_rows . each_propagate i-> + input_value_0 = input_storage_0.getItemBoxed i + input_value_1 = input_storage_1.getItemBoxed i + if skip_nothing && input_value_0.is_nothing then output_storage_builder.append Nothing else + output_value = function input_value_0 input_value_1 + output_storage_builder.append output_value + ok.if_not_error <| + output_storage = output_storage_builder.seal + Column.from_storage input_column_0.name output_storage diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso index 76808aeb9b6f..11ee667008f8 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso @@ -9,11 +9,13 @@ polyglot java import org.enso.table.data.table.Table as Java_Table polyglot java import org.enso.table.data.index.DefaultIndex polyglot java import org.enso.table.data.column.storage.Storage polyglot java import org.enso.table.data.column.builder.object.BoolBuilder +polyglot java import org.enso.table.data.column.builder.object.DateBuilder polyglot java import org.enso.table.data.column.builder.object.DateTimeBuilder polyglot java import org.enso.table.data.column.builder.object.InferredBuilder polyglot java import org.enso.table.data.column.builder.object.NumericBuilder polyglot java import org.enso.table.data.column.builder.object.StringBuilder polyglot java import org.enso.table.data.column.builder.object.TimeOfDayBuilder +polyglot java import org.enso.table.data.column.storage.Storage as Java_Storage ## PRIVATE make_bool_builder : BoolBuilder @@ -39,6 +41,10 @@ make_time_of_day_builder initial_size = TimeOfDayBuilder.new initial_size make_date_time_builder : Integer -> DateTimeBuilder make_date_time_builder initial_size = DateTimeBuilder.new initial_size +## PRIVATE +make_date_builder : Integer -> DateBuilder +make_date_builder initial_size = DateBuilder.new initial_size + ## PRIVATE make_inferred_builder : Integer -> InferredBuilder make_inferred_builder initial_size = InferredBuilder.new initial_size @@ -46,3 +52,22 @@ make_inferred_builder initial_size = InferredBuilder.new initial_size ## PRIVATE make_column : Text -> Storage -> Column make_column name storage = Column.Value (Java_Column.new name storage) + +## PRIVATE + Wrapper around a DateBuilder that uses DateBuilder.appendDate() to append a + value (instead of builder.append()) +type DateBuilderAdapter + Value (date_builder : DateBuilder) + + append : Date -> Nothing + append self date = + self.date_builder.appendDate date + + seal : Java_Storage + seal self = self.date_builder.seal + +## PRIVATE + DateBuilderAdapter constructor that matches the interface of the other + make_*_builder functions. +make_date_builder_adapter : Integer -> DateBuilderAdapter +make_date_builder_adapter n = DateBuilderAdapter.Value (make_date_builder n) diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java index 3c939d908f13..3a83f3d47de8 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java @@ -28,6 +28,10 @@ public void appendNoGrow(Object o) { data[currentSize++] = (LocalDate) o; } + public void appendDate(LocalDate date) { + append(date); + } + @Override public boolean accepts(Object o) { return o instanceof LocalDate; diff --git a/test/Table_Tests/src/In_Memory/Column_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Spec.enso index 6318ad66d212..66624dda10ca 100644 --- a/test/Table_Tests/src/In_Memory/Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Spec.enso @@ -1,94 +1,151 @@ from Standard.Base import all -import Standard.Base.Errors.Common.Index_Out_Of_Bounds -import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -from Standard.Table import Column, Value_Type +import project.Util +import Standard.Base.Errors.Common.Index_Out_Of_Bounds +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Examples - -from Standard.Test import Test, Test_Suite import Standard.Test.Extensions +from Standard.Table import Column, Value_Type +from Standard.Test import Test, Test_Suite, Problems + main = Test_Suite.run_main spec -spec = Test.group "Columns" <| - test_column = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] - empty_column = Column.from_vector "Test" [] - - Test.specify "should allow getting specific elements" <| - test_column.at 0 . should_equal 1 - test_column.at 2 . should_equal 5 - test_column.at 5 . should_equal 6 - test_column.at 6 . should_fail_with Index_Out_Of_Bounds - empty_column.at 0 . should_fail_with Index_Out_Of_Bounds - - Test.specify "should be able to take the first n elements" <| - expected_1 = Column.from_vector "Test" [1, 3, 5] - expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] - expected_3 = Column.from_vector "Test" [] - test_column.take (First 3) . to_vector . should_equal expected_1.to_vector - test_column.take (First 7) . to_vector . should_equal expected_2.to_vector - test_column.take (First 0) . to_vector . should_equal expected_3.to_vector - - Test.specify "should be able to take the first n elements by Integer" <| - expected_1 = Column.from_vector "Test" [1, 3, 5] - expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] - expected_3 = Column.from_vector "Test" [] - test_column.take 3 . to_vector . should_equal expected_1.to_vector - test_column.take 7 . to_vector . should_equal expected_2.to_vector - test_column.take 0 . to_vector . should_equal expected_3.to_vector - - Test.specify "should be able to take the last n elements" <| - expected_1 = Column.from_vector "Test" [2, 4, 6] - expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] - expected_3 = Column.from_vector "Test" [] - test_column.take (Last 3) . to_vector . should_equal expected_1.to_vector - test_column.take (Last 7) . to_vector . should_equal expected_2.to_vector - test_column.take (Last 0) . to_vector . should_equal expected_3.to_vector - - Test.specify "should be able to get the first element" <| - test_column.first . should_equal 1 - empty_column.first.should_fail_with Index_Out_Of_Bounds - - Test.specify "should be able to get the last element" <| - test_column.last . should_equal 6 - empty_column.last.should_fail_with Index_Out_Of_Bounds - - Test.specify "should be able to be reversed" <| - expected_1 = Column.from_vector "Test" [6, 4, 2, 5, 3, 1] - test_column.reverse.to_vector . should_equal expected_1.to_vector - empty_column.reverse.to_vector . should_equal empty_column.to_vector - - Test.specify "should allow to fill missing values from another column" <| - nulled = Column.from_vector "col" [0, Nothing, 4, 5, Nothing, Nothing] - defaults = Column.from_vector "def" [1, 2, 10, 20, Nothing, 30] - r = nulled.fill_nothing defaults - r.to_vector . should_equal [0, 2, 4, 5, Nothing, 30] - - Test.specify "should allow to count duplicate value occurences" <| - c_1 = Column.from_vector "c 1" [0, 1, 2, 2, 1, 0, 2] - c_1.duplicate_count.to_vector.should_equal [0, 0, 0, 1, 1, 1, 2] - - c_2 = Column.from_vector "c 2" ["foo", "bar", "foo", "baz", "bar"] - c_2.duplicate_count.to_vector.should_equal [0, 0, 1, 0, 1] - - Test.specify "should result in correct Storage if operation allows it" <| - another = Column.from_vector "Test" [10, 20, 30, 40, 50, 60] - (test_column + 1).value_type . should_equal Value_Type.Integer - (test_column - 1).value_type . should_equal Value_Type.Integer - (test_column * 2).value_type . should_equal Value_Type.Integer - (test_column * 1.5).value_type . should_equal Value_Type.Float - (test_column + another).value_type . should_equal Value_Type.Integer - - Test.specify "should not allow invalid column names" <| - c1 = Column.from_vector "" [1, 2, 3] - c1.should_fail_with Illegal_Argument - - c2 = Column.from_vector Nothing [1, 2, 3] - c2.should_fail_with Illegal_Argument - - c3 = Column.from_vector '\0' [1, 2, 3] - c3.should_fail_with Illegal_Argument - - c4 = Column.from_vector 'foo\0bar' [1, 2, 3] - c4.should_fail_with Illegal_Argument +spec = + Test.group "Columns" <| + test_column = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] + empty_column = Column.from_vector "Test" [] + + Test.specify "should allow getting specific elements" <| + test_column.at 0 . should_equal 1 + test_column.at 2 . should_equal 5 + test_column.at 5 . should_equal 6 + test_column.at 6 . should_fail_with Index_Out_Of_Bounds + empty_column.at 0 . should_fail_with Index_Out_Of_Bounds + + Test.specify "should be able to take the first n elements" <| + expected_1 = Column.from_vector "Test" [1, 3, 5] + expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] + expected_3 = Column.from_vector "Test" [] + test_column.take (First 3) . to_vector . should_equal expected_1.to_vector + test_column.take (First 7) . to_vector . should_equal expected_2.to_vector + test_column.take (First 0) . to_vector . should_equal expected_3.to_vector + + Test.specify "should be able to take the first n elements by Integer" <| + expected_1 = Column.from_vector "Test" [1, 3, 5] + expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] + expected_3 = Column.from_vector "Test" [] + test_column.take 3 . to_vector . should_equal expected_1.to_vector + test_column.take 7 . to_vector . should_equal expected_2.to_vector + test_column.take 0 . to_vector . should_equal expected_3.to_vector + + Test.specify "should be able to take the last n elements" <| + expected_1 = Column.from_vector "Test" [2, 4, 6] + expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] + expected_3 = Column.from_vector "Test" [] + test_column.take (Last 3) . to_vector . should_equal expected_1.to_vector + test_column.take (Last 7) . to_vector . should_equal expected_2.to_vector + test_column.take (Last 0) . to_vector . should_equal expected_3.to_vector + + Test.specify "should be able to get the first element" <| + test_column.first . should_equal 1 + empty_column.first.should_fail_with Index_Out_Of_Bounds + + Test.specify "should be able to get the last element" <| + test_column.last . should_equal 6 + empty_column.last.should_fail_with Index_Out_Of_Bounds + + Test.specify "should be able to be reversed" <| + expected_1 = Column.from_vector "Test" [6, 4, 2, 5, 3, 1] + test_column.reverse.to_vector . should_equal expected_1.to_vector + empty_column.reverse.to_vector . should_equal empty_column.to_vector + + Test.specify "should allow to fill missing values from another column" <| + nulled = Column.from_vector "col" [0, Nothing, 4, 5, Nothing, Nothing] + defaults = Column.from_vector "def" [1, 2, 10, 20, Nothing, 30] + r = nulled.fill_nothing defaults + r.to_vector . should_equal [0, 2, 4, 5, Nothing, 30] + + Test.specify "should allow to count duplicate value occurences" <| + c_1 = Column.from_vector "c 1" [0, 1, 2, 2, 1, 0, 2] + c_1.duplicate_count.to_vector.should_equal [0, 0, 0, 1, 1, 1, 2] + + c_2 = Column.from_vector "c 2" ["foo", "bar", "foo", "baz", "bar"] + c_2.duplicate_count.to_vector.should_equal [0, 0, 1, 0, 1] + + Test.specify "should result in correct Storage if operation allows it" <| + another = Column.from_vector "Test" [10, 20, 30, 40, 50, 60] + (test_column + 1).value_type . should_equal Value_Type.Integer + (test_column - 1).value_type . should_equal Value_Type.Integer + (test_column * 2).value_type . should_equal Value_Type.Integer + (test_column * 1.5).value_type . should_equal Value_Type.Float + (test_column + another).value_type . should_equal Value_Type.Integer + + Test.specify "should not allow invalid column names" <| + c1 = Column.from_vector "" [1, 2, 3] + c1.should_fail_with Illegal_Argument + + c2 = Column.from_vector Nothing [1, 2, 3] + c2.should_fail_with Illegal_Argument + + c3 = Column.from_vector '\0' [1, 2, 3] + c3.should_fail_with Illegal_Argument + + c4 = Column.from_vector 'foo\0bar' [1, 2, 3] + c4.should_fail_with Illegal_Argument + + Test.group "Rounding" <| + Test.specify "should be able to round a column of decimals" <| + Column.from_vector "foo" [1.2, 2.3, 3.6] . round . should_equal (Column.from_vector "foo" [1, 2, 4]) + Column.from_vector "foo" [1.25, 2.33, 3.57] . round 1 . should_equal <| Column.from_vector "foo" [1.3, 2.3, 3.6] + Column.from_vector "foo" [12.0, 24.0, 25.0, 29.0] . round -1 . should_equal <| Column.from_vector "foo" [10, 20, 30, 30] + Column.from_vector "foo" [1.5, 2.5, 3.5] . round use_bankers=True . should_equal <| Column.from_vector "foo" [2, 2, 4] + + Test.specify "decimal rounding should return the correct column type" <| + Column.from_vector "foo" [1.2, 2.3, 3.6] . round . value_type . should_equal Value_Type.Integer + Column.from_vector "foo" [1.2, 2.3, 3.6] . round 1 . value_type . should_equal Value_Type.Float + + Test.specify "should be able to round a column of integers" <| + Column.from_vector "foo" [12, 24, 25, 29] . round . should_equal <| Column.from_vector "foo" [12, 24, 25, 29] + Column.from_vector "foo" [12, 24, 25, 29] . round -1 . should_equal <| Column.from_vector "foo" [10, 20, 30, 30] + Column.from_vector "foo" [15, 25, 35] . round -1 use_bankers=True . should_equal <| Column.from_vector "foo" [20, 20, 40] + + Test.specify "integer rounding should return the correct column type" <| + Column.from_vector "foo" [12, 24, 25, 29] . round -1 . value_type . should_equal Value_Type.Integer + + Test.specify "should report out-of-range values as problems" <| + col = Column.from_vector "foo" [12, 23, 99999999999999999] + expected = Column.from_vector "foo" [10, 20, Nothing] + action = col.round -1 on_problems=_ + problems = [Illegal_Argument.Error "Error: `round` can only accept values between -99999999999999 and 99999999999999 (inclusive), but was 99999999999999999"] + tester = _.should_equal expected + Problems.test_problem_handling action problems tester + + Test.specify "should throw an error on decimal places out of range" <| + col = Column.from_vector "foo" [12, 23, 99999999999999999] + expected = Column.from_vector "foo" [Nothing, Nothing, Nothing] + action = col.round decimal_places=-1200 on_problems=_ + problems = [Illegal_Argument.Error "round: decimal_places must be between -15 and 15 (inclusive), but was -1200"] + tester = _.should_equal expected + Problems.test_problem_handling action problems tester + + Test.group "truncate" <| + Test.specify "should be able to truncate a column of decimals" <| + Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate . should_equal <| Column.from_vector "foo" [1, 2, 3] + Column.from_vector "foo" [1.25, 2.33, 3.57] . truncate . value_type . should_equal Value_Type.Integer + + Test.group "ceil" <| + Test.specify "should be able to take the ceil of a column of decimals" <| + Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil . should_equal <| Column.from_vector "foo" [2, 3, 4] + Column.from_vector "foo" [1.25, 2.33, 3.57] . ceil . value_type . should_equal Value_Type.Integer + + Test.group "floor" <| + Test.specify "should be able to take the floor of a column of decimals" <| + Column.from_vector "foo" [1.25, 2.33, 3.57] . floor . should_equal <| Column.from_vector "foo" [1, 2, 3] + Column.from_vector "foo" [1.25, 2.33, 3.57] . floor . value_type . should_equal Value_Type.Integer + + Test.group "Date_Time truncate" <| + Test.specify "should be able to truncate a column of Date_Times" <| + Column.from_vector "foo" [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3] . truncate . should_equal <| Column.from_vector "foo" [Date.new 2020 10 24, Date.new 2020 10 24] + Column.from_vector "foo" [Date_Time.new 2020 10 24 1 2 3, Date_Time.new 2020 10 24 1 2 3] . truncate . value_type . should_equal Value_Type.Date diff --git a/test/Tests/src/Data/Numbers_Spec.enso b/test/Tests/src/Data/Numbers_Spec.enso index c03cd64f83bd..09eac34a8d7e 100644 --- a/test/Tests/src/Data/Numbers_Spec.enso +++ b/test/Tests/src/Data/Numbers_Spec.enso @@ -585,22 +585,13 @@ spec = 231.2 . round . should_be_a Integer 231.2 . round -1 . should_be_a Integer - Test.specify "Edge cases" <| - max_double = 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368.0 - max_double . should_equal max_double - max_double_minus_point_five = 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858367.5 - max_double_minus_point_six = 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858367.4 - max_double_minus_one = 179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858367.0 - max_double_minus_point_five . round . should_equal max_double - max_double_minus_point_six . round . should_equal max_double_minus_one - - neg_max_double = -179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858368.0 - neg_max_double . should_equal neg_max_double - neg_max_double_minus_point_four = -179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858367.6 - neg_max_double_minus_point_five = -179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858367.5 - neg_max_double_minus_one = -179769313486231570814527423731704356798070567525844996598917476803157260780028538760589558632766878171540458953514382464234321326889464182768467546703537516986049910576551282076245490090389328944075868508455133942304583236903222948165808559332123348274797826204144723168738177180919299881250404026184124858367.0 - neg_max_double_minus_point_four . round . should_equal neg_max_double - neg_max_double_minus_point_five . round . should_equal neg_max_double_minus_one + Test.specify "Input out of range" <| + 100000000000000.0 . round . should_fail_with Illegal_Argument + -100000000000000.0 . round . should_fail_with Illegal_Argument + 100000000000000.0 . round -2 . should_fail_with Illegal_Argument + -100000000000000.0 . round -2 . should_fail_with Illegal_Argument + 99999999999999.0 . round -2 . should_equal 100000000000000 + -99999999999999.0 . round -2 . should_equal -100000000000000 Test.specify "Decimal places out of range" <| 3.1 . round 16 . should_fail_with Illegal_Argument @@ -685,7 +676,7 @@ spec = 3 . round 16 . should_fail_with Illegal_Argument 3 . round -16 . should_fail_with Illegal_Argument - Test.specify "Number out of range" <| + Test.specify "Input out of range" <| 100000000000000 . round . should_fail_with Illegal_Argument -100000000000000 . round . should_fail_with Illegal_Argument 100000000000000 . round -2 . should_fail_with Illegal_Argument diff --git a/test/Tests/src/Data/Range_Spec.enso b/test/Tests/src/Data/Range_Spec.enso index 64766a5052f3..a875a2700f88 100644 --- a/test/Tests/src/Data/Range_Spec.enso +++ b/test/Tests/src/Data/Range_Spec.enso @@ -164,6 +164,13 @@ spec = Test.group "Range" <| vec_mut = Vector.new_builder 1.up_to 6 . each (i -> vec_mut.append i) vec_mut.to_vector . should_equal [1, 2, 3, 4, 5] + Test.specify "should allow iteration, with error propagation and early exit" <| + vec_mut = Vector.new_builder + result = 1.up_to 6 . each_propagate i-> + if i >= 3 then Error.throw (Illegal_Argument.Error "dummy") else + vec_mut.append i + result . should_fail_with Illegal_Argument + vec_mut.to_vector . should_equal [1, 2] Test.specify "should allow efficient iteration" <| cell = Ref.new 0 n = 100000000 From 343b5fb085b02583d054f33255d204090ce2b754 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Thu, 1 Jun 2023 23:10:03 +0100 Subject: [PATCH 10/39] Execution control for Table.write and various widget tweaks... (#6835) - Adds execution control to `Table.write`. - Refactored the `Text.write` to make part reusable. - Tidied up some legacy mess in tests. - Add easier flow to go from `Text` to an `URI` to fetching data. - Add decode functions to `Response` and `Response_Body`. - Fix issue with 0 length regex matches (using same as Python and .Net approach). - Add various ALIAS entries to make function discovery easier. - Sort a lot of drop down and vector editors out (including switch to fully qualified names). --- CHANGELOG.md | 2 + .../lib/Standard/Base/0.0.0-dev/src/Any.enso | 10 +- .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 30 ++++-- .../Base/0.0.0-dev/src/Data/Array.enso | 3 +- .../Base/0.0.0-dev/src/Data/Boolean.enso | 6 +- .../0.0.0-dev/src/Data/Filter_Condition.enso | 9 -- .../Base/0.0.0-dev/src/Data/Locale.enso | 3 +- .../Base/0.0.0-dev/src/Data/Numbers.enso | 73 +++++++++------ .../Base/0.0.0-dev/src/Data/Text.enso | 3 +- .../0.0.0-dev/src/Data/Text/Encoding.enso | 4 +- .../src/Data/Text/Regex/Pattern.enso | 11 ++- .../Base/0.0.0-dev/src/Data/Time/Date.enso | 8 +- .../0.0.0-dev/src/Data/Time/Date_Time.enso | 6 +- .../0.0.0-dev/src/Data/Time/Duration.enso | 6 +- .../Base/0.0.0-dev/src/Data/Time/Period.enso | 9 +- .../0.0.0-dev/src/Data/Time/Time_Of_Day.enso | 6 +- .../Base/0.0.0-dev/src/Data/Vector.enso | 3 +- .../0.0.0-dev/src/Network/HTTP/Response.enso | 91 +++++++++++++++++++ .../src/Network/HTTP/Response_Body.enso | 35 ++++--- .../Base/0.0.0-dev/src/System/File.enso | 6 +- .../System/File/Existing_File_Behavior.enso | 14 ++- .../src/System/File/Write_Extensions.enso | 31 ++----- .../0.0.0-dev/src/System/File_Format.enso | 17 ++-- .../0.0.0-dev/src/Connection/Connection.enso | 9 +- .../Database/0.0.0-dev/src/Data/Column.enso | 40 ++++---- .../Database/0.0.0-dev/src/Data/Table.enso | 22 +++-- .../Postgres/Postgres_Connection.enso | 2 +- .../Internal/SQLite/SQLite_Connection.enso | 2 +- .../Table/0.0.0-dev/src/Data/Column.enso | 41 +++++---- .../0.0.0-dev/src/Data/Join_Condition.enso | 36 +++----- .../Table/0.0.0-dev/src/Data/Table.enso | 34 ++++--- .../0.0.0-dev/src/Excel/Excel_Workbook.enso | 12 ++- .../src/Internal/Widget_Helpers.enso | 77 +++++++++++----- .../Table_Tests/src/Database/SQLite_Spec.enso | 1 - test/Table_Tests/src/IO/Csv_Spec.enso | 1 - .../src/IO/Delimited_Read_Spec.enso | 1 - .../src/IO/Delimited_Write_Spec.enso | 1 - test/Table_Tests/src/IO/Excel_Spec.enso | 1 - test/Table_Tests/src/IO/Formats_Spec.enso | 36 ++++++++ test/Tests/src/Data/Text/Regex_Spec.enso | 9 ++ test/Tests/src/Data/Text_Spec.enso | 6 ++ test/Tests/src/Network/Http_Spec.enso | 30 +++--- .../System/Reporting_Stream_Decoder_Spec.enso | 1 - .../System/Reporting_Stream_Encoder_Spec.enso | 1 - test/Visualization_Tests/src/Table_Spec.enso | 1 - 45 files changed, 485 insertions(+), 265 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 862d1a96c44a..9293ecfbf24d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -471,6 +471,7 @@ - [Implemented the `cast` operation for `Table` and `Column`.][6711] - [Added `.round` and `.int` to `Integer` and `Decimal`.][6743] - [Added `.round`, `.truncate`, `.ceil`, and `.floor` to `Column`.][6817] +- [Added execution control to `Table.write` and various bug fixes.][6835] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -684,6 +685,7 @@ [6711]: https://github.com/enso-org/enso/pull/6711 [6743]: https://github.com/enso-org/enso/pull/6743 [6817]: https://github.com/enso-org/enso/pull/6817 +[6835]: https://github.com/enso-org/enso/pull/6835 #### Enso Compiler diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso index 96d826ef9cf1..d3f91886b890 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso @@ -62,8 +62,7 @@ type Any to_display_text : Text to_display_text self = @Builtin_Method "Any.to_display_text" - ## ALIAS Equality - + ## ALIAS Equals Checks if `self` is equal to `that`. Arguments: @@ -110,8 +109,7 @@ type Any == : Any -> Boolean == self that = @Builtin_Method "Any.==" - ## ALIAS Inequality - + ## ALIAS Not Equals Checks if `self` is not equal to `that`. Arguments: @@ -134,7 +132,6 @@ type Any != self that = (self == that).not ## ALIAS Greater Than - Checks if `self` is greater than `that`. Arguments: @@ -162,7 +159,6 @@ type Any _ -> False ## ALIAS Greater Than or Equal - Checks if `self` is greater than or equal to `that`. Arguments: @@ -192,7 +188,6 @@ type Any _ -> False ## ALIAS Less Than - Checks if `self` is less than `that`. Arguments: @@ -220,7 +215,6 @@ type Any _ -> False ## ALIAS Less Than or Equal - Checks if `self` is less than or equal to `that`. Arguments: diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index a4569f34a935..37dcf1a32e24 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -140,11 +140,13 @@ list_directory : (File | Text) -> Text -> Boolean -> Vector File list_directory directory name_filter=Nothing recursive=False = File.new directory . list name_filter=name_filter recursive=recursive -## Fetches from the provided URL and returns the response body. - Will error if the status code does not represent a successful response. +## ALIAS Download, HTTP Get + Fetches from the provided URI and returns the response, parsing the body if + the content-type is recognised. Returns an error if the status code does not + represent a successful response. Arguments: - - url: The URL to fetch. + - uri: The URI to fetch. - method: The HTTP method to use. Defaults to `GET`. - headers: The headers to send with the request. Defaults to an empty vector. - parse: If successful should the body be parsed to an Enso native object. @@ -159,10 +161,18 @@ fetch uri method=HTTP_Method.Get headers=[] parse=True = request = Request.new method uri parsed_headers response = HTTP.new.request request - if response.code.is_success.not then Error.throw (Request_Error.Error "Status Code" ("Request failed with status code: " + response.code.to_text + ". " + response.body.to_text)) else - response_headers = response.headers - content_type = response_headers.find if_missing=Nothing h-> "Content-Type".equals_ignore_case h.name - if (parse == False) || (content_type == Nothing) then response else - format = Auto_Detect.get_web_parser content_type.value uri - if format == Nothing then response else - format.read_web response + if response.code.is_success.not then Error.throw (Request_Error.Error "Status Code" ("Request failed with status code: " + response.code.to_text + ". " + response.body.decode_as_text)) else + if parse then response.decode if_unsupported=response else response + +## ALIAS Download, HTTP Get + Fetches from the URI and returns the response, parsing the body if the + content-type is recognised. Returns an error if the status code does not + represent a successful response. + + Arguments: + - method: The HTTP method to use. Defaults to `GET`. + - headers: The headers to send with the request. Defaults to an empty vector. + - parse: If successful should the body be parsed to an Enso native object. +URI.fetch : HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any +URI.fetch self method=HTTP_Method.Get headers=[] parse=True = + Data.fetch self method headers parse diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index 726f68e4137d..7cb8176be8df 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -784,7 +784,8 @@ type Array each_with_index : (Integer -> Any -> Any) -> Nothing each_with_index self f = Vector.each_with_index self f - ## Concatenates two arrays, resulting in a new `Vector`, containing all the + ## ALIAS Concatenate + Concatenates two arrays, resulting in a new `Vector`, containing all the elements of `self`, followed by all the elements of `that`. Arguments: diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso index 512a0081bd20..b4105667ead8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Boolean.enso @@ -15,7 +15,8 @@ type Boolean True False - ## Computes the logical and (conjunction) of two booleans. + ## ALIAS And + Computes the logical and (conjunction) of two booleans. Arguments: - that: The boolean to compute the conjunction of this with. @@ -31,7 +32,8 @@ type Boolean && : Boolean -> Boolean && self ~that = @Builtin_Method "Boolean.&&" - ## Computes the logical or (disjunction) of two booleans. + ## ALIAS Or + Computes the logical or (disjunction) of two booleans. Arguments: - that: The boolean to compute the disjunction of this with. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso index 75a0b7d48ce7..f81ffa50b9af 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso @@ -231,15 +231,6 @@ type Filter_Condition Not_In values -> "is not in " + values.to_display_text "Filter Condition: " + condition - ## PRIVATE - Gets a widget set up for a Filter_Condition. - default_widget : Widget - default_widget = - names = ["Equal", "Not Equal", "Is In", "Not In", "Is True", "Is False", "Is Nothing", "Not Nothing", "Is Empty", "Not Empty", "Less", "Equal Or Less", "Greater", "Equal Or Greater", "Between", "Starts With", "Ends With", "Contains", "Not Contains", "Like", "Not Like"] - values = ["(Filter_Condition.Equal)", "(Filter_Condition.Not_Equal)", "(Filter_Condition.Is_In)", "(Filter_Condition.Not_In)", "Filter_Condition.Is_True", "Filter_Condition.Is_False", "Filter_Condition.Is_Nothing", "Filter_Condition.Not_Nothing", "Filter_Condition.Is_Empty", "Filter_Condition.Not_Empty", "(Filter_Condition.Less)", "(Filter_Condition.Equal_Or_Less)", "(Filter_Condition.Greater)", "(Filter_Condition.Equal_Or_Greater)", "(Filter_Condition.Between)", "(Filter_Condition.Starts_With)", "(Filter_Condition.Ends_With)", "(Filter_Condition.Contains)", "(Filter_Condition.Not_Contains)", "(Filter_Condition.Like)", "(Filter_Condition.Not_Like)"] - options = names.zip values . map p-> Option p.first p.second - Single_Choice values=options display=Display.Always - ## PRIVATE sql_like_to_regex sql_pattern = regex_pattern = Regex_Utils.sql_like_pattern_to_regex sql_pattern diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso index be1d739a8726..a16b9317bb27 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Locale.enso @@ -445,5 +445,6 @@ type Locale widget_options : Vector Option widget_options = Locale.predefined_locale_fields.map field_name-> display_string = field_name.replace '_' ' ' . to_case (if field_name.length == 2 then Case.Upper else Case.Title) - code_string = "Locale." + field_name + fqn = Meta.get_qualified_type_name Locale + code_string = fqn + "." + field_name Option display_string code_string diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index f9e60b11a194..b7b824c3280e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -25,8 +25,7 @@ polyglot java import java.text.ParseException an Integer in its place. @Builtin_Type type Number - ## ALIAS Add - + ## ALIAS Add, Plus Adds two arbitrary numbers. Arguments: @@ -42,8 +41,7 @@ type Number + : Number -> Number + self that = @Builtin_Method "Integer.+" - ## ALIAS Subtract - + ## ALIAS Subtract, Minus Subtract an arbitrary number from this. Arguments: @@ -56,8 +54,7 @@ type Number - : Number -> Number - self that = @Builtin_Method "Integer.-" - ## ALIAS Multiply - + ## ALIAS Multiply, Times, Product Multiply two arbitrary numbers. Arguments: @@ -74,7 +71,6 @@ type Number * self that = @Builtin_Method "Integer.*" ## ALIAS Divide - Divides an this by an arbitrary number. Arguments: @@ -91,7 +87,6 @@ type Number / self that = @Builtin_Method "Integer./" ## ALIAS Power - Compute the result of raising this to the power that. Arguments: @@ -375,8 +370,8 @@ type Number floating point numbers. @Builtin_Type type Decimal - - ## Adds a deceimal and an arbitrary number. + ## ALIAS Add, Plus + Adds a decimal and an arbitrary number. Arguments: - that: The number to add to this. @@ -391,7 +386,8 @@ type Decimal + : Number -> Number + self that = @Builtin_Method "Decimal.+" - ## Subtract an arbitrary number from this. + ## ALIAS Subtract, Minus + Subtract an arbitrary number from this. Arguments: - that: The number to subtract from this. @@ -403,7 +399,8 @@ type Decimal - : Number -> Number - self that = @Builtin_Method "Decimal.-" - ## Multiply a decimal by an arbitrary number. + ## ALIAS Multiply, Times, Product + Multiply a decimal by an arbitrary number. Arguments: - that: The number to multiply this by. @@ -418,7 +415,8 @@ type Decimal * : Number -> Number * self that = @Builtin_Method "Decimal.*" - ## Divides a decimal by an arbitrary number. + ## ALIAS Divide + Divides a decimal by an arbitrary number. Arguments: - that: The number to divide this by. @@ -433,7 +431,8 @@ type Decimal / : Number -> Number / self that = @Builtin_Method "Decimal./" - ## Computes the remainder when dividing this by that. + ## ALIAS Modulus, Modulo + Computes the remainder when dividing this by that. Arguments: - that: The number to divide this by. @@ -453,7 +452,8 @@ type Decimal % : Number -> Number ! Arithmetic_Error % self that = @Builtin_Method "Decimal.%" - ## Compute the result of raising this to the power that. + ## ALIAS Power + Compute the result of raising this to the power that. Arguments: - that: The exponent. @@ -465,7 +465,8 @@ type Decimal ^ : Number -> Number ^ self that = @Builtin_Method "Decimal.^" - ## Checks if this is greater than that. + ## ALIAS Greater Than + Checks if this is greater than that. Arguments: - that: The number to compare this against. @@ -477,7 +478,8 @@ type Decimal > : Number -> Boolean > self that = @Builtin_Method "Decimal.>" - ## Checks if this is greater than or equal to thatthat. + ## ALIAS Greater Than or Equal + Checks if this is greater than or equal to that. Arguments: - that: The number to compare this against. @@ -489,7 +491,8 @@ type Decimal >= : Number -> Boolean >= self that = @Builtin_Method "Decimal.>=" - ## Checks if this is less than that. + ## ALIAS Less Than + Checks if this is less than that. Arguments: - that: The number to compare this against. @@ -501,7 +504,8 @@ type Decimal < : Number -> Boolean < self that = @Builtin_Method "Decimal.<" - ## Checks if this is less than or equal to thatthat. + ## ALIAS Less Than Or Equal + Checks if this is less than or equal to that. Arguments: - that: The number to compare this against. @@ -685,8 +689,8 @@ type Decimal degrade. @Builtin_Type type Integer - - ## Adds an integer and an arbitrary number. + ## ALIAS Add, Plus + Adds an integer and an arbitrary number. Arguments: - that: The number to add to this. @@ -701,7 +705,8 @@ type Integer + : Number -> Number + self that = @Builtin_Method "Integer.+" - ## Subtract an arbitrary number from this. + ## ALIAS Subtract, Minus + Subtract an arbitrary number from this. Arguments: - that: The number to subtract from this. @@ -713,7 +718,8 @@ type Integer - : Number -> Number - self that = @Builtin_Method "Integer.-" - ## Multiply an integer by an arbitrary number. + ## ALIAS Multiply, Times, Product + Multiply an integer by an arbitrary number. Arguments: - that: The number to multiply this by. @@ -728,7 +734,8 @@ type Integer * : Number -> Number * self that = @Builtin_Method "Integer.*" - ## Divides an integer by an arbitrary number. + ## ALIAS Divide + Divides an integer by an arbitrary number. Arguments: - that: The number to divide this by. @@ -743,7 +750,8 @@ type Integer / : Number -> Number / self that = @Builtin_Method "Integer./" - ## Computes the remainder when dividing this by that. + ## ALIAS Modulus, Modulo + Computes the remainder when dividing this by that. Arguments: - that: The number to divide this by. @@ -760,7 +768,8 @@ type Integer % : Number -> Number ! Arithmetic_Error % self that = @Builtin_Method "Integer.%" - ## Compute the result of raising this to the power that. + ## ALIAS Power + Compute the result of raising this to the power that. Arguments: - that: The exponent. @@ -772,7 +781,8 @@ type Integer ^ : Number -> Number ^ self that = @Builtin_Method "Integer.^" - ## Checks if this is greater than that. + ## ALIAS Greater Than + Checks if this is greater than that. Arguments: - that: The number to compare this against. @@ -784,7 +794,8 @@ type Integer > : Number -> Boolean > self that = @Builtin_Method "Integer.>" - ## Checks if this is greater than or equal to thatthat. + ## ALIAS Greater Than or Equal + Checks if this is greater than or equal to that. Arguments: - that: The number to compare this against. @@ -796,7 +807,8 @@ type Integer >= : Number -> Boolean >= self that = @Builtin_Method "Integer.>=" - ## Checks if this is less than that. + ## ALIAS Less Than + Checks if this is less than that. Arguments: - that: The number to compare this against. @@ -808,7 +820,8 @@ type Integer < : Number -> Boolean < self that = @Builtin_Method "Integer.<" - ## Checks if this is less than or equal to thatthat. + ## ALIAS Less Than Or Equal + Checks if this is less than or equal to that. Arguments: - that: The number to compare this against. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso index 1ef1c3f51af2..6f3cc9b32bc1 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso @@ -20,7 +20,8 @@ polyglot java import org.enso.base.Text_Utils @Builtin_Type type Text - ## Concatenates the text that to the right side of this. + ## ALIAS Concatenate + Concatenates the text that to the right side of this. Arguments: - that: The text to concatenate to this. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso index 17da232ec287..027d44acef90 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Encoding.enso @@ -1,6 +1,7 @@ import project.Data.Text.Text import project.Data.Vector.Vector import project.Error.Error +import project.Meta import project.Panic.Panic import project.Errors.Encoding_Error.Encoding_Error import project.Errors.Illegal_Argument.Illegal_Argument @@ -20,7 +21,8 @@ type Encoding Gets the default drop down option for this encoding. default_widget : Widget default_widget = - values = [Option "UTF-8" "Encoding.utf_8", Option "ASCII" "Encoding.ascii", Option "UTF-16LE" "Encoding.utf_16_le", Option "UTF-16BE" "Encoding.utf_16_be", Option "UTF-32LE" "Encoding.utf_32_le", Option "UTF-32BE" "Encoding.utf_32_be", Option "Windows-1250" "Encoding.windows_1250", Option "Windows-1251" "Encoding.windows_1251", Option "Windows-1252" "Encoding.windows_1252", Option "Windows-1253" "Encoding.windows_1253", Option "Windows-1254" "Encoding.windows_1254", Option "Windows-1255" "Encoding.windows_1255", Option "Windows-1256" "Encoding.windows_1256", Option "Windows-1257" "Encoding.windows_1257", Option "Windows-1258" "Encoding.windows_1258"] + fqn = Meta.get_qualified_type_name Encoding + values = [Option "UTF-8" fqn+".utf_8", Option "ASCII" fqn+".ascii", Option "UTF-16LE" fqn+".utf_16_le", Option "UTF-16BE" fqn+".utf_16_be", Option "UTF-32LE" fqn+".utf_32_le", Option "UTF-32BE" fqn+".utf_32_be", Option "Windows-1250" fqn+".windows_1250", Option "Windows-1251" fqn+".windows_1251", Option "Windows-1252" fqn+".windows_1252", Option "Windows-1253" fqn+".windows_1253", Option "Windows-1254" fqn+".windows_1254", Option "Windows-1255" fqn+".windows_1255", Option "Windows-1256" fqn+".windows_1256", Option "Windows-1257" fqn+".windows_1257", Option "Windows-1258" fqn+".windows_1258"] Single_Choice values=values display=Display.When_Modified ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso index b4d95df7ea49..cb688590a7f3 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Pattern.enso @@ -4,6 +4,7 @@ import project.Data.Map.Map import project.Data.Numbers.Integer import project.Data.Range.Extensions import project.Data.Range.Range +import project.Data.Text.Extensions import project.Data.Text.Helpers import project.Data.Text.Span.Span import project.Data.Text.Span.Utf_16_Span @@ -15,6 +16,7 @@ import project.Data.Vector.Vector import project.Errors.Common.Type_Error import project.Error.Error import project.Errors.Illegal_Argument.Illegal_Argument +import project.Math import project.Meta import project.Nothing.Nothing import project.Polyglot.Polyglot @@ -97,7 +99,7 @@ type Pattern Arguments: - input: The text to match the pattern described by `self` against. - find_all : Text -> Vector Text | Type_Error + find_all : Text -> Vector Text ! Type_Error find_all self input = Helpers.expect_text input <| self.match_all input . map match_to_group_maybe @@ -352,8 +354,8 @@ type Match_Iterator Also returns the next iterator, if there was a match. next : Match_Iterator_Value next self = - regex_result = self.pattern.internal_regex_object.exec self.input self.cursor - case regex_result.isMatch of + regex_result = if self.cursor > self.input.char_vector.length then Nothing else self.pattern.internal_regex_object.exec self.input self.cursor + case regex_result.is_nothing.not && regex_result.isMatch of False -> filler_range = Range.new self.cursor (Text_Utils.char_length self.input) filler_span = (Utf_16_Span.Value filler_range self.input) @@ -363,7 +365,8 @@ type Match_Iterator filler_range = Range.new self.cursor match_start filler_span = (Utf_16_Span.Value filler_range self.input) match = Match.Value self.pattern regex_result self.input - next_cursor = match.utf_16_end 0 + ## Handle edge case where match is 0 length + next_cursor = Math.max (self.cursor + 1) (match.utf_16_end 0) next_iterator = Match_Iterator.Value self.pattern self.input next_cursor Match_Iterator_Value.Next filler_span match next_iterator diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso index 6fec6c39cbf4..033ef1acdb4a 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso @@ -425,7 +425,8 @@ type Date to_date_time : Time_Of_Day -> Time_Zone -> Date_Time to_date_time self (time_of_day=Time_Of_Day.new) (zone=Time_Zone.system) = self.to_time_builtin time_of_day zone - ## Add the specified amount of time to this instant to get another date. + ## ALIAS Add Period + Add the specified amount of time to this instant to get another date. Arguments: - amount: The time duration to add to this instant. @@ -447,7 +448,6 @@ type Date Error.throw (Illegal_Argument.Error "Illegal period argument") ## ALIAS Date Range - Creates an increasing range of dates from `self` to `end`. Arguments: @@ -472,7 +472,6 @@ type Date _ -> Error.throw (Type_Error.Error Date end "end") ## ALIAS Date Range - Creates a decreasing range of dates from `self` to `end`. Arguments: @@ -618,7 +617,8 @@ type Date if holidays.contains end_date || is_weekend end_date then @Tail_Call go (end_date - (Period.new days=1)) else end_date go end - ## Subtract the specified amount of time from this instant to get another + ## ALIAS Subtract Period + Subtract the specified amount of time from this instant to get another date. Arguments: diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso index 9cd24652241b..6a498cd65965 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Time.enso @@ -501,7 +501,8 @@ type Date_Time at_zone : Time_Zone -> Date_Time at_zone self zone = @Builtin_Method "Date_Time.at_zone" - ## Add the specified amount of time to this instant to produce a new instant. + ## ALIAS Add Period, Add Duration + Add the specified amount of time to this instant to produce a new instant. Arguments: - amount: The amount of time to add to this instant, either `Duration` for @@ -564,7 +565,8 @@ type Date_Time ensure_in_epoch self <| self.date.add_work_days days holidays . to_date_time self.time_of_day self.zone - ## Subtract the specified amount of time from this instant to get a new + ## ALIAS Subtract Duration, Subtract Period + Subtract the specified amount of time from this instant to get a new instant. Produces a warning if the resulting date time is before an Enso epoch. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso index b71cf1d4bc68..062e81cc3842 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Duration.enso @@ -128,7 +128,8 @@ type Duration duration = Duration.new nanoseconds=(end - start) Pair.new duration result - ## Add the specified amount of time to this duration. + ## ALIAS Add Duration + Add the specified amount of time to this duration. Arguments: - that: The duration to add to `self`. @@ -152,7 +153,8 @@ type Duration Panic.catch ArithmeticException (self.plus_builtin that) err-> Error.throw (Time_Error.Error err.payload.getMessage) - ## Subtract the specified amount of time from this duration. + ## ALIAS Subtract Duration + Subtract the specified amount of time from this duration. Arguments: - that: The duration to subtract from `self`. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso index 6e2c81dc4f98..2d4c3dd8fc27 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso @@ -97,7 +97,8 @@ type Period days : Integer days self = self.internal_period.getDays - ## Add the specified amount of time to this period. + ## ALIAS Add Period + Add the specified amount of time to this period. Arguments: - other_period: The period to add to `self`. Note that this cannot be a @@ -115,7 +116,8 @@ type Period catch_java_exceptions "Period.+" <| Period.Value (self.internal_period.plus other_period.internal_period) - ## Subtract a specified amount of time from this period. + ## ALIAS Subtract Period + Subtract a specified amount of time from this period. Arguments: - other_period: Other Period to add to this Period. Note that this @@ -134,7 +136,8 @@ type Period catch_java_exceptions "Period.-" <| Period.Value (self.internal_period.minus other_period.internal_period) - ## Multiply the amount of time in this period by the specified scalar. + ## ALIAS Multiply, Times + Multiply the amount of time in this period by the specified scalar. Arguments: - factor: The scalar to multiply by. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso index e6b8b0d035a6..662e2817071f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Time_Of_Day.enso @@ -253,7 +253,8 @@ type Time_Of_Day to_date_time : Date -> Time_Zone -> Date_Time to_date_time self date (zone=Time_Zone.system) = self.to_date_time_builtin date zone - ## Add the specified amount of time to this instant to get a new instant. + ## ALIAS Add Duration + Add the specified amount of time to this instant to get a new instant. Arguments: - amount: The amount of time to add to this instant. Can be only @@ -270,7 +271,8 @@ type Time_Of_Day duration : Duration -> self.plus_builtin duration _ : Period -> Error.throw (Time_Error.Error "Time_Of_Day does not support date intervals (periods)") - ## Subtract the specified amount of time from this instant to get a new + ## ALIAS Subtract Duration + Subtract the specified amount of time from this instant to get a new instant. Arguments: diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index 46f95679045e..84dbf1d5446e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -651,7 +651,8 @@ type Vector a "and " + remaining_count.to_text + " more elements" prefix.map .to_text . join ", " "[" " "+remaining_text+"]" - ## Concatenates two vectors, resulting in a new `Vector`, containing all the + ## ALIAS Concatenate + Concatenates two vectors, resulting in a new `Vector`, containing all the elements of `self`, followed by all the elements of `that`. Arguments: diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso index 24865df1f9b6..f33dbdbfb700 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso @@ -1,9 +1,26 @@ +import project.Any.Any +import project.Data.Boolean.Boolean import project.Data.Json.JS_Object +import project.Data.Numbers.Number +import project.Data.Text.Encoding.Encoding +import project.Data.Text.Extensions +import project.Data.Text.Text import project.Data.Vector.Vector +import project.Error.Error +import project.Errors.Illegal_Argument.Illegal_Argument +import project.Meta +import project.Nothing.Nothing import project.Network.HTTP.Header.Header import project.Network.HTTP.HTTP_Status_Code.HTTP_Status_Code import project.Network.HTTP.Response_Body.Response_Body +from project.System.File_Format import Auto_Detect, File_Format, format_types + +import project.Metadata.Widget +from project.Metadata.Widget import Single_Choice +from project.Metadata.Choice import Option +import project.Metadata.Display + polyglot java import org.enso.base.Http_Utils type Response @@ -30,6 +47,12 @@ type Response header_entries = Vector.from_polyglot_array (Http_Utils.get_headers self.internal_http_response.headers) header_entries.map e-> Header.new e.getKey e.getValue + ## Get the response content type. + content_type : Text | Nothing + content_type self = + content_type_optional = self.internal_http_response.headers.firstValue "Content-Type" + if content_type_optional.isPresent then content_type_optional.get else Nothing + ## Get the response body. > Example @@ -54,6 +77,46 @@ type Response code : HTTP_Status_Code code self = HTTP_Status_Code.Value self.internal_http_response.statusCode + ## ALIAS Parse + Uses the format to decode the body. + If using `Auto_Detect`, the content-type will be used to determine the + format. + @format decode_format_selector + decode : File_Format -> Any -> Any + decode self format=Auto_Detect ~if_unsupported=(Error.throw (Unsupported_Content_Type.Error self.content_type)) = + case format of + Auto_Detect -> + content_type = self.content_type + format = if content_type.is_nothing then Nothing else + Auto_Detect.get_web_parser content_type (self.internal_http_response.uri.toString) + if format.is_nothing then if_unsupported else + format.read_web self + _ -> + type_obj = Meta.type_of format + if can_decode type_obj then format.read_web self else + Error.throw (Illegal_Argument.Error type_obj.to_text+" cannot be used to decode from the web.") + + ## ALIAS Parse as Text + Decodes the body to a Text value. + @encoding Encoding.default_widget + decode_as_text : Encoding -> Text + decode_as_text self encoding=Encoding.utf_8 = + self.body.decode_as_text encoding + + ## ALIAS Parse as JSON, Parse JSON + Decodes the body as JSON. + + > Example + Convert a response from JSON. + + import Standard.Examples + + example_to_text = Examples.get_geo_data.decode_as_json + @encoding Encoding.default_widget + decode_as_json : Encoding -> JS_Object | Boolean | Number | Nothing | Text | Vector + decode_as_json self encoding=Encoding.utf_8 = + self.decode_as_text encoding . parse_json + ## PRIVATE Convert to a JavaScript Object representing this Response. @@ -69,3 +132,31 @@ type Response type_pair = ["type", "Response"] cons_pair = ["constructor", "Value"] JS_Object.from_pairs [type_pair, cons_pair, ["headers", self.headers], ["body", self.body], ["code", self.code]] + +## PRIVATE +type Unsupported_Content_Type + ## PRIVATE + A type representing an unsupported content type. + + Arguments: + - content_type: The content type that is unsupported. + Error (content_type : Text | Nothing) + + ## PRIVATE + Convert the error to a human readable string. + to_display_text : Text + to_display_text self = case self.content_type of + Nothing -> "The response did not contain a content type." + _ : Text -> "The content type '" + self.content_type +"' cannot be automatically decoded." + +## PRIVATE +can_decode : File_Format -> Boolean +can_decode type = Meta.meta type . methods . contains "read_web" + +## PRIVATE + Selector for decoding from the web. +decode_format_selector : Widget +decode_format_selector = + all_types = [Auto_Detect] + (format_types.filter can_decode) + make_name type_obj = type_obj.to_text.replace "_Format" "" . replace "_" " " + Single_Choice display=Display.Always values=(all_types.map n->(Option (make_name n) (File_Format.constructor_code n))) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso index fa0b9281f164..2059040fbc87 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response_Body.enso @@ -2,6 +2,7 @@ import project.Data.Boolean.Boolean import project.Data.Json.Json import project.Data.Json.JS_Object import project.Data.Numbers.Number +import project.Data.Text.Encoding.Encoding import project.Data.Text.Extensions import project.Data.Text.Text import project.Data.Vector.Vector @@ -18,10 +19,31 @@ type Response_Body - bytes: The body of the response as binary data. Value bytes + ## ALIAS Parse as Text + Decodes the body to a Text value. + @encoding Encoding.default_widget + decode_as_text : Encoding -> Text + decode_as_text self encoding=Encoding.utf_8 = + Text.from_bytes self.bytes encoding + + ## ALIAS Parse as JSON, Parse JSON + Decodes the body as JSON. + + > Example + Convert a response from JSON. + + import Standard.Examples + + example_to_text = Examples.get_geo_data.decode_as_json + @encoding Encoding.default_widget + decode_as_json : Encoding -> JS_Object | Boolean | Number | Nothing | Text | Vector + decode_as_json self encoding=Encoding.utf_8 = + self.decode_as_text encoding . parse_json + ## PRIVATE Convert response body to Text. to_text : Text - to_text self = Text.from_utf_8 self.bytes + to_text self = "Response_Body [" + self.bytes.length.to_text + " bytes]" ## Write response body to a File. @@ -41,14 +63,3 @@ type Response_Body to_file self file = self.bytes.write_bytes file file - - ## Convert response body from JSON to the Enso data types. - - > Example - Convert a response from JSON. NOTE: This example makes a network request. - - import Standard.Examples - - example_to_text = Examples.get_geo_data.parse_json - parse_json : JS_Object | Boolean | Number | Nothing | Text | Vector - parse_json self = Json.parse self.to_text diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso index 31372b8f74ba..8855ad50bc78 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso @@ -11,6 +11,7 @@ import project.Data.Text.Text import project.Data.Time.Time_Of_Day.Time_Of_Day import project.Data.Vector.Vector import project.Error.Error +import project.Errors.Common.Dry_Run_Operation import project.Errors.Common.Forbidden_Operation import project.Errors.Encoding_Error.Encoding_Error import project.Errors.File_Error.File_Error @@ -88,7 +89,10 @@ type File if self.exists && copy_original then Context.Output.with_enabled <| self.copy_to temp replace_existing=True - temp + + ## Attach a warning to the file that it is a dry run + warning = Dry_Run_Operation.Warning "Only a dry run has occurred, with data written to a temporary file." + Warning.attach warning temp ## ALIAS Current Directory diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso index 5d9386786209..0567934321ec 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Existing_File_Behavior.enso @@ -4,6 +4,7 @@ import project.Errors.File_Error.File_Error import project.Nothing.Nothing import project.Panic.Panic import project.Panic.Caught_Panic +import project.Runtime.Context import project.System.File.File import project.System.File.File_Access.File_Access import project.System.File.Output_Stream @@ -33,6 +34,17 @@ type Existing_File_Behavior raised. Error + ## PRIVATE + Adjust the Existing_File_Behavior to take account of Context enablement. + get_effective_behavior : File -> Boolean -> Existing_File_Behavior ! File_Error + get_effective_behavior self file is_enabled=Context.Output.is_enabled = + if is_enabled then self else + case self of + Existing_File_Behavior.Backup -> Existing_File_Behavior.Overwrite + Existing_File_Behavior.Error -> + if file.exists then Error.throw (File_Error.Already_Exists file) else Existing_File_Behavior.Overwrite + _ -> self + ## PRIVATE Runs the `action` which is given a file output stream and should write the required contents to it. @@ -58,7 +70,7 @@ type Existing_File_Behavior handle_file_already_exists = catch_already_exists handle_existing_file handle_internal_dataflow = Panic.catch Internal_Write_Operation_Errored handler=handle_write_failure_dataflow ## We first attempt to write the file to the original - destination, but if that files due to the file already + destination, but if that fails due to the file already existing, we will run the alternative algorithm which uses a temporary file and creates a backup. handle_file_already_exists <| handle_internal_dataflow <| diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso index f44213fd67a1..14b09fbcd359 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File/Write_Extensions.enso @@ -3,7 +3,6 @@ import project.Data.Text.Encoding.Encoding import project.Data.Text.Extensions import project.Data.Vector.Vector import project.Error.Error -import project.Errors.Common.Dry_Run_Operation import project.Errors.Common.Unsupported_Argument_Types import project.Errors.Encoding_Error.Encoding_Error import project.Errors.File_Error.File_Error @@ -53,27 +52,17 @@ polyglot java import org.enso.base.Array_Builder Text.write : (File|Text) -> Encoding -> Existing_File_Behavior -> Problem_Behavior -> File ! Encoding_Error | Illegal_Argument | File_Error Text.write self path encoding=Encoding.utf_8 on_existing_file=Existing_File_Behavior.Backup on_problems=Problem_Behavior.Report_Warning = bytes = self.bytes encoding on_problems - - actual = File.new path - - is_enabled = Context.Output.is_enabled - - effective_existing_behaviour = if is_enabled then on_existing_file else - case on_existing_file of - Existing_File_Behavior.Backup -> Existing_File_Behavior.Overwrite - Existing_File_Behavior.Error -> if actual.exists then Error.throw (File_Error.Already_Exists actual) else Existing_File_Behavior.Overwrite - _ -> on_existing_file - - file = if is_enabled then actual else actual.create_dry_run_file copy_original=on_existing_file==Existing_File_Behavior.Append - - Context.Output.with_enabled <| - r = effective_existing_behaviour.write file stream-> - bytes.if_not_error <| + bytes.if_not_error <| + actual = File.new path + effective_existing_behaviour = on_existing_file.get_effective_behavior actual + file = if Context.Output.is_enabled then actual else + should_copy_file = on_existing_file==Existing_File_Behavior.Append + actual.create_dry_run_file copy_original=should_copy_file + + Context.Output.with_enabled <| + r = effective_existing_behaviour.write file stream-> stream.write_bytes bytes - r.if_not_error <| - if is_enabled then file else - warning = Dry_Run_Operation.Warning "Only a dry run has occurred, with data written to a temporary file." - Warning.attach warning file + r.if_not_error file ## Writes (or appends) the Vector of bytes into the specified file. The behavior specified in the `existing_file` parameter will be used if the file exists. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 79f4f656eed0..5cccfab7acb2 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -90,11 +90,16 @@ type File_Format Create the constructor code for a File_Format type. constructor_code : Any -> Text constructor_code type_obj = - type_name = Meta.get_simple_type_name type_obj - ctors = Meta.meta type_obj . constructors - is_singleton_type = ctors.length == 0 - if is_singleton_type then type_name else - "(" + type_name + "." + ctors.first.name + ")" + ## Workaround for JSON and Auto_Detect + case type_obj of + JSON_Format -> "JSON_Format" + Auto_Detect -> "Auto_Detect" + _ -> + type_name = Meta.get_qualified_type_name type_obj + ctors = Meta.meta type_obj . constructors + is_singleton_type = ctors.length == 0 + if is_singleton_type then type_name else + type_name + "." + ctors.first.name ## PRIVATE default_widget : Widget @@ -208,7 +213,7 @@ type JSON_Format Implements the `Data.parse` for this `File_Format` read_web : Response -> Any read_web self response = - response.body.parse_json + response.body.decode_as_json ## A setting to infer the default behaviour of some option. type Infer diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 7cac233bc99b..371acdce05bf 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -120,12 +120,15 @@ type Connection - all_fields: Return all the fields in the metadata table. @types make_table_types_selector @schema make_schema_selector - tables : Text -> Text -> Text -> Vector -> Boolean -> Materialized_Table + tables : Text -> Text -> Text -> Vector Text | Text | Nothing -> Boolean -> Materialized_Table tables self name_like=Nothing database=self.database schema=Nothing types=self.dialect.default_table_types all_fields=False = - types_array = if types.is_nothing then Nothing else types.to_array + types_vector = case types of + Nothing -> Nothing + _ : Vector -> types + _ -> [types] name_map = Map.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]] self.jdbc_connection.with_metadata metadata-> - table = Managed_Resource.bracket (metadata.getTables database schema name_like types_array) .close result_set-> + table = Managed_Resource.bracket (metadata.getTables database schema name_like types_vector) .close result_set-> result_set_to_table result_set self.dialect.make_column_fetcher_for_type renamed = table.rename_columns name_map if all_fields then renamed else diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso index 789fc8bb9576..2e72f3bd95b7 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso @@ -190,7 +190,8 @@ type Column count self = self.to_table.filter 0 Filter_Condition.Not_Nothing . row_count - ## Element-wise equality comparison. + ## ALIAS Equals + Element-wise equality comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -240,7 +241,8 @@ type Column new_name = self.naming_helpers.function_name "equals_ignore_case" [self, other] self.make_binary_op "equals_ignore_case" other new_name - ## Element-wise non-equality comparison. + ## ALIAS Not Equals + Element-wise non-equality comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -273,8 +275,7 @@ type Column != self other = make_equality_check_with_floating_point_handling self other "!=" - ## UNSTABLE - + ## ALIAS Greater Than Or Equal Element-wise order comparison. Arguments: @@ -287,8 +288,7 @@ type Column >= self other = Value_Type.expect_comparable self other <| self.make_binary_op ">=" other - ## UNSTABLE - + ## ALIAS Less Than Or Equal Element-wise order comparison. Arguments: @@ -301,8 +301,7 @@ type Column <= self other = Value_Type.expect_comparable self other <| self.make_binary_op "<=" other - ## UNSTABLE - + ## ALIAS Greater Than Element-wise order comparison. Arguments: @@ -315,8 +314,7 @@ type Column > self other = Value_Type.expect_comparable self other <| self.make_binary_op ">" other - ## UNSTABLE - + ## ALIAS Less Than Element-wise order comparison. Arguments: @@ -348,8 +346,7 @@ type Column new_name = self.naming_helpers.to_expression_text self + " between " + self.naming_helpers.to_expression_text lower + " and " + self.naming_helpers.to_expression_text upper self.make_op "BETWEEN" [lower, upper] new_name - ## UNSTABLE - + ## ALIAS Add, Plus, Concatenate Element-wise addition. Arguments: @@ -365,8 +362,7 @@ type Column new_name = self.naming_helpers.binary_operation_name "+" self other self.make_binary_op op other new_name - ## UNSTABLE - + ## ALIAS Subtract, Minus Element-wise subtraction. Arguments: @@ -380,8 +376,7 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| self.make_binary_op "-" other - ## UNSTABLE - + ## ALIAS Multiply, Times, Product Element-wise multiplication. Arguments: @@ -395,8 +390,7 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| self.make_binary_op "*" other - ## ALIAS Divide Columns - + ## ALIAS Divide Element-wise division. Arguments: @@ -430,7 +424,8 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| self.make_binary_op "/" other - ## Element-wise modulus. + ## ALIAS Modulus, Modulo + Element-wise modulus. Arguments: - other: The value to modulo `self` against. If `other` is a column, the @@ -468,7 +463,6 @@ type Column self.make_binary_op op other new_name ## ALIAS Power - Element-wise raising to the power. Arguments: @@ -497,8 +491,7 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| self.make_binary_op '^' other - ## UNSTABLE - + ## ALIAS And Element-wise boolean conjunction. Arguments: @@ -514,8 +507,7 @@ type Column new_name = self.naming_helpers.binary_operation_name "&&" self other self.make_binary_op "AND" other new_name - ## UNSTABLE - + ## ALIAS Or Element-wise boolean disjunction. Arguments: diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 0186a5199728..bae38f880388 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -8,6 +8,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Errors.Unimplemented.Unimplemented +from Standard.Base.Metadata import make_single_choice from Standard.Base.Widget_Helpers import make_delimiter_selector from Standard.Table import Aggregate_Column, Data_Formatter, Column_Selector, Sort_Column, Match_Columns, Position, Set_Mode, Auto, Value_Type @@ -190,13 +191,14 @@ type Table table.select_columns [-1, 0, 1] reorder=True Icon: select_column - @columns (Widget_Helpers.make_column_name_vector_selector include_blanks=True) + @columns Widget_Helpers.make_column_name_vector_selector select_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range select_columns self (columns = [self.columns.first.name]) (reorder = False) (error_on_missing_columns = True) (on_problems = Report_Warning) = new_columns = self.columns_helper.select_columns selectors=columns reorder=reorder error_on_missing_columns=error_on_missing_columns on_problems=on_problems self.updated_columns new_columns - ## Returns a new table with the chosen set of columns, as specified by the + ## ALIAS drop_columns + Returns a new table with the chosen set of columns, as specified by the `columns`, removed from the input table. Any unmatched input columns will be kept in the output. Columns are returned in the same order as in the input. @@ -243,7 +245,7 @@ type Table Remove the first two columns and the last column. table.remove_columns [-1, 0, 1] - @columns (Widget_Helpers.make_column_name_vector_selector include_blanks=True) + @columns Widget_Helpers.make_column_name_vector_selector remove_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range remove_columns self (columns = [self.columns.first.name]) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.remove_columns selectors=columns error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -299,7 +301,7 @@ type Table Move the first column to back. table.reorder_columns [0] position=Position.After_Other_Columns - @columns (Widget_Helpers.make_column_name_vector_selector include_blanks=True) + @columns Widget_Helpers.make_column_name_vector_selector reorder_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range reorder_columns self (columns = [self.columns.first.name]) (position = Position.Before_Other_Columns) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.reorder_columns selectors=columns position=position error_on_missing_columns on_problems=on_problems @@ -580,9 +582,7 @@ type Table new_ctx = self.context.set_limit max_rows self.updated_context new_ctx - ## UNSTABLE - ALIAS Add Column, Update Column - + ## ALIAS Add Column, Update Column, New Column Sets the column value at the given name. Arguments: @@ -809,7 +809,7 @@ type Table table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)] @columns Widget_Helpers.make_order_by_selector - order_by : Vector (Text | Sort_Column) | Text | Sort_Column -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range + order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range order_by self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = Panic.handle_wrapped_dataflow_error <| problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected] columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder @@ -927,8 +927,8 @@ type Table allows to join the two tables on equality of corresponding columns with the same name. So `table.join other on=["A", "B"]` is a shorthand for: table.join other on=[Join_Condition.Equals "A" "A", Join_Condition.Equals "B" "B"] - @on Widget_Helpers.make_column_name_selector - join : Table -> Join_Kind -> Join_Condition | Text | Vector (Join_Condition | Text) -> Text -> Problem_Behavior -> Table + @on Widget_Helpers.make_join_condition_selector + join : Table -> Join_Kind -> Vector (Join_Condition | Text) | Text -> Text -> Problem_Behavior -> Table join self right join_kind=Join_Kind.Left_Outer on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = can_proceed = if Table_Helpers.is_table right . not then Error.throw (Type_Error.Error Table right "right") else same_backend = case right of @@ -1062,6 +1062,7 @@ type Table defined nor any ordering was specified explicitly by the user, the order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. + @keep_unmatched (make_single_choice [["True", "Boolean.True"], ["False", "Boolean.False"], ["Report", Meta.get_qualified_type_name Report_Unmatched]]) zip : Table -> Boolean | Report_Unmatched -> Text -> Problem_Behavior -> Table zip self right keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning = _ = [right, keep_unmatched, right_prefix, on_problems] @@ -1276,6 +1277,7 @@ type Table Group by the Key column, count the rows table.aggregate [Aggregate_Column.Group_By "Key", Aggregate_Column.Count] + @columns Widget_Helpers.make_aggregate_column_vector_selector aggregate : Vector Aggregate_Column -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Invalid_Aggregate_Column | Invalid_Output_Column_Names | Duplicate_Output_Column_Names | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings aggregate self columns (error_on_missing_columns=False) (on_problems=Report_Warning) = validated = Aggregate_Column_Helper.prepare_aggregate_columns columns self error_on_missing_columns=error_on_missing_columns diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso index 3c21c341e277..a91855346232 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso @@ -72,7 +72,7 @@ type Postgres_Connection Arguments: - schema: The name of the schema to connect to. - @schema (self-> Single_Choice display=Display.Always values=(self.schemas . map s-> Option s s.pretty)) + @schema make_schema_selector set_schema : Text -> Connection ! SQL_Error set_schema self schema = if schema == self.schema then self else diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso index c10f17d79939..7f638d34c260 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso @@ -66,7 +66,7 @@ type SQLite_Connection Arguments: - schema: The name of the schema to connect to. - @schema (Single_Choice display=Display.Always values=[Option 'Nothing']) + @schema make_schema_selector set_schema : Text -> Connection ! SQL_Error set_schema self schema = if schema == self.schema then self else diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso index a214e821e733..d6e14a8adf3d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso @@ -131,7 +131,8 @@ type Column IO.println (self.display show_rows format_terminal=True) IO.println '' - ## Element-wise equality comparison. + ## ALIAS Equals + Element-wise equality comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -193,7 +194,8 @@ type Column new_name = Naming_Helpers.function_name "equals_ignore_case" [self, other] run_vectorized_binary_op self "equals_ignore_case" fallback other expected_result_type=Value_Type.Boolean new_name - ## Element-wise non-equality comparison. + ## ALIAS Not Equals + Element-wise non-equality comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -227,7 +229,8 @@ type Column new_name = Naming_Helpers.binary_operation_name "!=" self other (self == other).not . rename new_name - ## Element-wise order comparison. + ## ALIAS Greater Than Or Equal + Element-wise order comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -254,7 +257,8 @@ type Column >= self other = Value_Type.expect_comparable self other <| run_vectorized_binary_op self ">=" (>=) other expected_result_type=Value_Type.Boolean - ## Element-wise order comparison. + ## ALIAS Less Than Or Equal + Element-wise order comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -281,7 +285,8 @@ type Column <= self other = Value_Type.expect_comparable self other <| run_vectorized_binary_op self "<=" (<=) other expected_result_type=Value_Type.Boolean - ## Element-wise order comparison. + ## ALIAS Greater Than + Element-wise order comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -308,7 +313,8 @@ type Column > self other = Value_Type.expect_comparable self other <| run_vectorized_binary_op self ">" (>) other expected_result_type=Value_Type.Boolean - ## Element-wise order comparison. + ## ALIAS Less Than + Element-wise order comparison. Arguments: - other: The value to compare `self` against. If `other` is a column, the @@ -355,8 +361,7 @@ type Column result = (self >= lower) && (self <= upper) result.rename new_name - ## ALIAS Add Columns - + ## ALIAS Add, Plus, Concatenate Element-wise addition. Arguments: @@ -385,8 +390,7 @@ type Column Value_Type_Helpers.resolve_addition_kind self other . if_not_error <| run_vectorized_binary_op self '+' fallback_fn=Nothing other - ## ALIAS Subtract Columns - + ## ALIAS Subtract, Minus Element-wise subtraction. Arguments: @@ -415,8 +419,7 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| run_vectorized_binary_op self '-' fallback_fn=Nothing other - ## ALIAS Multiply Columns - + ## ALIAS Multiply, Times, Product Element-wise multiplication. Arguments: @@ -445,8 +448,7 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| run_vectorized_binary_op self '*' fallback_fn=Nothing other - ## ALIAS Divide Columns - + ## ALIAS Divide Element-wise division. Arguments: @@ -481,7 +483,8 @@ type Column new_name = Naming_Helpers.binary_operation_name "/" self other run_vectorized_binary_op_with_problem_handling self "/" fallback_fn=Nothing other new_name - ## Element-wise modulus. + ## ALIAS Modulus, Modulo + Element-wise modulus. Arguments: - other: The value to modulo `self` against. If `other` is a column, the @@ -515,8 +518,7 @@ type Column new_name = Naming_Helpers.binary_operation_name "%" self other run_vectorized_binary_op_with_problem_handling self "%" fallback_fn=Nothing other new_name - ## ALIAS Power Columns - + ## ALIAS Power Element-wise raising to the power. Arguments: @@ -545,8 +547,7 @@ type Column Value_Type_Helpers.check_binary_numeric_op self other <| run_vectorized_binary_op self '^' fallback_fn=Nothing other - ## ALIAS AND Columns - + ## ALIAS And Element-wise boolean conjunction. Arguments: @@ -576,7 +577,7 @@ type Column Value_Type_Helpers.check_binary_boolean_op self other <| run_vectorized_binary_op self "&&" fallback_fn=Nothing other - ## ALIAS OR Columns + ## ALIAS Or Element-wise boolean disjunction. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso index b5741978cab2..c7855e4a30b9 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso @@ -1,44 +1,36 @@ from Standard.Base import all type Join_Condition - ## Specifies a join condition that correlates rows from the two tables if - the element from the `left` of the left table is equal to the element - from the `right` of the right table. + ## Correlates rows from the two tables if the `left` element is equal to the + `right` element. - Missing values are treated as equal to each other. + `Nothing` (or null in database) is considered equal to itself. Arguments: - left: A name or index of a column in the left table. - right: A name or index of a column in the right table. Equals (left : Text | Integer) (right : Text | Integer = left) - ## Specifies a join condition that correlates rows from the two tables if - the element from the `left` column of the left table is equal to the - element from the `right` column of the right table, ignoring case. + ## Correlates rows from the two tables if the `left` element is equal to the + `right` element, ignoring case. This is only supported for text columns. - Missing values are treated as equal to each other. + Case insensitive comparisons may mean a database has to perform a row + scan, which can cause a performance degradation. - This is only supported for text columns. - - Case insensitive comparisons may make it impossible for a database - operation to re-use an existing index, which can possibly lead to - performance degradation. + `Nothing` (or null in database) is considered equal to itself. Arguments: - left: A name or index of a column in the left table. - - right: A name or index of a column in the right table. Defaults to the - same column selector as provided for `left`. + - right: A name or index of a column in the right table. - locale: The locale to use for case insensitive comparisons. Equals_Ignore_Case (left : Text | Integer) (right : Text | Integer = left) (locale : Locale = Locale.default) - ## Specifies a join condition that correlates rows from the two tables if - the element from the `left` column of the left table fits between the - corresponding elements from `right_lower` and `right_upper` columns of - the right table. The comparison is inclusive. + ## Correlates rows from the two tables if the `left` element fits between + the `right_lower` and `right_upper` elements. The comparison is inclusive + for both lower and upper bounds. - Pairs of rows in which any of `left`, `right_lower`, or `right_upper` is - missing are ignored, as the comparison is assumed to be not well-defined - for missing values. + If any of the values on row are `Nothing` (or null in database) then the + rows won't be joined. Arguments: - left: A name or index of a column in the left table. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 09fb7261f956..b2f11bdf8e56 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -7,7 +7,9 @@ import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Unimplemented.Unimplemented +import Standard.Base.Runtime.Context +from Standard.Base.Metadata import make_single_choice from Standard.Base.Widget_Helpers import make_delimiter_selector import project.Data.Aggregate_Column.Aggregate_Column @@ -312,13 +314,14 @@ type Table table.select_columns [-1, 0, 1] reorder=True Icon: select_column - @columns (Widget_Helpers.make_column_name_vector_selector include_blanks=True) + @columns Widget_Helpers.make_column_name_vector_selector select_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range select_columns self columns=[self.columns.first.name] (reorder = False) (error_on_missing_columns = True) (on_problems = Report_Warning) = new_columns = self.columns_helper.select_columns selectors=columns reorder=reorder error_on_missing_columns=error_on_missing_columns on_problems=on_problems Table.new new_columns - ## Returns a new table with the chosen set of columns, as specified by thez + ## ALIAS drop_columns + Returns a new table with the chosen set of columns, as specified by thez `columns`, removed from the input table. Any unmatched input columns will be kept in the output. Columns are returned in the same order as in the input. @@ -365,7 +368,7 @@ type Table Remove the first two columns and the last column. table.remove_columns [-1, 0, 1] - @columns (Widget_Helpers.make_column_name_vector_selector include_blanks=True) + @columns Widget_Helpers.make_column_name_vector_selector remove_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range remove_columns self (columns=[self.columns.first.name]) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.remove_columns selectors=columns error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -421,7 +424,7 @@ type Table Move the first column to back. table.reorder_columns [0] position=Position.After_Other_Columns - @columns (Widget_Helpers.make_column_name_vector_selector include_blanks=True) + @columns Widget_Helpers.make_column_name_vector_selector reorder_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range reorder_columns self (columns = [self.columns.first.name]) (position = Position.Before_Other_Columns) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.reorder_columns selectors=columns position=position error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -601,6 +604,7 @@ type Table Group by the Key column, count the rows table.aggregate [Aggregate_Column.Group_By "Key", Aggregate_Column.Count] + @columns Widget_Helpers.make_aggregate_column_vector_selector aggregate : Vector Aggregate_Column -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Invalid_Aggregate_Column | Invalid_Output_Column_Names | Duplicate_Output_Column_Names | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings aggregate self columns (error_on_missing_columns=False) (on_problems=Report_Warning) = validated = Aggregate_Column_Helper.prepare_aggregate_columns columns self error_on_missing_columns=error_on_missing_columns @@ -685,7 +689,7 @@ type Table table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)] @columns Widget_Helpers.make_order_by_selector - order_by : Vector (Text | Sort_Column) | Text | Sort_Column -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range + order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range order_by self (columns = [self.columns.first.name]) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected] columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder @@ -1188,9 +1192,7 @@ type Table drop self range=(First 1) = Index_Sub_Range_Module.drop_helper self.row_count self.rows.at self.slice (slice_ranges self) range - ## UNSTABLE - ALIAS Add Column, Update Column - + ## ALIAS Add Column, Update Column, New Column Sets the column value at the given name. Arguments: @@ -1409,8 +1411,8 @@ type Table allows to join the two tables on equality of corresponding columns with the same name. So `table.join other on=["A", "B"]` is a shorthand for: table.join other on=[Join_Condition.Equals "A" "A", Join_Condition.Equals "B" "B"] - @on Widget_Helpers.make_column_name_selector - join : Table -> Join_Kind -> Join_Condition | Text | Vector (Join_Condition | Text) -> Text -> Problem_Behavior -> Table + @on Widget_Helpers.make_join_condition_selector + join : Table -> Join_Kind -> Vector (Join_Condition | Text) | Text -> Text -> Problem_Behavior -> Table join self right join_kind=Join_Kind.Left_Outer on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = if check_table "right" right then # [left_unmatched, matched, right_unmatched] @@ -1527,6 +1529,7 @@ type Table defined nor any ordering was specified explicitly by the user, the order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. + @keep_unmatched (make_single_choice [["True", "Boolean.True"], ["False", "Boolean.False"], ["Report", Meta.get_qualified_type_name Report_Unmatched]]) zip : Table -> Boolean | Report_Unmatched -> Text -> Problem_Behavior -> Table zip self right keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning = if check_table "right" right then @@ -1953,8 +1956,15 @@ type Table self.write file format=base_format on_existing_file match_columns on_problems _ -> methods = if format == JSON_Format then ["write_table"] else Meta.meta (Meta.type_of format) . methods - if methods.contains "write_table" then format.write_table file self on_existing_file match_columns on_problems else - Error.throw (File_Error.Unsupported_Output_Type format Table) + if methods.contains "write_table" . not then Error.throw (File_Error.Unsupported_Output_Type format Table) else + effective_existing_behaviour = on_existing_file.get_effective_behavior file + tgt_file = if Context.Output.is_enabled then file else + should_copy_file = on_existing_file==Existing_File_Behavior.Append + file.create_dry_run_file copy_original=should_copy_file + + to_write = if Context.Output.is_enabled then self else self.take 1000 + Context.Output.with_enabled <| + format.write_table tgt_file to_write effective_existing_behaviour match_columns on_problems ## Creates a text representation of the table using the CSV format. to_csv : Text diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso index ef67deb61682..38fc2eeba590 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Excel/Excel_Workbook.enso @@ -103,10 +103,15 @@ type Excel_Workbook @types (self-> Single_Choice values=(self.table_types.map t-> Option t t.pretty)) tables : Text -> Text -> Text -> Vector -> Boolean -> Table tables self name_like=Nothing database=self.database schema=self.schema types=Nothing all_fields=False = + types_vector = case types of + Nothing -> Nothing + _ : Vector -> types + _ -> [types] + _ = [all_fields] rows = if schema != Nothing then [] else - sheets = if types==Nothing || types.contains "Worksheet" then self.sheet_names.map s-> [s, 'Worksheet', database, Nothing] else [] - ranges = if types==Nothing || types.contains "Named Range" then self.named_ranges.map r-> [r, 'Named Range', database, Nothing] else [] + sheets = if types_vector.is_nothing || types_vector.contains "Worksheet" then self.sheet_names.map s-> [s, 'Worksheet', database, Nothing] else [] + ranges = if types_vector.is_nothing || types_vector.contains "Named Range" then self.named_ranges.map r-> [r, 'Named Range', database, Nothing] else [] sheets + ranges filtered = if name_like == Nothing then rows else @@ -126,7 +131,8 @@ type Excel_Workbook _ = [alias] self.read query - ## Execute the query and load the results into memory as a Table. + ## ALIAS sheet, worksheet, range + Execute the query and load the results into memory as a Table. Arguments: - query: sheet name, range name or address to read from the workbook. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso index 108055366c12..8d08f566a48e 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso @@ -6,50 +6,61 @@ from Standard.Base.Metadata.Widget import Single_Choice, Vector_Editor from Standard.Base.Metadata.Choice import Option import Standard.Base.Metadata.Display -import project.Data.Table.Table import project.Data.Aggregate_Column.Aggregate_Column -import project.Internal.Parse_Values_Helper +import project.Data.Join_Condition.Join_Condition +import project.Data.Table.Table +import project.Data.Type.Value_Type.Auto +import project.Data.Type.Value_Type.Value_Type import project.Data.Table_Conversions +import project.Internal.Parse_Values_Helper ## PRIVATE Make an aggregate column selector. make_aggregate_column_selector : Table -> Display -> Boolean -> Widget make_aggregate_column_selector table display=Display.Always include_group_by=True = col_names_selector = make_column_name_selector table display=Display.Always - column_widget = Pair.new "column" col_names_selector + column_widget = ["column", col_names_selector] col_list_selector = make_column_name_vector_selector table display=Display.Always - group_by = if include_group_by then [Option "Group By" "(Aggregate_Column.Group_By)" [column_widget]] else [] - count = Option "Count" "Aggregate_Column.Count" - count_distinct = Option "Count Distinct" "(Aggregate_Column.Count_Distinct)" [Pair.new "columns" (col_list_selector)] - first = Option "First" "(Aggregate_Column.First)" [column_widget, Pair.new "order_by" (col_list_selector)] - last = Option "Last" "(Aggregate_Column.Last)" [column_widget, Pair.new "order_by" (col_list_selector)] + fqn = Meta.get_qualified_type_name Aggregate_Column + group_by = if include_group_by then [Option "Group By" fqn+".Group_By" [column_widget]] else [] + count = Option "Count" fqn+".Count" + count_distinct = Option "Count Distinct" fqn+".Count_Distinct" [["columns", col_list_selector]] + first = Option "First" fqn+".First" [column_widget, ["order_by" , col_list_selector]] + last = Option "Last" fqn+".Last" [column_widget, ["order_by" , col_list_selector]] - count_not_nothing = Option "Count Not Nothing" "(Aggregate_Column.Count_Not_Nothing)" [column_widget] - count_nothing = Option "Count Nothing" "(Aggregate_Column.Count_Nothing)" [column_widget] + count_not_nothing = Option "Count Not Nothing" fqn+".Count_Not_Nothing" [column_widget] + count_nothing = Option "Count Nothing" fqn+".Count_Nothing" [column_widget] ## Should be a list of Text columns only - count_not_empty = Option "Count Not Empty" "(Aggregate_Column.Count_Not_Empty)" [column_widget] - count_empty = Option "Count Empty" "(Aggregate_Column.Count_Empty)" [column_widget] - concatenate = Option "Concatenate" "(Aggregate_Column.Concatenate)" [column_widget] - shortest = Option "Shortest" "(Aggregate_Column.Shortest)" [column_widget] - longest = Option "Longest" "(Aggregate_Column.Longest)" [column_widget] + count_not_empty = Option "Count Not Empty" fqn+".Count_Not_Empty" [column_widget] + count_empty = Option "Count Empty" fqn+".Count_Empty" [column_widget] + concatenate = Option "Concatenate" fqn+".Concatenate" [column_widget] + shortest = Option "Shortest" fqn+".Shortest" [column_widget] + longest = Option "Longest" fqn+".Longest" [column_widget] ## Should be a list of Numeric columns only - sum = Option "Sum" "(Aggregate_Column.Sum)" [column_widget] - average = Option "Average" "(Aggregate_Column.Average)" [column_widget] - median = Option "Median" "(Aggregate_Column.Median)" [column_widget] - percentile = Option "Percentile" "(Aggregate_Column.Percentile)" [column_widget] - mode = Option "Mode" "(Aggregate_Column.Mode)" [column_widget] - standard_deviation = Option "Standard Deviation" "(Aggregate_Column.Standard_Deviation)" [column_widget] + sum = Option "Sum" fqn+".Sum" [column_widget] + average = Option "Average" fqn+".Average" [column_widget] + median = Option "Median" fqn+".Median" [column_widget] + percentile = Option "Percentile" fqn+".Percentile" [column_widget] + mode = Option "Mode" fqn+".Mode" [column_widget] + standard_deviation = Option "Standard Deviation" fqn+".Standard_Deviation" [column_widget] # Should be a list of comparable columns only - maximum = Option "Maximum" "(Aggregate_Column.Maximum)" [column_widget] - minimum = Option "Minimum" "(Aggregate_Column.Minimum)" [column_widget] + maximum = Option "Maximum" fqn+".Maximum" [column_widget] + minimum = Option "Minimum" fqn+".Minimum" [column_widget] Single_Choice display=display values=(group_by+[count, count_distinct, first, last, count_not_nothing, count_nothing, count_not_empty, count_empty, concatenate, shortest, longest, sum, average, median, percentile, mode, standard_deviation, maximum, minimum]) +## PRIVATE + Make an Aggregate_Column list editor +make_aggregate_column_vector_selector : Table -> Display -> Widget +make_aggregate_column_vector_selector table display=Display.Always = + item_editor = make_aggregate_column_selector table display=Display.Always + Vector_Editor item_editor=item_editor item_default=item_editor.values.first.value display=display + ## PRIVATE Make a column name selector. make_column_name_selector : Table -> Display -> Widget @@ -65,6 +76,21 @@ make_column_name_vector_selector table display=Display.Always = item_editor = make_column_name_selector table display=Display.Always Vector_Editor item_editor=item_editor item_default=item_editor.values.first.value display=display +## PRIVATE + Make a join condition selector. +make_join_condition_selector : Table -> Display -> Widget +make_join_condition_selector table display=Display.Always = + col_names_selector = make_column_name_selector table display=Display.Always + + fqn = "Join_Condition" + equals = Option "Equals" fqn+".Equals" [["left", col_names_selector]] + equals_ci = Option "Equals (Ignore Case)" fqn+".Equals_Ignore_Case" [["left", col_names_selector]] + between = Option "Between" fqn+".Between" [["left", col_names_selector]] + names=[equals, equals_ci, between] + + item_editor = Single_Choice display=display values=names + Vector_Editor item_editor=item_editor item_default="("+item_editor.values.first.value+")" display=display + ## PRIVATE Make a column name selector. make_order_by_selector : Table -> Display -> Widget @@ -91,8 +117,11 @@ parse_type_selector include_auto=True = valid_parse_targets = Parse_Values_Helper.valid_parse_targets prefix = if include_auto then ['Auto'] else [] - choice = prefix + (valid_parse_targets.map t-> 'Value_Type.'+t) names = prefix + valid_parse_targets + + fqn = Meta.get_qualified_type_name Value_Type + choice = names.map n-> if n=='Auto' then (Meta.get_qualified_type_name Auto) else fqn+'.'+n + options = names.zip choice . map pair-> Option pair.first pair.second Single_Choice display=Display.Always values=options diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 615f9f4deac0..4621dcce289f 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -1,6 +1,5 @@ from Standard.Base import all import Standard.Base.Runtime.Ref.Ref -import Standard.Base.Runtime.Context import Standard.Base.Errors.File_Error.File_Error import Standard.Table.Data.Type.Value_Type.Bits diff --git a/test/Table_Tests/src/IO/Csv_Spec.enso b/test/Table_Tests/src/IO/Csv_Spec.enso index 56b8f0377d14..78bc0aab320f 100644 --- a/test/Table_Tests/src/IO/Csv_Spec.enso +++ b/test/Table_Tests/src/IO/Csv_Spec.enso @@ -1,5 +1,4 @@ from Standard.Base import all -import Standard.Base.Runtime.Context from Standard.Table import Table, Column, Delimited import Standard.Table.Main as Table_Module diff --git a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso index 6237f164c444..a32002b94598 100644 --- a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso @@ -2,7 +2,6 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -import Standard.Base.Runtime.Context from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Delimited import Standard.Table.Data.Table_Conversions diff --git a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso index 3f57f964d4ba..a732b73da59e 100644 --- a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso @@ -2,7 +2,6 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -import Standard.Base.Runtime.Context from Standard.Table import Table, Column, Data_Formatter, Quote_Style, Match_Columns, Delimited from Standard.Table.Errors import all diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index 239b449363c6..5e209a763cf0 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -1,7 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.File_Error.File_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -import Standard.Base.Runtime.Context from Standard.Table import Table, Match_Columns, Excel, Excel_Range, Data_Formatter, Sheet_Names, Range_Names, Worksheet, Cell_Range, Delimited, Excel_Workbook diff --git a/test/Table_Tests/src/IO/Formats_Spec.enso b/test/Table_Tests/src/IO/Formats_Spec.enso index 8164031e31de..d539d3590ddb 100644 --- a/test/Table_Tests/src/IO/Formats_Spec.enso +++ b/test/Table_Tests/src/IO/Formats_Spec.enso @@ -79,4 +79,40 @@ spec = Test.group 'Various File Format support on Table' <| r2.catch.should_be_a File_Error.Unsupported_Output_Type r2.catch.format . should_equal my_format + write_test extension = + f = transient / ("big." + extension) + f.delete_if_exists + f_bak = transient / ("big." + extension + ".bak") + f_bak.delete_if_exists + + big_table = Table.new [["a", 1.up_to 2000 . to_vector]] + big_table.write f + + new_table = Table.new [["a", 2000.up_to 4000 . to_vector]] + r = Context.Output.with_disabled <| + s = new_table.write f + s.exists.should_be_true + + r_data = s.read + row_count = if r_data . is_a Table then r_data.row_count else r_data.length + row_count . should_equal 1000 + s + + f_bak.exists.should_be_false + + f.exists.should_be_true + f_data = f.read + f_row_count = if f_data . is_a Table then f_data.row_count else f_data.length + f_row_count . should_equal 1999 + + f.delete_if_exists + f_bak.delete_if_exists + r.delete_if_exists + + Test.specify "should write to a temporary CSV file part of the data if context disabled" <| + write_test "csv" + + Test.specify "should write to a temporary JSON file part of the data if context disabled" <| + write_test "json" + main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Text/Regex_Spec.enso b/test/Tests/src/Data/Text/Regex_Spec.enso index 8cf282610e38..53f9af3e79d9 100644 --- a/test/Tests/src/Data/Text/Regex_Spec.enso +++ b/test/Tests/src/Data/Text/Regex_Spec.enso @@ -141,6 +141,15 @@ spec = match = pattern.find_all input match . should_equal [] + Test.specify "should handle matching empty matches" <| + pattern = Regex.compile ".*" + pattern.find_all "Hello World" . should_equal ["Hello World", ""] + pattern.find_all "" . should_equal [""] + + pattern_2 = Regex.compile ".*(?=.)" + pattern_2.find_all "Hello World" . should_equal ["Hello Worl", ""] + pattern_2.find_all "" . should_equal [] + Test.specify "should correctly handle edge cases where one-letter matches happen at the end of the word" <| Regex.compile "(a+|1+)" . find_all "a1a1" . should_equal ["a", "1", "a", "1"] Regex.compile "([a]+|[1]+)" . find_all "a1a1" . should_equal ["a", "1", "a", "1"] diff --git a/test/Tests/src/Data/Text_Spec.enso b/test/Tests/src/Data/Text_Spec.enso index f07b7c751c6b..caeb708eec36 100644 --- a/test/Tests/src/Data/Text_Spec.enso +++ b/test/Tests/src/Data/Text_Spec.enso @@ -1279,6 +1279,12 @@ spec = "Hello World!".find_all ".o" . map (match-> match.span 0) . should_equal [Span.Value (3.up_to 5) "Hello World!", Span.Value (6.up_to 8) "Hello World!"] "foobar".find "BAR" Case_Sensitivity.Insensitive . span 0 . should_equal (Span.Value (3.up_to 6) "foobar") + Test.specify "find_all should handle 0 length matches" <| + "Hello World".find_all ".*" . map (_.text) . should_equal ["Hello World", ""] + "".find_all ".*" . map (_.text) . should_equal [""] + "Hello World".find_all ".*(?=.)" . map (_.text) . should_equal ["Hello Worl", ""] + "".find_all ".*(?=.)" . map (_.text) . should_equal [] + Test.specify "should handle accents and other multi-point graphemes" <| accents = 'a\u{301}e\u{301}o\u{301}he\u{301}h' diff --git a/test/Tests/src/Network/Http_Spec.enso b/test/Tests/src/Network/Http_Spec.enso index 9ecd745275a1..fd9d30300a42 100644 --- a/test/Tests/src/Network/Http_Spec.enso +++ b/test/Tests/src/Network/Http_Spec.enso @@ -55,7 +55,7 @@ spec = } res = HTTP.new.get url_get res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should send Get request using module method" <| expected_response = Json.parse <| ''' { @@ -69,7 +69,7 @@ spec = } res = HTTP.new.get url_get res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should fetch the body of a Get request" <| expected_response = Json.parse <| ''' @@ -83,14 +83,14 @@ spec = "args": {} } res = HTTP.fetch url_get - res.parse_json.should_equal expected_response + res.decode_as_json.should_equal expected_response Test.specify "should return error if the fetch method fails" <| HTTP.fetch "http://undefined_host" . should_fail_with Request_Error Test.specify "should send Head request" <| res = HTTP.new.head url_get res.code.should_equal HTTP_Status_Code.ok - res.body.to_text.should_equal '' + res.body.decode_as_text.should_equal '' Test.specify "should Post empty body" <| expected_response = Json.parse <| ''' @@ -110,7 +110,7 @@ spec = body_empty = Request_Body.Empty res = HTTP.new.post url_post body_empty res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post empty body using module method" <| expected_response = Json.parse <| ''' { @@ -129,7 +129,7 @@ spec = body_empty = Request_Body.Empty res = HTTP.new.post url_post body_empty res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post text body" <| expected_response = Json.parse <| ''' { @@ -149,7 +149,7 @@ spec = body_text = Request_Body.Text "Hello World!" res = HTTP.new.post url_post body_text res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post form text" <| expected_response = Json.parse <| ''' { @@ -169,7 +169,7 @@ spec = form_parts = [Form.text_field "key" "val"] res = HTTP.new.post_form url_post form_parts res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post form text using module method" <| expected_response = Json.parse <| ''' { @@ -189,7 +189,7 @@ spec = form_parts = [Form.text_field "key" "val"] res = HTTP.new.post_form url_post form_parts res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post form file" <| test_file = enso_project.data / "sample.txt" form_parts = [Form.text_field "key" "val", Form.file_field "sample" test_file] @@ -221,7 +221,7 @@ spec = json = '{"key":"val"}' res = HTTP.new.post_json url_post json res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post Json using module method" <| expected_response = Json.parse <| ''' { @@ -243,7 +243,7 @@ spec = json = '{"key":"val"}' res = HTTP.new.post_json url_post json res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should Post binary" <| expected_response = Json.parse <| ''' { @@ -263,7 +263,7 @@ spec = body_bytes = Request_Body.Bytes "Hello World!".utf_8 res = HTTP.new.post url_post body_bytes res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should create and send Get request" <| expected_response = Json.parse <| ''' @@ -279,7 +279,7 @@ spec = req = Request.new HTTP_Method.Get url_get res = HTTP.new.request req res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should create and send Post request with json body" <| expected_response = Json.parse <| ''' { @@ -303,7 +303,7 @@ spec = req_with_body = req.with_json json_body res = HTTP.new.request req_with_body res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response Test.specify "should create and send Post request with json text" <| expected_response = Json.parse <| ''' { @@ -328,6 +328,6 @@ spec = req_with_body = req.with_json json_text res = HTTP.new.request req_with_body res.code.should_equal HTTP_Status_Code.ok - res.body.parse_json.should_equal expected_response + res.body.decode_as_json.should_equal expected_response main = Test_Suite.run_main spec diff --git a/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso b/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso index a9a0e832ff41..c5b45c8f8454 100644 --- a/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso +++ b/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso @@ -1,6 +1,5 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error -import Standard.Base.Runtime.Context polyglot java import java.nio.CharBuffer diff --git a/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso b/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso index 948e4e6607d2..42cee53b6e34 100644 --- a/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso +++ b/test/Tests/src/System/Reporting_Stream_Encoder_Spec.enso @@ -1,7 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Encoding_Error.Encoding_Error import Standard.Base.Errors.Illegal_State.Illegal_State -import Standard.Base.Runtime.Context polyglot java import org.enso.base.Encoding_Utils polyglot java import java.nio.CharBuffer diff --git a/test/Visualization_Tests/src/Table_Spec.enso b/test/Visualization_Tests/src/Table_Spec.enso index eb74669c2a84..46960772b92c 100644 --- a/test/Visualization_Tests/src/Table_Spec.enso +++ b/test/Visualization_Tests/src/Table_Spec.enso @@ -1,5 +1,4 @@ from Standard.Base import all -import Standard.Base.Runtime.Context from Standard.Table import Table, Aggregate_Column, Value_Type From bd3ba26fc12183dfcd476431dcc2b1da8ba4629e Mon Sep 17 00:00:00 2001 From: Ilya Bogdanov Date: Fri, 2 Jun 2023 10:14:50 +0300 Subject: [PATCH 11/39] Split CB documentation to smaller pages (#6893) Now documentation for types, constructors and methods is displayed separately, with a links between pages. It drastically improves the speed of documentation panel update (50-100x on my machine), and also provides more readable documentation. https://github.com/enso-org/enso/assets/6566674/05c77560-162b-4396-bfa0-1e79eb6dcc5f Before: Screenshot 2023-05-31 at 01 02 47 After: Screenshot 2023-05-31 at 00 54 53 --- CHANGELOG.md | 3 + .../src/documentation_ir.rs | 328 ++++++++++++------ app/gui/view/documentation/assets/input.css | 4 + app/gui/view/documentation/src/html.rs | 182 +++++----- app/gui/view/documentation/src/lib.rs | 54 ++- lib/rust/ensogl/core/src/application.rs | 3 +- 6 files changed, 368 insertions(+), 206 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9293ecfbf24d..cbe45acd11c8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -179,6 +179,8 @@ - [A loading animation is now shown when opening and creating projects][6827], as the previous behaviour of showing a blank screen while the project was being loaded was potentially confusing to users. +- [Performance and readability of documentation panel was improved][6893]. The + documentation is now split into separate pages, which are much smaller. [6279]: https://github.com/enso-org/enso/pull/6279 [6421]: https://github.com/enso-org/enso/pull/6421 @@ -191,6 +193,7 @@ [6474]: https://github.com/enso-org/enso/pull/6474 [6844]: https://github.com/enso-org/enso/pull/6844 [6827]: https://github.com/enso-org/enso/pull/6827 +[6893]: https://github.com/enso-org/enso/pull/6893 #### EnsoGL (rendering engine) diff --git a/app/gui/suggestion-database/src/documentation_ir.rs b/app/gui/suggestion-database/src/documentation_ir.rs index 51ecbc89eb94..9a92d3d8559a 100644 --- a/app/gui/suggestion-database/src/documentation_ir.rs +++ b/app/gui/suggestion-database/src/documentation_ir.rs @@ -1,9 +1,21 @@ //! The intermediate representation of the entry's documentation. //! //! [`EntryDocumentation`] contains all the necessary information to generate HTML -//! documentation of the specific entry. [`EntryDocumentation`] is created by aggregating -//! documentation of the entry, and also its children entries, such as methods of the type or types -//! defined in the module. +//! documentation of the specific entry. +//! +//! When displaying the documentation for the user, we render the information contained in +//! [`EntryDocumentation`], and include hyperlinks to other related documentation pages. For +//! example, the type's documentation has a link to its parent module's documentation, and +//! to every method or constructor it has. These links are created by the +//! [`EntryDocumentation::linked_doc_pages`] method. +//! +//! We don't link modules to each other, but a type's documentation does link to its module. Since +//! we don't have a documentation registry, we must create a whole module's documentation for each +//! method entry, following the `method -> type -> module` link path. We can't create module +//! documentation on demand as the link handler doesn't have access to the suggestion database, and +//! we can't share module documentation between entries because it needs mutable state. We store the +//! suggestion database in memory, so this process is quick, but we might need to improve it in the +//! future. use crate::prelude::*; @@ -21,6 +33,17 @@ use std::cmp::Ordering; +// ============== +// === Errors === +// ============== + +#[allow(missing_docs)] +#[derive(Debug, Clone, Eq, Fail, PartialEq)] +#[fail(display = "Can't find parent module for entry {}.", _0)] +pub struct NoParentModule(String); + + + // ========================== // === EntryDocumentation === // ========================== @@ -42,28 +65,35 @@ impl Default for EntryDocumentation { } } +/// A link to the other documentation entry. It is used to connect documentation pages (for +/// example, the module documentation with every type's documentation). +#[derive(Debug)] +pub struct LinkedDocPage { + /// The name of the liked entry. It is used to produce a unique ID for the link. + pub name: Rc, + /// The intermediate reprentation of the linked entry's documentation. + pub page: EntryDocumentation, +} + impl EntryDocumentation { /// Constructor. - pub fn new(db: &SuggestionDatabase, id: &entry::Id) -> Result { + pub fn new(db: &SuggestionDatabase, id: &entry::Id) -> FallibleResult { let entry = db.lookup(*id); let result = match entry { Ok(entry) => match entry.kind { - Kind::Type => { - let type_docs = TypeDocumentation::new(*id, &entry, db)?.into(); - Documentation::Type(type_docs).into() - } + Kind::Type => Self::type_docs(db, &entry, *id)?, Kind::Module => { - let module_docs = ModuleDocumentation::new(*id, &entry, db)?.into(); + let module_docs = ModuleDocumentation::new(*id, &entry, db)?; Documentation::Module(module_docs).into() } Kind::Constructor => Self::constructor_docs(db, &entry)?, Kind::Method => Self::method_docs(db, &entry)?, Kind::Function => { - let function_docs = FunctionDocumentation::from_entry(&entry).into(); + let function_docs = Function::from_entry(&entry); Documentation::Function(function_docs).into() } Kind::Local => { - let local_docs = LocalDocumentation::from_entry(&entry).into(); + let local_docs = LocalDocumentation::from_entry(&entry); Documentation::Local(local_docs).into() } }, @@ -75,24 +105,129 @@ impl EntryDocumentation { Ok(result) } - /// Qualified name of the function-like entry. See [`Documentation::function_name`]. - pub fn function_name(&self) -> Option<&QualifiedName> { + /// Create documentation for a hard-coded builtin entry. + pub fn builtin(sections: impl IntoIterator) -> Self { + let sections = BuiltinDocumentation::from_doc_sections(sections.into_iter()); + Self::Docs(Documentation::Builtin(sections)) + } + + /// The list of links displayed on the documentation page. + pub fn linked_doc_pages(&self) -> Vec { match self { - EntryDocumentation::Docs(docs) => docs.function_name(), - _ => None, + EntryDocumentation::Docs(docs) => match docs { + // Module documentation contains links to all methods and types defined in this + // module. + Documentation::Module(docs) => { + let methods = docs.methods.iter().map(|method| LinkedDocPage { + name: method.name.clone_ref(), + page: Documentation::ModuleMethod { + docs: method.clone_ref(), + module_docs: docs.clone_ref(), + } + .into(), + }); + let types = docs.types.iter().map(|type_| LinkedDocPage { + name: type_.name.clone_ref(), + page: Documentation::Type { + docs: type_.clone_ref(), + module_docs: docs.clone_ref(), + } + .into(), + }); + methods.chain(types).collect() + } + // Type documentation contains links to all constructors and methods of this type, + // and also a link to the parent module. + Documentation::Type { docs, module_docs } => { + let methods = docs.methods.iter().map(|method| LinkedDocPage { + name: method.name.clone_ref(), + page: Documentation::Method { + docs: method.clone_ref(), + type_docs: docs.clone_ref(), + module_docs: module_docs.clone_ref(), + } + .into(), + }); + let constructors = docs.constructors.iter().map(|constructor| LinkedDocPage { + name: constructor.name.clone_ref(), + page: Documentation::Constructor { + docs: constructor.clone_ref(), + type_docs: docs.clone_ref(), + module_docs: module_docs.clone_ref(), + } + .into(), + }); + let parent_module = LinkedDocPage { + name: module_docs.name.clone_ref(), + page: Documentation::Module(module_docs.clone_ref()).into(), + }; + methods.chain(constructors).chain(iter::once(parent_module)).collect() + } + // Constructor documentation contains a link to the type. We also need to provide a + // module documentation here, because the type documentation has a link to the + // module documentation. + Documentation::Constructor { type_docs, module_docs, .. } => vec![LinkedDocPage { + name: type_docs.name.clone_ref(), + page: Documentation::Type { + docs: type_docs.clone_ref(), + module_docs: module_docs.clone_ref(), + } + .into(), + }], + // Method documentation contains a link to the type. We also need to provide a + // module documentation here, because the type documentation has a link to the + // module documentation. + Documentation::Method { type_docs, module_docs, .. } => vec![LinkedDocPage { + name: type_docs.name.clone_ref(), + page: Documentation::Type { + docs: type_docs.clone_ref(), + module_docs: module_docs.clone_ref(), + } + .into(), + }], + // Module method documentation contains a link to the module. + Documentation::ModuleMethod { module_docs, .. } => vec![LinkedDocPage { + name: module_docs.name.clone_ref(), + page: Documentation::Module(module_docs.clone_ref()).into(), + }], + Documentation::Function(_) => default(), + Documentation::Local(_) => default(), + Documentation::Builtin(_) => default(), + }, + EntryDocumentation::Placeholder(_) => default(), } } - /// Create documentation for a hard-coded builtin entry. - pub fn builtin(sections: impl IntoIterator) -> Self { - let sections = Rc::new(BuiltinDocumentation::from_doc_sections(sections.into_iter())); - Self::Docs(Documentation::Builtin(sections)) + fn parent_module( + db: &SuggestionDatabase, + entry: &Entry, + ) -> Result { + let defined_in = &entry.defined_in; + let parent_module = db.lookup_by_qualified_name(defined_in); + match parent_module { + Some((id, parent)) => match parent.kind { + Kind::Module => Ok(ModuleDocumentation::new(id, &parent, db) + .map_err(|_| NoParentModule(entry.qualified_name().to_string()))?), + _ => Err(NoParentModule(entry.qualified_name().to_string())), + }, + None => { + error!("Parent module for entry {} not found.", entry.qualified_name()); + Err(NoParentModule(entry.qualified_name().to_string())) + } + } } - fn method_docs( + fn type_docs( db: &SuggestionDatabase, entry: &Entry, - ) -> Result { + entry_id: entry::Id, + ) -> FallibleResult { + let module_docs = Self::parent_module(db, entry)?; + let type_docs = TypeDocumentation::new(entry_id, entry, db)?; + Ok(Documentation::Type { docs: type_docs, module_docs }.into()) + } + + fn method_docs(db: &SuggestionDatabase, entry: &Entry) -> FallibleResult { let self_type = match &entry.self_type { Some(self_type) => self_type, None => { @@ -100,26 +235,25 @@ impl EntryDocumentation { return Ok(Placeholder::NoDocumentation.into()); } }; - let return_type = db.lookup_by_qualified_name(self_type); - match return_type { - Some((id, parent)) => { - let name = entry.qualified_name().into(); - match parent.kind { - Kind::Type => { - let type_docs = TypeDocumentation::new(id, &parent, db)?.into(); - Ok(Documentation::Method { name, type_docs }.into()) - } - Kind::Module => { - let module_docs = ModuleDocumentation::new(id, &parent, db)?; - let module_docs = module_docs.into(); - Ok(Documentation::ModuleMethod { name, module_docs }.into()) - } - _ => { - error!("Unexpected parent kind for method {}.", entry.qualified_name()); - Ok(Placeholder::NoDocumentation.into()) - } + let self_type = db.lookup_by_qualified_name(self_type); + match self_type { + Some((id, parent)) => match parent.kind { + Kind::Type => { + let docs = Function::from_entry(entry); + let type_docs = TypeDocumentation::new(id, &parent, db)?; + let module_docs = Self::parent_module(db, &parent)?; + Ok(Documentation::Method { docs, type_docs, module_docs }.into()) } - } + Kind::Module => { + let docs = Function::from_entry(entry); + let module_docs = ModuleDocumentation::new(id, &parent, db)?; + Ok(Documentation::ModuleMethod { docs, module_docs }.into()) + } + _ => { + error!("Unexpected parent kind for method {}.", entry.qualified_name()); + Ok(Placeholder::NoDocumentation.into()) + } + }, None => { error!("Parent entry for method {} not found.", entry.qualified_name()); Ok(Self::Placeholder(Placeholder::NoDocumentation)) @@ -130,15 +264,16 @@ impl EntryDocumentation { fn constructor_docs( db: &SuggestionDatabase, entry: &Entry, - ) -> Result { + ) -> FallibleResult { let return_type = &entry.return_type; let return_type = db.lookup_by_qualified_name(return_type); match return_type { Some((id, parent)) => { - let name = entry.qualified_name().into(); - let type_docs = TypeDocumentation::new(id, &parent, db)?.into(); - Ok(Documentation::Constructor { name, type_docs }.into()) + let docs = Function::from_entry(entry); + let type_docs = TypeDocumentation::new(id, &parent, db)?; + let module_docs = Self::parent_module(db, &parent)?; + Ok(Documentation::Constructor { docs, type_docs, module_docs }.into()) } None => { error!("No return type found for constructor {}.", entry.qualified_name()); @@ -166,31 +301,32 @@ pub enum Placeholder { #[derive(Debug, Clone, CloneRef, PartialEq)] #[allow(missing_docs)] pub enum Documentation { - Module(Rc), - Type(Rc), - Constructor { name: Rc, type_docs: Rc }, - Method { name: Rc, type_docs: Rc }, - ModuleMethod { name: Rc, module_docs: Rc }, - Function(Rc), - Local(Rc), - Builtin(Rc), -} - -impl Documentation { - /// Qualified name of the documented function. Functions are part of the documentation for - /// the larger entity, e.g., constructor documentation is embedded into the type - /// documentation. The returned qualified name is used to scroll to the corresponding section in - /// a larger documentation page. - pub fn function_name(&self) -> Option<&QualifiedName> { - match self { - Documentation::Constructor { name, .. } => Some(name), - Documentation::Method { name, .. } => Some(name), - Documentation::ModuleMethod { name, .. } => Some(name), - _ => None, - } - } + Module(ModuleDocumentation), + Type { + docs: TypeDocumentation, + module_docs: ModuleDocumentation, + }, + Constructor { + docs: Function, + type_docs: TypeDocumentation, + module_docs: ModuleDocumentation, + }, + Method { + docs: Function, + type_docs: TypeDocumentation, + module_docs: ModuleDocumentation, + }, + ModuleMethod { + docs: Function, + module_docs: ModuleDocumentation, + }, + Function(Function), + Local(LocalDocumentation), + Builtin(BuiltinDocumentation), } + + // ========================= // === TypeDocumentation === // ========================= @@ -242,7 +378,7 @@ impl TypeDocumentation { // =========================== /// Documentation of the [`EntryKind::Module`] entries. -#[derive(Debug, Clone, CloneRef, PartialEq)] +#[derive(Debug, Clone, CloneRef, PartialEq, Eq)] #[allow(missing_docs)] pub struct ModuleDocumentation { pub name: Rc, @@ -271,38 +407,6 @@ impl ModuleDocumentation { -// ============================= -// === FunctionDocumentation === -// ============================= - -/// Documentation of the [`EntryKind::Function`] entries. -#[derive(Debug, Clone, CloneRef, PartialEq)] -#[allow(missing_docs)] -pub struct FunctionDocumentation { - pub name: Rc, - pub arguments: Rc>, - pub tags: Tags, - pub synopsis: Synopsis, - pub examples: Examples, -} - -impl FunctionDocumentation { - /// Constructor. - pub fn from_entry(entry: &Entry) -> Self { - let FilteredDocSections { tags, synopsis, examples } = - FilteredDocSections::new(entry.documentation.iter()); - Self { - name: entry.qualified_name().into(), - arguments: entry.arguments.clone().into(), - tags, - synopsis, - examples, - } - } -} - - - // ========================== // === LocalDocumentation === // ========================== @@ -422,7 +526,7 @@ impl Synopsis { // ============= /// A list of types defined in the module. -#[derive(Debug, Clone, CloneRef, PartialEq, Default, Deref)] +#[derive(Debug, Clone, CloneRef, PartialEq, Eq, Default, Deref)] pub struct Types { list: SortedVec, } @@ -663,8 +767,9 @@ mod tests { fn test_documentation_of_constructor() { let db = mock_db(); let name = Rc::new(QualifiedName::from_text("Standard.Base.A.Foo").unwrap()); - let type_docs = a_type().into(); - let expected = Documentation::Constructor { name: name.clone(), type_docs }; + let type_docs = a_type(); + let docs = a_foo_constructor(); + let expected = Documentation::Constructor { docs, type_docs, module_docs: module_docs() }; assert_docs(&db, name, expected); } @@ -676,23 +781,26 @@ mod tests { // === Type method === let name = Rc::new(QualifiedName::from_text("Standard.Base.A.baz").unwrap()); - let type_docs = a_type().into(); - let expected = Documentation::Method { name: name.clone(), type_docs }; + let type_docs = a_type(); + let docs = a_baz_method(); + let expected = + Documentation::Method { docs, type_docs, module_docs: module_docs().clone_ref() }; assert_docs(&db, name, expected); // === Module method === let name = Rc::new(QualifiedName::from_text("Standard.Base.module_method").unwrap()); - let module_docs = module_docs().into(); - let expected = Documentation::ModuleMethod { name: name.clone(), module_docs }; + let module_docs = module_docs(); + let docs = module_method_function(); + let expected = Documentation::ModuleMethod { docs, module_docs }; assert_docs(&db, name, expected); } #[test] fn test_documentation_of_module() { let db = mock_db(); - let expected = Documentation::Module(Rc::new(module_docs())); + let expected = Documentation::Module(module_docs()); let name = Rc::new(QualifiedName::from_text("Standard.Base").unwrap()); assert_docs(&db, name, expected); } @@ -703,7 +811,7 @@ mod tests { // === Type Standard.Base.A === - let expected = Documentation::Type(Rc::new(a_type())); + let expected = Documentation::Type { docs: a_type(), module_docs: module_docs() }; let name = QualifiedName::from_text("Standard.Base.A").unwrap(); let (entry_id, _) = db.lookup_by_qualified_name(&name).unwrap(); let docs = EntryDocumentation::new(&db, &entry_id).unwrap(); @@ -711,7 +819,7 @@ mod tests { // === Type Standard.Base.B === - let expected = Documentation::Type(Rc::new(b_type())); + let expected = Documentation::Type { docs: b_type(), module_docs: module_docs() }; let name = Rc::new(QualifiedName::from_text("Standard.Base.B").unwrap()); assert_docs(&db, name, expected); } diff --git a/app/gui/view/documentation/assets/input.css b/app/gui/view/documentation/assets/input.css index 12f860034456..0162fe7b8e3e 100644 --- a/app/gui/view/documentation/assets/input.css +++ b/app/gui/view/documentation/assets/input.css @@ -17,3 +17,7 @@ ul { list-style-type: disc; list-style-position: inside; } + +svg { + pointer-events: none; +} diff --git a/app/gui/view/documentation/src/html.rs b/app/gui/view/documentation/src/html.rs index 802a7c3a1416..0efb7b1c2733 100644 --- a/app/gui/view/documentation/src/html.rs +++ b/app/gui/view/documentation/src/html.rs @@ -14,7 +14,6 @@ use enso_suggestion_database::documentation_ir::Documentation; use enso_suggestion_database::documentation_ir::EntryDocumentation; use enso_suggestion_database::documentation_ir::Examples; use enso_suggestion_database::documentation_ir::Function; -use enso_suggestion_database::documentation_ir::FunctionDocumentation; use enso_suggestion_database::documentation_ir::LocalDocumentation; use enso_suggestion_database::documentation_ir::ModuleDocumentation; use enso_suggestion_database::documentation_ir::Placeholder; @@ -62,14 +61,14 @@ fn svg_icon(content: &'static str) -> impl Render { /// Render entry documentation to HTML code with Tailwind CSS styles. #[profile(Detail)] -pub fn render(docs: EntryDocumentation) -> String { +pub fn render(docs: &EntryDocumentation) -> String { let html = match docs { EntryDocumentation::Placeholder(placeholder) => match placeholder { Placeholder::NoDocumentation => String::from("No documentation available."), Placeholder::VirtualComponentGroup { name } => - render_virtual_component_group_docs(name), + render_virtual_component_group_docs(name.clone_ref()), }, - EntryDocumentation::Docs(docs) => render_documentation(docs), + EntryDocumentation::Docs(docs) => render_documentation(docs.clone_ref()), }; match validate_utf8(&html) { Ok(_) => html, @@ -87,17 +86,33 @@ fn validate_utf8(s: &str) -> Result<&str, std::str::Utf8Error> { } fn render_documentation(docs: Documentation) -> String { + let back_link = match &docs { + Documentation::Constructor { type_docs, .. } => Some(BackLink { + displayed: type_docs.name.name().to_owned(), + id: anchor_name(&type_docs.name), + }), + Documentation::Method { type_docs, .. } => Some(BackLink { + displayed: type_docs.name.name().to_owned(), + id: anchor_name(&type_docs.name), + }), + Documentation::ModuleMethod { module_docs, .. } => Some(BackLink { + displayed: module_docs.name.name().to_owned(), + id: anchor_name(&module_docs.name), + }), + Documentation::Type { module_docs, .. } => Some(BackLink { + displayed: module_docs.name.name().to_owned(), + id: anchor_name(&module_docs.name), + }), + _ => None, + }; match docs { - Documentation::Module(module_docs) => render_module_documentation(&module_docs, None), - Documentation::Type(type_docs) => render_type_documentation(&type_docs, None), - Documentation::Function(docs) => render_function_documentation(&docs), + Documentation::Module(module_docs) => render_module_documentation(&module_docs), + Documentation::Type { docs, .. } => render_type_documentation(&docs, back_link), + Documentation::Function(docs) => render_function_documentation(&docs, back_link), Documentation::Local(docs) => render_local_documentation(&docs), - Documentation::Constructor { type_docs, name } => - render_type_documentation(&type_docs, Some(&name)), - Documentation::Method { type_docs, name } => - render_type_documentation(&type_docs, Some(&name)), - Documentation::ModuleMethod { module_docs, name } => - render_module_documentation(&module_docs, Some(&name)), + Documentation::Constructor { docs, .. } => render_function_documentation(&docs, back_link), + Documentation::Method { docs, .. } => render_function_documentation(&docs, back_link), + Documentation::ModuleMethod { docs, .. } => render_function_documentation(&docs, back_link), Documentation::Builtin(builtin_docs) => render_builtin_documentation(&builtin_docs), } } @@ -112,6 +127,17 @@ fn render_virtual_component_group_docs(name: ImString) -> String { docs_content(content).into_string().unwrap() } +/// An optional link to the parent entry (module or type), that is displayed in the documentation +/// header. Pressing this link will switch the documentation to the parent entry, allowing +/// bidirectional navigation. +#[derive(Debug, Clone)] +struct BackLink { + /// Displayed text. + displayed: String, + /// The unique ID of the link. + id: String, +} + // === Types === @@ -122,23 +148,20 @@ fn render_virtual_component_group_docs(name: ImString) -> String { /// - Synopsis and a list of constructors. /// - Methods. /// - Examples. -fn render_type_documentation( - docs: &TypeDocumentation, - function_name: Option<&QualifiedName>, -) -> String { +fn render_type_documentation(docs: &TypeDocumentation, back_link: Option) -> String { let methods_exist = !docs.methods.is_empty(); let examples_exist = !docs.examples.is_empty(); let name = &docs.name; let arguments = &docs.arguments; let synopsis = &docs.synopsis; let constructors = &docs.constructors; - let synopsis = section_content(type_synopsis(synopsis, constructors, function_name)); - let methods = section_content(list_of_functions(&docs.methods, function_name)); + let synopsis = section_content(type_synopsis(synopsis, constructors)); + let methods = section_content(list_of_functions(&docs.methods)); let examples = section_content(list_of_examples(&docs.examples)); let tags = section_content(list_of_tags(&docs.tags)); let content = owned_html! { - : header(ICON_TYPE, type_header(name.name(), arguments_list(arguments))); + : header(ICON_TYPE, type_header(name.name(), arguments_list(arguments), back_link.as_ref())); : &tags; : &synopsis; @ if methods_exist { @@ -154,8 +177,18 @@ fn render_type_documentation( } /// A header for the type documentation. -fn type_header<'a>(name: &'a str, arguments: impl Render + 'a) -> Box { +fn type_header<'a>( + name: &'a str, + arguments: impl Render + 'a, + back_link: Option<&'a BackLink>, +) -> Box { box_html! { + @ if let Some(BackLink { id, displayed }) = &back_link { + a(id=id, class="text-2xl font-bold text-typeName hover:underline cursor-pointer") { + : displayed; + } + : " :: "; + } span(class="text-2xl font-bold text-typeName") { span { : name } span(class="opacity-34") { : &arguments } @@ -176,7 +209,6 @@ fn methods_header() -> impl Render { fn type_synopsis<'a>( synopsis: &'a Synopsis, constructors: &'a Constructors, - function_name: Option<&'a QualifiedName>, ) -> Box { box_html! { @ for p in synopsis.iter() { @@ -189,86 +221,65 @@ fn type_synopsis<'a>( } ul(class="list-disc list-outside marker:text-typeName") { @ for method in constructors.iter() { - : single_constructor(method, function_name); + : single_constructor(method); } } } } /// A documentation for a single constructor in the list. -fn single_constructor<'a>( - method: &'a Function, - function_name: Option<&'a QualifiedName>, -) -> Box { - let highlight = function_name.map(|n| n == &*method.name).unwrap_or(false); +/// If the first [`DocSection`] is of type [`DocSection::Paragraph`], it is rendered on the first +/// line, after the list of arguments. +fn single_constructor<'a>(constructor: &'a Function) -> Box { + let first = match &constructor.synopsis.as_ref()[..] { + [DocSection::Paragraph { body }, ..] => Some(body), + _ => None, + }; box_html! { - li(id=anchor_name(&method.name)) { - span(class=labels!("text-typeName", "font-bold", "bg-yellow-100" => highlight)) { + li(id=anchor_name(&constructor.name), class="hover:underline cursor-pointer") { + span(class=labels!("text-typeName", "font-bold")) { span(class="opacity-85") { - : method.name.name(); + : constructor.name.name(); } - span(class="opacity-34") { : arguments_list(&method.arguments); } + span(class="opacity-34") { : arguments_list(&constructor.arguments); } + } + @ if let Some(first) = first { + span { : ", "; : Raw(first); } } - : function_docs(method); } } } /// A list of methods defined for the type. -fn list_of_functions<'a>( - functions: &'a [Function], - function_name: Option<&'a QualifiedName>, -) -> Box { +fn list_of_functions<'a>(functions: &'a [Function]) -> Box { box_html! { ul(class="list-disc list-inside") { @ for f in functions.iter() { - : single_function(f, function_name); + : single_function(f); } } } } /// A documentation for a single method in the list. -fn single_function<'a>( - function: &'a Function, - function_name: Option<&'a QualifiedName>, -) -> Box { - let highlight = function_name.map(|n| n == &*function.name).unwrap_or(false); +/// If the first [`DocSection`] is of type [`DocSection::Paragraph`], it is rendered on the first +/// line, after the list of arguments. +fn single_function<'a>(function: &'a Function) -> Box { + let first = match &function.synopsis.as_ref()[..] { + [DocSection::Paragraph { body }, ..] => Some(body), + _ => None, + }; box_html! { - li(id=anchor_name(&function.name)) { - span(class=labels!("text-methodName", "font-semibold", "bg-yellow-100" => highlight)) { + li(id=anchor_name(&function.name), class="hover:underline cursor-pointer") { + span(class=labels!("text-methodName", "font-semibold")) { span(class="opacity-85") { : function.name.name(); } span(class="opacity-34") { : arguments_list(&function.arguments); } } - : function_docs(function); - } - } -} - -/// Synopsis of a function. If the first [`DocSection`] is of type -/// [`DocSection::Paragraph`], it is rendered on the first line, after the list of arguments. All -/// other sections are rendered as separate paragraphs below. Examples for the function are rendered -/// below the main part of the documentation in a separate subsection. -fn function_docs<'a>(constructor: &'a Function) -> Box { - let (first, rest) = match &constructor.synopsis.as_ref()[..] { - [DocSection::Paragraph { body }, rest @ ..] => (Some(body), rest), - [_, rest @ ..] => (None, rest), - [] => (None, default()), - }; - let tags = list_of_tags(&constructor.tags); - box_html! { - @ if let Some(first) = first { - span { : ", "; : Raw(first); } - } - : &tags; - @ for p in rest { - : paragraph(p); - } - @ if !constructor.examples.is_empty() { - h2(class="font-semibold") { : "Examples" } - : list_of_examples(&constructor.examples); + @ if let Some(first) = first { + span { : ", "; : Raw(first); } + } } } } @@ -284,17 +295,14 @@ fn function_docs<'a>(constructor: &'a Function) -> Box { /// - Types. /// - Functions. /// - Examples. -fn render_module_documentation( - docs: &ModuleDocumentation, - function_name: Option<&QualifiedName>, -) -> String { +fn render_module_documentation(docs: &ModuleDocumentation) -> String { let types_exist = !docs.types.is_empty(); let methods_exist = !docs.methods.is_empty(); let examples_exist = !docs.examples.is_empty(); let name = &docs.name; let synopsis = section_content(module_synopsis(&docs.synopsis)); let types = section_content(list_of_types(&docs.types)); - let methods = section_content(list_of_functions(&docs.methods, function_name)); + let methods = section_content(list_of_functions(&docs.methods)); let examples = section_content(list_of_examples(&docs.examples)); let tags = section_content(list_of_tags(&docs.tags)); let content = owned_html! { @@ -331,7 +339,7 @@ fn list_of_types<'a>(types: &'a Types) -> Box { /// A single type in the list. fn single_type<'a>(type_: &'a TypeDocumentation) -> Box { box_html! { - li(id=anchor_name(&type_.name), class="text-typeName font-semibold") { + li(id=anchor_name(&type_.name), class="text-typeName font-semibold hover:underline cursor-pointer") { span(class="opacity-85") { : type_.name.name(); } @@ -402,15 +410,15 @@ fn module_synopsis<'a>(synopsis: &'a Synopsis) -> Box { // === Functions === /// Render documentation of a function. -fn render_function_documentation(docs: &FunctionDocumentation) -> String { - let FunctionDocumentation { name, arguments, synopsis, tags, .. } = docs; +fn render_function_documentation(docs: &Function, back_link: Option) -> String { + let Function { name, arguments, synopsis, tags, .. } = docs; let examples_exist = !docs.examples.is_empty(); let synopsis = section_content(function_synopsis(synopsis)); let tags = section_content(list_of_tags(tags)); let examples = section_content(list_of_examples(&docs.examples)); let content = owned_html! { - : header(ICON_TYPE, function_header(name.name(), arguments_list(arguments))); + : header(ICON_TYPE, function_header(name.name(), arguments_list(arguments), back_link.as_ref())); : &tags; : &synopsis; @ if examples_exist { @@ -422,8 +430,18 @@ fn render_function_documentation(docs: &FunctionDocumentation) -> String { } /// A header for the function documentation. -fn function_header<'a>(name: &'a str, arguments: impl Render + 'a) -> Box { +fn function_header<'a>( + name: &'a str, + arguments: impl Render + 'a, + back_link: Option<&'a BackLink>, +) -> Box { box_html! { + @ if let Some(BackLink { id, displayed }) = &back_link { + a(id=id, class="text-2xl font-bold text-typeName hover:underline cursor-pointer") { + : displayed; + } + : " :: "; + } span(class="text-2xl font-bold text-typeName") { span { : name } span(class="opacity-34") { : &arguments } diff --git a/app/gui/view/documentation/src/lib.rs b/app/gui/view/documentation/src/lib.rs index 33799f27acc2..75a44d8c84ac 100644 --- a/app/gui/view/documentation/src/lib.rs +++ b/app/gui/view/documentation/src/lib.rs @@ -40,6 +40,7 @@ use ensogl::system::web::traits::*; use enso_frp as frp; use enso_suggestion_database::documentation_ir::EntryDocumentation; +use enso_suggestion_database::documentation_ir::LinkedDocPage; use ensogl::animation::physics::inertia::Spring; use ensogl::application::Application; use ensogl::data::color; @@ -55,8 +56,6 @@ use ensogl_derive_theme::FromTheme; use ensogl_hardcoded_theme::application::component_browser::documentation as theme; use graph_editor::component::visualization; use ide_view_graph_editor as graph_editor; -use web::HtmlElement; -use web::JsCast; // ============== @@ -110,6 +109,7 @@ pub struct Model { /// to EnsoGL shapes, and pass them to the DOM instead. overlay: overlay::View, display_object: display::object::Instance, + event_handlers: Rc>>, } impl Model { @@ -150,7 +150,15 @@ impl Model { scene.dom.layers.node_searcher.manage(&inner_dom); scene.dom.layers.node_searcher.manage(&caption_dom); - Model { outer_dom, inner_dom, caption_dom, overlay, display_object }.init() + Model { + outer_dom, + inner_dom, + caption_dom, + overlay, + display_object, + event_handlers: default(), + } + .init() } fn init(self) -> Self { @@ -173,18 +181,34 @@ impl Model { } /// Display the documentation and scroll to the qualified name if needed. - fn display_doc(&self, docs: EntryDocumentation) { - let anchor = docs.function_name().map(html::anchor_name); - let html = html::render(docs); + fn display_doc(&self, docs: EntryDocumentation, display_doc: &frp::Source) { + let linked_pages = docs.linked_doc_pages(); + let html = html::render(&docs); self.inner_dom.dom().set_inner_html(&html); - if let Some(anchor) = anchor { + self.set_link_handlers(linked_pages, display_doc); + // Scroll to the top of the page. + self.inner_dom.dom().set_scroll_top(0); + } + + /// Setup event handlers for links on the documentation page. + fn set_link_handlers( + &self, + linked_pages: Vec, + display_doc: &frp::Source, + ) { + let new_handlers = linked_pages.into_iter().filter_map(|page| { + let content = page.page.clone_ref(); + let anchor = html::anchor_name(&page.name); if let Some(element) = web::document.get_element_by_id(&anchor) { - let offset = element.dyn_ref::().map(|e| e.offset_top()).unwrap_or(0); - self.inner_dom.dom().set_scroll_top(offset); + let closure: web::JsEventHandler = web::Closure::new(f_!([display_doc, content] { + display_doc.emit(content.clone_ref()); + })); + Some(web::add_event_listener(&element, "click", closure)) + } else { + None } - } else { - self.inner_dom.dom().set_scroll_top(0); - } + }); + let _ = self.event_handlers.replace(new_handlers.collect()); } /// Load an HTML file into the documentation view when user is waiting for data to be received. @@ -300,7 +324,11 @@ impl View { docs <+ frp.display_documentation; display_delay.restart <+ frp.display_documentation.constant(DISPLAY_DELAY_MS); display_docs <- display_delay.on_expired.map2(&docs,|_,docs| docs.clone_ref()); - eval display_docs((docs) model.display_doc(docs.clone_ref())); + display_docs_callback <- source(); + display_docs <- any(&display_docs, &display_docs_callback); + eval display_docs([model, display_docs_callback] + (docs) model.display_doc(docs.clone_ref(), &display_docs_callback) + ); // === Hovered item preview caption === diff --git a/lib/rust/ensogl/core/src/application.rs b/lib/rust/ensogl/core/src/application.rs index 0e1d3b5758fd..54cae9475224 100644 --- a/lib/rust/ensogl/core/src/application.rs +++ b/lib/rust/ensogl/core/src/application.rs @@ -106,7 +106,8 @@ impl Application { let data = &self.inner; let network = self.frp.network(); enso_frp::extend! { network - eval self.display.default_scene.frp.focused ((t) data.show_system_cursor(!t)); + app_focused <- self.display.default_scene.frp.focused.on_change(); + eval app_focused((t) data.show_system_cursor(!t)); frp.private.output.tooltip <+ frp.private.input.set_tooltip; eval_ frp.private.input.show_system_cursor(data.show_system_cursor(true)); eval_ frp.private.input.hide_system_cursor(data.show_system_cursor(false)); From 4e9f02258ed33a7d1b98a0a5e3805bd5514c22f7 Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Fri, 2 Jun 2023 08:26:01 +0100 Subject: [PATCH 12/39] Suggestion should contain a list of annotations (#6924) close #6847 Add annotation names to suggestions. --- .../protocol-language-server.md | 6 + .../search/SearchProtocol.scala | 1 + .../languageserver/search/Suggestions.scala | 15 +- .../search/SuggestionsHandlerSpec.scala | 9 +- .../json/SuggestionsHandlerEventsTest.scala | 6 +- .../scala/org/enso/polyglot/Suggestion.scala | 19 + .../RuntimeSuggestionUpdatesTest.scala | 51 ++- .../compiler/context/SuggestionBuilder.scala | 43 +- .../test/context/SuggestionBuilderTest.scala | 430 ++++++++++++++---- .../enso/searcher/sql/SuggestionRandom.scala | 48 +- .../searcher/sql/SqlSuggestionsRepo.scala | 4 + .../searcher/sql/SuggestionsRepoTest.scala | 9 +- 12 files changed, 476 insertions(+), 165 deletions(-) diff --git a/docs/language-server/protocol-language-server.md b/docs/language-server/protocol-language-server.md index e12c9124cbd6..e09e1281c28c 100644 --- a/docs/language-server/protocol-language-server.md +++ b/docs/language-server/protocol-language-server.md @@ -575,6 +575,9 @@ interface Constructor { /** The documentation string. */ documentation?: string; + + /** The list of annotations. */ + annotations: string[]; } interface Method { @@ -604,6 +607,9 @@ interface Method { /** The documentation string. */ documentation?: string; + + /** The list of annotations. */ + annotations: string[]; } interface Function { diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/search/SearchProtocol.scala b/engine/language-server/src/main/scala/org/enso/languageserver/search/SearchProtocol.scala index a879f07fc1e7..952fc013cd94 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/search/SearchProtocol.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/search/SearchProtocol.scala @@ -135,6 +135,7 @@ object SearchProtocol { conversion.returnType, isStatic = false, conversion.documentation, + Seq(), conversion.reexport ) } diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala b/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala index 7d3261a12ab5..e29c662496fa 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/search/Suggestions.scala @@ -45,7 +45,8 @@ object Suggestions { name = "MyType", arguments = Vector(Suggestion.Argument("a", "Any", false, false, None)), returnType = "MyAtom", - documentation = Some(comment.atom) + documentation = Some(comment.atom), + annotations = Seq("a") ) val method: Suggestion.Method = Suggestion.Method( @@ -59,7 +60,8 @@ object Suggestions { selfType = "MyType", returnType = "Number", isStatic = false, - documentation = Some("Lovely") + documentation = Some("Lovely"), + annotations = Seq("foo") ) val function: Suggestion.Function = Suggestion.Function( @@ -98,7 +100,8 @@ object Suggestions { selfType = "Any", returnType = "Any", isStatic = false, - documentation = Some("Lovely") + documentation = Some("Lovely"), + annotations = Seq() ) val methodOnNumber: Suggestion.Method = Suggestion.Method( @@ -111,7 +114,8 @@ object Suggestions { selfType = "Number", returnType = "Number", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ) val methodOnInteger: Suggestion.Method = Suggestion.Method( @@ -124,7 +128,8 @@ object Suggestions { selfType = "Integer", returnType = "Number", isStatic = false, - documentation = Some("Blah, blah") + documentation = Some("Blah, blah"), + annotations = Seq() ) val all = Seq( diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/search/SuggestionsHandlerSpec.scala b/engine/language-server/src/test/scala/org/enso/languageserver/search/SuggestionsHandlerSpec.scala index 1c010a62c5dc..d3fd7c6601ef 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/search/SuggestionsHandlerSpec.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/search/SuggestionsHandlerSpec.scala @@ -357,7 +357,8 @@ class SuggestionsHandlerSpec name = "Foo", arguments = Vector(), returnType = moduleName, - documentation = None + documentation = None, + annotations = Seq() ) val module = Suggestion.Module( module = moduleName, @@ -1083,7 +1084,8 @@ class SuggestionsHandlerSpec Suggestion.Argument("b", "Any", false, false, None) ), returnType = "Pair", - documentation = Some("Awesome") + documentation = Some("Awesome"), + annotations = Seq() ) val method: Suggestion.Method = @@ -1095,7 +1097,8 @@ class SuggestionsHandlerSpec selfType = "Test.Main", returnType = "IO", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ) } diff --git a/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/SuggestionsHandlerEventsTest.scala b/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/SuggestionsHandlerEventsTest.scala index f197a4dd36fc..0c71dbc34e04 100644 --- a/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/SuggestionsHandlerEventsTest.scala +++ b/engine/language-server/src/test/scala/org/enso/languageserver/websocket/json/SuggestionsHandlerEventsTest.scala @@ -112,7 +112,8 @@ class SuggestionsHandlerEventsTest extends BaseServerTest with FlakySpec { } ], "returnType" : "MyAtom", - "documentation" : " PRIVATE\n\n A key-value store. This type assumes all keys are pairwise comparable,\n using the `<`, `>` and `==` operators.\n\n Arguments:\n - one: The first.\n - two_three: The *second*.\n\n ? Info\n Here is a thing." + "documentation" : " PRIVATE\n\n A key-value store. This type assumes all keys are pairwise comparable,\n using the `<`, `>` and `==` operators.\n\n Arguments:\n - one: The first.\n - two_three: The *second*.\n\n ? Info\n Here is a thing.", + "annotations" : ["a"] } } ], @@ -182,7 +183,8 @@ class SuggestionsHandlerEventsTest extends BaseServerTest with FlakySpec { "selfType" : "MyType", "returnType" : "Number", "isStatic" : false, - "documentation" : "Lovely" + "documentation" : "Lovely", + "annotations" : ["foo"] } } ], diff --git a/engine/polyglot-api/src/main/scala/org/enso/polyglot/Suggestion.scala b/engine/polyglot-api/src/main/scala/org/enso/polyglot/Suggestion.scala index 72d89ce0deb8..ffc078016412 100644 --- a/engine/polyglot-api/src/main/scala/org/enso/polyglot/Suggestion.scala +++ b/engine/polyglot-api/src/main/scala/org/enso/polyglot/Suggestion.scala @@ -137,6 +137,21 @@ object Suggestion { } } + /** Annotations extractor. */ + object Annotations { + + def apply(suggestion: Suggestion): Seq[String] = + suggestion match { + case _: Module => Seq() + case _: Type => Seq() + case constructor: Constructor => constructor.annotations + case method: Method => method.annotations + case _: Conversion => Seq() + case _: Function => Seq() + case _: Local => Seq() + } + } + /** An argument of an atom or a function. * * @param name the argument name @@ -283,6 +298,7 @@ object Suggestion { * @param arguments the list of arguments * @param returnType the type of an atom * @param documentation the documentation string + * @param annotations the list of annotations * @param reexport the module re-exporting this atom */ case class Constructor( @@ -292,6 +308,7 @@ object Suggestion { arguments: Seq[Argument], returnType: String, documentation: Option[String], + annotations: Seq[String], reexport: Option[String] = None ) extends Suggestion with ToLogString { @@ -323,6 +340,7 @@ object Suggestion { * @param returnType the return type of a method * @param isStatic the flag indicating whether a method is static or instance * @param documentation the documentation string + * @param annotations the list of annotations * @param reexport the module re-exporting this method */ case class Method( @@ -334,6 +352,7 @@ object Suggestion { returnType: String, isStatic: Boolean, documentation: Option[String], + annotations: Seq[String], reexport: Option[String] = None ) extends Suggestion with ToLogString { diff --git a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeSuggestionUpdatesTest.scala b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeSuggestionUpdatesTest.scala index 38b5d8884469..9369abdadc5a 100644 --- a/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeSuggestionUpdatesTest.scala +++ b/engine/runtime-with-instruments/src/test/scala/org/enso/interpreter/test/instrument/RuntimeSuggestionUpdatesTest.scala @@ -169,7 +169,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -218,7 +219,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Modify() ), @@ -289,7 +291,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Modify() ), @@ -380,7 +383,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Modify() ), @@ -479,7 +483,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Modify() ), @@ -555,7 +560,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -615,7 +621,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Modify( None, @@ -716,7 +723,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -773,7 +781,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Foo.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -870,7 +879,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -914,7 +924,8 @@ class RuntimeSuggestionUpdatesTest ConstantsGen.TEXT, ConstantsGen.ANY, false, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -940,7 +951,8 @@ class RuntimeSuggestionUpdatesTest ConstantsGen.NUMBER, ConstantsGen.ANY, false, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -1059,7 +1071,8 @@ class RuntimeSuggestionUpdatesTest .Argument("a", ConstantsGen.ANY, false, false, None) ), "Enso_Test.Test.A.MyType", - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -1084,7 +1097,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.A.MyType", ConstantsGen.ANY, false, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -1108,7 +1122,8 @@ class RuntimeSuggestionUpdatesTest ConstantsGen.INTEGER, ConstantsGen.ANY, false, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -1132,7 +1147,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.A", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), @@ -1181,7 +1197,8 @@ class RuntimeSuggestionUpdatesTest "Enso_Test.Test.Main", ConstantsGen.ANY, true, - None + None, + Seq() ), Api.SuggestionAction.Add() ), diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala b/engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala index cc8d43819f7d..cc21a97e5af5 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala @@ -5,6 +5,7 @@ import org.enso.compiler.core.IR import org.enso.compiler.data.BindingsMap import org.enso.compiler.pass.resolve.{ DocumentationComments, + GenericAnnotations, MethodDefinitions, TypeNames, TypeSignatures @@ -77,7 +78,7 @@ final class SuggestionBuilder[A: IndexedSource]( case data @ IR.Module.Scope.Definition.Data( name, arguments, - _, + annotations, _, _, _ @@ -87,6 +88,7 @@ final class SuggestionBuilder[A: IndexedSource]( tpName.name, name.name, arguments, + annotations, data.getMetadata(DocumentationComments).map(_.documentation) ) } @@ -108,6 +110,7 @@ final class SuggestionBuilder[A: IndexedSource]( _ ) if !m.isStaticWrapperForInstanceMethod => val typeSignature = ir.getMetadata(TypeSignatures) + val annotations = ir.getMetadata(GenericAnnotations) val (selfTypeOpt, isStatic) = typePtr match { case Some(typePtr) => val selfType = typePtr @@ -126,7 +129,8 @@ final class SuggestionBuilder[A: IndexedSource]( isStatic, args, doc, - typeSignature + typeSignature, + annotations ) } val subforest = go( @@ -224,11 +228,14 @@ final class SuggestionBuilder[A: IndexedSource]( isStatic: Boolean, args: Seq[IR.DefinitionArgument], doc: Option[String], - typeSignature: Option[TypeSignatures.Metadata] + typeSignature: Option[TypeSignatures.Metadata], + genericAnnotations: Option[GenericAnnotations.Metadata] ): Suggestion.Method = { val typeSig = buildTypeSignatureFromMetadata(typeSignature) val (methodArgs, returnTypeDef) = buildMethodArguments(args, typeSig, selfType) + val annotations = + genericAnnotations.map(buildAnnotationsFromMetadata).getOrElse(Seq()) Suggestion.Method( externalId = externalId, module = module.toString, @@ -237,7 +244,8 @@ final class SuggestionBuilder[A: IndexedSource]( selfType = selfType.toString, returnType = buildReturnType(returnTypeDef), isStatic = isStatic, - documentation = doc + documentation = doc, + annotations = annotations ) } @@ -345,6 +353,7 @@ final class SuggestionBuilder[A: IndexedSource]( tp: String, name: String, arguments: Seq[IR.DefinitionArgument], + genericAnnotations: Seq[IR.Name.GenericAnnotation], doc: Option[String] ): Suggestion.Constructor = Suggestion.Constructor( @@ -353,7 +362,8 @@ final class SuggestionBuilder[A: IndexedSource]( name = name, arguments = arguments.map(buildArgument), returnType = module.createChild(tp).toString, - documentation = doc + documentation = doc, + annotations = genericAnnotations.map(_.name) ) /** Build getter methods from atom arguments. */ @@ -371,14 +381,15 @@ final class SuggestionBuilder[A: IndexedSource]( location = None ) buildMethod( - externalId = None, - module = module, - name = getterName, - selfType = module.createChild(typeName), - isStatic = false, - args = Seq(thisArg), - doc = None, - typeSignature = argument.name.getMetadata(TypeSignatures) + externalId = None, + module = module, + name = getterName, + selfType = module.createChild(typeName), + isStatic = false, + args = Seq(thisArg), + doc = None, + typeSignature = argument.name.getMetadata(TypeSignatures), + genericAnnotations = None ) } @@ -420,6 +431,12 @@ final class SuggestionBuilder[A: IndexedSource]( TypeArg.Value(resolvedName.qualifiedName) } + /** Build annotations from metadata. */ + private def buildAnnotationsFromMetadata( + genericAnnotations: GenericAnnotations.Metadata + ): Seq[String] = + genericAnnotations.annotations.map(_.name) + /** Build type signature from the ir metadata. * * @param typeSignature the type signature metadata diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/context/SuggestionBuilderTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/context/SuggestionBuilderTest.scala index e44387817c2b..5bcb62cea2d6 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/context/SuggestionBuilderTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/context/SuggestionBuilderTest.scala @@ -79,7 +79,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -110,7 +111,43 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = Some(" The foo") + documentation = Some(" The foo"), + annotations = Seq() + ), + Vector() + ) + ) + ) + } + + "build method with annotations" in { + + val code = + """@a foo + |@b bar + |foo a b = a + b""".stripMargin + val module = code.preprocessModule + + build(code, module) shouldEqual Tree.Root( + Vector( + ModuleNode, + Tree.Node( + Suggestion.Method( + externalId = None, + module = "Unnamed.Test", + name = "foo", + arguments = Seq( + Suggestion.Argument("self", "Unnamed.Test", false, false, None), + Suggestion + .Argument("a", SuggestionBuilder.Any, false, false, None), + Suggestion + .Argument("b", SuggestionBuilder.Any, false, false, None) + ), + selfType = "Unnamed.Test", + returnType = SuggestionBuilder.Any, + isStatic = true, + documentation = None, + annotations = Seq("a", "b") ), Vector() ) @@ -142,7 +179,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Number", isStatic = true, - documentation = Some(" The foo") + documentation = Some(" The foo"), + annotations = Seq() ), Vector() ) @@ -172,7 +210,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Foo.Bar", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -211,7 +250,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Standard.Base.Data.Numbers.Number", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -248,7 +288,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Number", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -279,7 +320,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Foo.Bar Baz", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -315,7 +357,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -375,7 +418,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -423,7 +467,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyType", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -483,7 +528,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyAtom", returnType = "Number", isStatic = false, - documentation = Some(" My bar") + documentation = Some(" My bar"), + annotations = Seq() ), Vector() ) @@ -529,7 +575,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyAtom", returnType = "Number", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -573,7 +620,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Variant_1", arguments = Seq(), returnType = "Unnamed.Test.My_Atom", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -584,7 +632,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Variant_2", arguments = Seq(), returnType = "Unnamed.Test.My_Atom", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -633,7 +682,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Other_Atom", returnType = "Number", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -680,7 +730,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "A", arguments = Seq(), returnType = "Unnamed.Test.Value", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -691,7 +742,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "B", arguments = Seq(), returnType = "Unnamed.Test.Value", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -734,7 +786,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Any", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -764,7 +817,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -815,7 +869,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Unnamed.Test.A", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -865,7 +920,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyType", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -929,7 +985,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("a", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.MyMaybe", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -945,7 +1002,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyMaybe", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -956,7 +1014,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "None", arguments = Seq(), returnType = "Unnamed.Test.None", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -982,7 +1041,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("x", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.New", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -998,7 +1058,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.New", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1041,7 +1102,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1091,7 +1153,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1155,7 +1218,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1217,7 +1281,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1273,7 +1338,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1327,7 +1393,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1373,7 +1440,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1433,7 +1501,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1492,7 +1561,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1537,7 +1607,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -1594,7 +1665,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("b", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.MyType", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1610,7 +1682,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyType", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1626,7 +1699,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyType", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -1672,7 +1746,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("b", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.Mtp", - documentation = Some(" My sweet atom") + documentation = Some(" My sweet atom"), + annotations = Seq() ), Vector() ), @@ -1688,7 +1763,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Mtp", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1704,7 +1780,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Mtp", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -1742,7 +1819,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Nothing", arguments = Seq(), returnType = "Unnamed.Test.Maybe", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1756,7 +1834,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("a", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.Maybe", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1772,7 +1851,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Maybe", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -1815,7 +1895,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Nothing", arguments = Seq(), returnType = "Unnamed.Test.Maybe", - documentation = Some(" Nothing here") + documentation = Some(" Nothing here"), + annotations = Seq() ), Vector() ), @@ -1829,7 +1910,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("a", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.Maybe", - documentation = Some(" Something there") + documentation = Some(" Something there"), + annotations = Seq() ), Vector() ), @@ -1845,7 +1927,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Maybe", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -1889,7 +1972,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Cons", arguments = Seq(), returnType = "Unnamed.Test.List", - documentation = Some(" And more") + documentation = Some(" And more"), + annotations = Seq() ), Vector() ), @@ -1900,7 +1984,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Nil", arguments = Seq(), returnType = "Unnamed.Test.List", - documentation = Some(" End") + documentation = Some(" End"), + annotations = Seq() ), Vector() ), @@ -1916,7 +2001,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.List", returnType = "Unnamed.Test.List", isStatic = true, - documentation = Some(" a method") + documentation = Some(" a method"), + annotations = Seq() ), Vector() ) @@ -1957,7 +2043,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Nothing", arguments = Seq(), returnType = "Unnamed.Test.Maybe", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1971,7 +2058,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("a", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.Maybe", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -1987,7 +2075,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Maybe", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2005,7 +2094,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Maybe", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2047,7 +2137,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "X", arguments = Seq(), returnType = "Unnamed.Test.S", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2058,7 +2149,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Y", arguments = Seq(), returnType = "Unnamed.Test.S", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2090,7 +2182,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { ) ), returnType = "Unnamed.Test.T", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2106,9 +2199,123 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.T", returnType = "Unnamed.Test.S", isStatic = false, + documentation = None, + annotations = Seq() + ), + Vector() + ) + ) + ) + } + + "build type with constructors with annotations" in { + + val code = + """type S + | @a foo + | @b bar + | X a b + | Y c + |""".stripMargin + val module = code.preprocessModule + + build(code, module) shouldEqual Tree.Root( + Vector( + ModuleNode, + Tree.Node( + Suggestion.Type( + externalId = None, + module = "Unnamed.Test", + name = "S", + params = Seq(), + returnType = "Unnamed.Test.S", + parentType = Some(SuggestionBuilder.Any), documentation = None ), Vector() + ), + Tree.Node( + Suggestion.Constructor( + externalId = None, + module = "Unnamed.Test", + name = "X", + arguments = Seq( + Suggestion + .Argument("a", SuggestionBuilder.Any, false, false, None), + Suggestion + .Argument("b", SuggestionBuilder.Any, false, false, None) + ), + returnType = "Unnamed.Test.S", + documentation = None, + annotations = Seq("a", "b") + ), + Vector() + ), + Tree.Node( + Suggestion.Constructor( + externalId = None, + module = "Unnamed.Test", + name = "Y", + arguments = Seq( + Suggestion + .Argument("c", SuggestionBuilder.Any, false, false, None) + ), + returnType = "Unnamed.Test.S", + documentation = None, + annotations = Seq() + ), + Vector() + ), + Tree.Node( + Suggestion.Method( + externalId = None, + module = "Unnamed.Test", + name = "a", + arguments = Seq( + Suggestion + .Argument("self", "Unnamed.Test.S", false, false, None) + ), + selfType = "Unnamed.Test.S", + returnType = SuggestionBuilder.Any, + isStatic = false, + documentation = None, + annotations = Seq() + ), + Vector() + ), + Tree.Node( + Suggestion.Method( + externalId = None, + module = "Unnamed.Test", + name = "b", + arguments = Seq( + Suggestion + .Argument("self", "Unnamed.Test.S", false, false, None) + ), + selfType = "Unnamed.Test.S", + returnType = SuggestionBuilder.Any, + isStatic = false, + documentation = None, + annotations = Seq() + ), + Vector() + ), + Tree.Node( + Suggestion.Method( + externalId = None, + module = "Unnamed.Test", + name = "c", + arguments = Seq( + Suggestion + .Argument("self", "Unnamed.Test.S", false, false, None) + ), + selfType = "Unnamed.Test.S", + returnType = SuggestionBuilder.Any, + isStatic = false, + documentation = None, + annotations = Seq() + ), + Vector() ) ) ) @@ -2155,7 +2362,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { ) ), returnType = "Unnamed.Test.T", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2171,7 +2379,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.T", returnType = "Standard.Base.Data.Numbers.Number", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2218,7 +2427,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { Suggestion.Argument("x", "a", false, false, None) ), returnType = "Unnamed.Test.E", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2231,7 +2441,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { Suggestion.Argument("y", "b", false, false, None) ), returnType = "Unnamed.Test.E", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2247,7 +2458,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.E", returnType = "a", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2263,7 +2475,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.E", returnType = "b", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2370,7 +2583,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("b", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.MyType", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2386,7 +2600,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyType", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2402,7 +2617,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.MyType", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2415,7 +2631,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2456,7 +2673,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { .Argument("a", SuggestionBuilder.Any, false, false, None) ), returnType = "Unnamed.Test.Test", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2472,7 +2690,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.Test", returnType = SuggestionBuilder.Any, isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2485,7 +2704,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2532,16 +2752,17 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Mk_A", arguments = List(), returnType = "Unnamed.Test.A", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), Tree.Node( Suggestion.Constructor( - None, - "Unnamed.Test", - "Mk_A_Plus", - List( + externalId = None, + module = "Unnamed.Test", + name = "Mk_A_Plus", + arguments = List( Suggestion.Argument( "a", "Standard.Base.Any.Any", @@ -2551,26 +2772,26 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { None ) ), - "Unnamed.Test.A", - None, - None + returnType = "Unnamed.Test.A", + documentation = None, + annotations = Seq() ), Vector() ), Tree.Node( Suggestion.Method( - None, - "Unnamed.Test", - "a", - Vector( + externalId = None, + module = "Unnamed.Test", + name = "a", + arguments = Vector( Suggestion .Argument("self", "Unnamed.Test.A", false, false, None, None) ), - "Unnamed.Test.A", - "Standard.Base.Any.Any", - false, - None, - None + selfType = "Unnamed.Test.A", + returnType = "Standard.Base.Any.Any", + isStatic = false, + documentation = None, + annotations = Seq() ), Vector() ), @@ -2594,7 +2815,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.A", returnType = "Unnamed.Test.A", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2617,7 +2839,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Unnamed.Test.A", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2630,7 +2853,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2676,7 +2900,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { name = "Mk_A", arguments = List(), returnType = "Unnamed.Test.A", - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2700,7 +2925,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test.A", returnType = "Unnamed.Test.A", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2723,7 +2949,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = "Unnamed.Test.A", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ), @@ -2736,7 +2963,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2768,7 +2996,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector() ) @@ -2801,7 +3030,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -2855,7 +3085,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ), Vector( Tree.Node( @@ -2909,7 +3140,8 @@ class SuggestionBuilderTest extends AnyWordSpecLike with Matchers { selfType = "Unnamed.Test", returnType = SuggestionBuilder.Any, isStatic = true, - documentation = Some(" The foo") + documentation = Some(" The foo"), + annotations = Seq() ), Vector() ) diff --git a/lib/scala/searcher/src/bench/scala/org/enso/searcher/sql/SuggestionRandom.scala b/lib/scala/searcher/src/bench/scala/org/enso/searcher/sql/SuggestionRandom.scala index 64834002aa4f..cf88e6d78efa 100644 --- a/lib/scala/searcher/src/bench/scala/org/enso/searcher/sql/SuggestionRandom.scala +++ b/lib/scala/searcher/src/bench/scala/org/enso/searcher/sql/SuggestionRandom.scala @@ -28,41 +28,43 @@ object SuggestionRandom { def nextSuggestionModule(): Suggestion.Module = Suggestion.Module( - module = nextString(), - documentation = optional(nextString()) + module = nextString(), + documentation = optional(nextString()) ) def nextSuggestionType(): Suggestion.Type = Suggestion.Type( - externalId = optional(UUID.randomUUID()), - module = "Test.Main", - name = nextString(), - params = Seq(), - returnType = nextString(), - parentType = optional(nextString()), - documentation = optional(nextString()) + externalId = optional(UUID.randomUUID()), + module = "Test.Main", + name = nextString(), + params = Seq(), + returnType = nextString(), + parentType = optional(nextString()), + documentation = optional(nextString()) ) def nextSuggestionConstructor(): Suggestion.Constructor = Suggestion.Constructor( - externalId = optional(UUID.randomUUID()), - module = "Test.Main", - name = nextString(), - arguments = Seq(), - returnType = nextString(), - documentation = optional(nextString()) + externalId = optional(UUID.randomUUID()), + module = "Test.Main", + name = nextString(), + arguments = Seq(), + returnType = nextString(), + documentation = optional(nextString()), + annotations = Seq() ) def nextSuggestionMethod(): Suggestion.Method = Suggestion.Method( - externalId = optional(UUID.randomUUID()), - module = "Test.Main", - name = nextString(), - arguments = Seq(), - selfType = nextString(), - returnType = nextString(), - isStatic = Random.nextBoolean(), - documentation = optional(nextString()) + externalId = optional(UUID.randomUUID()), + module = "Test.Main", + name = nextString(), + arguments = Seq(), + selfType = nextString(), + returnType = nextString(), + isStatic = Random.nextBoolean(), + documentation = optional(nextString()), + annotations = Seq() ) def nextSuggestionFunction(): Suggestion.Function = diff --git a/lib/scala/searcher/src/main/scala/org/enso/searcher/sql/SqlSuggestionsRepo.scala b/lib/scala/searcher/src/main/scala/org/enso/searcher/sql/SqlSuggestionsRepo.scala index 843e2235ef4f..f177b43c99f1 100644 --- a/lib/scala/searcher/src/main/scala/org/enso/searcher/sql/SqlSuggestionsRepo.scala +++ b/lib/scala/searcher/src/main/scala/org/enso/searcher/sql/SqlSuggestionsRepo.scala @@ -823,6 +823,7 @@ final class SqlSuggestionsRepo(val db: SqlDatabase)(implicit _, returnType, doc, + _, reexport ) => SuggestionRow( @@ -852,6 +853,7 @@ final class SqlSuggestionsRepo(val db: SqlDatabase)(implicit returnType, isStatic, doc, + _, reexport ) => SuggestionRow( @@ -981,6 +983,7 @@ final class SqlSuggestionsRepo(val db: SqlDatabase)(implicit arguments = Seq(), returnType = suggestion.returnType, documentation = suggestion.documentation, + annotations = Seq(), reexport = suggestion.reexport ) case SuggestionKind.METHOD => @@ -994,6 +997,7 @@ final class SqlSuggestionsRepo(val db: SqlDatabase)(implicit returnType = suggestion.returnType, isStatic = suggestion.isStatic, documentation = suggestion.documentation, + annotations = Seq(), reexport = suggestion.reexport ) case SuggestionKind.CONVERSION => diff --git a/lib/scala/searcher/src/test/scala/org/enso/searcher/sql/SuggestionsRepoTest.scala b/lib/scala/searcher/src/test/scala/org/enso/searcher/sql/SuggestionsRepoTest.scala index cc0c7c27ce7c..ecf60887edfa 100644 --- a/lib/scala/searcher/src/test/scala/org/enso/searcher/sql/SuggestionsRepoTest.scala +++ b/lib/scala/searcher/src/test/scala/org/enso/searcher/sql/SuggestionsRepoTest.scala @@ -1490,7 +1490,8 @@ class SuggestionsRepoTest Suggestion.Argument("b", "Any", false, false, None) ), returnType = "Standard.Builtins.Pair", - documentation = Some("Awesome") + documentation = Some("Awesome"), + annotations = Seq() ) val method: Suggestion.Method = @@ -1502,7 +1503,8 @@ class SuggestionsRepoTest selfType = "local.Test.Main", returnType = "Standard.Builtins.IO", isStatic = true, - documentation = None + documentation = None, + annotations = Seq() ) val instanceMethod: Suggestion.Method = @@ -1514,7 +1516,8 @@ class SuggestionsRepoTest selfType = "local.Test.Main.A", returnType = "Standard.Builtins.Nothing", isStatic = false, - documentation = None + documentation = None, + annotations = Seq() ) val conversion: Suggestion.Conversion = From cd7cb0bda582e8a43c43ccc03463eafe58b0e557 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Fri, 2 Jun 2023 19:05:37 +1000 Subject: [PATCH 13/39] Offline mode (#6870) * Dashboard and authentication changes * wip * Replace `useNavigate` with a wrapper * Fixes * Fix flipped boolean * QoL improvement for request blocking * Add service worker to cache dependencies --- app/ide-desktop/eslint.config.js | 8 + app/ide-desktop/lib/content/esbuild-config.ts | 1 + .../lib/content/src/devServiceWorker.ts | 59 ++++++ app/ide-desktop/lib/content/src/index.ts | 12 +- .../lib/content/src/project_manager.ts | 186 ------------------ .../lib/content/src/serviceWorker.ts | 31 +-- .../lib/content/src/serviceWorkerConstants.js | 61 ++++++ app/ide-desktop/lib/content/watch.ts | 10 +- .../components/confirmRegistration.tsx | 3 +- .../src/authentication/providers/auth.tsx | 108 ++++++++-- .../src/authentication/src/components/app.tsx | 3 +- .../src/authentication/src/config.ts | 7 +- .../src/dashboard/components/dashboard.tsx | 93 ++++++--- .../src/dashboard/components/userMenu.tsx | 55 ++++-- .../src/authentication/src/hooks.tsx | 31 +++ 15 files changed, 390 insertions(+), 278 deletions(-) create mode 100644 app/ide-desktop/lib/content/src/devServiceWorker.ts delete mode 100644 app/ide-desktop/lib/content/src/project_manager.ts create mode 100644 app/ide-desktop/lib/content/src/serviceWorkerConstants.js diff --git a/app/ide-desktop/eslint.config.js b/app/ide-desktop/eslint.config.js index 9b7bcf76da35..42172a05dbc8 100644 --- a/app/ide-desktop/eslint.config.js +++ b/app/ide-desktop/eslint.config.js @@ -274,6 +274,14 @@ export default [ }, ], 'sort-imports': ['error', { allowSeparatedGroups: true }], + 'no-restricted-properties': [ + 'error', + { + object: 'router', + property: 'useNavigate', + message: 'Use `hooks.useNavigate` instead.', + }, + ], 'no-restricted-syntax': ['error', ...RESTRICTED_SYNTAXES], 'prefer-arrow-callback': 'error', // Prefer `interface` over `type`. diff --git a/app/ide-desktop/lib/content/esbuild-config.ts b/app/ide-desktop/lib/content/esbuild-config.ts index 222194bee60b..dfbccd1b7da7 100644 --- a/app/ide-desktop/lib/content/esbuild-config.ts +++ b/app/ide-desktop/lib/content/esbuild-config.ts @@ -116,6 +116,7 @@ export function bundlerOptions(args: Arguments) { pathModule.resolve(THIS_PATH, 'src', 'run.js'), pathModule.resolve(THIS_PATH, 'src', 'style.css'), pathModule.resolve(THIS_PATH, 'src', 'docsStyle.css'), + pathModule.resolve(THIS_PATH, 'src', 'serviceWorker.ts'), ...wasmArtifacts.split(pathModule.delimiter), ...fsSync .readdirSync(assetsPath) diff --git a/app/ide-desktop/lib/content/src/devServiceWorker.ts b/app/ide-desktop/lib/content/src/devServiceWorker.ts new file mode 100644 index 000000000000..8d2089a79c7f --- /dev/null +++ b/app/ide-desktop/lib/content/src/devServiceWorker.ts @@ -0,0 +1,59 @@ +/** @file A service worker that redirects paths without extensions to `/index.html`. + * This is required for paths like `/login`, which are handled by client-side routing, + * to work when developing locally on `localhost:8080`. */ +// Bring globals and interfaces specific to Web Workers into scope. +/// +import * as common from 'enso-common' + +import * as constants from './serviceWorkerConstants' + +// ===================== +// === Fetch handler === +// ===================== + +// We `declare` a variable here because Service Workers have a different global scope. +// eslint-disable-next-line no-restricted-syntax +declare const self: ServiceWorkerGlobalScope + +self.addEventListener('install', event => { + event.waitUntil( + caches.open(constants.CACHE_NAME).then(cache => { + void cache.addAll(constants.DEPENDENCIES) + return + }) + ) +}) + +self.addEventListener('fetch', event => { + const url = new URL(event.request.url) + if (url.hostname === 'localhost' && url.pathname === '/esbuild') { + return false + } else if (url.hostname === 'localhost') { + const responsePromise = caches + .open(constants.CACHE_NAME) + .then(cache => cache.match(event.request)) + .then(response => + response ?? /\/[^.]+$/.test(url.pathname) + ? fetch('/index.html') + : fetch(event.request.url) + ) + event.respondWith( + responsePromise.then(response => { + const clonedResponse = new Response(response.body, response) + for (const [header, value] of common.COOP_COEP_CORP_HEADERS) { + clonedResponse.headers.set(header, value) + } + return clonedResponse + }) + ) + return + } else { + event.respondWith( + caches + .open(constants.CACHE_NAME) + .then(cache => cache.match(event.request)) + .then(response => response ?? fetch(event.request)) + ) + return + } +}) diff --git a/app/ide-desktop/lib/content/src/index.ts b/app/ide-desktop/lib/content/src/index.ts index 746e3ce4dae8..b974e66e1229 100644 --- a/app/ide-desktop/lib/content/src/index.ts +++ b/app/ide-desktop/lib/content/src/index.ts @@ -23,8 +23,10 @@ const INITIAL_URL_KEY = `${common.PRODUCT_NAME.toLowerCase()}-initial-url` const ESBUILD_PATH = '/esbuild' /** SSE event indicating a build has finished. */ const ESBUILD_EVENT_NAME = 'change' -/** Path to the service worker that resolves all extensionless paths to `/index.html`. - * This service worker is required for client-side routing to work when doing `./run gui watch`. */ +/** Path to the serice worker that caches assets for offline usage. + * In development, it also resolves all extensionless paths to `/index.html`. + * This is required for client-side routing to work when doing `./run gui watch`. + */ const SERVICE_WORKER_PATH = '/serviceWorker.js' /** One second in milliseconds. */ const SECOND = 1000 @@ -41,12 +43,8 @@ if (IS_DEV_MODE) { // The `toString()` is to bypass a lint without using a comment. location.href = location.href.toString() }) - void navigator.serviceWorker.register(SERVICE_WORKER_PATH) -} else { - void navigator.serviceWorker - .getRegistration() - .then(serviceWorker => serviceWorker?.unregister()) } +void navigator.serviceWorker.register(SERVICE_WORKER_PATH) // ============= // === Fetch === diff --git a/app/ide-desktop/lib/content/src/project_manager.ts b/app/ide-desktop/lib/content/src/project_manager.ts deleted file mode 100644 index 324f4ea79fe1..000000000000 --- a/app/ide-desktop/lib/content/src/project_manager.ts +++ /dev/null @@ -1,186 +0,0 @@ -/** @file This module defines the Project Manager endpoint. */ -import * as newtype from './newtype' - -const PROJECT_MANAGER_ENDPOINT = 'ws://127.0.0.1:30535' - -// ============= -// === Types === -// ============= - -/** Possible actions to take when a component is missing. */ -export enum MissingComponentAction { - fail = 'Fail', - install = 'Install', - forceInstallBroken = 'ForceInstallBroken', -} - -/** The return value of a JSON-RPC call. */ -interface Result { - result: T -} - -// This intentionally has the same brand as in the cloud backend API. -/** An ID of a project. */ -export type ProjectId = newtype.Newtype -/** A name of a project. */ -export type ProjectName = newtype.Newtype -/** A UTC value containing a date and a time. */ -export type UTCDateTime = newtype.Newtype - -/** Details for a project. */ -interface ProjectMetadata { - name: ProjectName - namespace: string - id: ProjectId - engineVersion: string | null - lastOpened: UTCDateTime | null -} - -/** A value specifying a socket's hostname and port. */ -interface IpWithSocket { - host: string - port: number -} - -/** The return value of the "list projects" endpoint. */ -interface ProjectList { - projects: ProjectMetadata[] -} - -/** The return value of the "create project" endpoint. */ -interface CreateProject { - projectId: ProjectId -} - -/** The return value of the "open project" endpoint. */ -interface OpenProject { - engineVersion: string - languageServerJsonAddress: IpWithSocket - languageServerBinaryAddress: IpWithSocket - projectName: ProjectName - projectNamespace: string -} - -// ================================ -// === Parameters for endpoints === -// ================================ - -/** Parameters for the "open project" endpoint. */ -export interface OpenProjectParams { - projectId: ProjectId - missingComponentAction: MissingComponentAction -} - -/** Parameters for the "close project" endpoint. */ -export interface CloseProjectParams { - projectId: ProjectId -} - -/** Parameters for the "list projects" endpoint. */ -export interface ListProjectsParams { - numberOfProjects?: number -} - -/** Parameters for the "create project" endpoint. */ -export interface CreateProjectParams { - name: ProjectName - projectTemplate?: string - version?: string - missingComponentAction?: MissingComponentAction -} - -/** Parameters for the "list samples" endpoint. */ -export interface RenameProjectParams { - projectId: ProjectId - name: ProjectName -} - -/** Parameters for the "delete project" endpoint. */ -export interface DeleteProjectParams { - projectId: ProjectId -} - -/** Parameters for the "list samples" endpoint. */ -export interface ListSamplesParams { - projectId: ProjectId -} - -// ======================= -// === Project Manager === -// ======================= - -/** A WebSocket endpoint to the Project Manager. */ -export class ProjectManager { - /** Creates a {@link ProjectManager}. */ - constructor(protected readonly connectionUrl: string) {} - - /** The returns the singleton instance of the {@link ProjectManager}. */ - static default() { - return new ProjectManager(PROJECT_MANAGER_ENDPOINT) - } - - /** Sends a JSON-RPC request to the WebSocket endpoint. */ - public async sendRequest(method: string, params: unknown): Promise> { - const req = { - jsonrpc: '2.0', - id: 0, - method, - params, - } - - const ws = new WebSocket(this.connectionUrl) - return new Promise>((resolve, reject) => { - ws.onopen = () => { - ws.send(JSON.stringify(req)) - } - ws.onmessage = event => { - // There is no way to avoid this; `JSON.parse` returns `any`. - // eslint-disable-next-line @typescript-eslint/no-unsafe-argument - resolve(JSON.parse(event.data)) - } - ws.onerror = error => { - reject(error) - } - }).finally(() => { - ws.close() - }) - } - - /** * Open an existing project. */ - public async openProject(params: OpenProjectParams): Promise> { - return this.sendRequest('project/open', params) - } - - /** * Close an open project. */ - public async closeProject(params: CloseProjectParams): Promise> { - return this.sendRequest('project/close', params) - } - - /** * Get the projects list, sorted by open time. */ - public async listProjects(params: ListProjectsParams): Promise> { - return this.sendRequest('project/list', params) - } - - /** * Create a new project. */ - public async createProject(params: CreateProjectParams): Promise> { - return this.sendRequest('project/create', { - missingComponentAction: MissingComponentAction.install, - ...params, - }) - } - - /** * Rename a project. */ - public async renameProject(params: RenameProjectParams): Promise> { - return this.sendRequest('project/rename', params) - } - - /** * Delete a project. */ - public async deleteProject(params: DeleteProjectParams): Promise> { - return this.sendRequest('project/delete', params) - } - - /** * Get the list of sample projects that are available to the user. */ - public async listSamples(params: ListSamplesParams): Promise> { - return this.sendRequest('project/listSample', params) - } -} diff --git a/app/ide-desktop/lib/content/src/serviceWorker.ts b/app/ide-desktop/lib/content/src/serviceWorker.ts index e51a59ef3e95..9066999b86d5 100644 --- a/app/ide-desktop/lib/content/src/serviceWorker.ts +++ b/app/ide-desktop/lib/content/src/serviceWorker.ts @@ -3,7 +3,7 @@ * to work when developing locally on `localhost:8080`. */ // Bring globals and interfaces specific to Web Workers into scope. /// -import * as common from 'enso-common' +import * as constants from './serviceWorkerConstants' // ===================== // === Fetch handler === @@ -13,23 +13,26 @@ import * as common from 'enso-common' // eslint-disable-next-line no-restricted-syntax declare const self: ServiceWorkerGlobalScope +self.addEventListener('install', event => { + event.waitUntil( + caches.open(constants.CACHE_NAME).then(cache => { + void cache.addAll(constants.DEPENDENCIES) + return + }) + ) +}) + self.addEventListener('fetch', event => { const url = new URL(event.request.url) - if (url.hostname === 'localhost' && url.pathname !== '/esbuild') { - const responsePromise = /\/[^.]+$/.test(new URL(event.request.url).pathname) - ? fetch('/index.html') - : fetch(event.request.url) + if (url.hostname === 'localhost') { + return false + } else { event.respondWith( - responsePromise.then(response => { - const clonedResponse = new Response(response.body, response) - for (const [header, value] of common.COOP_COEP_CORP_HEADERS) { - clonedResponse.headers.set(header, value) - } - return clonedResponse - }) + caches + .open(constants.CACHE_NAME) + .then(cache => cache.match(event.request)) + .then(response => response ?? fetch(event.request)) ) return - } else { - return false } }) diff --git a/app/ide-desktop/lib/content/src/serviceWorkerConstants.js b/app/ide-desktop/lib/content/src/serviceWorkerConstants.js new file mode 100644 index 000000000000..b3904f5c976e --- /dev/null +++ b/app/ide-desktop/lib/content/src/serviceWorkerConstants.js @@ -0,0 +1,61 @@ +/** @file Constants shared between all service workers (development and production). */ +import * as common from 'enso-common' + +/** The name of the cache under which offline assets are stored. */ +export const CACHE_NAME = common.PRODUCT_NAME.toLowerCase() + +/** The numbers after each font loaded by the "M PLUS 1" font. */ +const M_PLUS_1_SECTIONS = [ + /* eslint-disable @typescript-eslint/no-magic-numbers */ + 0, 1, 3, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, + 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 53, + 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, + 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, + 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, + /* eslint-enable @typescript-eslint/no-magic-numbers */ +] + +/** The complete list of assets to cache for offline use. */ +export const DEPENDENCIES = [ + // app/gui/view/graph-editor/src/builtin/visualization/java_script/heatmap.js + // app/gui/view/graph-editor/src/builtin/visualization/java_script/histogram.js + // app/gui/view/graph-editor/src/builtin/visualization/java_script/scatterPlot.js + 'https://d3js.org/d3.v4.min.js', + 'https://fonts.cdnfonts.com/css/dejavu-sans-mono', + // Loaded by https://fonts.cdnfonts.com/css/dejavu-sans-mono + 'https://fonts.cdnfonts.com/s/108/DejaVuSansMono.woff', + 'https://fonts.cdnfonts.com/s/108/DejaVuSansMono-Oblique.woff', + 'https://fonts.cdnfonts.com/s/108/DejaVuSansMono-Bold.woff', + 'https://fonts.cdnfonts.com/s/108/DejaVuSansMono-BoldOblique.woff', + // app/gui/view/graph-editor/src/builtin/visualization/java_script/geoMap.js + 'https://unpkg.com/deck.gl@8.4/dist.min.js', + 'https://api.mapbox.com/mapbox-gl-js/v2.1.1/mapbox-gl.js', + 'https://api.mapbox.com/mapbox-gl-js/v2.1.1/mapbox-gl.css', + // Loaded by https://api.mapbox.com/mapbox-gl-js/v2.1.1/mapbox-gl.js + 'https://api.mapbox.com/styles/v1/mapbox/light-v9?access_token=pk.' + + 'eyJ1IjoiZW5zby1vcmciLCJhIjoiY2tmNnh5MXh2MGlyOTJ5cWdubnFxbXo4ZSJ9.3KdAcCiiXJcSM18nwk09-Q', + 'https://api.mapbox.com/styles/v1/mapbox/light-v9/sprite.json?access_token=pk.' + + 'eyJ1IjoiZW5zby1vcmciLCJhIjoiY2tmNnh5MXh2MGlyOTJ5cWdubnFxbXo4ZSJ9.3KdAcCiiXJcSM18nwk09-Q', + 'https://api.mapbox.com/styles/v1/mapbox/light-v9/sprite.png?access_token=pk.' + + 'eyJ1IjoiZW5zby1vcmciLCJhIjoiY2tmNnh5MXh2MGlyOTJ5cWdubnFxbXo4ZSJ9.3KdAcCiiXJcSM18nwk09-Q', + // app/gui/view/graph-editor/src/builtin/visualization/java_script/sql.js + 'https://cdnjs.cloudflare.com/ajax/libs/sql-formatter/4.0.2/sql-formatter.min.js', + // app/gui/view/graph-editor/src/builtin/visualization/java_script/table.js + 'https://cdn.jsdelivr.net/npm/ag-grid-community/dist/ag-grid-community.min.js', + 'https://cdn.jsdelivr.net/npm/ag-grid-community/styles/ag-grid.css', + 'https://cdn.jsdelivr.net/npm/ag-grid-community/styles/ag-theme-alpine.css', + // app/ide-desktop/lib/content/src/docsStyle.css + 'https://fonts.gstatic.com/s/sourcecodepro/v14/HI_XiYsKILxRpg3hIP6sJ7fM7PqtlsnztA.ttf', + 'https://fonts.gstatic.com/s/sourcecodepro/v14/HI_SiYsKILxRpg3hIP6sJ7fM7PqVOg.ttf', + 'https://fonts.gstatic.com/s/sourcecodepro/v14/HI_XiYsKILxRpg3hIP6sJ7fM7PqtzsjztA.ttf', + 'https://fonts.gstatic.com/s/sourcecodepro/v14/HI_XiYsKILxRpg3hIP6sJ7fM7Pqt4s_ztA.ttf', + 'https://fonts.gstatic.com/s/sourcecodepro/v14/HI_XiYsKILxRpg3hIP6sJ7fM7Pqths7ztA.ttf', + // app/ide-desktop/lib/dashboard/src/tailwind.css + 'https://fonts.googleapis.com/css2?family=M+PLUS+1:wght@500;700&display=swap', + // Loaded by https://fonts.googleapis.com/css2?family=M+PLUS+1:wght@500;700&display=swap + ...M_PLUS_1_SECTIONS.map( + number => + `https://fonts.gstatic.com/s/mplus1/v6/` + + `R70ZjygA28ymD4HgBVu92j6eR1mYP_TX-Bb-rTg93gHfHe9F4Q.${number}.woff2` + ), +] diff --git a/app/ide-desktop/lib/content/watch.ts b/app/ide-desktop/lib/content/watch.ts index 091b48a87d70..8d951c4ea4b9 100644 --- a/app/ide-desktop/lib/content/watch.ts +++ b/app/ide-desktop/lib/content/watch.ts @@ -39,10 +39,12 @@ async function watch() { }) ) opts.define.REDIRECT_OVERRIDE = JSON.stringify('http://localhost:8080') - opts.entryPoints.push({ - in: path.resolve(THIS_PATH, 'src', 'serviceWorker.ts'), - out: 'serviceWorker', - }) + // This is safe as this entry point is statically known. + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const serviceWorkerEntryPoint = opts.entryPoints.find( + entryPoint => entryPoint.out === 'serviceWorker' + )! + serviceWorkerEntryPoint.in = path.resolve(THIS_PATH, 'src', 'devServiceWorker.ts') const builder = await esbuild.context(opts) await builder.watch() await builder.serve({ diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/confirmRegistration.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/confirmRegistration.tsx index 5fdff64ffe70..157d368c0efe 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/confirmRegistration.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/confirmRegistration.tsx @@ -6,6 +6,7 @@ import toast from 'react-hot-toast' import * as app from '../../components/app' import * as auth from '../providers/auth' +import * as hooks from '../../hooks' import * as loggerProvider from '../../providers/logger' // ================= @@ -26,7 +27,7 @@ function ConfirmRegistration() { const logger = loggerProvider.useLogger() const { confirmSignUp } = auth.useAuth() const { search } = router.useLocation() - const navigate = router.useNavigate() + const navigate = hooks.useNavigate() const { verificationCode, email } = parseUrlSearchParams(search) diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx index 5c1f0cfe5515..bc4e64115d83 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx @@ -13,6 +13,7 @@ import * as backendModule from '../../dashboard/backend' import * as backendProvider from '../../providers/backend' import * as errorModule from '../../error' import * as http from '../../http' +import * as localBackend from '../../dashboard/localBackend' import * as loggerProvider from '../../providers/logger' import * as newtype from '../../newtype' import * as remoteBackend from '../../dashboard/remoteBackend' @@ -22,6 +23,8 @@ import * as sessionProvider from './session' // === Constants === // ================= +/** The minimum delay between two requests. */ +const REQUEST_DELAY_MS = 200 const MESSAGES = { signUpSuccess: 'We have sent you an email with further instructions!', confirmSignUpSuccess: 'Your account has been confirmed! Please log in.', @@ -44,6 +47,7 @@ const MESSAGES = { /** Possible types of {@link BaseUserSession}. */ export enum UserSessionType { + offline = 'offline', partial = 'partial', full = 'full', } @@ -58,10 +62,20 @@ interface BaseUserSession { email: string } -/** Object containing the currently signed-in user's session data. */ -export interface FullUserSession extends BaseUserSession { - /** User's organization information. */ - organization: backendModule.UserOrOrganization +// Extends `BaseUserSession` in order to inherit the documentation. +/** Empty object of an offline user session. + * Contains some fields from {@link FullUserSession} to allow destructuring. */ +export interface OfflineUserSession extends Pick, 'type'> { + accessToken: null + organization: null +} + +/** The singleton instance of {@link OfflineUserSession}. + * Minimizes React re-renders. */ +const OFFLINE_USER_SESSION: OfflineUserSession = { + type: UserSessionType.offline, + accessToken: null, + organization: null, } /** Object containing the currently signed-in user's session data, if the user has not yet set their @@ -72,9 +86,15 @@ export interface FullUserSession extends BaseUserSession { * used by the `SetUsername` component. */ export interface PartialUserSession extends BaseUserSession {} +/** Object containing the currently signed-in user's session data. */ +export interface FullUserSession extends BaseUserSession { + /** User's organization information. */ + organization: backendModule.UserOrOrganization +} + /** A user session for a user that may be either fully registered, * or in the process of registering. */ -export type UserSession = FullUserSession | PartialUserSession +export type UserSession = FullUserSession | OfflineUserSession | PartialUserSession // =================== // === AuthContext === @@ -88,6 +108,7 @@ export type UserSession = FullUserSession | PartialUserSession * * See {@link Cognito} for details on each of the authentication functions. */ interface AuthContextType { + goOffline: () => Promise signUp: (email: string, password: string) => Promise confirmSignUp: (email: string, code: string) => Promise setUsername: ( @@ -154,10 +175,21 @@ export function AuthProvider(props: AuthProviderProps) { const { session, deinitializeSession } = sessionProvider.useSession() const { setBackend } = backendProvider.useSetBackend() const logger = loggerProvider.useLogger() + // This must not be `hooks.useNavigate` as `goOffline` would be inaccessible, + // and the function call would error. + // eslint-disable-next-line no-restricted-properties const navigate = router.useNavigate() const [initialized, setInitialized] = react.useState(false) const [userSession, setUserSession] = react.useState(null) + // This is identical to `hooks.useOnlineCheck`, however it is inline here to avoid any possible + // circular dependency. + react.useEffect(() => { + if (!navigator.onLine) { + void goOffline() + } + }, [navigator.onLine]) + /** Fetch the JWT access token from the session via the AWS Amplify library. * * When invoked, retrieves the access token (if available) from the storage method chosen when @@ -165,7 +197,9 @@ export function AuthProvider(props: AuthProviderProps) { * If the token has expired, automatically refreshes the token and returns the new token. */ react.useEffect(() => { const fetchSession = async () => { - if (session.none) { + if (!navigator.onLine) { + goOfflineInternal() + } else if (session.none) { setInitialized(true) setUserSession(null) } else { @@ -179,7 +213,25 @@ export function AuthProvider(props: AuthProviderProps) { if (!initialized || userSession == null) { setBackend(backend) } - const organization = await backend.usersMe().catch(() => null) + let organization + // eslint-disable-next-line no-restricted-syntax + while (organization === undefined) { + try { + organization = await backend.usersMe() + } catch { + // The value may have changed after the `await`. + // eslint-disable-next-line @typescript-eslint/no-unnecessary-condition + if (!navigator.onLine) { + goOfflineInternal() + // eslint-disable-next-line no-restricted-syntax + return + } + // This prevents a busy loop when request blocking is enabled in DevTools. + // The UI will be blank indefinitely. This is intentional, since for real + // network outages, `navigator.onLine` will be false. + await new Promise(resolve => setTimeout(resolve, REQUEST_DELAY_MS)) + } + } let newUserSession: UserSession const sharedSessionData = { email, accessToken } if (!organization) { @@ -231,6 +283,19 @@ export function AuthProvider(props: AuthProviderProps) { return result } + const goOfflineInternal = () => { + setInitialized(true) + setUserSession(OFFLINE_USER_SESSION) + setBackend(new localBackend.LocalBackend()) + } + + const goOffline = () => { + toast.error('You are offline, switching to offline mode.') + goOfflineInternal() + navigate(app.DASHBOARD_PATH) + return Promise.resolve(true) + } + const signUp = async (username: string, password: string) => { const result = await cognito.signUp(username, password) if (result.ok) { @@ -345,19 +410,20 @@ export function AuthProvider(props: AuthProviderProps) { } const value = { + goOffline: goOffline, signUp: withLoadingToast(signUp), confirmSignUp: withLoadingToast(confirmSignUp), setUsername, signInWithGoogle: () => - cognito - .signInWithGoogle() - .then(() => true) - .catch(() => false), + cognito.signInWithGoogle().then( + () => true, + () => false + ), signInWithGitHub: () => - cognito - .signInWithGitHub() - .then(() => true) - .catch(() => false), + cognito.signInWithGitHub().then( + () => true, + () => false + ), signInWithPassword: withLoadingToast(signInWithPassword), forgotPassword: withLoadingToast(forgotPassword), resetPassword: withLoadingToast(resetPassword), @@ -474,11 +540,11 @@ export function usePartialUserSession() { return router.useOutletContext() } -// ========================== -// === useFullUserSession === -// ========================== +// ================================ +// === useNonPartialUserSession === +// ================================ -/** A React context hook returning the user session for a user that has completed registration. */ -export function useFullUserSession() { - return router.useOutletContext() +/** A React context hook returning the user session for a user that can perform actions. */ +export function useNonPartialUserSession() { + return router.useOutletContext>() } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx index 6dc553eca089..f5a6925764cd 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx @@ -40,6 +40,7 @@ import * as toast from 'react-hot-toast' import * as authService from '../authentication/service' import * as detect from '../detect' +import * as hooks from '../hooks' import * as authProvider from '../authentication/providers/auth' import * as backendProvider from '../providers/backend' @@ -124,7 +125,7 @@ function App(props: AppProps) { * component as the component that defines the provider. */ function AppRouter(props: AppProps) { const { logger, showDashboard, onAuthenticated } = props - const navigate = router.useNavigate() + const navigate = hooks.useNavigate() // FIXME[sb]: After platform detection for Electron is merged in, `IS_DEV_MODE` should be // set to true on `ide watch`. if (IS_DEV_MODE) { diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/config.ts b/app/ide-desktop/lib/dashboard/src/authentication/src/config.ts index 106a70990645..eb677a3ed15d 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/config.ts +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/config.ts @@ -7,6 +7,9 @@ import * as newtype from './newtype' // === Constants === // ================= +/** The web domain of the cloud website. */ +export const CLOUD_DOMAIN = 'https://cloud.enso.org' + /** The current environment that we're running in. */ export const ENVIRONMENT: Environment = 'production' @@ -17,9 +20,7 @@ const CLOUD_REDIRECTS = { * when it is created. In the native app, the port is unpredictable, but this is not a problem * because the native app does not use port-based redirects, but deep links. */ development: newtype.asNewtype('http://localhost:8080'), - production: newtype.asNewtype( - REDIRECT_OVERRIDE ?? 'https://cloud.enso.org' - ), + production: newtype.asNewtype(REDIRECT_OVERRIDE ?? CLOUD_DOMAIN), } /** All possible API URLs, sorted by environment. */ diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx index a80df57b0bd6..1d35a58e0ec9 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx @@ -234,7 +234,7 @@ function Dashboard(props: DashboardProps) { const { supportsLocalBackend, appRunner } = props const logger = loggerProvider.useLogger() - const { accessToken, organization } = auth.useFullUserSession() + const session = auth.useNonPartialUserSession() const { backend } = backendProvider.useBackend() const { setBackend } = backendProvider.useSetBackend() const { modal } = modalProvider.useModal() @@ -244,7 +244,9 @@ function Dashboard(props: DashboardProps) { const [query, setQuery] = react.useState('') const [loadingProjectManagerDidFail, setLoadingProjectManagerDidFail] = react.useState(false) - const [directoryId, setDirectoryId] = react.useState(rootDirectoryId(organization.id)) + const [directoryId, setDirectoryId] = react.useState( + session.organization != null ? rootDirectoryId(session.organization.id) : null + ) const [directoryStack, setDirectoryStack] = react.useState< backendModule.Asset[] >([]) @@ -281,10 +283,13 @@ function Dashboard(props: DashboardProps) { backendModule.Asset[] >([]) - const listingLocalDirectoryAndWillFail = + const isListingLocalDirectoryAndWillFail = backend.type === backendModule.BackendType.local && loadingProjectManagerDidFail - const listingRemoteDirectoryAndWillFail = - backend.type === backendModule.BackendType.remote && !organization.isEnabled + const isListingRemoteDirectoryAndWillFail = + backend.type === backendModule.BackendType.remote && !session.organization?.isEnabled + const isListingRemoteDirectoryWhileOffline = + session.type === auth.UserSessionType.offline && + backend.type === backendModule.BackendType.remote const directory = directoryStack[directoryStack.length - 1] const parentDirectory = directoryStack[directoryStack.length - 2] @@ -366,7 +371,10 @@ function Dashboard(props: DashboardProps) { } const exitDirectory = () => { - setDirectoryId(parentDirectory?.id ?? rootDirectoryId(organization.id)) + setDirectoryId( + parentDirectory?.id ?? + (session.organization != null ? rootDirectoryId(session.organization.id) : null) + ) setDirectoryStack( // eslint-disable-next-line @typescript-eslint/no-magic-numbers directoryStack.slice(0, -1) @@ -396,7 +404,10 @@ function Dashboard(props: DashboardProps) { }, []) react.useEffect(() => { - if (directoryId === rootDirectoryId(organization.id)) { + if ( + session.organization == null || + directoryId === rootDirectoryId(session.organization.id) + ) { localStorage.removeItem(DIRECTORY_STACK_KEY) } else { localStorage.setItem(DIRECTORY_STACK_KEY, JSON.stringify(directoryStack)) @@ -594,7 +605,10 @@ function Dashboard(props: DashboardProps) { )) @@ -636,19 +650,36 @@ function Dashboard(props: DashboardProps) { hooks.useAsyncEffect( null, async signal => { - if (listingLocalDirectoryAndWillFail) { - // Do not `setIsLoadingAssets(false)` - } else if (!listingRemoteDirectoryAndWillFail) { - const assets = await backend.listDirectory({ parentId: directoryId }) - if (!signal.aborted) { - setIsLoadingAssets(false) - setAssets(assets) + switch (backend.type) { + case backendModule.BackendType.local: { + if (!isListingLocalDirectoryAndWillFail) { + const assets = await backend.listDirectory() + if (!signal.aborted) { + setIsLoadingAssets(false) + setAssets(assets) + } + } + return + } + case backendModule.BackendType.remote: { + if ( + !isListingRemoteDirectoryAndWillFail && + !isListingRemoteDirectoryWhileOffline && + directoryId != null + ) { + const assets = await backend.listDirectory({ parentId: directoryId }) + if (!signal.aborted) { + setIsLoadingAssets(false) + setAssets(assets) + } + } else { + setIsLoadingAssets(false) + } + return } - } else { - setIsLoadingAssets(false) } }, - [accessToken, directoryId, refresh, backend] + [session.accessToken, directoryId, refresh, backend] ) react.useEffect(() => { @@ -746,7 +777,11 @@ function Dashboard(props: DashboardProps) { break case backendModule.BackendType.remote: { const headers = new Headers() - headers.append('Authorization', `Bearer ${accessToken}`) + // If `accessToken` is null, then there is no internet connection. + headers.append( + 'Authorization', + `Bearer ${session.accessToken ?? ''}` + ) const client = new http.Client(headers) setBackend(new remoteBackendModule.RemoteBackend(client, logger)) break @@ -757,14 +792,21 @@ function Dashboard(props: DashboardProps) { query={query} setQuery={setQuery} /> - {listingLocalDirectoryAndWillFail ? ( + {isListingRemoteDirectoryWhileOffline ? ( +
+
+ You are offline. Please connect to the internet and refresh to access the + cloud backend. +
+
+ ) : isListingLocalDirectoryAndWillFail ? (
Could not connect to the Project Manager. Please try restarting{' '} {common.PRODUCT_NAME}, or manually launching the Project Manager.
- ) : listingRemoteDirectoryAndWillFail ? ( + ) : isListingRemoteDirectoryAndWillFail ? (
We will review your user details and enable the cloud experience for you @@ -808,7 +850,10 @@ function Dashboard(props: DashboardProps) { event.stopPropagation() setModal(() => ( )) @@ -1195,7 +1240,9 @@ function Dashboard(props: DashboardProps) { ))(backend)} - {isFileBeingDragged && backend.type === backendModule.BackendType.remote ? ( + {isFileBeingDragged && + directoryId != null && + backend.type === backendModule.BackendType.remote ? (
{ diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/userMenu.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/userMenu.tsx index 93507033bcef..48a54cf8e03f 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/userMenu.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/userMenu.tsx @@ -1,7 +1,9 @@ /** @file The UserMenu component provides a dropdown menu of user actions and settings. */ import * as react from 'react' +import * as app from '../../components/app' import * as auth from '../../authentication/providers/auth' +import * as hooks from '../../hooks' import * as modalProvider from '../../providers/modal' import ChangePasswordModal from './changePasswordModal' @@ -35,7 +37,8 @@ function UserMenuItem(props: react.PropsWithChildren) { /** Handling the UserMenuItem click event logic and displaying its content. */ function UserMenu() { const { signOut } = auth.useAuth() - const { accessToken, organization } = auth.useFullUserSession() + const { accessToken, organization } = auth.useNonPartialUserSession() + const navigate = hooks.useNavigate() const { setModal } = modalProvider.useSetModal() @@ -43,10 +46,16 @@ function UserMenu() { // TODO: Implement this when the backend endpoints are implemented. } - // We know the shape of the JWT payload. - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment, @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-non-null-assertion - const username: string = JSON.parse(atob(accessToken.split('.')[1]!)).username - const canChangePassword = !/^Github_|^Google_/.test(username) + const goToLoginPage = () => { + navigate(app.LOGIN_PATH) + } + + // The shape of the JWT payload is statically known. + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + const username: string | null = + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access, @typescript-eslint/no-non-null-assertion + accessToken != null ? JSON.parse(atob(accessToken.split('.')[1]!)).username : null + const canChangePassword = username != null ? !/^Github_|^Google_/.test(username) : null return (
- - Signed in as {organization.name} - - Your profile - {canChangePassword && ( - { - setModal(() => ) - }} - > - Change your password - + {organization != null ? ( + <> + {' '} + + Signed in as {organization.name} + + Your profile + {canChangePassword && ( + { + setModal(() => ) + }} + > + Change your password + + )} + Sign out + + ) : ( + <> + You are offline. + Login + )} - Sign out
) } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/hooks.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/hooks.tsx index ea635c2c53c0..1400b684d4f3 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/hooks.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/hooks.tsx @@ -1,6 +1,9 @@ /** @file Module containing common custom React hooks used throughout out Dashboard. */ import * as react from 'react' +import * as router from 'react-router' +import * as app from './components/app' +import * as auth from './authentication/providers/auth' import * as loggerProvider from './providers/logger' // ================== @@ -68,3 +71,31 @@ export function useAsyncEffect( return value } + +// =================== +// === useNavigate === +// =================== + +/** A wrapper around {@link router.useNavigate} that goes into offline mode when + * offline. */ +export function useNavigate() { + const { goOffline } = auth.useAuth() + // This function is a wrapper around `router.useNavigate`. It shouldbe the only place where + // `router.useNavigate` is used. + // eslint-disable-next-line no-restricted-properties + const originalNavigate = router.useNavigate() + + const navigate: router.NavigateFunction = (...args: [unknown, unknown?]) => { + const isOnline = navigator.onLine + if (!isOnline) { + void goOffline() + originalNavigate(app.DASHBOARD_PATH) + } else { + // This is safe, because the arguments are being passed through transparently. + // eslint-disable-next-line no-restricted-syntax + originalNavigate(...(args as [never, never?])) + } + } + + return navigate +} From d44b1250b705f846846a476636b16ffaf308fb52 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Fri, 2 Jun 2023 12:13:13 +0200 Subject: [PATCH 14/39] Implement `Table.add_row_number` (#6890) Closes #5227 # Important Notes - This lays first steps towards #6292 - we get pure Enso variants of MultiValueKey. - Another part refactors `LongStorage` into `AbstractLongStorage` allowing it to provide alternative implementations of the underlying storage, in our case `LongRangeStorage` generating the values ad-hoc and `LongConstantStorage` - currently unused but in the future it can be adapted to support constant columns (once we implement similar facilities for other types). --- CHANGELOG.md | 2 + .../Database/0.0.0-dev/src/Data/Table.enso | 38 +++ .../src/Internal/Column_Fetcher.enso | 2 +- .../Table/0.0.0-dev/src/Data/Table.enso | 42 ++- .../src/Internal/Add_Row_Number.enso | 101 +++++++ .../0.0.0-dev/src/Internal/Java_Exports.enso | 4 - .../src/Internal/Multi_Value_Key.enso | 92 +++++++ .../0.0.0-dev/src/Internal/Table_Helpers.enso | 17 +- .../java/org/enso/base/arrays/LongArray.java | 21 ++ .../column/builder/object/DateBuilder.java | 2 +- .../builder/object/DateTimeBuilder.java | 2 +- .../column/builder/object/DoubleBuilder.java | 4 +- .../column/builder/object/LongBuilder.java | 2 +- .../builder/object/TimeOfDayBuilder.java | 2 +- .../cast/ToDateStorageConverter.java | 4 +- .../cast/ToDateTimeStorageConverter.java | 4 +- .../cast/ToFloatStorageConverter.java | 4 +- .../cast/ToIntegerStorageConverter.java | 4 +- .../cast/ToTextStorageConverter.java | 5 + .../cast/ToTimeOfDayStorageConverter.java | 4 +- .../column/operation/map/UnaryIntegerOp.java | 4 +- .../map/numeric/DoubleBooleanOp.java | 4 +- .../operation/map/numeric/DoubleIsInOp.java | 3 +- .../map/numeric/DoubleNumericOp.java | 4 +- .../operation/map/numeric/LongBooleanOp.java | 15 +- .../operation/map/numeric/LongIsInOp.java | 4 +- .../operation/map/numeric/LongNumericOp.java | 23 +- .../data/column/storage/ObjectStorage.java | 2 +- .../table/data/column/storage/Storage.java | 1 + .../storage/{ => datetime}/DateStorage.java | 4 +- .../{ => datetime}/DateTimeStorage.java | 4 +- .../{ => datetime}/TimeOfDayStorage.java | 4 +- .../AbstractLongStorage.java} | 253 +++--------------- .../storage/numeric/ComputedLongStorage.java | 138 ++++++++++ .../storage/{ => numeric}/DoubleStorage.java | 4 +- .../storage/numeric/LongConstantStorage.java | 15 ++ .../storage/numeric/LongRangeStorage.java | 21 ++ .../column/storage/numeric/LongStorage.java | 218 +++++++++++++++ .../storage/{ => numeric}/NumericStorage.java | 4 +- .../org/enso/table/write/ExcelWriter.java | 4 +- .../Add_Row_Number_Spec.enso | 127 +++++++++ .../src/In_Memory/Builders_Spec.enso | 3 +- 42 files changed, 930 insertions(+), 285 deletions(-) create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Multi_Value_Key.enso create mode 100644 std-bits/base/src/main/java/org/enso/base/arrays/LongArray.java rename std-bits/table/src/main/java/org/enso/table/data/column/storage/{ => datetime}/DateStorage.java (92%) rename std-bits/table/src/main/java/org/enso/table/data/column/storage/{ => datetime}/DateTimeStorage.java (92%) rename std-bits/table/src/main/java/org/enso/table/data/column/storage/{ => datetime}/TimeOfDayStorage.java (89%) rename std-bits/table/src/main/java/org/enso/table/data/column/storage/{LongStorage.java => numeric/AbstractLongStorage.java} (51%) create mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/ComputedLongStorage.java rename std-bits/table/src/main/java/org/enso/table/data/column/storage/{ => numeric}/DoubleStorage.java (98%) create mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongConstantStorage.java create mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongRangeStorage.java create mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongStorage.java rename std-bits/table/src/main/java/org/enso/table/data/column/storage/{ => numeric}/NumericStorage.java (80%) create mode 100644 test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso diff --git a/CHANGELOG.md b/CHANGELOG.md index cbe45acd11c8..381646c6cb8f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -475,6 +475,7 @@ - [Added `.round` and `.int` to `Integer` and `Decimal`.][6743] - [Added `.round`, `.truncate`, `.ceil`, and `.floor` to `Column`.][6817] - [Added execution control to `Table.write` and various bug fixes.][6835] +- [Implemented `Table.add_row_number`.][6890] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -689,6 +690,7 @@ [6743]: https://github.com/enso-org/enso/pull/6743 [6817]: https://github.com/enso-org/enso/pull/6817 [6835]: https://github.com/enso-org/enso/pull/6835 +[6890]: https://github.com/enso-org/enso/pull/6890 #### Enso Compiler diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index bae38f880388..720ebb587dca 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -547,6 +547,44 @@ type Table msg = "`Table.drop` is not yet implemented." Error.throw (Unsupported_Database_Operation.Error msg) + ## Adds a new column to the table enumerating the rows. + + Arguments: + - name: The name of the new column. Defaults to "Row". + - from: The starting value for the enumeration. Defaults to 1. + - step: The amount to increment the enumeration by. Defaults to 1. + - group_by: Specifies the columns to group by. The row numbers are + counted separately for each group. By default, all rows are treated as + a single group. + - order_by: Specifies the columns to order by. Defaults to the order of + the rows in the table. The row numbers are assigned according to the + specified ordering. + + ? Ordering of rows + + Note that the ordering of rows from the original table is preserved in + all cases. The grouping and ordering settings affect how the row + numbers are assigned to each row, but the order of the rows itself is + not changed by this operation. + + ! Error Conditions + + - If the columns specified in `group_by` or `order_by` are not present + in the table, a `Missing_Input_Columns` or + `Column_Indexes_Out_Of_Range` error is raised. + - If the column with the same name as provided `name` already exists, + a `Duplicate_Output_Column_Names` problem is reported and the + existing column is renamed to avoid the clash. + - If grouping on floating point numbers, a `Floating_Point_Equality` + problem is reported. + @group_by Widget_Helpers.make_column_name_vector_selector + @order_by Widget_Helpers.make_order_by_selector + add_row_number : Text -> Integer -> Integer -> Vector (Text | Integer | Column_Selector) | Text | Integer | Column_Selector -> Vector (Text | Sort_Column) | Text | Sort_Column -> Problem_Behavior -> Table + add_row_number self name="Row" from=1 step=1 group_by=[] order_by=[] on_problems=Problem_Behavior.Report_Warning = + _ = [name, from, step, group_by, order_by, on_problems] + msg = "`Table.add_row_number` is not yet implemented in the Database backend." + Error.throw (Unsupported_Database_Operation.Error msg) + ## UNSTABLE Returns a new Table that will include at most `max_rows` rows from the diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Column_Fetcher.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Column_Fetcher.enso index a61eaee70421..5cf1bf238452 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Column_Fetcher.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Column_Fetcher.enso @@ -151,7 +151,7 @@ default_fetcher_for_value_type value_type = ## PRIVATE seal_java_builder java_builder column_name = storage = java_builder.seal - Java_Exports.make_column column_name storage + Materialized_Column.from_storage column_name storage ## PRIVATE make_builder_from_java_object_builder java_builder = diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index b2f11bdf8e56..4e22bcd1d295 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -30,6 +30,7 @@ import project.Data.Set_Mode.Set_Mode import project.Data.Sort_Column.Sort_Column import project.Data.Table_Conversions import project.Delimited.Delimited_Format.Delimited_Format +import project.Internal.Add_Row_Number import project.Internal.Aggregate_Column_Helper import project.Internal.Java_Problems import project.Internal.Join_Helpers @@ -1192,6 +1193,42 @@ type Table drop self range=(First 1) = Index_Sub_Range_Module.drop_helper self.row_count self.rows.at self.slice (slice_ranges self) range + ## Adds a new column to the table enumerating the rows. + + Arguments: + - name: The name of the new column. Defaults to "Row". + - from: The starting value for the enumeration. Defaults to 1. + - step: The amount to increment the enumeration by. Defaults to 1. + - group_by: Specifies the columns to group by. The row numbers are + counted separately for each group. By default, all rows are treated as + a single group. + - order_by: Specifies the columns to order by. Defaults to the order of + the rows in the table. The row numbers are assigned according to the + specified ordering. + + ? Ordering of rows + + Note that the ordering of rows from the original table is preserved in + all cases. The grouping and ordering settings affect how the row + numbers are assigned to each row, but the order of the rows itself is + not changed by this operation. + + ! Error Conditions + + - If the columns specified in `group_by` or `order_by` are not present + in the table, a `Missing_Input_Columns` or + `Column_Indexes_Out_Of_Range` error is raised. + - If the column with the same name as provided `name` already exists, + a `Duplicate_Output_Column_Names` problem is reported and the + existing column is renamed to avoid the clash. + - If grouping on floating point numbers, a `Floating_Point_Equality` + problem is reported. + @group_by Widget_Helpers.make_column_name_vector_selector + @order_by Widget_Helpers.make_order_by_selector + add_row_number : Text -> Integer -> Integer -> Vector (Text | Integer | Column_Selector) | Text | Integer | Column_Selector -> Vector (Text | Sort_Column) | Text | Sort_Column -> Problem_Behavior -> Table + add_row_number self name="Row" from=1 step=1 group_by=[] order_by=[] on_problems=Problem_Behavior.Report_Warning = + Add_Row_Number.add_row_number self name from step group_by order_by on_problems + ## ALIAS Add Column, Update Column, New Column Sets the column value at the given name. @@ -1249,14 +1286,15 @@ type Table renamed = case new_name of Nothing -> resolved _ : Text -> resolved.rename new_name - to_add = case set_mode of + check_add_mode = case set_mode of Set_Mode.Add_Or_Update -> True Set_Mode.Add -> if self.java_table.getColumnByName renamed.name . is_nothing then True else Error.throw (Existing_Column.Error renamed.name) Set_Mode.Update -> if self.java_table.getColumnByName renamed.name . is_nothing . not then True else Error.throw (Missing_Column.Error renamed.name) - if to_add then Table.Value (self.java_table.addOrReplaceColumn renamed.java_column) else to_add + check_add_mode.if_not_error <| + Table.Value (self.java_table.addOrReplaceColumn renamed.java_column) ## Given an expression, create a derived column where each value is the result of evaluating the expression for the row. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso new file mode 100644 index 000000000000..f52bde87e869 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Add_Row_Number.enso @@ -0,0 +1,101 @@ +from Standard.Base import all + +import project.Data.Column.Column +import project.Data.Column_Selector.Column_Selector +import project.Data.Sort_Column.Sort_Column +import project.Data.Set_Mode.Set_Mode +import project.Data.Table.Table +import project.Internal.Multi_Value_Key.Unordered_Multi_Value_Key +import project.Internal.Multi_Value_Key.Ordered_Multi_Value_Key +import project.Internal.Problem_Builder.Problem_Builder +import project.Internal.Table_Helpers +import project.Internal.Unique_Name_Strategy.Unique_Name_Strategy + +from project.Errors import Duplicate_Output_Column_Names +from project.Internal.Java_Exports import make_long_builder + +polyglot java import org.enso.base.arrays.LongArray +polyglot java import org.enso.table.data.column.storage.numeric.LongStorage +polyglot java import org.enso.table.data.column.storage.numeric.LongRangeStorage + +## PRIVATE +add_row_number : Table -> Text -> Integer -> Integer -> (Column_Selector | Vector Text) -> Vector (Text | Sort_Column) | Text | Sort_Column -> Problem_Behavior -> Table +add_row_number table name from step group_by order_by on_problems = + problem_builder = Problem_Builder.new error_on_missing_columns=True + grouping_columns = table.columns_helper.select_columns_helper group_by True problem_builder + Unordered_Multi_Value_Key.validate_grouping_columns grouping_columns problem_builder + ordering = Table_Helpers.resolve_order_by table.columns order_by problem_builder + problem_builder.attach_problems_before on_problems <| + new_column = case ordering.is_empty of + True -> + case grouping_columns.is_empty of + True -> make_range_column name from step table.row_count + False -> make_grouped_enumeration name grouping_columns from step + False -> make_grouped_ordered_enumeration name grouping_columns ordering from step + + column_names = table.column_names + renamed_table = if column_names.contains name . not then table else + problems = [Duplicate_Output_Column_Names.Error [name]] + on_problems.attach_problems_before problems <| + unique_name_strategy = Unique_Name_Strategy.new + unique_name_strategy.mark_used column_names + new_name = unique_name_strategy.make_unique name + new_columns = table.columns.map column-> + if column.name == name then column.rename new_name else column + Table.new new_columns + renamed_table.set new_column name set_mode=Set_Mode.Add + +## PRIVATE +nth_index start step n = + start + n*step + +## PRIVATE +make_range_column name start step length = + storage = LongRangeStorage.new start step length + Column.from_storage name storage + +## PRIVATE +make_grouped_enumeration name grouping_columns start step = + n = grouping_columns.at 0 . length + column_builder = make_long_builder n + 0.up_to n . fold Map.empty grouping_counters-> ix-> + key = Unordered_Multi_Value_Key.from_row grouping_columns ix + enum_index = grouping_counters.get key 0 + column_builder.appendLong (nth_index start step enum_index) + new_counters = grouping_counters.insert key (enum_index + 1) + new_counters + storage = column_builder.seal + Column.from_storage name storage + +## PRIVATE + If the `grouping_columns` are empty, all rows are considered to be in the same group. +make_grouped_ordered_enumeration name grouping_columns ordering from step = + ordering_columns = ordering.map .column + ordering_flip_directions = ordering.map descriptor-> case descriptor.associated_selector.direction of + Sort_Direction.Ascending -> False + Sort_Direction.Descending -> True + n = ordering_columns.at 0 . length + grouped_rows = (0.up_to n).fold Map.empty grouped_rows-> ix-> + key = Unordered_Multi_Value_Key.from_row grouping_columns ix + new_grouped_rows = case grouped_rows.get key of + Nothing -> + builder = Vector.new_builder + builder.append ix + grouped_rows.insert key builder + existing_builder -> + existing_builder.append ix + grouped_rows + new_grouped_rows + + long_array = LongArray.new n + + grouped_rows.each row_group_builder-> + row_group = row_group_builder.to_vector + sorted_group = row_group.sort on=ix-> + Ordered_Multi_Value_Key.from_row ordering_columns ordering_flip_directions ix + sorted_group.each_with_index enum_ix-> row_ix-> + enum_value = nth_index from step enum_ix + long_array.set row_ix enum_value + + storage = LongStorage.fromArray long_array.to_array + Column.from_storage name storage diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso index 11ee667008f8..c4022d607d34 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Java_Exports.enso @@ -49,10 +49,6 @@ make_date_builder initial_size = DateBuilder.new initial_size make_inferred_builder : Integer -> InferredBuilder make_inferred_builder initial_size = InferredBuilder.new initial_size -## PRIVATE -make_column : Text -> Storage -> Column -make_column name storage = Column.Value (Java_Column.new name storage) - ## PRIVATE Wrapper around a DateBuilder that uses DateBuilder.appendDate() to append a value (instead of builder.append()) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Multi_Value_Key.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Multi_Value_Key.enso new file mode 100644 index 000000000000..55b8e84ac062 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Multi_Value_Key.enso @@ -0,0 +1,92 @@ +from Standard.Base import all +import Standard.Base.Data.Array_Proxy.Array_Proxy +import Standard.Base.Errors.Illegal_State.Illegal_State + +import project.Data.Column.Column +import project.Data.Type.Value_Type.Value_Type +from project.Errors import Floating_Point_Equality + +## PRIVATE + An Enso implementation mirroring `UnorderedMultiValueKey` from the Java + helpers. +type Unordered_Multi_Value_Key + ## PRIVATE + Key hash_code:Integer columns:(Vector Column) row_index:Integer + + ## PRIVATE + from_row columns row_index = + # TODO floating point grouping warning? + arr = Array_Proxy.new columns.length column_ix-> + columns . at column_ix . at row_index + vector = Vector.from_polyglot_array arr + hash_code = Comparable.from vector . hash vector + Unordered_Multi_Value_Key.Key hash_code columns row_index + + ## PRIVATE + Checks which column may cause problems in the grouping due to imprecise + floating-point values. + validate_grouping_columns columns problem_builder = + validate_column column = + value_type = column.value_type + is_float x = x.is_a Decimal + has_floats = value_type.is_floating_point || ((value_type == Value_Type.Mixed) && column.to_vector.any is_float) + if has_floats then + problem_builder.report_other_warning (Floating_Point_Equality.Error column.name) + columns.each validate_column + +## PRIVATE +type Unordered_Multi_Value_Key_Comparator + ## PRIVATE + compare x y = + if x.hash_code != y.hash_code then Nothing else + n = x.columns.length + go ix = + if ix >= n then Ordering.Equal else + vx = x.columns.at ix . at x.row_index + vy = y.columns.at ix . at y.row_index + if vx != vy then Nothing else + @Tail_Call go (ix + 1) + go 0 + + ## PRIVATE + hash x = x.hash_code + +Comparable.from (_:Unordered_Multi_Value_Key) = Unordered_Multi_Value_Key_Comparator + +## PRIVATE + An Enso implementation mirroring `OrderedMultiValueKey` from the Java + helpers. +type Ordered_Multi_Value_Key + Key columns:(Vector Column) flip_direction:(Vector Boolean) row_index:Integer + + ## PRIVATE + from_row columns flip_directions row_index = + Ordered_Multi_Value_Key.Key columns flip_directions row_index + +## PRIVATE +type Ordered_Multi_Value_Key_Comparator + ## PRIVATE + compare x y = + n = x.columns.length + adapt_direction ix cmp = + needs_flip = x.flip_direction.at ix + case needs_flip of + False -> cmp + True -> case cmp of + Ordering.Less -> Ordering.Greater + Ordering.Greater -> Ordering.Less + + if n != y.columns.length then Panic.throw (Illegal_State.Error "Multi_Value_Key used with different number of columns: " + x.columns.to_text + " vs " + y.columns.to_text) else + go ix = + if ix >= n then Ordering.Equal else + vx = x.columns.at ix . at x.row_index + vy = y.columns.at ix . at y.row_index + cmp = (Comparable.from vx).compare vx vy + if cmp != Ordering.Equal then adapt_direction ix cmp else + @Tail_Call go (ix + 1) + go 0 + + ## PRIVATE + hash _ = Error.throw (Illegal_State.new "Ordered_Multi_Value_Key is not intended for usage in unordered collections.") + +Comparable.from (_:Ordered_Multi_Value_Key) = Ordered_Multi_Value_Key_Comparator diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 59be059e8b20..9c648a2f5dde 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -355,8 +355,20 @@ type Column_Transform_Element Value column associated_selector ## PRIVATE + Resolves the `Sort_Column` selectors and ensures that at least one column was + selected. prepare_order_by : Vector -> Text | Sort_Column | Vector (Text | Sort_Column) -> Problem_Builder -> Vector Column_Transform_Element prepare_order_by internal_columns column_selectors problem_builder = + selected_elements = resolve_order_by internal_columns column_selectors problem_builder + if selected_elements.is_empty then + problem_builder.report_other_warning No_Input_Columns_Selected + selected_elements + +## PRIVATE + Resolves the `Sort_Column` selectors and returns a list of elements + containing the original selector and the selected column. +resolve_order_by : Vector -> Text | Sort_Column | Vector (Text | Sort_Column) -> Problem_Builder -> Vector Column_Transform_Element +resolve_order_by internal_columns column_selectors problem_builder = resolve_selector selector = case selector of name : Text -> resolve_selector (Sort_Column.Name name) ix : Integer -> resolve_selector (Sort_Column.Index ix) @@ -373,10 +385,7 @@ prepare_order_by internal_columns column_selectors problem_builder = selectors_vec = case column_selectors of _ : Vector -> column_selectors _ -> [column_selectors] - selected_elements = selectors_vec.flat_map resolve_selector - if selected_elements.is_empty then - problem_builder.report_other_warning No_Input_Columns_Selected - selected_elements + selectors_vec.flat_map resolve_selector ## PRIVATE A helper method gathering the common logic for constructing expressions that diff --git a/std-bits/base/src/main/java/org/enso/base/arrays/LongArray.java b/std-bits/base/src/main/java/org/enso/base/arrays/LongArray.java new file mode 100644 index 000000000000..f58406fbf5e7 --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/arrays/LongArray.java @@ -0,0 +1,21 @@ +package org.enso.base.arrays; + +/** + * A wrapper for a long array, used to expose a random-access mutable array to Enso, needed for some + * efficient algorithms. + */ +public class LongArray { + private final long[] storage; + + public LongArray(int size) { + this.storage = new long[size]; + } + + public void set(int ix, long value) { + storage[ix] = value; + } + + public long[] to_array() { + return storage; + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java index 3a83f3d47de8..00f6fc5109f1 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateBuilder.java @@ -1,6 +1,6 @@ package org.enso.table.data.column.builder.object; -import org.enso.table.data.column.storage.DateStorage; +import org.enso.table.data.column.storage.datetime.DateStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.DateType; import org.enso.table.data.column.storage.type.StorageType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateTimeBuilder.java b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateTimeBuilder.java index 11a5414c3fee..1d2c7e4f4ba5 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateTimeBuilder.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DateTimeBuilder.java @@ -1,6 +1,6 @@ package org.enso.table.data.column.builder.object; -import org.enso.table.data.column.storage.DateTimeStorage; +import org.enso.table.data.column.storage.datetime.DateTimeStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.DateTimeType; import org.enso.table.data.column.storage.type.StorageType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DoubleBuilder.java b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DoubleBuilder.java index 9b189aba37bc..9a6752c03cbf 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DoubleBuilder.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/DoubleBuilder.java @@ -3,8 +3,8 @@ import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.operation.cast.ToFloatStorageConverter; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.BooleanType; import org.enso.table.data.column.storage.type.FloatType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/LongBuilder.java b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/LongBuilder.java index 2d2e959aa93a..73726389947b 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/LongBuilder.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/LongBuilder.java @@ -3,7 +3,7 @@ import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.operation.cast.ToIntegerStorageConverter; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.BooleanType; import org.enso.table.data.column.storage.type.FloatType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/TimeOfDayBuilder.java b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/TimeOfDayBuilder.java index 48d3b7cccd0a..e287820cd0a5 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/TimeOfDayBuilder.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/builder/object/TimeOfDayBuilder.java @@ -1,7 +1,7 @@ package org.enso.table.data.column.builder.object; import org.enso.table.data.column.storage.Storage; -import org.enso.table.data.column.storage.TimeOfDayStorage; +import org.enso.table.data.column.storage.datetime.TimeOfDayStorage; import org.enso.table.data.column.storage.type.StorageType; import org.enso.table.data.column.storage.type.TimeOfDayType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateStorageConverter.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateStorageConverter.java index 3cdb4224fdb4..32a9204fedd3 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateStorageConverter.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateStorageConverter.java @@ -1,8 +1,8 @@ package org.enso.table.data.column.operation.cast; import org.enso.table.data.column.builder.object.DateBuilder; -import org.enso.table.data.column.storage.DateStorage; -import org.enso.table.data.column.storage.DateTimeStorage; +import org.enso.table.data.column.storage.datetime.DateStorage; +import org.enso.table.data.column.storage.datetime.DateTimeStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.AnyObjectType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateTimeStorageConverter.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateTimeStorageConverter.java index 54da574c9360..4a9ab6cb28c2 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateTimeStorageConverter.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToDateTimeStorageConverter.java @@ -1,8 +1,8 @@ package org.enso.table.data.column.operation.cast; import org.enso.table.data.column.builder.object.DateTimeBuilder; -import org.enso.table.data.column.storage.DateStorage; -import org.enso.table.data.column.storage.DateTimeStorage; +import org.enso.table.data.column.storage.datetime.DateStorage; +import org.enso.table.data.column.storage.datetime.DateTimeStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.AnyObjectType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToFloatStorageConverter.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToFloatStorageConverter.java index 07d9516e61d4..b62245ffedff 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToFloatStorageConverter.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToFloatStorageConverter.java @@ -4,8 +4,8 @@ import org.enso.table.data.column.builder.object.DoubleBuilder; import org.enso.table.data.column.builder.object.NumericBuilder; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.AnyObjectType; import org.enso.table.data.column.storage.type.Bits; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToIntegerStorageConverter.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToIntegerStorageConverter.java index 5910986338c9..302d0c4e2873 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToIntegerStorageConverter.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToIntegerStorageConverter.java @@ -4,8 +4,8 @@ import org.enso.table.data.column.builder.object.LongBuilder; import org.enso.table.data.column.builder.object.NumericBuilder; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.AnyObjectType; import org.enso.table.data.column.storage.type.Bits; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTextStorageConverter.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTextStorageConverter.java index da8f18b56078..3653115caee7 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTextStorageConverter.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTextStorageConverter.java @@ -4,6 +4,11 @@ import org.enso.polyglot.common_utils.Core_Date_Utils; import org.enso.table.data.column.builder.object.StringBuilder; import org.enso.table.data.column.storage.*; +import org.enso.table.data.column.storage.datetime.DateStorage; +import org.enso.table.data.column.storage.datetime.DateTimeStorage; +import org.enso.table.data.column.storage.datetime.TimeOfDayStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.type.AnyObjectType; import org.enso.table.data.column.storage.type.TextType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTimeOfDayStorageConverter.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTimeOfDayStorageConverter.java index e3603cb2714a..2ecff25ed2c5 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTimeOfDayStorageConverter.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/cast/ToTimeOfDayStorageConverter.java @@ -1,9 +1,9 @@ package org.enso.table.data.column.operation.cast; import org.enso.table.data.column.builder.object.TimeOfDayBuilder; -import org.enso.table.data.column.storage.DateTimeStorage; +import org.enso.table.data.column.storage.datetime.DateTimeStorage; import org.enso.table.data.column.storage.Storage; -import org.enso.table.data.column.storage.TimeOfDayStorage; +import org.enso.table.data.column.storage.datetime.TimeOfDayStorage; import org.enso.table.data.column.storage.type.AnyObjectType; import java.time.LocalTime; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/UnaryIntegerOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/UnaryIntegerOp.java index 28e4559ad182..a7862c742b5f 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/UnaryIntegerOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/UnaryIntegerOp.java @@ -1,11 +1,9 @@ package org.enso.table.data.column.operation.map; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; -import org.enso.table.util.BitSets; import java.util.BitSet; -import java.util.function.Function; /** An operation that takes a single argument of some type and returns an integer. */ public abstract class UnaryIntegerOp> extends UnaryMapOperation { diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleBooleanOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleBooleanOp.java index 33bb747d3c8a..314b2f6c9b84 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleBooleanOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleBooleanOp.java @@ -3,8 +3,8 @@ import org.enso.table.data.column.operation.map.MapOperation; import org.enso.table.data.column.operation.map.MapOperationProblemBuilder; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.error.UnexpectedTypeException; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleIsInOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleIsInOp.java index ad5b581aa5a6..135eccbf1573 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleIsInOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleIsInOp.java @@ -2,8 +2,7 @@ import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.operation.map.SpecializedIsInOp; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.Storage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; import java.util.HashSet; import java.util.List; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleNumericOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleNumericOp.java index 1a563898af92..804dd5e5951c 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleNumericOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/DoubleNumericOp.java @@ -2,8 +2,8 @@ import org.enso.table.data.column.operation.map.MapOperation; import org.enso.table.data.column.operation.map.MapOperationProblemBuilder; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.error.UnexpectedTypeException; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongBooleanOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongBooleanOp.java index 6ba514c7557e..21657585968e 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongBooleanOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongBooleanOp.java @@ -3,15 +3,18 @@ import org.enso.table.data.column.operation.map.MapOperation; import org.enso.table.data.column.operation.map.MapOperationProblemBuilder; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; import org.enso.table.data.column.storage.Storage; +import org.enso.table.data.column.storage.numeric.AbstractLongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.error.UnexpectedTypeException; import java.util.BitSet; -/** An operation expecting a numeric argument and returning a boolean. */ -public abstract class LongBooleanOp extends MapOperation { +/** + * An operation expecting a numeric argument and returning a boolean. + */ +public abstract class LongBooleanOp extends MapOperation { public LongBooleanOp(String name) { super(name); } @@ -25,7 +28,7 @@ protected boolean doObject(long a, Object b) { } @Override - public BoolStorage runMap(LongStorage storage, Object arg, MapOperationProblemBuilder problemBuilder) { + public BoolStorage runMap(AbstractLongStorage storage, Object arg, MapOperationProblemBuilder problemBuilder) { if (arg instanceof Long) { long x = (Long) arg; BitSet newVals = new BitSet(); @@ -62,7 +65,7 @@ public BoolStorage runMap(LongStorage storage, Object arg, MapOperationProblemBu } @Override - public BoolStorage runZip(LongStorage storage, Storage arg, MapOperationProblemBuilder problemBuilder) { + public BoolStorage runZip(AbstractLongStorage storage, Storage arg, MapOperationProblemBuilder problemBuilder) { if (arg instanceof DoubleStorage v) { BitSet newVals = new BitSet(); BitSet newMissing = new BitSet(); diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongIsInOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongIsInOp.java index ac7abf1f9c45..cd749919d384 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongIsInOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongIsInOp.java @@ -2,12 +2,12 @@ import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.operation.map.SpecializedIsInOp; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.AbstractLongStorage; import java.util.HashSet; import java.util.List; -public class LongIsInOp extends SpecializedIsInOp { +public class LongIsInOp extends SpecializedIsInOp { @Override protected CompactRepresentation prepareList(List list) { HashSet set = new HashSet<>(); diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java index ed6aa1c4b72e..bf13f39914dd 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java @@ -2,17 +2,20 @@ import org.enso.table.data.column.operation.map.MapOperation; import org.enso.table.data.column.operation.map.MapOperationProblemBuilder; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; -import org.enso.table.data.column.storage.NumericStorage; import org.enso.table.data.column.storage.Storage; +import org.enso.table.data.column.storage.numeric.AbstractLongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; +import org.enso.table.data.column.storage.numeric.NumericStorage; import org.enso.table.error.UnexpectedTypeException; import org.enso.table.util.BitSets; import java.util.BitSet; -/** An operation expecting a numeric argument and returning a boolean. */ -public abstract class LongNumericOp extends MapOperation { +/** + * An operation expecting a numeric argument and returning a boolean. + */ +public abstract class LongNumericOp extends MapOperation { private final boolean alwaysCastToDouble; public LongNumericOp(String name, boolean alwaysCastToDouble) { @@ -29,7 +32,7 @@ public LongNumericOp(String name) { public abstract Long doLong(long in, long arg, int ix, MapOperationProblemBuilder problemBuilder); @Override - public NumericStorage runMap(LongStorage storage, Object arg, MapOperationProblemBuilder problemBuilder) { + public NumericStorage runMap(AbstractLongStorage storage, Object arg, MapOperationProblemBuilder problemBuilder) { if (arg == null) { if (alwaysCastToDouble) { return DoubleStorage.makeEmpty(storage.size()); @@ -64,8 +67,8 @@ public NumericStorage runMap(LongStorage storage, Object arg, MapOperationPro } @Override - public NumericStorage runZip(LongStorage storage, Storage arg, MapOperationProblemBuilder problemBuilder) { - if (arg instanceof LongStorage v) { + public NumericStorage runZip(AbstractLongStorage storage, Storage arg, MapOperationProblemBuilder problemBuilder) { + if (arg instanceof AbstractLongStorage v) { long[] out = new long[storage.size()]; BitSet newMissing = new BitSet(); for (int i = 0; i < storage.size(); i++) { @@ -84,9 +87,7 @@ public NumericStorage runZip(LongStorage storage, Storage arg, MapOperatio newMissing.set(i); } } - return alwaysCastToDouble - ? new DoubleStorage(out, storage.size(), newMissing) - : new LongStorage(out, storage.size(), newMissing); + return alwaysCastToDouble ? new DoubleStorage(out, storage.size(), newMissing) : new LongStorage(out, storage.size(), newMissing); } else if (arg instanceof DoubleStorage v) { long[] out = new long[storage.size()]; BitSet newMissing = new BitSet(); diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/ObjectStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/ObjectStorage.java index e796999d1c5f..a72476ce44a9 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/ObjectStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/ObjectStorage.java @@ -41,7 +41,7 @@ public Builder createDefaultBuilderOfSameType(int capacity) { private static final MapOpStorage> ops = buildObjectOps(); - static > MapOpStorage buildObjectOps() { + public static > MapOpStorage buildObjectOps() { MapOpStorage ops = new MapOpStorage<>(); ops.add( new UnaryMapOperation<>(Maps.IS_NOTHING) { diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java index 1e173a1528ad..64cd45f4a8e1 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java @@ -7,6 +7,7 @@ import org.enso.table.data.column.operation.cast.CastProblemBuilder; import org.enso.table.data.column.operation.cast.StorageConverter; import org.enso.table.data.column.operation.map.MapOperationProblemBuilder; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.type.StorageType; import org.enso.table.data.mask.OrderMask; import org.enso.table.data.mask.SliceRange; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DateStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/DateStorage.java similarity index 92% rename from std-bits/table/src/main/java/org/enso/table/data/column/storage/DateStorage.java rename to std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/DateStorage.java index 1b1ef83b4733..2bf98a2c7dd2 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DateStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/DateStorage.java @@ -1,10 +1,12 @@ -package org.enso.table.data.column.storage; +package org.enso.table.data.column.storage.datetime; import org.enso.table.data.column.builder.object.Builder; import org.enso.table.data.column.builder.object.DateBuilder; import org.enso.table.data.column.operation.map.MapOpStorage; import org.enso.table.data.column.operation.map.UnaryIntegerOp; import org.enso.table.data.column.operation.map.datetime.DateTimeIsInOp; +import org.enso.table.data.column.storage.ObjectStorage; +import org.enso.table.data.column.storage.SpecializedStorage; import org.enso.table.data.column.storage.type.DateType; import org.enso.table.data.column.storage.type.StorageType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DateTimeStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/DateTimeStorage.java similarity index 92% rename from std-bits/table/src/main/java/org/enso/table/data/column/storage/DateTimeStorage.java rename to std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/DateTimeStorage.java index 126b1f919a29..d31d2f70a451 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DateTimeStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/DateTimeStorage.java @@ -1,10 +1,12 @@ -package org.enso.table.data.column.storage; +package org.enso.table.data.column.storage.datetime; import org.enso.table.data.column.builder.object.Builder; import org.enso.table.data.column.builder.object.DateTimeBuilder; import org.enso.table.data.column.operation.map.MapOpStorage; import org.enso.table.data.column.operation.map.UnaryIntegerOp; import org.enso.table.data.column.operation.map.datetime.DateTimeIsInOp; +import org.enso.table.data.column.storage.ObjectStorage; +import org.enso.table.data.column.storage.SpecializedStorage; import org.enso.table.data.column.storage.type.DateTimeType; import org.enso.table.data.column.storage.type.StorageType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/TimeOfDayStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/TimeOfDayStorage.java similarity index 89% rename from std-bits/table/src/main/java/org/enso/table/data/column/storage/TimeOfDayStorage.java rename to std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/TimeOfDayStorage.java index ebc9bcfdb650..b49b3dd31d97 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/TimeOfDayStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/datetime/TimeOfDayStorage.java @@ -1,9 +1,11 @@ -package org.enso.table.data.column.storage; +package org.enso.table.data.column.storage.datetime; import org.enso.table.data.column.builder.object.Builder; import org.enso.table.data.column.builder.object.TimeOfDayBuilder; import org.enso.table.data.column.operation.map.MapOpStorage; import org.enso.table.data.column.operation.map.datetime.DateTimeIsInOp; +import org.enso.table.data.column.storage.ObjectStorage; +import org.enso.table.data.column.storage.SpecializedStorage; import org.enso.table.data.column.storage.type.StorageType; import org.enso.table.data.column.storage.type.TimeOfDayType; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/AbstractLongStorage.java similarity index 51% rename from std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java rename to std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/AbstractLongStorage.java index 9b99e4a78ba5..86c92ce529b6 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/AbstractLongStorage.java @@ -1,6 +1,5 @@ -package org.enso.table.data.column.storage; +package org.enso.table.data.column.storage.numeric; -import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.builder.object.Builder; import org.enso.table.data.column.builder.object.NumericBuilder; import org.enso.table.data.column.operation.map.MapOpStorage; @@ -9,90 +8,22 @@ import org.enso.table.data.column.operation.map.numeric.LongBooleanOp; import org.enso.table.data.column.operation.map.numeric.LongIsInOp; import org.enso.table.data.column.operation.map.numeric.LongNumericOp; -import org.enso.table.data.column.storage.type.IntegerType; -import org.enso.table.data.column.storage.type.StorageType; -import org.enso.table.data.index.Index; -import org.enso.table.data.mask.OrderMask; -import org.enso.table.data.mask.SliceRange; -import org.graalvm.polyglot.Value; +import org.enso.table.data.column.storage.BoolStorage; +import org.enso.table.data.column.storage.Storage; import java.util.BitSet; -import java.util.List; -/** A column storing 64-bit integers. */ -public final class LongStorage extends NumericStorage { - // TODO [RW] at some point we will want to add separate storage classes for byte, short and int, - // for more compact storage and more efficient handling of smaller integers; for now we will be - // handling this just by checking the bounds - private final long[] data; - private final BitSet isMissing; - private final int size; - private static final MapOpStorage ops = buildOps(); +public abstract class AbstractLongStorage extends NumericStorage { + public abstract long getItem(int idx); - /** - * @param data the underlying data - * @param size the number of items stored - * @param isMissing a bit set denoting at index {@code i} whether or not the value at index {@code - * i} is missing. - */ - public LongStorage(long[] data, int size, BitSet isMissing) { - this.data = data; - this.isMissing = isMissing; - this.size = size; - } - - public static LongStorage makeEmpty(int size) { - BitSet isMissing = new BitSet(size); - isMissing.set(0, size); - return new LongStorage(new long[0], size, isMissing); - } - - public LongStorage(long[] data) { - this(data, data.length, new BitSet()); - } - - /** @inheritDoc */ - @Override - public int size() { - return size; - } - - /** @inheritDoc */ - @Override - public int countMissing() { - return isMissing.cardinality(); - } - - /** - * @param idx an index - * @return the data item contained at the given index. - */ - public long getItem(int idx) { - return data[idx]; - } + public abstract BitSet getIsMissing(); @Override public double getItemDouble(int idx) { return (double) getItem(idx); } - @Override - public Long getItemBoxed(int idx) { - return isMissing.get(idx) ? null : data[idx]; - } - - /** @inheritDoc */ - @Override - public StorageType getType() { - // TODO add possibility to set integer bit limit (#5159) - return IntegerType.INT_64; - } - - /** @inheritDoc */ - @Override - public boolean isNa(long idx) { - return isMissing.get((int) idx); - } + private static final MapOpStorage ops = buildOps(); @Override public boolean isOpVectorized(String name) { @@ -111,107 +42,15 @@ protected Storage runVectorizedZip( return ops.runZip(name, this, argument, problemBuilder); } - private Storage fillMissingDouble(double arg) { - final var builder = NumericBuilder.createDoubleBuilder(size()); - long rawArg = Double.doubleToRawLongBits(arg); - for (int i = 0; i < size(); i++) { - if (isMissing.get(i)) { - builder.appendRawNoGrow(rawArg); - } else { - double coerced = data[i]; - builder.appendRawNoGrow(Double.doubleToRawLongBits(coerced)); - } - } - return builder.seal(); - } - - private Storage fillMissingLong(long arg) { - final var builder = NumericBuilder.createLongBuilder(size()); - for (int i = 0; i < size(); i++) { - if (isMissing.get(i)) { - builder.appendRawNoGrow(arg); - } else { - builder.appendRawNoGrow(data[i]); - } - } - return builder.seal(); - } - @Override - public Storage fillMissing(Value arg) { - if (arg.isNumber()) { - if (NumericConverter.isCoercibleToLong(arg.as(Object.class))) { - return fillMissingLong(arg.asLong()); - } else { - return fillMissingDouble(arg.asDouble()); - } - } - - return super.fillMissing(arg); - } - - @Override - public Storage mask(BitSet mask, int cardinality) { - BitSet newMissing = new BitSet(); - long[] newData = new long[cardinality]; - int resIx = 0; - for (int i = 0; i < size; i++) { - if (mask.get(i)) { - if (isMissing.get(i)) { - newMissing.set(resIx++); - } else { - newData[resIx++] = data[i]; - } - } - } - return new LongStorage(newData, cardinality, newMissing); - } - - @Override - public Storage applyMask(OrderMask mask) { - int[] positions = mask.getPositions(); - long[] newData = new long[positions.length]; - BitSet newMissing = new BitSet(); - for (int i = 0; i < positions.length; i++) { - if (positions[i] == Index.NOT_FOUND || isMissing.get(positions[i])) { - newMissing.set(i); - } else { - newData[i] = data[positions[i]]; - } - } - return new LongStorage(newData, positions.length, newMissing); - } - - @Override - public Storage countMask(int[] counts, int total) { - long[] newData = new long[total]; - BitSet newMissing = new BitSet(); - int pos = 0; - for (int i = 0; i < counts.length; i++) { - if (isMissing.get(i)) { - newMissing.set(pos, pos + counts[i]); - pos += counts[i]; - } else { - for (int j = 0; j < counts[i]; j++) { - newData[pos++] = data[i]; - } - } - } - return new LongStorage(newData, total, newMissing); - } - - public BitSet getIsMissing() { - return isMissing; - } - - public long[] getRawData() { - return data; + public Builder createDefaultBuilderOfSameType(int capacity) { + return NumericBuilder.createLongBuilder(capacity); } - private static MapOpStorage buildOps() { - MapOpStorage ops = new MapOpStorage<>(); + private static MapOpStorage buildOps() { + MapOpStorage ops = new MapOpStorage<>(); ops.add( - new LongNumericOp(Maps.ADD) { + new LongNumericOp(Storage.Maps.ADD) { @Override public double doDouble( long in, double arg, int ix, MapOperationProblemBuilder problemBuilder) { @@ -225,7 +64,7 @@ public Long doLong( } }) .add( - new LongNumericOp(Maps.SUB) { + new LongNumericOp(Storage.Maps.SUB) { @Override public double doDouble( long in, double arg, int ix, MapOperationProblemBuilder problemBuilder) { @@ -239,7 +78,7 @@ public Long doLong( } }) .add( - new LongNumericOp(Maps.MUL) { + new LongNumericOp(Storage.Maps.MUL) { @Override public double doDouble( long in, double arg, int ix, MapOperationProblemBuilder problemBuilder) { @@ -253,7 +92,7 @@ public Long doLong( } }) .add( - new LongNumericOp(Maps.MOD) { + new LongNumericOp(Storage.Maps.MOD) { @Override public double doDouble( long in, double arg, int ix, MapOperationProblemBuilder problemBuilder) { @@ -275,7 +114,7 @@ public Long doLong( } }) .add( - new LongNumericOp(Maps.POWER, true) { + new LongNumericOp(Storage.Maps.POWER, true) { @Override public double doDouble( long in, double arg, int ix, MapOperationProblemBuilder problemBuilder) { @@ -290,7 +129,7 @@ public Long doLong( } }) .add( - new LongNumericOp(Maps.DIV, true) { + new LongNumericOp(Storage.Maps.DIV, true) { @Override public double doDouble( long in, double arg, int ix, MapOperationProblemBuilder problemBuilder) { @@ -307,7 +146,7 @@ public Long doLong( } }) .add( - new LongBooleanOp(Maps.GT) { + new LongBooleanOp(Storage.Maps.GT) { @Override protected boolean doLong(long a, long b) { return a > b; @@ -319,7 +158,7 @@ protected boolean doDouble(long a, double b) { } }) .add( - new LongBooleanOp(Maps.GTE) { + new LongBooleanOp(Storage.Maps.GTE) { @Override protected boolean doLong(long a, long b) { return a >= b; @@ -331,7 +170,7 @@ protected boolean doDouble(long a, double b) { } }) .add( - new LongBooleanOp(Maps.LT) { + new LongBooleanOp(Storage.Maps.LT) { @Override protected boolean doLong(long a, long b) { return a < b; @@ -343,7 +182,7 @@ protected boolean doDouble(long a, double b) { } }) .add( - new LongBooleanOp(Maps.LTE) { + new LongBooleanOp(Storage.Maps.LTE) { @Override protected boolean doLong(long a, long b) { return a <= b; @@ -355,10 +194,12 @@ protected boolean doDouble(long a, double b) { } }) .add( - new LongBooleanOp(Maps.EQ) { + new LongBooleanOp(Storage.Maps.EQ) { @Override public BoolStorage runMap( - LongStorage storage, Object arg, MapOperationProblemBuilder problemBuilder) { + AbstractLongStorage storage, + Object arg, + MapOperationProblemBuilder problemBuilder) { if (arg instanceof Double) { problemBuilder.reportFloatingPointEquality(-1); } @@ -367,12 +208,14 @@ public BoolStorage runMap( @Override public BoolStorage runZip( - LongStorage storage, Storage arg, MapOperationProblemBuilder problemBuilder) { + AbstractLongStorage storage, + Storage arg, + MapOperationProblemBuilder problemBuilder) { if (arg instanceof DoubleStorage) { problemBuilder.reportFloatingPointEquality(-1); } else if (!(arg instanceof LongStorage)) { boolean hasFloats = false; - for (int i = 0; i < storage.size; i++) { + for (int i = 0; i < storage.size(); i++) { if (arg.isNa(i)) { continue; } @@ -405,45 +248,13 @@ protected boolean doObject(long x, Object o) { } }) .add( - new UnaryMapOperation<>(Maps.IS_NOTHING) { + new UnaryMapOperation<>(Storage.Maps.IS_NOTHING) { @Override - public BoolStorage run(LongStorage storage) { - return new BoolStorage(storage.isMissing, new BitSet(), storage.size, false); + public BoolStorage run(AbstractLongStorage storage) { + return new BoolStorage(storage.getIsMissing(), new BitSet(), storage.size(), false); } }) .add(new LongIsInOp()); return ops; } - - @Override - public LongStorage slice(int offset, int limit) { - int newSize = Math.min(size - offset, limit); - long[] newData = new long[newSize]; - System.arraycopy(data, offset, newData, 0, newSize); - BitSet newMask = isMissing.get(offset, offset + limit); - return new LongStorage(newData, newSize, newMask); - } - - @Override - public Builder createDefaultBuilderOfSameType(int capacity) { - return NumericBuilder.createLongBuilder(capacity); - } - - @Override - public LongStorage slice(List ranges) { - int newSize = SliceRange.totalLength(ranges); - long[] newData = new long[newSize]; - BitSet newMissing = new BitSet(newSize); - int offset = 0; - for (SliceRange range : ranges) { - int length = range.end() - range.start(); - System.arraycopy(data, range.start(), newData, offset, length); - for (int i = 0; i < length; ++i) { - newMissing.set(offset + i, isMissing.get(range.start() + i)); - } - offset += length; - } - - return new LongStorage(newData, newSize, newMissing); - } } diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/ComputedLongStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/ComputedLongStorage.java new file mode 100644 index 000000000000..6e990569cfed --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/ComputedLongStorage.java @@ -0,0 +1,138 @@ +package org.enso.table.data.column.storage.numeric; + +import org.enso.table.data.column.storage.Storage; +import org.enso.table.data.column.storage.type.IntegerType; +import org.enso.table.data.column.storage.type.StorageType; +import org.enso.table.data.index.Index; +import org.enso.table.data.mask.OrderMask; +import org.enso.table.data.mask.SliceRange; + +import java.util.BitSet; +import java.util.List; + +/** + * Implements a storage that computes the ith stored value using some function. + * + *

This storage assumes that _all_ values are present. + */ +public abstract class ComputedLongStorage extends AbstractLongStorage { + protected final int size; + + protected abstract long computeItem(int idx); + + protected ComputedLongStorage(int size) { + this.size = size; + } + + @Override + public int size() { + return size; + } + + @Override + public int countMissing() { + return 0; + } + + @Override + public StorageType getType() { + return IntegerType.INT_64; + } + + @Override + public boolean isNa(long idx) { + return false; + } + + @Override + public Long getItemBoxed(int idx) { + return getItem(idx); + } + + public long getItem(int idx) { + if (idx < 0 || idx >= size) { + throw new IndexOutOfBoundsException( + "Index " + idx + " is out of bounds for range of length " + size + "."); + } + + return computeItem(idx); + } + + @Override + public BitSet getIsMissing() { + return EMPTY; + } + + @Override + public Storage mask(BitSet mask, int cardinality) { + BitSet newMissing = new BitSet(); + long[] newData = new long[cardinality]; + int resIx = 0; + for (int i = 0; i < size; i++) { + if (mask.get(i)) { + newData[resIx++] = getItem(i); + } + } + return new LongStorage(newData, cardinality, newMissing); + } + + @Override + public Storage applyMask(OrderMask mask) { + int[] positions = mask.getPositions(); + long[] newData = new long[positions.length]; + BitSet newMissing = new BitSet(); + for (int i = 0; i < positions.length; i++) { + if (positions[i] == Index.NOT_FOUND) { + newMissing.set(i); + } else { + newData[i] = getItem(positions[i]); + } + } + return new LongStorage(newData, positions.length, newMissing); + } + + @Override + public Storage countMask(int[] counts, int total) { + long[] newData = new long[total]; + BitSet newMissing = new BitSet(); + int pos = 0; + for (int i = 0; i < counts.length; i++) { + long item = getItem(i); + for (int j = 0; j < counts[i]; j++) { + newData[pos++] = item; + } + } + return new LongStorage(newData, total, newMissing); + } + + @Override + public Storage slice(int offset, int limit) { + int newSize = Math.min(size - offset, limit); + long[] newData = new long[newSize]; + for (int i = 0; i < newSize; i++) { + newData[i] = getItem(offset + i); + } + BitSet newMask = new BitSet(); + return new LongStorage(newData, newSize, newMask); + } + + @Override + public Storage slice(List ranges) { + int newSize = SliceRange.totalLength(ranges); + long[] newData = new long[newSize]; + BitSet newMissing = new BitSet(newSize); + int offset = 0; + for (SliceRange range : ranges) { + int rangeStart = range.start(); + int length = range.end() - rangeStart; + for (int i = 0; i < length; i++) { + newData[offset + i] = getItem(rangeStart + i); + } + offset += length; + } + + return new LongStorage(newData, newSize, newMissing); + } + + private static final BitSet EMPTY = new BitSet(); +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/DoubleStorage.java similarity index 98% rename from std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java rename to std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/DoubleStorage.java index 1e25446f7e76..a6461dd29c60 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/DoubleStorage.java @@ -1,4 +1,4 @@ -package org.enso.table.data.column.storage; +package org.enso.table.data.column.storage.numeric; import org.enso.table.data.column.builder.object.Builder; import org.enso.table.data.column.builder.object.NumericBuilder; @@ -8,6 +8,8 @@ import org.enso.table.data.column.operation.map.numeric.DoubleBooleanOp; import org.enso.table.data.column.operation.map.numeric.DoubleIsInOp; import org.enso.table.data.column.operation.map.numeric.DoubleNumericOp; +import org.enso.table.data.column.storage.BoolStorage; +import org.enso.table.data.column.storage.Storage; import org.enso.table.data.column.storage.type.FloatType; import org.enso.table.data.column.storage.type.StorageType; import org.enso.table.data.index.Index; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongConstantStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongConstantStorage.java new file mode 100644 index 000000000000..e26621654d65 --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongConstantStorage.java @@ -0,0 +1,15 @@ +package org.enso.table.data.column.storage.numeric; + +public class LongConstantStorage extends ComputedLongStorage { + private final long constant; + + public LongConstantStorage(long constant, int size) { + super(size); + this.constant = constant; + } + + @Override + protected long computeItem(int idx) { + return constant; + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongRangeStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongRangeStorage.java new file mode 100644 index 000000000000..497d269c3606 --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongRangeStorage.java @@ -0,0 +1,21 @@ +package org.enso.table.data.column.storage.numeric; + +/** + * Implements a storage that can store a simple range of integers (e.g. row indices) with a + * specified start, step and length. + */ +public class LongRangeStorage extends ComputedLongStorage { + private final long start; + private final long step; + + public LongRangeStorage(long start, long step, int size) { + super(size); + this.start = start; + this.step = step; + } + + @Override + protected long computeItem(int idx) { + return start + idx * step; + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongStorage.java new file mode 100644 index 000000000000..7cd347e53d2c --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/LongStorage.java @@ -0,0 +1,218 @@ +package org.enso.table.data.column.storage.numeric; + +import org.enso.base.polyglot.NumericConverter; +import org.enso.table.data.column.builder.object.NumericBuilder; +import org.enso.table.data.column.storage.Storage; +import org.enso.table.data.column.storage.type.IntegerType; +import org.enso.table.data.column.storage.type.StorageType; +import org.enso.table.data.index.Index; +import org.enso.table.data.mask.OrderMask; +import org.enso.table.data.mask.SliceRange; +import org.graalvm.polyglot.Value; + +import java.util.BitSet; +import java.util.List; + +/** A column storing 64-bit integers. */ +public final class LongStorage extends AbstractLongStorage { + // TODO [RW] at some point we will want to add separate storage classes for byte, short and int, + // for more compact storage and more efficient handling of smaller integers; for now we will be + // handling this just by checking the bounds + private final long[] data; + private final BitSet isMissing; + private final int size; + + /** + * @param data the underlying data + * @param size the number of items stored + * @param isMissing a bit set denoting at index {@code i} whether or not the value at index {@code + * i} is missing. + */ + public LongStorage(long[] data, int size, BitSet isMissing) { + this.data = data; + this.isMissing = isMissing; + this.size = size; + } + + public static LongStorage fromArray(long[] data) { + return new LongStorage(data, data.length, new BitSet()); + } + + public static LongStorage makeEmpty(int size) { + BitSet isMissing = new BitSet(size); + isMissing.set(0, size); + return new LongStorage(new long[0], size, isMissing); + } + + public LongStorage(long[] data) { + this(data, data.length, new BitSet()); + } + + /** @inheritDoc */ + @Override + public int size() { + return size; + } + + /** @inheritDoc */ + @Override + public int countMissing() { + return isMissing.cardinality(); + } + + /** + * @param idx an index + * @return the data item contained at the given index. + */ + public long getItem(int idx) { + return data[idx]; + } + + @Override + public double getItemDouble(int idx) { + return (double) getItem(idx); + } + + @Override + public Long getItemBoxed(int idx) { + return isMissing.get(idx) ? null : data[idx]; + } + + /** @inheritDoc */ + @Override + public StorageType getType() { + // TODO add possibility to set integer bit limit (#5159) + return IntegerType.INT_64; + } + + /** @inheritDoc */ + @Override + public boolean isNa(long idx) { + return isMissing.get((int) idx); + } + + private Storage fillMissingDouble(double arg) { + final var builder = NumericBuilder.createDoubleBuilder(size()); + long rawArg = Double.doubleToRawLongBits(arg); + for (int i = 0; i < size(); i++) { + if (isMissing.get(i)) { + builder.appendRawNoGrow(rawArg); + } else { + double coerced = data[i]; + builder.appendRawNoGrow(Double.doubleToRawLongBits(coerced)); + } + } + return builder.seal(); + } + + private Storage fillMissingLong(long arg) { + final var builder = NumericBuilder.createLongBuilder(size()); + for (int i = 0; i < size(); i++) { + if (isMissing.get(i)) { + builder.appendRawNoGrow(arg); + } else { + builder.appendRawNoGrow(data[i]); + } + } + return builder.seal(); + } + + @Override + public Storage fillMissing(Value arg) { + if (arg.isNumber()) { + if (NumericConverter.isCoercibleToLong(arg.as(Object.class))) { + return fillMissingLong(arg.asLong()); + } else { + return fillMissingDouble(arg.asDouble()); + } + } + + return super.fillMissing(arg); + } + + @Override + public Storage mask(BitSet mask, int cardinality) { + BitSet newMissing = new BitSet(); + long[] newData = new long[cardinality]; + int resIx = 0; + for (int i = 0; i < size; i++) { + if (mask.get(i)) { + if (isMissing.get(i)) { + newMissing.set(resIx++); + } else { + newData[resIx++] = data[i]; + } + } + } + return new LongStorage(newData, cardinality, newMissing); + } + + @Override + public Storage applyMask(OrderMask mask) { + int[] positions = mask.getPositions(); + long[] newData = new long[positions.length]; + BitSet newMissing = new BitSet(); + for (int i = 0; i < positions.length; i++) { + if (positions[i] == Index.NOT_FOUND || isMissing.get(positions[i])) { + newMissing.set(i); + } else { + newData[i] = data[positions[i]]; + } + } + return new LongStorage(newData, positions.length, newMissing); + } + + @Override + public Storage countMask(int[] counts, int total) { + long[] newData = new long[total]; + BitSet newMissing = new BitSet(); + int pos = 0; + for (int i = 0; i < counts.length; i++) { + if (isMissing.get(i)) { + newMissing.set(pos, pos + counts[i]); + pos += counts[i]; + } else { + for (int j = 0; j < counts[i]; j++) { + newData[pos++] = data[i]; + } + } + } + return new LongStorage(newData, total, newMissing); + } + + @Override + public BitSet getIsMissing() { + return isMissing; + } + + public long[] getRawData() { + return data; + } + + @Override + public LongStorage slice(int offset, int limit) { + int newSize = Math.min(size - offset, limit); + long[] newData = new long[newSize]; + System.arraycopy(data, offset, newData, 0, newSize); + BitSet newMask = isMissing.get(offset, offset + limit); + return new LongStorage(newData, newSize, newMask); + } + + @Override + public LongStorage slice(List ranges) { + int newSize = SliceRange.totalLength(ranges); + long[] newData = new long[newSize]; + BitSet newMissing = new BitSet(newSize); + int offset = 0; + for (SliceRange range : ranges) { + int length = range.end() - range.start(); + System.arraycopy(data, range.start(), newData, offset, length); + for (int i = 0; i < length; ++i) { + newMissing.set(offset + i, isMissing.get(range.start() + i)); + } + offset += length; + } + + return new LongStorage(newData, newSize, newMissing); + } +} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/NumericStorage.java similarity index 80% rename from std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java rename to std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/NumericStorage.java index 50ebd4bc3e28..10d4af3fbe5e 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/numeric/NumericStorage.java @@ -1,4 +1,6 @@ -package org.enso.table.data.column.storage; +package org.enso.table.data.column.storage.numeric; + +import org.enso.table.data.column.storage.Storage; /** A storage containing items representable as a {@code double}. */ public abstract class NumericStorage extends Storage { diff --git a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java index 4bf633af4d82..bfff733ce87d 100644 --- a/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java +++ b/std-bits/table/src/main/java/org/enso/table/write/ExcelWriter.java @@ -10,8 +10,8 @@ import org.apache.poi.ss.usermodel.CellType; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import org.enso.table.data.column.storage.BoolStorage; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.LongStorage; +import org.enso.table.data.column.storage.numeric.DoubleStorage; +import org.enso.table.data.column.storage.numeric.LongStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.table.Column; import org.enso.table.data.table.Table; diff --git a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso new file mode 100644 index 000000000000..314909262370 --- /dev/null +++ b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso @@ -0,0 +1,127 @@ +from Standard.Base import all + +from Standard.Table import Sort_Column +from Standard.Table.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Output_Column_Names, Floating_Point_Equality + +from Standard.Test import Test, Problems +import Standard.Test.Extensions + +from project.Common_Table_Operations.Util import run_default_backend + +polyglot java import java.lang.Long as Java_Long + +main = run_default_backend spec + +spec setup = + prefix = setup.prefix + table_builder = setup.table_builder + Test.group prefix+"Table.add_row_number" <| + Test.specify "should add a row numbering column" <| + t = table_builder [["X", ['a', 'b', 'a', 'a', 'c']]] + t1 = t.add_row_number + rows = t1.rows.to_vector . map .to_vector + rows . should_equal [['a', 1], ['b', 2], ['a', 3], ['a', 4], ['c', 5]] + t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 5] + t1.at "Row" . value_type . is_integer . should_be_true + + Test.specify "should rename existing column upon a name clash" <| + t1 = table_builder [["X", ['a', 'b']], ["Y", ['c', 'd']], ["Z", [40, 20]]] + t2 = t1.add_row_number name="Y" + t2.column_names . should_equal ["X", "Y 1", "Z", "Y"] + t2.at "X" . to_vector . should_equal ['a', 'b'] + t2.at "Y 1" . to_vector . should_equal ['c', 'd'] + t2.at "Z" . to_vector . should_equal [40, 20] + t2.at "Y" . to_vector . should_equal [1, 2] + Problems.expect_warning Duplicate_Output_Column_Names t2 + + r3 = t1.add_row_number name="X" on_problems=Problem_Behavior.Report_Error + r3.should_fail_with Duplicate_Output_Column_Names + + Test.specify "should allow customizing the starting index and step" <| + t = table_builder [["X", ['a', 'b', 'a']]] + t1 = t.add_row_number from=10 + t1.at "Row" . to_vector . should_equal [10, 11, 12] + + t2 = t.add_row_number step=10 + t2.at "Row" . to_vector . should_equal [1, 11, 21] + + t3 = t.add_row_number from=100 step=(-10) + t3.at "Row" . to_vector . should_equal [100, 90, 80] + + t4 = t.add_row_number from=44 step=0 + t4.at "Row" . to_vector . should_equal [44, 44, 44] + + t5 = t.add_row_number from=(-1) + t5.at "Row" . to_vector . should_equal [-1, 0, 1] + + Test.specify "should allow to assign row numbers separately within each group" <| + t = table_builder [["X", ['a', 'a', 'a', 'a', 'b', 'b']], ["Y", [40, 30, 20, 40, 20, 10]]] + t1 = t.add_row_number group_by=["X"] + t1.at "Row" . to_vector . should_equal [1, 2, 3, 4, 1, 2] + + t2 = table_builder [["X", ['a', 'a', 'a', 'a', 'b', 'b']], ["Y", [40, 40, 20, 20, 20, 10]]] + t3 = t2.add_row_number group_by=["X", "Y"] + t3.at "Row" . to_vector . should_equal [1, 2, 1, 2, 1, 1] + + Test.specify "should allow to assign row numbers separately within scattered groups, preserving the row layout" <| + v = ['a', 'b', 'a', 'b', 'b', 'b', 'c', 'a'] + t = table_builder [["X", v]] + t1 = t.add_row_number group_by=["X"] + # No reordering of elements: + t1.at "X" . to_vector . should_equal v + t1.at "Row" . to_vector . should_equal [1, 1, 2, 2, 3, 4, 1, 3] + + Test.specify "should allow to order the row numbers by some columns, keeping the row ordering intact" <| + v = [9, 8, 7, 6, 5, 4, 100, 200] + t = table_builder [["X", v]] + t1 = t.add_row_number order_by=["X"] + # No reordering of elements + t1.at "X" . to_vector . should_equal v + t1.at "Row" . to_vector . should_equal [6, 5, 4, 3, 2, 1, 7, 8] + + t2 = table_builder [["X", ["a", "b", "a", "a"]], ["Y", [1, 2, 3, 4]]] + t3 = t2.add_row_number order_by=["X", (Sort_Column.Name "Y" Sort_Direction.Descending)] + t3.at "Row" . to_vector . should_equal [3, 4, 2, 1] + + Test.specify "should allow mixing grouping with ordering and custom start and step" <| + vx = ['a', 'b', 'a', 'a', 'a', 'b', 'c', 'c'] + vy = [9, 8, 7, 6, 5, 4, 100, 200] + t = table_builder [["X", vx], ["Y", vy]] + t1 = t.add_row_number group_by=["X"] order_by=["Y"] from=100 step=100 + + # No reordering + t1.at "X" . to_vector . should_equal vx + t1.at "Y" . to_vector . should_equal vy + + t1.at "Row" . to_vector . should_equal [400, 200, 300, 200, 100, 100, 100, 200] + + Test.specify "should report floating point equality warning when grouping on float columns" <| + t = table_builder [["X", [1.0, 1.5, 1.0, 2.5, 2.5]]] + t1 = t.add_row_number group_by=["X"] + Problems.expect_warning Floating_Point_Equality t1 + t1.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2] + + r2 = t.add_row_number group_by=["X"] on_problems=Problem_Behavior.Report_Error + r2.should_fail_with Floating_Point_Equality + + t3 = t.add_row_number order_by=["X"] + Problems.assume_no_problems t3 + t3.at "Row" . to_vector . should_equal [1, 3, 2, 4, 5] + + t4 = table_builder [["X", [1, "A", 1, 24.0, 24.0, 24.0, 24]]] + t5 = t4.add_row_number group_by=["X"] + Problems.expect_warning Floating_Point_Equality t5 + t5.at "Row" . to_vector . should_equal [1, 1, 2, 1, 2, 3, 4] + + Test.specify "should fail if columns provided in ordering/grouping do not exist" <| + t = table_builder [["X", [20, 30, 10]]] + r1 = t.add_row_number group_by=["X", "Y", "Z"] + r1.should_fail_with Missing_Input_Columns + r1.catch.criteria . should_equal ["Y", "Z"] + + r2 = t.add_row_number order_by=["Z", "X", "Y"] + r2.should_fail_with Missing_Input_Columns + r2.catch.criteria . should_equal ["Z", "Y"] + + r3 = t.add_row_number group_by=[44] + r3.should_fail_with Column_Indexes_Out_Of_Range diff --git a/test/Table_Tests/src/In_Memory/Builders_Spec.enso b/test/Table_Tests/src/In_Memory/Builders_Spec.enso index 8725cf140652..33f1aa9479f2 100644 --- a/test/Table_Tests/src/In_Memory/Builders_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Builders_Spec.enso @@ -1,5 +1,6 @@ from Standard.Base import all +from Standard.Table import Column import Standard.Table.Internal.Java_Exports from Standard.Test import Test, Test_Suite @@ -27,5 +28,5 @@ spec = Test.group "[In-Memory] Storage Builders" <| 0.up_to 5 . each _-> builder.append e storage = builder.seal - column = Java_Exports.make_column "X" storage + column = Column.from_storage "X" storage column.to_vector . should_equal vector From cfb2f2916eb1d81f521c5af238e42c2b227f65d3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Fri, 2 Jun 2023 14:09:20 +0200 Subject: [PATCH 15/39] Merge `Column_Indexes_Out_Of_Range` into `Missing_Input_Columns`. (#6901) Implements #6869 --- .../Database/0.0.0-dev/src/Data/Table.enso | 48 ++++--------------- .../Table/0.0.0-dev/src/Data/Table.enso | 48 ++++--------------- .../Standard/Table/0.0.0-dev/src/Errors.enso | 26 ++++------ .../0.0.0-dev/src/Internal/Join_Helpers.enso | 6 +-- .../src/Internal/Problem_Builder.enso | 17 +++---- .../Add_Row_Number_Spec.enso | 4 +- .../Aggregate_Spec.enso | 18 +++---- .../Cross_Tab_Spec.enso | 12 ++--- .../Distinct_Spec.enso | 6 +-- .../Join/Join_Spec.enso | 22 ++++----- .../Order_By_Spec.enso | 4 +- .../Select_Columns_Spec.enso | 24 ++++------ .../Transpose_Spec.enso | 6 +-- .../src/Formatting/Parse_Values_Spec.enso | 9 ++-- 14 files changed, 84 insertions(+), 166 deletions(-) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 720ebb587dca..7f466d3ae556 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -166,10 +166,6 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. > Example Select columns by name. @@ -192,7 +188,7 @@ type Table Icon: select_column @columns Widget_Helpers.make_column_name_vector_selector - select_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range + select_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns select_columns self (columns = [self.columns.first.name]) (reorder = False) (error_on_missing_columns = True) (on_problems = Report_Warning) = new_columns = self.columns_helper.select_columns selectors=columns reorder=reorder error_on_missing_columns=error_on_missing_columns on_problems=on_problems self.updated_columns new_columns @@ -221,10 +217,6 @@ type Table `Missing_Input_Columns` is reported according to the `on_problems` setting, unless `error_on_missing_columns` is set to `True`, in which case it is raised as an error. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - reported according to the `on_problems` setting, unless - `error_on_missing_columns` is set to `True`, in which case it is - raised as an error. > Example Remove columns with given names. @@ -246,7 +238,7 @@ type Table table.remove_columns [-1, 0, 1] @columns Widget_Helpers.make_column_name_vector_selector - remove_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range + remove_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns remove_columns self (columns = [self.columns.first.name]) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.remove_columns selectors=columns error_on_missing_columns=error_on_missing_columns on_problems=on_problems self.updated_columns new_columns @@ -272,10 +264,6 @@ type Table `Missing_Input_Columns` is reported according to the `on_problems` setting, unless `error_on_missing_columns` is set to `True`, in which case it is raised as an error. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - reported according to the `on_problems` setting, unless - `error_on_missing_columns` is set to `True`, in which case it is - raised as an error. > Example Move a column with a specified name to back. @@ -302,7 +290,7 @@ type Table table.reorder_columns [0] position=Position.After_Other_Columns @columns Widget_Helpers.make_column_name_vector_selector - reorder_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range + reorder_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns reorder_columns self (columns = [self.columns.first.name]) (position = Position.Before_Other_Columns) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.reorder_columns selectors=columns position=position error_on_missing_columns on_problems=on_problems self.updated_columns new_columns @@ -352,10 +340,6 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. - Other problems are reported according to the `on_problems` setting: - If a column is matched by two selectors resulting in a different name mapping, a `Ambiguous_Column_Rename`. @@ -397,7 +381,7 @@ type Table by_name = Column_Selector.By_Name "name=(.*)" Case_Sensitivity.Sensitive use_regex=True table.rename_columns (Map.from_vector [[by_name, "key:$1"]]) @column_map Widget_Helpers.make_rename_name_vector_selector - rename_columns : Map (Text | Integer | Column_Selector) Text | Vector Text | Vector Vector -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Output_Column_Names | Duplicate_Output_Column_Names + rename_columns : Map (Text | Integer | Column_Selector) Text | Vector Text | Vector Vector -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Output_Column_Names | Duplicate_Output_Column_Names rename_columns self column_map=["Column"] (error_on_missing_columns=True) (on_problems=Report_Warning) = new_names = Table_Helpers.rename_columns internal_columns=self.internal_columns mapping=column_map error_on_missing_columns=error_on_missing_columns on_problems=on_problems Warning.with_suspended new_names names-> @@ -570,8 +554,7 @@ type Table ! Error Conditions - If the columns specified in `group_by` or `order_by` are not present - in the table, a `Missing_Input_Columns` or - `Column_Indexes_Out_Of_Range` error is raised. + in the table, a `Missing_Input_Columns` error is raised. - If the column with the same name as provided `name` already exists, a `Duplicate_Output_Column_Names` problem is reported and the existing column is renamed to avoid the clash. @@ -798,10 +781,6 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. - If no columns have been selected for ordering, a `No_Input_Columns_Selected` is raised as dataflow error regardless of any settings. @@ -847,7 +826,7 @@ type Table table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)] @columns Widget_Helpers.make_order_by_selector - order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range + order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns order_by self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = Panic.handle_wrapped_dataflow_error <| problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected] columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder @@ -1282,7 +1261,7 @@ type Table not possible to create a table without any columns. - If a given aggregate is not supported by the backend, `Unsupported_Database_Operation` is reported. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is + - If a column index is out of range, a `Missing_Input_Columns` is reported according to the `on_problems` setting, unless `error_on_missing_columns` is set to `True`, in which case it is raised as an error. Problems resolving `Group_By` columns are @@ -1391,15 +1370,11 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. - If any column names in the new table are clashing, a `Duplicate_Output_Column_Names` is reported according to the `on_problems` setting. @id_fields Widget_Helpers.make_column_name_vector_selector - transpose : Vector (Integer | Text | Column_Selector) | Text | Integer -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range | Duplicate_Output_Column_Names + transpose : Vector (Integer | Text | Column_Selector) | Text | Integer -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Duplicate_Output_Column_Names transpose self id_fields=[] (name_field="Name") (value_field="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = ## Avoid unused arguments warning. We cannot rename arguments to `_`, because we need to keep the API consistent with the in-memory table. @@ -1426,8 +1401,6 @@ type Table - If a column in `group_by` or `name_field` is not in the input table, a `Missing_Input_Columns` is raised as a dataflow error. - - If a column index in `group_by`, `name_field` or `values` is out of - range, a `Column_Indexes_Out_Of_Range` is raised as a dataflow error. - If a column selector in `values` given as a `Text` and it does not match any columns in the input table nor is it a valid expression, an `Invalid_Aggregate_Column` dataflow error is raised. @@ -1444,7 +1417,7 @@ type Table @group_by Widget_Helpers.make_column_name_vector_selector @name_column Widget_Helpers.make_column_name_selector @values (Widget_Helpers.make_aggregate_column_selector include_group_by=False) - cross_tab : Vector (Integer | Text | Column_Selector | Aggregate_Column) | Text | Integer -> (Text | Integer) -> Aggregate_Column | Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings + cross_tab : Vector (Integer | Text | Column_Selector | Aggregate_Column) | Text | Integer -> (Text | Integer) -> Aggregate_Column | Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings cross_tab self group_by=[] name_column=self.column_names.first values=Aggregate_Column.Count (on_problems=Report_Warning) = ## Avoid unused arguments warning. We cannot rename arguments to `_`, because we need to keep the API consistent with the in-memory table. @@ -1488,9 +1461,6 @@ type Table - If a column in `columns` is not in the input table, a `Missing_Input_Columns` is raised as an error or problem following the `error_on_missing_columns` rules. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error or problem following the - `error_on_missing_columns` rules. - If a column selected for parsing is not a text column, an `Invalid_Value_Type` error is raised. - If no columns have been selected for parsing, diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 4e22bcd1d295..056f6a14433e 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -289,10 +289,6 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. > Example Select columns by name. @@ -316,7 +312,7 @@ type Table Icon: select_column @columns Widget_Helpers.make_column_name_vector_selector - select_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range + select_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns select_columns self columns=[self.columns.first.name] (reorder = False) (error_on_missing_columns = True) (on_problems = Report_Warning) = new_columns = self.columns_helper.select_columns selectors=columns reorder=reorder error_on_missing_columns=error_on_missing_columns on_problems=on_problems Table.new new_columns @@ -345,10 +341,6 @@ type Table `Missing_Input_Columns` is reported according to the `on_problems` setting, unless `error_on_missing_columns` is set to `True`, in which case it is raised as an error. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - reported according to the `on_problems` setting, unless - `error_on_missing_columns` is set to `True`, in which case it is - raised as an error. > Example Remove columns with given names. @@ -370,7 +362,7 @@ type Table table.remove_columns [-1, 0, 1] @columns Widget_Helpers.make_column_name_vector_selector - remove_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range + remove_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns remove_columns self (columns=[self.columns.first.name]) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.remove_columns selectors=columns error_on_missing_columns=error_on_missing_columns on_problems=on_problems Table.new new_columns @@ -396,10 +388,6 @@ type Table `Missing_Input_Columns` is reported according to the `on_problems` setting, unless `error_on_missing_columns` is set to `True`, in which case it is raised as an error. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - reported according to the `on_problems` setting, unless - `error_on_missing_columns` is set to `True`, in which case it is - raised as an error. > Example Move a column with a specified name to back. @@ -426,7 +414,7 @@ type Table table.reorder_columns [0] position=Position.After_Other_Columns @columns Widget_Helpers.make_column_name_vector_selector - reorder_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range + reorder_columns : Vector (Integer | Text | Column_Selector) | Text | Integer -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns reorder_columns self (columns = [self.columns.first.name]) (position = Position.Before_Other_Columns) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.reorder_columns selectors=columns position=position error_on_missing_columns=error_on_missing_columns on_problems=on_problems Table.new new_columns @@ -476,10 +464,6 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. - Other problems are reported according to the `on_problems` setting: - If a column is matched by two selectors resulting in a different name mapping, a `Ambiguous_Column_Rename`. @@ -521,7 +505,7 @@ type Table by_name = Column_Selector.By_Name "name=(.*)" Case_Sensitivity.Sensitive use_regex=True table.rename_columns (Map.from_vector [[by_name, "key:$1"]]) @column_map Widget_Helpers.make_rename_name_vector_selector - rename_columns : Map (Text | Integer | Column_Selector) Text | Vector Text | Vector Vector -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Output_Column_Names | Duplicate_Output_Column_Names + rename_columns : Map (Text | Integer | Column_Selector) Text | Vector Text | Vector Vector -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Output_Column_Names | Duplicate_Output_Column_Names rename_columns self column_map=["Column"] (error_on_missing_columns=True) (on_problems=Report_Warning) = new_names = Table_Helpers.rename_columns internal_columns=self.columns mapping=column_map error_on_missing_columns=error_on_missing_columns on_problems=on_problems Warning.with_suspended new_names names-> @@ -572,7 +556,7 @@ type Table - If there are no columns in the output table, a `No_Output_Columns` is raised as an error regardless of the problem behavior, because it is not possible to create a table without any columns. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is + - If a column index is out of range, a `Missing_Input_Columns` is reported according to the `on_problems` setting, unless `error_on_missing_columns` is set to `True`, in which case it is raised as an error. Problems resolving `Group_By` columns are @@ -641,10 +625,6 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. - If no columns have been selected for ordering, a `No_Input_Columns_Selected` is raised as dataflow error regardless of any settings. @@ -690,7 +670,7 @@ type Table table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)] @columns Widget_Helpers.make_order_by_selector - order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range + order_by : Vector (Text | Sort_Column) | Text -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns order_by self (columns = [self.columns.first.name]) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected] columns_for_ordering = Table_Helpers.prepare_order_by self.columns columns problem_builder @@ -797,9 +777,6 @@ type Table - If a column in `columns` is not in the input table, a `Missing_Input_Columns` is raised as an error or problem following the `error_on_missing_columns` rules. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error or problem following the - `error_on_missing_columns` rules. - If a column selected for parsing is not a text column, an `Invalid_Value_Type` error is raised. - If no columns have been selected for parsing, @@ -1216,8 +1193,7 @@ type Table ! Error Conditions - If the columns specified in `group_by` or `order_by` are not present - in the table, a `Missing_Input_Columns` or - `Column_Indexes_Out_Of_Range` error is raised. + in the table, a `Missing_Input_Columns` error is raised. - If the column with the same name as provided `name` already exists, a `Duplicate_Output_Column_Names` problem is reported and the existing column is renamed to avoid the clash. @@ -1776,15 +1752,11 @@ type Table `Missing_Input_Columns` is raised as an error, unless `error_on_missing_columns` is set to `False`, in which case the problem is reported according to the `on_problems` setting. - - If a column index is out of range, a `Column_Indexes_Out_Of_Range` is - raised as an error, unless `error_on_missing_columns` is set to - `False`, in which case the problem is reported according to the - `on_problems` setting. - If any column names in the new table are clashing, a `Duplicate_Output_Column_Names` is reported according to the `on_problems` setting. @id_fields Widget_Helpers.make_column_name_vector_selector - transpose : Vector (Integer | Text | Column_Selector) | Text | Integer -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range | Duplicate_Output_Column_Names + transpose : Vector (Integer | Text | Column_Selector) | Text | Integer -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Duplicate_Output_Column_Names transpose self (id_fields = []) (name_field="Name") (value_field="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = columns_helper = self.columns_helper unique = Unique_Name_Strategy.new @@ -1824,8 +1796,6 @@ type Table - If a column in `group_by` or `name_field` is not in the input table, a `Missing_Input_Columns` is raised as a dataflow error. - - If a column index in `group_by`, `name_field` or `values` is out of - range, a `Column_Indexes_Out_Of_Range` is raised as a dataflow error. - If a column selector in `values` given as a `Text` and it does not match any columns in the input table nor is it a valid expression, an `Invalid_Aggregate_Column` dataflow error is raised. @@ -1842,7 +1812,7 @@ type Table @group_by Widget_Helpers.make_column_name_vector_selector @name_column Widget_Helpers.make_column_name_selector @values (Widget_Helpers.make_aggregate_column_selector include_group_by=False) - cross_tab : Vector (Integer | Text | Column_Selector | Aggregate_Column) | Text | Integer -> (Text | Integer) -> Aggregate_Column | Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings + cross_tab : Vector (Integer | Text | Column_Selector | Aggregate_Column) | Text | Integer -> (Text | Integer) -> Aggregate_Column | Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings cross_tab self group_by=[] name_column=self.column_names.first values=Aggregate_Column.Count (on_problems=Report_Warning) = columns_helper = self.columns_helper problem_builder = Problem_Builder.new error_on_missing_columns=True diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso index 57040b29d294..591d632135f2 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso @@ -14,12 +14,12 @@ type Missing_Input_Columns One or more columns not found in the input table. Arguments: - - criteria: the names of the columns or regular expressions that did not - have any matches. + - criteria: the names of the columns, regular expressions or indices that + did not matches any columns. - where: an optional text describing to which object this error is related to (for example in join, whether the reported error is for the left or right table). - Error (criteria : [Text]) (where:Text|Nothing = Nothing) + Error (criteria : [Text | Integer]) (where:Text|Nothing = Nothing) ## PRIVATE @@ -29,21 +29,11 @@ type Missing_Input_Columns where = case self.where of Nothing -> "." location : Text -> " in "+location+"." - "The criteria "+self.criteria.to_text+" did not match any columns"+where - -type Column_Indexes_Out_Of_Range - ## PRIVATE - One or more column indexes were invalid on the input table. - Can occur when using By_Index. - Error (indexes : [Integer]) - - ## PRIVATE - - Convert a column indexes out of bounds error to a human-readable form. - to_display_text : Text - to_display_text self = case self.indexes.length == 1 of - True -> "The index " + (self.indexes.at 0).to_text + " is out of range." - False -> "The indexes "+self.indexes.short_display_text+" are out of range." + criteria_texts = self.criteria.map c-> case c of + _ : Integer -> c.to_text+" (index)" + _ -> c.pretty + criteria_text = criteria_texts.join ", " + "The criteria "+criteria_text+" did not match any columns"+where type Too_Many_Column_Names_Provided ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso index abdd958ea67d..f837672125a9 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso @@ -2,7 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Illegal_State.Illegal_State -from project.Errors import Invalid_Value_Type, No_Such_Column, Missing_Input_Columns, Column_Indexes_Out_Of_Range +from project.Errors import Invalid_Value_Type, No_Such_Column, Missing_Input_Columns import project.Data.Join_Condition.Join_Condition import project.Data.Type.Value_Type.Value_Type import project.Internal.Problem_Builder.Problem_Builder @@ -23,8 +23,8 @@ type Join_Condition_Resolver resolve : Join_Condition | Text | Vector (Join_Condition | Text) -> Problem_Behavior -> Join_Condition_Resolution resolve self conditions on_problems = redundant_names = Vector.new_builder - left_problem_builder = Problem_Builder.new missing_input_columns_location="the left table" types_to_always_throw=[Missing_Input_Columns, Column_Indexes_Out_Of_Range] - right_problem_builder = Problem_Builder.new missing_input_columns_location="the right table" types_to_always_throw=[Missing_Input_Columns, Column_Indexes_Out_Of_Range] + left_problem_builder = Problem_Builder.new missing_input_columns_location="the left table" types_to_always_throw=[Missing_Input_Columns] + right_problem_builder = Problem_Builder.new missing_input_columns_location="the right table" types_to_always_throw=[Missing_Input_Columns] resolve_selector problem_builder resolver selector = r_1 = resolver selector diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso index 901ac040e51f..0ff67c2f620a 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso @@ -3,7 +3,7 @@ import Standard.Base.Runtime.Ref.Ref import project.Internal.Vector_Builder.Vector_Builder -from project.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Invalid_Aggregate_Column +from project.Errors import Missing_Input_Columns, Duplicate_Output_Column_Names, Invalid_Output_Column_Names, Invalid_Aggregate_Column ## PRIVATE type Problem_Builder @@ -34,13 +34,10 @@ type Problem_Builder build_problemset : Vector build_problemset self = problems = Vector.new_builder - build_vector_and_append ref problem_creator = - vec = ref.get . build - if vec.not_empty then - problems.append (problem_creator vec) - build_vector_and_append self.missing_input_columns (Missing_Input_Columns.Error _ where=self.missing_input_columns_location) - build_vector_and_append self.oob_indices Column_Indexes_Out_Of_Range.Error + missing_criteria = self.missing_input_columns.get.build + self.oob_indices.get.build + if missing_criteria.not_empty then + problems.append (Missing_Input_Columns.Error missing_criteria where=self.missing_input_columns_location) self.other.to_vector.each problems.append problems.to_vector @@ -87,15 +84,15 @@ type Problem_Builder an error by the `attach_` methods regardless of the `Problem_Behavior` used. By default, an empty vector. - error_on_missing_columns: If set to `True`, `Missing_Input_Columns` and - `Column_Indexes_Out_Of_Range` will be raised as errors by the `attach_` + `Invalid_Aggregate_Column` will be raised as errors by the `attach_` methods regardless of the `Problem_Behavior` used. Defaults to `False`. - Setting this to `True` is essentially a shorthand for adding these two + Setting this to `True` is essentially a shorthand for adding these problem types to `types_to_always_throw`. - missing_input_columns_location: The location to add to the missing input column error to make it more informative. Defaults to `Nothing`. new : Vector -> Boolean -> Text | Nothing -> Problem_Builder new types_to_always_throw=[] error_on_missing_columns=False missing_input_columns_location=Nothing = - additional_types_to_throw = if error_on_missing_columns then [Missing_Input_Columns, Column_Indexes_Out_Of_Range, Invalid_Aggregate_Column] else [] + additional_types_to_throw = if error_on_missing_columns then [Missing_Input_Columns, Invalid_Aggregate_Column] else [] Problem_Builder.Value types_to_always_throw+additional_types_to_throw (Ref.new Vector_Builder.empty) (Ref.new Vector_Builder.empty) missing_input_columns_location other=Vector.new_builder ## PRIVATE diff --git a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso index 314909262370..f39d8a8df8ec 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Add_Row_Number_Spec.enso @@ -1,7 +1,7 @@ from Standard.Base import all from Standard.Table import Sort_Column -from Standard.Table.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, Duplicate_Output_Column_Names, Floating_Point_Equality +from Standard.Table.Errors import Missing_Input_Columns, Duplicate_Output_Column_Names, Floating_Point_Equality from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -124,4 +124,4 @@ spec setup = r2.catch.criteria . should_equal ["Z", "Y"] r3 = t.add_row_number group_by=[44] - r3.should_fail_with Column_Indexes_Out_Of_Range + r3.should_fail_with Missing_Input_Columns diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 8f691bdeefdb..debe577c718a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -1321,8 +1321,8 @@ spec setup = t2.catch.name.should_equal "MISSING" t3 = table.aggregate [Sum 42] - t3 . should_fail_with Column_Indexes_Out_Of_Range - t3.catch.indexes.should_equal [42] + t3 . should_fail_with Missing_Input_Columns + t3.catch.criteria.should_equal [42] Test.specify "should raise a warning when can't find a column by name, but a hard error if the missing column is in a Group_By" <| err1 = table.aggregate [Group_By "Missing", Group_By "Index", Group_By "Other_Missing"] on_problems=Problem_Behavior.Ignore @@ -1347,21 +1347,21 @@ spec setup = err3.catch.name.should_equal "Missing" err4 = table.aggregate [Group_By 100, Group_By "Index", Group_By -42] on_problems=Problem_Behavior.Ignore - err4.should_fail_with Column_Indexes_Out_Of_Range - err4.catch.indexes.should_equal [100, -42] + err4.should_fail_with Missing_Input_Columns + err4.catch.criteria.should_equal [100, -42] action2 = table.aggregate [Group_By "Index", Sum "Value", Sum 42] on_problems=_ - problems2 = [Column_Indexes_Out_Of_Range.Error [42]] + problems2 = [Missing_Input_Columns.Error [42]] tester2 = expect_column_names ["Index", "Sum Value"] Problems.test_problem_handling action2 problems2 tester2 # As above, missing errors from group-by take precedence over aggregates. err5 = table.aggregate [Group_By "Index", Group_By 55, Sum "Value", Sum 144, Group_By -33] on_problems=Problem_Behavior.Report_Error - err5.should_fail_with Column_Indexes_Out_Of_Range - err5.catch.indexes.should_equal [55, -33] + err5.should_fail_with Missing_Input_Columns + err5.catch.criteria.should_equal [55, -33] err6 = table.aggregate [Group_By "Index", Sum "Value", Sum 42] on_problems=Problem_Behavior.Ignore error_on_missing_columns=True - err6.catch . should_equal (Column_Indexes_Out_Of_Range.Error [42]) + err6.catch . should_equal (Missing_Input_Columns.Error [42]) Test.specify "should raise a warning when an invalid output name" <| action = table.aggregate [Group_By "Index" ""] on_problems=_ @@ -1395,7 +1395,7 @@ spec setup = Test.specify "should ignore Count_Distinct if no columns matched" <| action = table.aggregate [Count_Distinct [-100], Count] on_problems=_ - problems = [Column_Indexes_Out_Of_Range.Error [-100]] + problems = [Missing_Input_Columns.Error [-100]] tester = expect_column_names ["Count"] Problems.test_problem_handling action problems tester diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index bfe835b49b98..775100059f7e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -110,8 +110,8 @@ spec setup = err1.catch.criteria . should_equal ["Name"] err2 = table.cross_tab [] 42 - err2.should_fail_with Column_Indexes_Out_Of_Range - err2.catch.indexes . should_equal [42] + err2.should_fail_with Missing_Input_Columns + err2.catch.criteria . should_equal [42] Test.specify "should fail if group-by contains missing columns" <| err1 = table2.cross_tab ["Group", "Nonexistent Group", "OTHER"] "Key" @@ -119,8 +119,8 @@ spec setup = err1.catch.criteria . should_equal ["Nonexistent Group", "OTHER"] err2 = table2.cross_tab [0, 42] "Key" - err2.should_fail_with Column_Indexes_Out_Of_Range - err2.catch.indexes . should_equal [42] + err2.should_fail_with Missing_Input_Columns + err2.catch.criteria . should_equal [42] Test.specify "should fail if aggregate values contain missing columns" <| err1 = table.cross_tab values=[Count, Sum "Nonexistent Value", Sum "Value", Sum "OTHER"] @@ -128,8 +128,8 @@ spec setup = err1.catch.name . should_equal "Nonexistent Value" err2 = table.cross_tab values=[Count, Sum "Nonexistent Value", Sum "Value", Sum 42] - err2.should_fail_with Column_Indexes_Out_Of_Range - err2.catch.indexes . should_equal [42] + err2.should_fail_with Missing_Input_Columns + err2.catch.criteria . should_equal [42] Test.specify "should fail if aggregate values contain invalid expressions" <| err1 = table.cross_tab values=[Sum "[MISSING]*10"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso index 4063525ce389..84d7d698cc34 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Distinct_Spec.enso @@ -104,8 +104,8 @@ spec setup = t3.catch . should_equal (Missing_Input_Columns.Error ["Y"]) t4 = t1.distinct [0, 42] on_problems=pb - t4.should_fail_with Column_Indexes_Out_Of_Range - t4.catch . should_equal (Column_Indexes_Out_Of_Range.Error [42]) + t4.should_fail_with Missing_Input_Columns + t4.catch . should_equal (Missing_Input_Columns.Error [42]) t5 = t1.distinct [] on_problems=pb t5.should_fail_with No_Input_Columns_Selected @@ -126,5 +126,5 @@ spec setup = action3 = t1.distinct [0, 42] error_on_missing_columns=False on_problems=_ tester3 table = table.at "X" . to_vector . should_equal [1, 2, 3] - problems3 = [Column_Indexes_Out_Of_Range.Error [42]] + problems3 = [Missing_Input_Columns.Error [42]] Problems.test_problem_handling action3 problems3 tester3 diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index 910080636bea..e3b509a4073c 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -292,23 +292,19 @@ spec setup = r1 = t1.join t2 r1.should_fail_with Missing_Input_Columns r1.catch.criteria.should_equal ["X"] - r1.catch.to_display_text.should_equal "The criteria [X] did not match any columns in the right table." + r1.catch.to_display_text.should_equal "The criteria 'X' did not match any columns in the right table." - conditions = [Join_Condition.Equals "foo" 42, Join_Condition.Equals "X" -3, Join_Condition.Equals -1 "baz"] - - r2 = t1.join t2 on=conditions on_problems=Problem_Behavior.Ignore - ## We have both - - Column_Indexes_Out_Of_Range.Error [42, -3] - - Missing_Input_Columns.Error ["foo", "baz"] - here, but we can throw only one error. I think column names error - will be more useful, so I'd prioritize it. + conditions2 = [Join_Condition.Equals "foo" 42, Join_Condition.Equals "X" -3, Join_Condition.Equals -1 "baz"] + r2 = t1.join t2 on=conditions2 on_problems=Problem_Behavior.Ignore r2.should_fail_with Missing_Input_Columns r2.catch.criteria.should_equal ["foo"] - r2.catch.to_display_text.should_equal "The criteria [foo] did not match any columns in the left table." + r2.catch.to_display_text.should_equal "The criteria 'foo' did not match any columns in the left table." - r3 = t1.join t2 on=[Join_Condition.Equals 42 0] on_problems=Problem_Behavior.Ignore - r3.should_fail_with Column_Indexes_Out_Of_Range - r3.catch.indexes.should_equal [42] + conditions3 = [Join_Condition.Equals "Y" 42, Join_Condition.Equals "X" -3, Join_Condition.Equals -1 "baz"] + r3 = t1.join t2 on=conditions3 on_problems=Problem_Behavior.Ignore + r3.should_fail_with Missing_Input_Columns + r3.catch.criteria.should_equal ["baz", 42, -3] + r3.catch.to_display_text.should_equal "The criteria 'baz', 42 (index), -3 (index) did not match any columns in the right table." Test.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <| t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso index 377fb4bb312c..e10cbd1df364 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso @@ -84,9 +84,9 @@ spec setup = Test.specify "should correctly handle problems: out of bounds indices" <| selector = [0, 100, Sort_Column.Index -200, Sort_Column.Index 300] - expected_problem = Column_Indexes_Out_Of_Range.Error [100, -200, 300] + expected_problem = Missing_Input_Columns.Error [100, -200, 300] t1 = table.order_by selector - t1.should_fail_with Column_Indexes_Out_Of_Range + t1.should_fail_with Missing_Input_Columns t1.catch . should_equal expected_problem action = table.order_by selector error_on_missing_columns=False on_problems=_ diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index b79d762f1b8c..9c630a0ffd40 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -70,11 +70,11 @@ spec setup = selector = [1, 0, 100, -200, 300] action = table.select_columns selector error_on_missing_columns=False on_problems=_ tester = expect_column_names ["foo", "bar"] - problems = [Column_Indexes_Out_Of_Range.Error [100, -200, 300]] + problems = [Missing_Input_Columns.Error [100, -200, 300]] Problems.test_problem_handling action problems tester err = table.select_columns selector - err.should_fail_with Column_Indexes_Out_Of_Range + err.should_fail_with Missing_Input_Columns Test.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] @@ -126,11 +126,7 @@ spec setup = Test.specify "should correctly handle problems in mixed case" <| err = table.select_columns ["foo", "hmm", 99] on_problems=Problem_Behavior.Ignore err.should_fail_with Missing_Input_Columns - err.catch.criteria . should_equal ["hmm"] - - err_2 = table.select_columns [99, "foo", "hmm"] on_problems=Problem_Behavior.Ignore - err_2.should_fail_with Missing_Input_Columns - err_2.catch.criteria . should_equal ["hmm"] + err.catch.criteria . should_equal ["hmm", 99] Test.specify "should correctly handle problems: no columns in the output" <| [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> @@ -174,11 +170,11 @@ spec setup = selector = [1, 0, 100, -200, 300] action = table.remove_columns selector on_problems=_ tester = expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] - problems = [Column_Indexes_Out_Of_Range.Error [100, -200, 300]] + problems = [Missing_Input_Columns.Error [100, -200, 300]] Problems.test_problem_handling action problems tester err = table.remove_columns selector error_on_missing_columns=True - err.should_fail_with Column_Indexes_Out_Of_Range + err.should_fail_with Missing_Input_Columns Test.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] @@ -256,11 +252,11 @@ spec setup = selector = [1, 0, 100, -200, 300] action = table.reorder_columns selector on_problems=_ tester = expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] - problems = [Column_Indexes_Out_Of_Range.Error [100, -200, 300]] + problems = [Missing_Input_Columns.Error [100, -200, 300]] Problems.test_problem_handling action problems tester err = table.reorder_columns selector error_on_missing_columns=True - err.should_fail_with Column_Indexes_Out_Of_Range + err.should_fail_with Missing_Input_Columns Test.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] @@ -393,12 +389,12 @@ spec setup = action = table.rename_columns map error_on_missing_columns=False on_problems=_ tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"] err_checker err = - err.catch.should_be_a Column_Indexes_Out_Of_Range.Error - err.catch.indexes.should_contain_the_same_elements_as [-200, 100, 300] + err.catch.should_be_a Missing_Input_Columns.Error + err.catch.criteria.should_contain_the_same_elements_as [-200, 100, 300] Problems.test_advanced_problem_handling action err_checker (x-> x) tester err = table.rename_columns map - err.should_fail_with Column_Indexes_Out_Of_Range + err.should_fail_with Missing_Input_Columns Test.specify "should correctly handle edge-cases: aliased indices" <| map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index 1fa3b63e4f0e..2e1a833c1422 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -78,8 +78,8 @@ spec setup = err1.catch.criteria . should_equal ["Missing", "Missing 2"] err2 = t1.transpose [0, -1, 42, -100] - err2.should_fail_with Column_Indexes_Out_Of_Range - err2.catch.indexes . should_equal [42, -100] + err2.should_fail_with Missing_Input_Columns + err2.catch.criteria . should_equal [42, -100] action1 = t1.transpose ["Key", "Missing", "Missing 2"] error_on_missing_columns=False on_problems=_ tester1 table = @@ -90,7 +90,7 @@ spec setup = action2 = t1.transpose [0, -1, 42, -100] error_on_missing_columns=False on_problems=_ tester2 table = table.column_names . should_equal ["Key", "Another", "Name", "Value"] - problems2 = [Column_Indexes_Out_Of_Range.Error [42, -100]] + problems2 = [Missing_Input_Columns.Error [42, -100]] Problems.test_problem_handling action2 problems2 tester2 Test.specify "should warn on column name clashes" <| diff --git a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso index cd9881e87c35..95565e66e6da 100644 --- a/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso +++ b/test/Table_Tests/src/Formatting/Parse_Values_Spec.enso @@ -334,13 +334,13 @@ spec = Test.specify "should correctly handle problems: out of bounds indices" <| t1 = Table.new [["A", ["1", "2", "3"]]] r1 = t1.parse columns=[0, -1, 42, -5] - r1.should_fail_with Column_Indexes_Out_Of_Range - r1.catch.indexes . should_equal [42, -5] + r1.should_fail_with Missing_Input_Columns + r1.catch.criteria . should_equal [42, -5] action = t1.parse columns=[0, -1, 42, -5] error_on_missing_columns=False on_problems=_ tester table = table.at "A" . to_vector . should_equal [1, 2, 3] - problems = [Column_Indexes_Out_Of_Range.Error [42, -5]] + problems = [Missing_Input_Columns.Error [42, -5]] Problems.test_problem_handling action problems tester Test.specify "should allow mixed column selectors" <| @@ -386,8 +386,7 @@ spec = r4 = t1.parse columns=["nonexistent column :D", -42] error_on_missing_columns=False on_problems=Problem_Behavior.Report_Warning r4 . should_equal t1 Problems.expect_warning No_Input_Columns_Selected r4 - Problems.expect_warning (Missing_Input_Columns.Error ["nonexistent column :D"]) r4 - Problems.expect_warning (Column_Indexes_Out_Of_Range.Error [-42]) r4 + Problems.expect_warning (Missing_Input_Columns.Error ["nonexistent column :D", -42]) r4 Test.group "Column.parse" <| Test.specify "should correctly parse integers" <| From f09d922a41184968be5fbfa74e0259bc5bd05a0c Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Mon, 5 Jun 2023 23:14:59 +1000 Subject: [PATCH 16/39] Project create form (#6923) * Re-add project create form * Add dropdown component and use in projct create form * Fix "project create" button behavior --- .../src/authentication/src/components/svg.tsx | 7 + .../src/dashboard/components/createForm.tsx | 2 +- .../src/dashboard/components/dashboard.tsx | 48 ++++--- .../src/dashboard/components/dropdown.tsx | 59 +++++++++ .../components/projectCreateForm.tsx | 125 +++++++++--------- .../src/dashboard/components/templates.tsx | 4 +- 6 files changed, 163 insertions(+), 82 deletions(-) create mode 100644 app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dropdown.tsx diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/components/svg.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/components/svg.tsx index b03888589a0a..da5c910ab1b1 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/components/svg.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/components/svg.tsx @@ -264,6 +264,13 @@ export const DEFAULT_USER_ICON = ( ) +/** An icon representing a menu that can be expanded downwards. */ +export const DOWN_CARET_ICON = ( + + + +) + /** Props for a {@link Spinner}. */ export interface SpinnerProps { size: number diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx index ad3f2154a0bb..a52d1dd3bc7f 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx @@ -39,7 +39,7 @@ function CreateForm(props: CreateFormProps) {

{ event.stopPropagation() diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx index 1d35a58e0ec9..c61654068d62 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx @@ -1,6 +1,7 @@ /** @file Main dashboard component, responsible for listing user's projects as well as other * interactive components. */ import * as react from 'react' +import toast from 'react-hot-toast' import * as common from 'enso-common' @@ -80,6 +81,7 @@ export interface CreateFormProps { left: number top: number directoryId: backendModule.DirectoryId + getNewProjectName: (templateId: string | null) => string onSuccess: () => void } @@ -585,19 +587,26 @@ function Dashboard(props: DashboardProps) { /** Heading element for every column. */ const ColumnHeading = (column: Column, assetType: backendModule.AssetType) => column === Column.name ? ( - assetType === backendModule.AssetType.project ? ( - <>{ASSET_TYPE_NAME[assetType]} - ) : ( -
- {ASSET_TYPE_NAME[assetType]} - -
- ) + } + }} + > + {svg.ADD_ICON} + +
) : ( <>{COLUMN_NAME[column]} ) diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dropdown.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dropdown.tsx new file mode 100644 index 000000000000..c6c828efe310 --- /dev/null +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dropdown.tsx @@ -0,0 +1,59 @@ +/** @file A select menu with a dropdown. */ +import * as react from 'react' + +import * as svg from '../../components/svg' + +/** Props for a {@link Dropdown}. */ +export interface DropdownProps { + items: [string, ...string[]] + onChange: (value: string) => void + className?: string + optionsClassName?: string +} + +/** A select menu with a dropdown. */ +function Dropdown(props: DropdownProps) { + const { items, onChange, className, optionsClassName } = props + const [value, setValue] = react.useState(items[0]) + // TODO: + const [isDropdownVisible, setIsDropdownVisible] = react.useState(false) + + return ( +
+
{ + setIsDropdownVisible(!isDropdownVisible) + }} + > + {value} {svg.DOWN_CARET_ICON} +
+
+
+ {items.map(item => ( +
{ + setIsDropdownVisible(false) + setValue(item) + onChange(item) + }} + className="cursor-pointer bg-white first:rounded-t-lg last:rounded-b-lg hover:bg-gray-100 p-1" + > + {item} +
+ ))} +
+
+
+ ) +} + +export default Dropdown diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectCreateForm.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectCreateForm.tsx index e0b57a3a4b96..95ed741a07cd 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectCreateForm.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectCreateForm.tsx @@ -4,9 +4,11 @@ import toast from 'react-hot-toast' import * as backendModule from '../backend' import * as backendProvider from '../../providers/backend' -import * as error from '../../error' import * as modalProvider from '../../providers/modal' +import * as templates from './templates' + import CreateForm, * as createForm from './createForm' +import Dropdown from './dropdown' // ========================= // === ProjectCreateForm === @@ -15,78 +17,81 @@ import CreateForm, * as createForm from './createForm' /** Props for a {@link ProjectCreateForm}. */ export interface ProjectCreateFormProps extends createForm.CreateFormPassthroughProps { directoryId: backendModule.DirectoryId + getNewProjectName: (templateId: string | null) => string onSuccess: () => void } /** A form to create a project. */ function ProjectCreateForm(props: ProjectCreateFormProps) { - const { directoryId, onSuccess, ...passThrough } = props + const { directoryId, getNewProjectName, onSuccess, ...passThrough } = props const { backend } = backendProvider.useBackend() const { unsetModal } = modalProvider.useSetModal() + const [defaultName, setDefaultName] = react.useState(() => getNewProjectName(null)) const [name, setName] = react.useState(null) - const [template, setTemplate] = react.useState(null) + const [templateId, setTemplateId] = react.useState(null) - if (backend.type === backendModule.BackendType.local) { - return <> - } else { - const onSubmit = async (event: react.FormEvent) => { - event.preventDefault() - if (name == null) { - toast.error('Please provide a project name.') - } else { - unsetModal() - await toast - .promise( - backend.createProject({ - parentDirectoryId: directoryId, - projectName: name, - projectTemplateName: template, - }), - { - loading: 'Creating project...', - success: 'Sucessfully created project.', - error: error.unsafeIntoErrorMessage, - } - ) - .then(onSuccess) + const onSubmit = async (event: react.FormEvent) => { + event.preventDefault() + unsetModal() + const finalName = name ?? defaultName + const templateText = templateId == null ? '' : `from template '${templateId}'` + await toast.promise( + backend.createProject({ + parentDirectoryId: directoryId, + projectName: name ?? defaultName, + projectTemplateName: templateId, + }), + { + loading: `Creating project '${finalName}'${templateText}...`, + success: `Sucessfully created project '${finalName}'${templateText}.`, + // This is UNSAFE, as the original function's parameter is of type `any`. + error: (promiseError: Error) => + `Error creating project '${finalName}'${templateText}: ${promiseError.message}`, } - } - - return ( - -
- - { - setName(event.target.value) - }} - /> -
-
- {/* FIXME[sb]: Use the array of templates in a dropdown when it becomes available. */} - - { - setTemplate(event.target.value) - }} - /> -
-
) + onSuccess() } + + return ( + +
+ + { + setName(event.target.value) + }} + /> +
+
+ + item.title)]} + onChange={newTemplateTitle => { + const newTemplateId = + templates.TEMPLATES.find( + template => template.title === newTemplateTitle + )?.id ?? null + setTemplateId(newTemplateId) + if (name == null) { + setDefaultName(getNewProjectName(newTemplateId)) + } + }} + /> +
+
+ ) } export default ProjectCreateForm diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx index 2ed71940b2c8..ecec1468da77 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx @@ -6,7 +6,7 @@ import * as svg from '../../components/svg' // ================= /** Template metadata. */ -interface Template { +export interface Template { title: string description: string id: string @@ -14,7 +14,7 @@ interface Template { } /** The full list of templates. */ -const TEMPLATES: Template[] = [ +export const TEMPLATES: [Template, ...Template[]] = [ { title: 'Colorado COVID', id: 'Colorado_COVID', From e9a92a1fb57d7c4f3acbd1ec304a26a691903128 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Mon, 5 Jun 2023 15:16:12 +0200 Subject: [PATCH 17/39] Throw panic on "no currying for conversions" (#6940) Previously, a `RuntimeException` would be thrown when an attempt would be made to curry a conversion function. That is problematic for IDE where `executionFailed` means we can't enter functions due to lack of method pointers info. Closes #6897. ![Screenshot from 2023-06-02 20-31-03](https://github.com/enso-org/enso/assets/292128/a6c77544-2c47-425c-8ce0-982d837dda5b) # Important Notes A more generic solution that allows to recover from execution failures will need a follow up. --- .../Base/0.0.0-dev/src/Errors/Common.enso | 25 +++++++++++++++++++ .../node/callable/InvokeCallableNode.java | 4 +-- .../builtin/error/NoConversionCurrying.java | 19 ++++++++++++++ .../interpreter/runtime/builtin/Error.java | 7 ++++++ test/Tests/src/Semantic/Conversion_Spec.enso | 3 +++ 5 files changed, 56 insertions(+), 2 deletions(-) create mode 100644 engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/NoConversionCurrying.java diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso index 6f0fb3e0c146..d004a1f1c7cd 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Errors/Common.enso @@ -323,6 +323,31 @@ type No_Such_Conversion to_display_text : Text to_display_text self = "Could not find a conversion from `"+self.that.to_display_text+"` to `"+self.target.to_display_text+"`." +@Builtin_Type +type No_Conversion_Currying + ## PRIVATE + An error that occurs when an attempt is made to curry a conversion function. + + Arguments: + - has_this: true, when `this` argument is present + - has_that: true, when `that` argument is present + - conversion: the conversion that was attempted. + Error has_this has_that conversion + + ## PRIVATE + Convert the No_Conversion_Currying error to a human-readable format. + to_display_text : Text + to_display_text self = + case self.has_this of + True -> + case self.has_that of + True -> "Conversion currying is not supported." + False -> "Conversion currying without `that` argument is not supported." + False -> + case self.has_that of + True -> "Conversion currying without `this` argument is not supported." + False -> "Conversion currying without `this` and `that` arguments is not supported." + @Builtin_Type type Forbidden_Operation ## PRIVATE diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java index 967943b45509..defd9e29a8ba 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeCallableNode.java @@ -224,8 +224,8 @@ public Object invokeConversion( callerFrame, state, conversion, selfArgument, thatArgument, arguments); } else { CompilerDirectives.transferToInterpreter(); - throw new RuntimeException( - "Conversion currying without `this` or `that` argument is not supported."); + var ctx = EnsoContext.get(this); + throw new PanicException(ctx.getBuiltins().error().makeNoConversionCurrying(canApplyThis, canApplyThat, conversion), this); } } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/NoConversionCurrying.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/NoConversionCurrying.java new file mode 100644 index 000000000000..3a01a7c944e2 --- /dev/null +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/builtin/error/NoConversionCurrying.java @@ -0,0 +1,19 @@ +package org.enso.interpreter.node.expression.builtin.error; + +import org.enso.interpreter.dsl.BuiltinType; +import org.enso.interpreter.node.expression.builtin.UniquelyConstructibleBuiltin; + +import java.util.List; + +@BuiltinType +public class NoConversionCurrying extends UniquelyConstructibleBuiltin { + @Override + protected String getConstructorName() { + return "Error"; + } + + @Override + protected List getConstructorParamNames() { + return List.of("has_this", "has_that", "conversion"); + } +} diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Error.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Error.java index 0cbd91c70278..5addeb626b92 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Error.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Error.java @@ -25,6 +25,7 @@ public class Error { private final UninitializedState uninitializedState; private final NoSuchMethod noSuchMethod; private final NoSuchConversion noSuchConversion; + private final NoConversionCurrying noConversionCurrying; private final ModuleNotInPackageError moduleNotInPackageError; private final ArithmeticError arithmeticError; private final InvalidArrayIndex invalidArrayIndex; @@ -60,6 +61,7 @@ public Error(Builtins builtins, EnsoContext context) { uninitializedState = builtins.getBuiltinType(UninitializedState.class); noSuchMethod = builtins.getBuiltinType(NoSuchMethod.class); noSuchConversion = builtins.getBuiltinType(NoSuchConversion.class); + noConversionCurrying = builtins.getBuiltinType(NoConversionCurrying.class); moduleNotInPackageError = builtins.getBuiltinType(ModuleNotInPackageError.class); arithmeticError = builtins.getBuiltinType(ArithmeticError.class); invalidArrayIndex = builtins.getBuiltinType(InvalidArrayIndex.class); @@ -136,6 +138,11 @@ public Atom makeInvalidConversionTarget(Object target) { return invalidConversionTarget.newInstance(target); } + public Atom makeNoConversionCurrying( + boolean hasThis, boolean hasThat, UnresolvedConversion conversion) { + return noConversionCurrying.newInstance(hasThis, hasThat, conversion); + } + /** * Creates an instance of the runtime representation of a {@code Type_Error}. * diff --git a/test/Tests/src/Semantic/Conversion_Spec.enso b/test/Tests/src/Semantic/Conversion_Spec.enso index dda980a3ad40..5d2eec40efa2 100644 --- a/test/Tests/src/Semantic/Conversion_Spec.enso +++ b/test/Tests/src/Semantic/Conversion_Spec.enso @@ -112,4 +112,7 @@ spec = meta_from.rename "foo" 123 . should_equal "foo called" meta_from.rename "foo" . should_equal .foo + Test.specify "should not allow currying" <| + Panic.recover Any (Foo.from) . catch Any .to_display_text . should_equal "Conversion currying without `that` argument is not supported." + main = Test_Suite.run_main spec From db96bd2e2c562701406e0f87dc6512ecb43f9d9d Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Mon, 5 Jun 2023 14:57:30 +0100 Subject: [PATCH 18/39] Small fixes from book club. (#6933) - Add the missing dropdowns for `Locale` and `Encoding`. - Correct a few mismatched type signatures. - Adjust `order_by` calls with a single `Sort_Column` to call in a Vector. - Adjust parameter names for `transpose`. - Fix for the table viz: escape HTML and `suppressFieldDotNotation`. - Use `Filter_Condition.Equal True` for the default filter. - Adjust `Data.fetch` to return the response on success when parse fails. Rename `parse` to `try_auto_parse`. - Add various aliases for methods. - Add tests for `Table.set` when using a `Vector`, `Range` or `Date_Range`. - Add check for mismatched length on `Table.set`. ![image](https://github.com/enso-org/enso/assets/4699705/23ea0ba3-2b05-4af8-afd9-f35b55446c24) ![image](https://github.com/enso-org/enso/assets/4699705/8b0253e6-e9e8-490a-9607-0da51ab5a215) --- .../visualization/java_script/table.js | 8 +- .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 15 ++-- .../Base/0.0.0-dev/src/Data/Array.enso | 3 +- .../Base/0.0.0-dev/src/Data/Numbers.enso | 6 +- .../Base/0.0.0-dev/src/Data/Text.enso | 3 + .../0.0.0-dev/src/Data/Text/Extensions.enso | 4 + .../src/Data/Text/Text_Ordering.enso | 1 + .../Base/0.0.0-dev/src/Data/Vector.enso | 3 +- .../0.0.0-dev/src/System/File_Format.enso | 1 + .../Database/0.0.0-dev/src/Data/Column.enso | 13 +-- .../Database/0.0.0-dev/src/Data/Table.enso | 59 ++++++++++--- .../Searcher/0.0.0-dev/src/Data_Science.enso | 2 +- .../0.0.0-dev/src/Data_Science/Transform.enso | 2 +- .../0.0.0-dev/src/Data/Aggregate_Column.enso | 4 +- .../Table/0.0.0-dev/src/Data/Column.enso | 8 +- .../src/Data/Column_Vector_Extensions.enso | 2 +- .../0.0.0-dev/src/Data/Data_Formatter.enso | 1 + .../0.0.0-dev/src/Data/Join_Condition.enso | 1 + .../Table/0.0.0-dev/src/Data/Table.enso | 85 +++++++++++++------ .../src/Delimited/Delimited_Format.enso | 4 + .../0.0.0-dev/src/Internal/Table_Helpers.enso | 2 +- .../Cross_Tab_Spec.enso | 2 +- .../Order_By_Spec.enso | 8 +- .../Transpose_Spec.enso | 8 +- .../Table_Tests/src/In_Memory/Table_Spec.enso | 27 +++++- test/Visualization_Tests/src/SQL_Spec.enso | 2 +- 26 files changed, 199 insertions(+), 75 deletions(-) diff --git a/app/gui/view/graph-editor/src/builtin/visualization/java_script/table.js b/app/gui/view/graph-editor/src/builtin/visualization/java_script/table.js index da52b9a3d814..75115da7dcf3 100644 --- a/app/gui/view/graph-editor/src/builtin/visualization/java_script/table.js +++ b/app/gui/view/graph-editor/src/builtin/visualization/java_script/table.js @@ -91,6 +91,11 @@ class TableVisualization extends Visualization { return content } + function escapeHTML(str) { + const mapping = { '&': '&', '<': '<', '"': '"', "'": ''', '>': '>' } + return str.replace(/[&<>"']/g, m => mapping[m]) + } + function cellRenderer(params) { if (params.value === null) { return 'Nothing' @@ -99,7 +104,7 @@ class TableVisualization extends Visualization { } else if (params.value === '') { return 'Empty' } - return params.value.toString() + return escapeHTML(params.value.toString()) } if (!this.tabElem) { @@ -146,6 +151,7 @@ class TableVisualization extends Visualization { cellRenderer: cellRenderer, }, onColumnResized: e => this.lockColumnSize(e), + suppressFieldDotNotation: true, } this.agGrid = new agGrid.Grid(tabElem, this.agGridOptions) } diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index 37dcf1a32e24..a2615140e383 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -149,9 +149,10 @@ list_directory directory name_filter=Nothing recursive=False = - uri: The URI to fetch. - method: The HTTP method to use. Defaults to `GET`. - headers: The headers to send with the request. Defaults to an empty vector. - - parse: If successful should the body be parsed to an Enso native object. + - try_auto_parse: If successful should the body be attempted to be parsed to + an Enso native object. fetch : (URI | Text) -> HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any -fetch uri method=HTTP_Method.Get headers=[] parse=True = +fetch uri method=HTTP_Method.Get headers=[] try_auto_parse=True = parsed_headers = headers . map h-> case h of _ : Vector -> Header.new (h.at 0) (h.at 1) _ : Pair -> Header.new (h.at 0) (h.at 1) @@ -162,7 +163,8 @@ fetch uri method=HTTP_Method.Get headers=[] parse=True = response = HTTP.new.request request if response.code.is_success.not then Error.throw (Request_Error.Error "Status Code" ("Request failed with status code: " + response.code.to_text + ". " + response.body.decode_as_text)) else - if parse then response.decode if_unsupported=response else response + if try_auto_parse.not then response else + response.decode if_unsupported=response . catch handler=(_->response) ## ALIAS Download, HTTP Get Fetches from the URI and returns the response, parsing the body if the @@ -172,7 +174,8 @@ fetch uri method=HTTP_Method.Get headers=[] parse=True = Arguments: - method: The HTTP method to use. Defaults to `GET`. - headers: The headers to send with the request. Defaults to an empty vector. - - parse: If successful should the body be parsed to an Enso native object. + - try_auto_parse: If successful should the body be attempted to be parsed to + an Enso native object. URI.fetch : HTTP_Method -> Vector (Header | Pair Text Text) -> Boolean -> Any -URI.fetch self method=HTTP_Method.Get headers=[] parse=True = - Data.fetch self method headers parse +URI.fetch self method=HTTP_Method.Get headers=[] try_auto_parse=True = + Data.fetch self method headers try_auto_parse diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index 7cb8176be8df..490b6d43d027 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -156,7 +156,8 @@ type Array # For compatibility with Vector # - ## Sort the array. + ## ALIAS order_by + Sort the array. Arguments: - order: The order in which the array elements are sorted. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index b7b824c3280e..88917f73a335 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -1,3 +1,4 @@ +import project.Any.Any import project.Data.Text.Text import project.Data.Locale.Locale import project.Errors.Common.Arithmetic_Error @@ -271,6 +272,7 @@ type Number Convert the value 5000 to a string. 5000.format "#,##0" + @locale Locale.default_widget format : Text -> Locale -> Text format self format locale=Locale.default = symbols = DecimalFormatSymbols.new locale.java_locale @@ -1142,7 +1144,7 @@ round_min_long = -99999999999999 ## PRIVATE Restrict rounding decimal_places parameter. -check_decimal_places : Integer -> Function +check_decimal_places : Integer -> Any -> Any ! Illegal_Argument check_decimal_places decimal_places ~action = if decimal_places >= round_min_decimal_places && decimal_places <= round_max_decimal_places then action else msg = "round: decimal_places must be between " + round_min_decimal_places.to_text + " and " + round_max_decimal_places.to_text + " (inclusive), but was " + decimal_places.to_text @@ -1150,7 +1152,7 @@ check_decimal_places decimal_places ~action = ## PRIVATE Restrict allowed range of input to rounding methods. -check_round_input : Number -> Function +check_round_input : Number -> Function -> Any ! Illegal_Argument check_round_input n ~action = if n >= round_min_long && n <= round_max_long then action else msg = "Error: `round` can only accept values between " + round_min_long.to_text + " and " + round_max_long.to_text + " (inclusive), but was " + n.to_text diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso index 6f3cc9b32bc1..d854c65dad02 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text.enso @@ -55,6 +55,7 @@ type Text ACCENT). Therefore: (('É' . equals_ignore_case 'é') && ('é' . equals_ignore_case 'e\u0301')) == True + @locale Locale.default_widget equals_ignore_case : Text -> Locale -> Boolean equals_ignore_case self that locale=Locale.default = Text_Utils.equals_ignore_case self that locale.java_locale @@ -64,6 +65,7 @@ type Text UNSTABLE Unifies the case of all letters in the text, generating a key which can be used to perform case-insensitive comparisons. + @locale Locale.default_widget to_case_insensitive_key : Locale -> Text to_case_insensitive_key self locale=Locale.default = Text_Utils.case_insensitive_key self locale.java_locale @@ -77,6 +79,7 @@ type Text Checking how "a" orders in relation to "b". "a".compare_to_ignore_case "b" + @locale Locale.default_widget compare_to_ignore_case : Text -> Locale -> Ordering compare_to_ignore_case self that locale=Locale.default = if that.is_nothing then Error.throw (Type_Error.Error Text that "that") else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso index 47ea5a4a6d50..9c9ee6420455 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso @@ -1035,6 +1035,7 @@ Text.drop self range=(Index_Sub_Range.First 1) = from Standard.Base import all example_case_with_locale = "i".to_case Upper (Locale.new "tr") == "İ" +@locale Locale.default_widget Text.to_case : Case -> Locale -> Text Text.to_case self case_option=Case.Lower locale=Locale.default = case case_option of Case.Lower -> UCharacter.toLowerCase locale.java_locale self @@ -1391,6 +1392,7 @@ Text.last_index_of self term="" start=-1 case_sensitivity=Case_Sensitivity.Sensi Parse the text "7.6" into a decimal number. "7.6".parse_decimal +@locale Locale.default_widget Text.parse_decimal : Locale | Nothing -> Decimal ! Number_Parse_Error Text.parse_decimal self locale=Nothing = Decimal.parse self locale @@ -1570,6 +1572,7 @@ Text.parse_date self format=Nothing = Date.parse self format example_parse = "06 of May 2020 at 04:30AM".parse_date_time "dd 'of' MMMM yyyy 'at' hh:mma" +@locale Locale.default_widget Text.parse_date_time : Text | Nothing -> Locale -> Date_Time ! Time_Error Text.parse_date_time self format=Nothing locale=Locale.default = Date_Time.parse self format locale @@ -1638,6 +1641,7 @@ Text.parse_date_time self format=Nothing locale=Locale.default = Date_Time.parse import Standard.Base.Data.Text.Extensions example_parse = "4:30AM".parse_time_of_day "h:mma" +@locale Locale.default_widget Text.parse_time_of_day : Text | Nothing -> Locale -> Time_Of_Day ! Time_Error Text.parse_time_of_day self format=Nothing locale=Locale.default = Time_Of_Day.parse self format locale diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso index 939464e76119..66c3d13351b9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Ordering.enso @@ -38,6 +38,7 @@ type Text_Ordering Arguments: - sort_digits_as_numbers: Sort digits in the text as numbers. Setting this to `True` results in a "Natural" ordering. + @locale Locale.default_widget Case_Insensitive (locale:Locale=Locale.default) (sort_digits_as_numbers:Boolean=False) ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index 84dbf1d5446e..4324bbc81da7 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -866,7 +866,8 @@ type Vector a second : Any ! Index_Out_Of_Bounds second self = self.at 1 - ## Sort the vector. + ## ALIAS order_by + Sort the vector. Arguments: - order: The order in which the vector elements are sorted. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 5cccfab7acb2..1abaef3b9c60 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -109,6 +109,7 @@ type File_Format Single_Choice display=Display.Always values=(all_types.map n->(Option (make_name n) (File_Format.constructor_code n))) type Plain_Text_Format + @encoding Encoding.default_widget Plain_Text (encoding:Encoding=Encoding.utf_8) ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso index 2e72f3bd95b7..97fe93c215db 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso @@ -233,6 +233,7 @@ type Column Returns a column with results of comparing this column's elements against `other`. + @locale Locale.default_widget equals_ignore_case : Column | Any -> Locale -> Column equals_ignore_case self other locale=Locale.default = Value_Type.expect_text self <| @@ -691,7 +692,8 @@ type Column False -> is_blank result.rename new_name - ## Returns a new column where missing values have been replaced with the + ## ALIAS if_nothing + Returns a new column where missing values have been replaced with the provided default. fill_nothing : Column | Any -> Column fill_nothing self default = @@ -701,7 +703,8 @@ type Column op_result = self.make_binary_op "FILL_NULL" default new_name adapt_unified_column op_result common_type - ## Returns a new column where empty Text values have been replaced with the + ## ALIAS if_empty + Returns a new column where empty Text values have been replaced with the provided default. Arguments: @@ -732,8 +735,7 @@ type Column rename self name = self.naming_helpers.ensure_name_is_valid name <| Column.Value name self.connection self.sql_type_reference self.expression self.context - ## UNSTABLE - + ## ALIAS order_by Sorts the column according to the specified rules. Arguments: @@ -752,7 +754,7 @@ type Column column.sort Sort_Direction.Descending sort : Sort_Direction -> Column sort self order=Sort_Direction.Ascending = - self.to_table.order_by (Sort_Column.Index 0 order) . at 0 + self.to_table.order_by [Sort_Column.Index 0 order] . at 0 ## UNSTABLE Creates a new Column with the specified range of rows from the input @@ -1050,6 +1052,7 @@ type Column self.internal_do_cast type on_problems ## Formatting values is not supported in database columns. + @locale Locale.default_widget format : Text | Column -> Locale -> Column ! Illegal_Argument format self format=Nothing locale=Locale.default = _ = [format, locale] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 7f466d3ae556..e2e5d8cdd96c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -452,7 +452,7 @@ type Table people.filter "age" (age -> (age%10 == 0)) @column Widget_Helpers.make_column_name_selector filter : (Column | Text | Integer) -> (Filter_Condition|(Any->Boolean)) -> Problem_Behavior -> Table ! No_Such_Column | Index_Out_Of_Bounds | Invalid_Value_Type - filter self column filter=(Filter_Condition.Is_True) on_problems=Report_Warning = case column of + filter self column filter=(Filter_Condition.Equal True) on_problems=Report_Warning = case column of _ : Column -> mask filter_column = case Helpers.check_integrity self filter_column of False -> @@ -595,7 +595,7 @@ type Table In the call below, assuming that the table of `t1` contains rows for numbers 1, 2, ..., 10, will return rows starting from 6 and not an empty result as one could expect if the limit was applied before the filters. - t1 = table.order_by ([Sort_Column.Name "A"]) . limit 5 + t1 = table.order_by [Sort_Column.Name "A"] . limit 5 t2 = t1.filter 'A' (Greater than=5) t2.read limit : Integer -> Table @@ -1350,14 +1350,14 @@ type Table value field. Arguments: - - id_fields: Set of fields to remain as columns. These values will be + - key_columns: Set of fields to remain as columns. These values will be repeated for each data field that is pivoted. - - name_field: The name of the field that will contain the names of the - pivoted fields. If this name is already in use, it will be renamed - with a numeric suffix. - - value_field: The name of the field that will contain the values of the - pivoted fields. If this name is already in use, it will be renamed - with a numeric suffix. + - attribute_column_name: The name of the field that will contain the + names of the pivoted fields. If this name is already in use, it will be + renamed with a numeric suffix. + - value_column_name: The name of the field that will contain the values + of the pivoted fields. If this name is already in use, it will be + renamed with a numeric suffix. - on_problems: Specifies how to handle problems if they occur, reporting them as warnings by default. @@ -1373,12 +1373,30 @@ type Table - If any column names in the new table are clashing, a `Duplicate_Output_Column_Names` is reported according to the `on_problems` setting. - @id_fields Widget_Helpers.make_column_name_vector_selector + + ? Example Transpose Operation + + Input Table `table`: + + Id | Name | Country + ----|---------|--------- + A | Example | France + B | Another | Germany + + Result `table.transpose ['Id'] 'Attribute' 'Value'`: + + Id | Attribute | Value + ----|-----------|--------- + A | Name | Example + A | Country | France + B | Name | Another + B | Country | Germany + @key_columns Widget_Helpers.make_column_name_vector_selector transpose : Vector (Integer | Text | Column_Selector) | Text | Integer -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Duplicate_Output_Column_Names - transpose self id_fields=[] (name_field="Name") (value_field="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = + transpose self key_columns=[] (attribute_column_name="Name") (value_column_name="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = ## Avoid unused arguments warning. We cannot rename arguments to `_`, because we need to keep the API consistent with the in-memory table. - _ = [id_fields, name_field, value_field, error_on_missing_columns, on_problems] + _ = [key_columns, attribute_column_name, value_column_name, error_on_missing_columns, on_problems] msg = "Transposing columns is not supported in database tables, the table has to be materialized first with `read`." Error.throw (Unsupported_Database_Operation.Error msg) @@ -1399,7 +1417,7 @@ type Table ! Error Conditions - - If a column in `group_by` or `name_field` is not in the input table, + - If a column in `group_by` or `name_column` is not in the input table, a `Missing_Input_Columns` is raised as a dataflow error. - If a column selector in `values` given as a `Text` and it does not match any columns in the input table nor is it a valid expression, an @@ -1414,6 +1432,21 @@ type Table an `Unquoted_Delimiter` - If there are more than 10 issues with a single column, an `Additional_Warnings`. + + ? Example Cross Tab Operation + + Input Table `table`: + + Id | B | C + ----|---------|--------- + A | Name | Example + A | Country | France + + Result `table.cross_tab ['Id'] 'B' (Aggregate_Column.First 'C')`: + + Id | Name | Country + ----|---------|--------- + A | Example | France @group_by Widget_Helpers.make_column_name_vector_selector @name_column Widget_Helpers.make_column_name_selector @values (Widget_Helpers.make_aggregate_column_selector include_group_by=False) diff --git a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science.enso b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science.enso index 6756eec15fa1..df8fdc30123f 100644 --- a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science.enso +++ b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science.enso @@ -52,7 +52,7 @@ example_sort = table = Examples.inventory_table - table.order_by ([Sort_Column.Name "total_stock", Sort_Column.Name "sold_stock" Sort_Direction.Descending]) + table.order_by [Sort_Column.Name "total_stock", Sort_Column.Name "sold_stock" Sort_Direction.Descending] > Example Compute the number of transactions that each item has participated in, as diff --git a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science/Transform.enso b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science/Transform.enso index c687b77fc613..80b4dff48fc9 100644 --- a/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science/Transform.enso +++ b/distribution/lib/Standard/Searcher/0.0.0-dev/src/Data_Science/Transform.enso @@ -31,7 +31,7 @@ example_sort = table = Examples.inventory_table - table.order_by ([Sort_Column.Name "price" Sort_Direction.Descending]) + table.order_by [Sort_Column.Name "price" Sort_Direction.Descending] > Example Add two columns to each other. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Aggregate_Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Aggregate_Column.enso index 0bd7532f8bfe..1c691fa326a6 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Aggregate_Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Aggregate_Column.enso @@ -149,7 +149,7 @@ type Aggregate_Column not missing value returned. - order_by: required for database tables. Specifies how to order the results within the group. - First (column:Text|Integer|Column|Any=0) (new_name:Text|Nothing=Nothing) (ignore_nothing:Boolean=True) (order_by:(Text | Sort_Column | Vector (Text | Sort_Column) | Nothing)=Nothing) # Column needed because of 6866 + First (column:Text|Integer|Column|Any=0) (new_name:Text|Nothing=Nothing) (ignore_nothing:Boolean=True) (order_by:(Text | Vector (Text | Sort_Column) | Nothing)=Nothing) # Column needed because of 6866 ## Creates a new column with the last value in each group. If no rows, evaluates to `Nothing`. @@ -162,7 +162,7 @@ type Aggregate_Column not missing value returned. - order_by: required for database tables. Specifies how to order the results within the group. - Last (column:Text|Integer|Column|Any=0) (new_name:Text|Nothing=Nothing) (ignore_nothing:Boolean=True) (order_by:(Text | Sort_Column | Vector (Text | Sort_Column) | Nothing)=Nothing) # Column needed because of 6866 + Last (column:Text|Integer|Column|Any=0) (new_name:Text|Nothing=Nothing) (ignore_nothing:Boolean=True) (order_by:(Text | Vector (Text | Sort_Column) | Nothing)=Nothing) # Column needed because of 6866 ## Creates a new column with the maximum value in each group. If no rows, evaluates to `Nothing`. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso index d6e14a8adf3d..c6899bdfd1f4 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso @@ -180,6 +180,7 @@ type Column Returns a column with results of comparing this column's elements against `other`. + @locale Locale.default_widget equals_ignore_case : Column | Any -> Locale -> Column equals_ignore_case self other locale=Locale.default = ## TODO currently this always runs the fallback which is slow due to the @@ -903,7 +904,7 @@ type Column _ -> self.is_nothing result.rename new_name - ## ALIAS Fill Missing + ## ALIAS Fill Missing, if_nothing Returns a new column where missing values have been replaced with the provided default. @@ -935,7 +936,7 @@ type Column col = Java_Column.new new_name new_st Column.Value col - ## ALIAS Fill Empty + ## ALIAS Fill Empty, if_empty Returns a new column where empty Text values have been replaced with the provided default. @@ -1665,8 +1666,7 @@ type Column info : Table info self = self.to_table.info - ## UNSTABLE - + ## ALIAS order_by Sorts the column according to the specified rules. Arguments: diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column_Vector_Extensions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column_Vector_Extensions.enso index c16982fd1654..071204bd14f6 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column_Vector_Extensions.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column_Vector_Extensions.enso @@ -53,7 +53,7 @@ Column.compute_bulk self statistics=[Statistic.Count, Statistic.Sum] = Arguments: - statistic: Statistic to calculate. - name: Name of the new column. -Column.running : Statistic -> Column +Column.running : Statistic -> Text -> Column Column.running self statistic=Statistic.Count name=statistic.to_text+" "+self.name = data = Statistic.running self.to_vector statistic Column.from_vector name data diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso index e321a52ccabe..8fa1c10d7085 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Data_Formatter.enso @@ -129,6 +129,7 @@ type Data_Formatter Arguments: - locale: The locale to use when parsing dates and times. + @datetime_locale Locale.default_widget with_locale : Locale -> Data_Formatter with_locale self datetime_locale = self.clone datetime_locale=datetime_locale diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso index c7855e4a30b9..d0f86825d281 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Join_Condition.enso @@ -23,6 +23,7 @@ type Join_Condition - left: A name or index of a column in the left table. - right: A name or index of a column in the right table. - locale: The locale to use for case insensitive comparisons. + @locale Locale.default_widget Equals_Ignore_Case (left : Text | Integer) (right : Text | Integer = left) (locale : Locale = Locale.default) ## Correlates rows from the two tables if the `left` element fits between diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 056f6a14433e..e2148f021f35 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -1078,7 +1078,7 @@ type Table people.filter "age" (age -> (age%10 == 0)) @column Widget_Helpers.make_column_name_selector filter : (Column | Text | Integer) -> (Filter_Condition|(Any->Boolean)) -> Problem_Behavior -> Table ! No_Such_Column | Index_Out_Of_Bounds | Invalid_Value_Type - filter self column filter=(Filter_Condition.Is_True) on_problems=Report_Warning = case column of + filter self column filter=(Filter_Condition.Equal True) on_problems=Report_Warning = case column of _ : Column -> mask filter_column = Table.Value (self.java_table.mask filter_column.java_column) case filter of @@ -1259,18 +1259,20 @@ type Table _ : Range -> Column.from_vector (new_name.if_nothing "Range") column.to_vector _ : Date_Range -> Column.from_vector (new_name.if_nothing "Date Range") column.to_vector _ -> Error.throw (Illegal_Argument.Error "Unsupported type for `Table.set`.") - renamed = case new_name of - Nothing -> resolved - _ : Text -> resolved.rename new_name - check_add_mode = case set_mode of - Set_Mode.Add_Or_Update -> True - Set_Mode.Add -> if self.java_table.getColumnByName renamed.name . is_nothing then True else - Error.throw (Existing_Column.Error renamed.name) - Set_Mode.Update -> if self.java_table.getColumnByName renamed.name . is_nothing . not then True else - Error.throw (Missing_Column.Error renamed.name) - - check_add_mode.if_not_error <| - Table.Value (self.java_table.addOrReplaceColumn renamed.java_column) + + if resolved.length != self.row_count then Error.throw (Row_Count_Mismatch.Error self.row_count resolved.length) else + renamed = case new_name of + Nothing -> resolved + _ : Text -> resolved.rename new_name + check_add_mode = case set_mode of + Set_Mode.Add_Or_Update -> True + Set_Mode.Add -> if self.java_table.getColumnByName renamed.name . is_nothing then True else + Error.throw (Existing_Column.Error renamed.name) + Set_Mode.Update -> if self.java_table.getColumnByName renamed.name . is_nothing . not then True else + Error.throw (Missing_Column.Error renamed.name) + + check_add_mode.if_not_error <| + Table.Value (self.java_table.addOrReplaceColumn renamed.java_column) ## Given an expression, create a derived column where each value is the result of evaluating the expression for the row. @@ -1732,14 +1734,14 @@ type Table value field. Arguments: - - id_fields: Set of fields to remain as columns. These values will be + - key_columns: Set of fields to remain as columns. These values will be repeated for each data field that is pivoted. - - name_field: The name of the field that will contain the names of the - pivoted fields. If this name is already in use, it will be renamed - with a numeric suffix. - - value_field: The name of the field that will contain the values of the - pivoted fields. If this name is already in use, it will be renamed - with a numeric suffix. + - attribute_column_name: The name of the field that will contain the + names of the pivoted fields. If this name is already in use, it will be + renamed with a numeric suffix. + - value_column_name: The name of the field that will contain the values + of the pivoted fields. If this name is already in use, it will be + renamed with a numeric suffix. - on_problems: Specifies how to handle problems if they occur, reporting them as warnings by default. @@ -1755,14 +1757,32 @@ type Table - If any column names in the new table are clashing, a `Duplicate_Output_Column_Names` is reported according to the `on_problems` setting. - @id_fields Widget_Helpers.make_column_name_vector_selector + + ? Example Transpose Operation + + Input Table `table`: + + Id | Name | Country + ----|---------|--------- + A | Example | France + B | Another | Germany + + Result `table.transpose ['Id'] 'Attribute' 'Value'`: + + Id | Attribute | Value + ----|-----------|--------- + A | Name | Example + A | Country | France + B | Name | Another + B | Country | Germany + @key_columns Widget_Helpers.make_column_name_vector_selector transpose : Vector (Integer | Text | Column_Selector) | Text | Integer -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Duplicate_Output_Column_Names - transpose self (id_fields = []) (name_field="Name") (value_field="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = + transpose self (key_columns = []) (attribute_column_name="Name") (value_column_name="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = columns_helper = self.columns_helper unique = Unique_Name_Strategy.new problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns - id_columns = columns_helper.select_columns_helper id_fields False problem_builder + id_columns = columns_helper.select_columns_helper key_columns False problem_builder selected_names = Map.from_vector (id_columns.map column-> [column.name, True]) @@ -1772,7 +1792,7 @@ type Table java_id = id_columns.map .java_column unique.mark_used (id_columns.map .name) - result = Table.Value (Java_Table.transpose java_id.to_array java_data.to_array (unique.make_unique name_field) (unique.make_unique value_field)) + result = Table.Value (Java_Table.transpose java_id.to_array java_data.to_array (unique.make_unique attribute_column_name) (unique.make_unique value_column_name)) problem_builder.report_unique_name_strategy unique problem_builder.attach_problems_after on_problems result @@ -1794,7 +1814,7 @@ type Table ! Error Conditions - - If a column in `group_by` or `name_field` is not in the input table, + - If a column in `group_by` or `name_column` is not in the input table, a `Missing_Input_Columns` is raised as a dataflow error. - If a column selector in `values` given as a `Text` and it does not match any columns in the input table nor is it a valid expression, an @@ -1809,6 +1829,21 @@ type Table an `Unquoted_Delimiter` - If there are more than 10 issues with a single column, an `Additional_Warnings`. + + ? Example Cross Tab Operation + + Input Table `table`: + + Id | B | C + ----|---------|--------- + A | Name | Example + A | Country | France + + Result `table.cross_tab ['Id'] 'B' (Aggregate_Column.First 'C')`: + + Id | Name | Country + ----|---------|--------- + A | Example | France @group_by Widget_Helpers.make_column_name_vector_selector @name_column Widget_Helpers.make_column_name_selector @values (Widget_Helpers.make_aggregate_column_selector include_group_by=False) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso index 95f2e251469b..c5d994d97c6c 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso @@ -8,6 +8,8 @@ import project.Delimited.Quote_Style.Quote_Style import project.Internal.Delimited_Reader import project.Internal.Delimited_Writer +from Standard.Base.Widget_Helpers import make_delimiter_selector + ## Read delimited files such as CSVs into a Table. type Delimited_Format ## Read delimited files such as CSVs into a Table. @@ -49,6 +51,8 @@ type Delimited_Format character if it anywhere else than at the beginning of the line. This option is only applicable for read mode and does not affect writing. It defaults to `Nothing` which means that comments are disabled. + @delimiter make_delimiter_selector + @encoding Encoding.default_widget Delimited (delimiter:Text=',') (encoding:Encoding=Encoding.utf_8) (skip_rows:Integer=0) (row_limit:Integer|Nothing=Nothing) (quote_style:Quote_Style=Quote_Style.With_Quotes) (headers:Boolean|Infer=Infer) (value_formatter:Data_Formatter|Nothing=Data_Formatter.Value) (keep_invalid_rows:Boolean=True) (line_endings:Line_Ending_Style|Infer=Infer) (comment_character:Text|Nothing=Nothing) ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 9c648a2f5dde..2678e8032581 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -398,7 +398,7 @@ filter_blank_rows table when_any treat_nans_as_blank = merge = if when_any then (||) else (&&) missing_mask = cols.map (_.is_blank treat_nans_as_blank) . reduce merge non_missing_mask = missing_mask.not - table.filter non_missing_mask + table.filter non_missing_mask Filter_Condition.Is_True False -> table ## PRIVATE diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index 775100059f7e..7e835290d271 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -104,7 +104,7 @@ spec setup = t1.at "z Count" . to_vector . should_equal [2] t1.at "z Sum" . to_vector . should_equal [17] - Test.specify "should fail if name_field is not found" <| + Test.specify "should fail if name_column is not found" <| err1 = table.cross_tab [] "Name" err1.should_fail_with Missing_Input_Columns err1.catch.criteria . should_equal ["Name"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso index e10cbd1df364..61360a890f92 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Order_By_Spec.enso @@ -56,19 +56,19 @@ spec setup = t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] Test.specify "should work with single Sort_Column" <| - t1 = table.order_by (Sort_Column.Name "alpha") + t1 = table.order_by [Sort_Column.Name "alpha"] t1.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t1.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - t2 = t1.order_by (Sort_Column.Name "alpha" Sort_Direction.Descending) + t2 = t1.order_by [Sort_Column.Name "alpha" Sort_Direction.Descending] t2.at "alpha" . to_vector . should_equal [3, 2, 1, 0] t2.at "gamma" . to_vector . should_equal [1, 2, 3, 4] - t3 = table.order_by (Sort_Column.Index 0) + t3 = table.order_by [Sort_Column.Index 0] t3.at "alpha" . to_vector . should_equal [0, 1, 2, 3] t3.at "gamma" . to_vector . should_equal [4, 3, 2, 1] - t4 = t3.order_by (Sort_Column.Index 0 Sort_Direction.Descending) + t4 = t3.order_by [Sort_Column.Index 0 Sort_Direction.Descending] t4.at "alpha" . to_vector . should_equal [3, 2, 1, 0] t4.at "gamma" . to_vector . should_equal [1, 2, 3, 4] diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index 2e1a833c1422..cfcab8498c5f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -24,7 +24,7 @@ spec setup = Test.specify "should allow custom names" <| t = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]], ["Yet Another", [Nothing, "Hello", "World"]]] - t1 = t.transpose name_field="Key" value_field="Object" + t1 = t.transpose attribute_column_name="Key" value_column_name="Object" t1.row_count . should_equal 12 t1.column_count . should_equal 2 t1.at "Key" . to_vector . should_equal ["Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another", "Key", "Value", "Another", "Yet Another"] @@ -96,19 +96,19 @@ spec setup = Test.specify "should warn on column name clashes" <| t1 = table_builder [["X", ["x", "y", "z"]], ["Y", [1, 2, 3]], ["Z", [10, Nothing, 20]]] - action1 = t1.transpose ["X", "Y", "Z"] name_field="Y" value_field="Z" on_problems=_ + action1 = t1.transpose ["X", "Y", "Z"] attribute_column_name="Y" value_column_name="Z" on_problems=_ tester1 table = table.column_names . should_equal ["X", "Y", "Z", "Y 1", "Z 1"] problems1 = [Duplicate_Output_Column_Names.Error ["Y", "Z"]] Problems.test_problem_handling action1 problems1 tester1 - action2 = t1.transpose ["X"] name_field="F" value_field="F" on_problems=_ + action2 = t1.transpose ["X"] attribute_column_name="F" value_column_name="F" on_problems=_ tester2 table = table.column_names . should_equal ["X", "F", "F 1"] problems2 = [Duplicate_Output_Column_Names.Error ["F"]] Problems.test_problem_handling action2 problems2 tester2 # No clash with the columns that are removed by transpose. - t2 = t1.transpose ["X"] name_field="Y" value_field="Z" on_problems=Problem_Behavior.Report_Error + t2 = t1.transpose ["X"] attribute_column_name="Y" value_column_name="Z" on_problems=Problem_Behavior.Report_Error Problems.assume_no_problems t2 t2.column_names . should_equal ["X", "Y", "Z"] diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 43384190d364..8254122fdd8d 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -8,7 +8,7 @@ from Standard.Table import Table, Column, Sort_Column, Column_Selector, Aggregat import Standard.Table.Main as Table_Module from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First, Last import Standard.Table.Data.Type.Value_Type.Value_Type -from Standard.Table.Errors import Invalid_Output_Column_Names, Duplicate_Output_Column_Names, No_Input_Columns_Selected, Missing_Input_Columns, No_Such_Column, Floating_Point_Equality, Invalid_Value_Type +from Standard.Table.Errors import Invalid_Output_Column_Names, Duplicate_Output_Column_Names, No_Input_Columns_Selected, Missing_Input_Columns, No_Such_Column, Floating_Point_Equality, Invalid_Value_Type, Row_Count_Mismatch import Standard.Visualization @@ -860,6 +860,31 @@ spec = r.should_fail_with Illegal_Argument r.catch.message . contains "cross-backend" . should_be_true + Test.group "[In-Memory-specific] Table.set" <| + Test.specify "should allow using vector and range for a new column" <| + t = Table.new [["X", [1, 2, 3]]] + + t_vec = t.set [10, 20, 30] + t_vec.column_names.should_equal ["X", "Vector"] + t_vec.at "Vector" . to_vector . should_equal [10, 20, 30] + + t_range = t.set (100.up_to 103) + t_range.column_names.should_equal ["X", "Range"] + t_range.at "Range" . to_vector . should_equal [100, 101, 102] + + t_date_range = t.set ((Date.new 2020 1 1).up_to (Date.new 2020 1 4)) + t_date_range.column_names.should_equal ["X", "Date Range"] + t_date_range.at "Date Range" . to_vector . should_equal [Date.new 2020 1 1, Date.new 2020 1 2, Date.new 2020 1 3] + + Test.specify "should fail if there is a length mismatch on a new column" <| + t = Table.new [["X", [1, 2, 3]]] + + c = Column.from_vector "Column" [10, 20] + t.set c . should_fail_with Row_Count_Mismatch + t.set [10, 20] . should_fail_with Row_Count_Mismatch + t.set (100.up_to 102) . should_fail_with Row_Count_Mismatch + t.set ((Date.new 2020 1 1).up_to (Date.new 2020 1 3)) . should_fail_with Row_Count_Mismatch + main = Test_Suite.run_main spec ## JS indexes months form 0, so we need to subtract 1. diff --git a/test/Visualization_Tests/src/SQL_Spec.enso b/test/Visualization_Tests/src/SQL_Spec.enso index 0a2076716aed..56ba2393ffe6 100644 --- a/test/Visualization_Tests/src/SQL_Spec.enso +++ b/test/Visualization_Tests/src/SQL_Spec.enso @@ -12,7 +12,7 @@ visualization_spec connection = t = connection.query (SQL_Query.Table_Name "T") Test.group "SQL Visualization" <| Test.specify "should provide type metadata for interpolations" <| - q = t.filter ((t.at "B" == 2) && (t.at "A" == True)) . at "C" + q = t.filter ((t.at "B" == 2) && (t.at "A" == True)) Filter_Condition.Is_True . at "C" vis = Visualization.prepare_visualization q int_param = JS_Object.from_pairs [["value", 2], ["enso_type", "Standard.Base.Data.Numbers.Integer"]] str_param = JS_Object.from_pairs [["value", True], ["enso_type", "Standard.Base.Data.Boolean.Boolean"]] From 72b202b7d09753d9af1adc8815ed0872396b0d7b Mon Sep 17 00:00:00 2001 From: Michael Mauderer Date: Mon, 5 Jun 2023 18:01:06 +0200 Subject: [PATCH 19/39] Fix visualisation FRP bugs. (#6831) Fixes * Empty Visualization when opening a full-screen visualization directly without opening the visualization before. #6770 https://github.com/enso-org/enso/assets/1428930/5812ed03-652c-4a27-8e33-b85512ca11b6 * Empty visualization when opening the full-screen visualization before the data for the visualization has arrived. #6561 https://github.com/enso-org/enso/assets/1428930/d8e58f2d-f1b6-4b70-84fa-e917f6c0af1f * Visualization is reset to default when reconnecting nodes #6673 https://github.com/enso-org/enso/assets/1428930/ac6cf79a-7147-4f13-9045-52599fb39900 * Redundant internal open/lose events caused by logic loops around the show/hide button, as well as many redundant layer setting/unsetting issues internal to the visualization code. Generally improves the logic around the visualization API by avoiding decentralized logic in different places and removing old code that is no longer needed. --- Cargo.lock | 1 + app/gui/src/presenter/graph/visualization.rs | 10 +- app/gui/view/graph-editor/Cargo.toml | 1 + .../view/graph-editor/src/component/node.rs | 90 +++--- .../src/component/node/action_bar.rs | 5 + .../src/component/visualization/container.rs | 268 ++++++++++-------- .../visualization/container/action_bar.rs | 5 + .../visualization/container/fullscreen.rs | 40 +-- .../src/component/visualization/layer.rs | 3 + app/gui/view/graph-editor/src/lib.rs | 26 +- build-config.yaml | 2 +- .../ensogl/component/toggle-button/src/lib.rs | 37 ++- 12 files changed, 246 insertions(+), 242 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7fe5ea817628..248f8425ec33 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4347,6 +4347,7 @@ dependencies = [ "base64 0.13.1", "bimap", "bitflags 2.2.1", + "derivative", "engine-protocol", "enso-config", "enso-frp", diff --git a/app/gui/src/presenter/graph/visualization.rs b/app/gui/src/presenter/graph/visualization.rs index 2d8ee8d20de5..53a29d029401 100644 --- a/app/gui/src/presenter/graph/visualization.rs +++ b/app/gui/src/presenter/graph/visualization.rs @@ -186,12 +186,12 @@ impl Visualization { eval view.visualization_preprocessor_changed (((node, preprocessor)) model.visualization_preprocessor_changed(*node, preprocessor.clone_ref())); eval view.set_node_error_status (((node, error)) model.error_on_node_changed(*node, error)); - update <- source::<(ViewNodeId, visualization_view::Data)>(); + set_data <- source::<(ViewNodeId, visualization_view::Data)>(); error_update <- source::<(ViewNodeId, visualization_view::Data)>(); visualization_failure <- source::(); error_vis_failure <- source::(); - view.set_visualization_data <+ update; + view.set_visualization_data <+ set_data; view.set_error_visualization_data <+ error_update; view.disable_visualization <+ visualization_failure; @@ -199,7 +199,7 @@ impl Visualization { } Self { model, _network: network } - .spawn_visualization_handler(notifications, manager, update, visualization_failure) + .spawn_visualization_handler(notifications, manager, set_data, visualization_failure) .spawn_visualization_handler( error_notifications, error_manager, @@ -213,7 +213,7 @@ impl Visualization { self, notifier: impl Stream + Unpin + 'static, manager: Rc, - update_endpoint: frp::Source<(ViewNodeId, visualization_view::Data)>, + set_data_endpoint: frp::Source<(ViewNodeId, visualization_view::Data)>, failure_endpoint: frp::Source, ) -> Self { let weak = Rc::downgrade(&self.model); @@ -221,7 +221,7 @@ impl Visualization { info!("Received update for visualization: {notification:?}"); match notification { manager::Notification::ValueUpdate { target, data, .. } => { - model.handle_value_update(&update_endpoint, target, data); + model.handle_value_update(&set_data_endpoint, target, data); } manager::Notification::FailedToAttach { visualization, error } => { error!("Visualization {} failed to attach: {error}.", visualization.id); diff --git a/app/gui/view/graph-editor/Cargo.toml b/app/gui/view/graph-editor/Cargo.toml index ac1a90cb0e6f..5adbcd12f28c 100644 --- a/app/gui/view/graph-editor/Cargo.toml +++ b/app/gui/view/graph-editor/Cargo.toml @@ -12,6 +12,7 @@ analytics = { path = "../../analytics" } ast = { path = "../../language/ast/impl" } base64 = "0.13" bimap = { version = "0.4.0" } +derivative = "2.2.0" engine-protocol = { path = "../../controller/engine-protocol" } enso-config = { path = "../../config" } enso-frp = { path = "../../../../lib/rust/frp" } diff --git a/app/gui/view/graph-editor/src/component/node.rs b/app/gui/view/graph-editor/src/component/node.rs index 7a5479570ab2..3b6a5e0cd1a7 100644 --- a/app/gui/view/graph-editor/src/component/node.rs +++ b/app/gui/view/graph-editor/src/component/node.rs @@ -259,6 +259,7 @@ ensogl::define_endpoints_2! { select (), deselect (), enable_visualization (), + enable_fullscreen_visualization (), disable_visualization (), set_visualization (Option), set_disabled (bool), @@ -318,12 +319,6 @@ ensogl::define_endpoints_2! { freeze (bool), hover (bool), error (Option), - /// Whether visualization was permanently enabled (e.g. by pressing the button). - visualization_enabled (bool), - /// Visualization can be visible even when it is not enabled, e.g. when showing preview. - /// Visualization can be invisible even when enabled, e.g. when the node has an error. - visualization_visible (bool), - visualization_path (Option), expression_label_visible (bool), /// The [`display::object::Model::position`] of the Node. Emitted when the Display Object /// hierarchy is updated (see: [`ensogl_core::display::object::Instance::update`]). @@ -607,12 +602,6 @@ impl NodeModel { size } - #[profile(Debug)] - #[allow(missing_docs)] // FIXME[everyone] All pub functions should have docs. - pub fn visualization(&self) -> &visualization::Container { - &self.visualization - } - #[profile(Debug)] fn set_error(&self, error: Option<&Error>) { if let Some(error) = error { @@ -753,7 +742,6 @@ impl Node { // === Action Bar === - let visualization_button_state = action_bar.action_visibility.clone_ref(); out.context_switch <+ action_bar.action_context_switch; out.skip <+ action_bar.action_skip; out.freeze <+ action_bar.action_freeze; @@ -790,7 +778,10 @@ impl Node { hover_onset_delay.set_delay(VIS_PREVIEW_ONSET_MS); hover_onset_delay.set_duration(0.0); + let visualization = &model.visualization.frp; + frp::extend! { network + enabled <- bool(&input.disable_visualization, &input.enable_visualization); out.error <+ input.set_error; is_error_set <- input.set_error.map( @@ -806,11 +797,23 @@ impl Node { } )); - eval input.set_visualization ((t) model.visualization.frp.set_visualization.emit(t)); - visualization_enabled_frp <- bool(&input.disable_visualization,&input.enable_visualization); - eval visualization_enabled_frp ((enabled) - model.action_bar.set_action_visibility_state(enabled) - ); + viz_enabled <- enabled && no_error_set; + visualization.set_view_state <+ viz_enabled.on_true().constant(visualization::ViewState::Enabled); + visualization.set_view_state <+ viz_enabled.on_false().constant(visualization::ViewState::Disabled); + + // Integration between visualization and action bar. + visualization.set_visualization <+ input.set_visualization; + is_enabled <- visualization.view_state.map(|state|{ + matches!(state,visualization::ViewState::Enabled) + }); + action_bar.set_action_visibility_state <+ is_enabled; + button_set_to_true <- action_bar.user_action_visibility.on_true(); + button_set_to_true_without_error <- button_set_to_true.gate_not(&is_error_set); + button_set_to_true_with_error <- button_set_to_true.gate(&is_error_set); + visualization.set_view_state <+ button_set_to_true_without_error.constant(visualization::ViewState::Enabled); + action_bar.set_action_visibility_state <+ button_set_to_true_with_error.constant(false); + + visualization.set_view_state <+ action_bar.user_action_visibility.on_false().constant(visualization::ViewState::Disabled); // Show preview visualisation after some delay, depending on whether we show an error // or are in quick preview mode. Also, omit the preview if we don't have an @@ -840,38 +843,10 @@ impl Node { hide_preview <+ editing_finished; preview_enabled <- bool(&hide_preview, &input.show_preview); preview_visible <- hover_preview_visible || preview_enabled; - preview_visible <- preview_visible.on_change(); - - // If the preview is visible while the visualization button is disabled, clicking the - // visualization button hides the preview and keeps the visualization button disabled. - vis_button_on <- visualization_button_state.filter(|e| *e).constant(()); - vis_button_off <- visualization_button_state.filter(|e| !*e).constant(()); - visualization_on <- vis_button_on.gate_not(&preview_visible); - vis_button_on_while_preview_visible <- vis_button_on.gate(&preview_visible); - hide_preview <+ vis_button_on_while_preview_visible; - hide_preview <+ vis_button_off; - action_bar.set_action_visibility_state <+ - vis_button_on_while_preview_visible.constant(false); - visualization_enabled <- bool(&vis_button_off, &visualization_on); - - visualization_visible <- visualization_enabled || preview_visible; - visualization_visible <- visualization_visible && no_error_set; - visualization_visible_on_change <- visualization_visible.on_change(); - out.visualization_visible <+ visualization_visible_on_change; - out.visualization_enabled <+ visualization_enabled; - eval visualization_visible_on_change ((is_visible) - model.visualization.frp.set_visibility(is_visible) - ); - out.visualization_path <+ model.visualization.frp.visualisation.all_with(&init,|def_opt,_| { - def_opt.as_ref().map(|def| def.signature.path.clone_ref()) - }); - - // Ensure the preview is visible above all other elements, but the normal visualisation - // is below nodes. - layer_on_hover <- hover_preview_visible.on_false().map(|_| visualization::Layer::Default); - layer_on_not_hover <- hover_preview_visible.on_true().map(|_| visualization::Layer::Front); - layer <- any(layer_on_hover,layer_on_not_hover); - model.visualization.frp.set_layer <+ layer; + vis_preview_visible <- preview_visible && no_error_set; + vis_preview_visible <- vis_preview_visible.on_change(); + visualization.set_view_state <+ vis_preview_visible.on_true().constant(visualization::ViewState::Preview); + visualization.set_view_state <+ vis_preview_visible.on_false().constant(visualization::ViewState::Disabled); update_error <- all(input.set_error,preview_visible); eval update_error([model]((error,visible)){ @@ -883,6 +858,10 @@ impl Node { }); eval error_color_anim.value ((value) model.set_error_color(value)); + visualization.set_view_state <+ input.set_error.is_some().constant(visualization::ViewState::Disabled); + + enable_fullscreen <- frp.enable_fullscreen_visualization.gate(&no_error_set); + visualization.set_view_state <+ enable_fullscreen.constant(visualization::ViewState::Fullscreen); } @@ -949,16 +928,14 @@ impl Node { // === Type Labels === model.output.set_type_label_visibility - <+ visualization_visible.not().and(&no_error_set); + <+ visualization.visible.not().and(&no_error_set); // === Bounding Box === let visualization_size = &model.visualization.frp.size; - // Visualization can be enabled and not visible when the node has an error. - visualization_enabled_and_visible <- visualization_enabled && visualization_visible; bbox_input <- all4( - &out.position,&new_size,&visualization_enabled_and_visible,visualization_size); + &out.position,&new_size,&visualization.visible,visualization_size); out.bounding_box <+ bbox_input.map(|(a,b,c,d)| bounding_box(*a,*b,c.then(|| *d))); inner_bbox_input <- all2(&out.position,&new_size); @@ -997,6 +974,11 @@ impl Node { color::Lcha::transparent() } } + + /// FRP API of the visualization container attached to this node. + pub fn visualization(&self) -> &visualization::container::Frp { + &self.model().visualization.frp + } } impl display::Object for Node { diff --git a/app/gui/view/graph-editor/src/component/node/action_bar.rs b/app/gui/view/graph-editor/src/component/node/action_bar.rs index 0d49c0293f67..42b6d0488d99 100644 --- a/app/gui/view/graph-editor/src/component/node/action_bar.rs +++ b/app/gui/view/graph-editor/src/component/node/action_bar.rs @@ -70,6 +70,7 @@ ensogl::define_endpoints! { Input { set_size (Vector2), set_visibility (bool), + /// Set whether the `visibility` icon should be toggled on or off. set_action_visibility_state (bool), set_action_skip_state (bool), set_action_freeze_state (bool), @@ -86,6 +87,9 @@ ensogl::define_endpoints! { mouse_over (), mouse_out (), action_visibility (bool), + /// The last visibility selection by the user. Ignores changes to the + /// visibility chooser icon made through the input API. + user_action_visibility (bool), action_context_switch (bool), action_freeze (bool), action_skip (bool), @@ -412,6 +416,7 @@ impl ActionBar { // === Icon Actions === frp.source.action_visibility <+ model.icons.visibility.state; + frp.source.user_action_visibility <+ model.icons.visibility.last_user_state; frp.source.action_skip <+ model.icons.skip.state; frp.source.action_freeze <+ model.icons.freeze.state; disable_context_button_clicked <- model.icons.context_switch.disable_button.is_pressed.on_true(); diff --git a/app/gui/view/graph-editor/src/component/visualization/container.rs b/app/gui/view/graph-editor/src/component/visualization/container.rs index cc135403a8f8..d1a5f0ad781c 100644 --- a/app/gui/view/graph-editor/src/component/visualization/container.rs +++ b/app/gui/view/graph-editor/src/component/visualization/container.rs @@ -1,4 +1,12 @@ -//! This module defines the `Container` struct and related functionality. +//! This module defines the `Container` struct and related functionality. This represent the view +//! a visualisation in the graph editor and includes a visual box that contains the visualisation, +//! and action bar that allows setting the visualisation type. +//! +//! The `[Container]` struct is responsible for managing the visualisation and action bar and +//! providing a unified interface to the graph editor. This includes ensuring that the visualisation +//! is correctly positioned, sized and layouted in its different [ViewState]s (which include the +//! `Enabled`, `Fullscreen` and `Preview` states). Importantly, this also includes EnsoGL layer +//! management to ensure correct occlusion of the visualisation with respect to other scene objects. // FIXME There is a serious performance problem in this implementation. It assumes that the // FIXME visualization is a child of the container. However, this is very inefficient. Consider a @@ -24,6 +32,7 @@ use ensogl::data::color::Rgba; use ensogl::display; use ensogl::display::scene; use ensogl::display::scene::Scene; +use ensogl::display::DomScene; use ensogl::display::DomSymbol; use ensogl::system::web; use ensogl::Animation; @@ -125,29 +134,59 @@ pub mod background { // === Frp === // =========== -ensogl::define_endpoints! { +/// Indicates the visibility state of the visualisation. +#[derive(Clone, Copy, Debug, PartialEq, Derivative)] +#[derivative(Default)] +pub enum ViewState { + /// Visualisation is permanently enabled and visible in the graph editor. It is attached to a + /// single node and can be moved and interacted with when selected. + Enabled, + /// Visualisation is disabled and hidden in the graph editor. + #[derivative(Default)] + Disabled, + /// Visualisation is temporarily enabled and visible in the graph editor. It should be placed + /// above other scene elements to allow quick inspection. + Preview, + /// Visualisation is enabled and visible in the graph editor in fullscreen mode. It occludes + /// the whole graph and can be interacted with. + Fullscreen, +} + +impl ViewState { + /// Indicates whether the visualisation is visible in the graph editor. It is always visible + /// when not disabled. + pub fn is_visible(&self) -> bool { + !matches!(self, ViewState::Disabled) + } + + /// Indicates whether the visualisation is fullscreen mode. + pub fn is_fullscreen(&self) -> bool { + matches!(self, ViewState::Fullscreen) + } +} + + +ensogl::define_endpoints_2! { Input { - set_visibility (bool), - toggle_visibility (), + set_view_state (ViewState), set_visualization (Option), cycle_visualization (), - set_data (visualization::Data), + set_data (Option), select (), deselect (), set_size (Vector2), - enable_fullscreen (), - disable_fullscreen (), set_vis_input_type (Option), - set_layer (visualization::Layer), } - Output { preprocessor (PreprocessorConfiguration), visualisation (Option), + visualization_path (Option), size (Vector2), is_selected (bool), + vis_input_type (Option), + fullscreen (bool), visible (bool), - vis_input_type (Option) + view_state (ViewState), } } @@ -161,8 +200,7 @@ ensogl::define_endpoints! { #[derive(Debug)] #[allow(missing_docs)] pub struct View { - display_object: display::object::Instance, - + display_object: display::object::Instance, background: background::View, overlay: overlay::View, background_dom: DomSymbol, @@ -277,7 +315,6 @@ pub struct ContainerModel { scene: Scene, view: View, fullscreen_view: fullscreen::Panel, - is_fullscreen: Rc>, registry: visualization::Registry, size: Rc>, action_bar: ActionBar, @@ -294,7 +331,6 @@ impl ContainerModel { let view = View::new(scene.clone_ref()); let fullscreen_view = fullscreen::Panel::new(scene); let scene = scene.clone_ref(); - let is_fullscreen = default(); let size = default(); let action_bar = ActionBar::new(app, registry.clone_ref()); view.add_child(&action_bar); @@ -307,7 +343,6 @@ impl ContainerModel { scene, view, fullscreen_view, - is_fullscreen, registry, size, action_bar, @@ -318,21 +353,16 @@ impl ContainerModel { fn init(self) -> Self { self.display_object.add_child(&self.drag_root); self.scene.layers.above_nodes.add(&self.action_bar); - - self.update_shape_sizes(); + self.update_shape_sizes(ViewState::default()); self.init_corner_roundness(); - // FIXME: These 2 lines fix a bug with display objects visible on stage. - self.set_visibility(true); - self.set_visibility(false); - self.view.show_waiting_screen(); self } - /// Indicates whether the visualization container is visible and active. - /// Note: can't be called `is_visible` due to a naming conflict with `display::object::class`. - pub fn is_active(&self) -> bool { - self.view.has_parent() + fn set_visualization_layer(&self, layer: visualization::Layer) { + if let Some(vis) = self.visualization.borrow().as_ref() { + vis.set_layer.emit(layer) + } } } @@ -340,54 +370,60 @@ impl ContainerModel { // === Private API === impl ContainerModel { - fn set_visibility(&self, visibility: bool) { + fn apply_view_state(&self, view_state: ViewState) { // This is a workaround for #6600. It ensures the action bar is removed // and receive no further mouse events. - if visibility { + if view_state.is_visible() { self.view.add_child(&self.action_bar); } else { self.action_bar.unset_parent(); } // Show or hide the visualization. - if visibility { + if view_state.is_visible() { self.drag_root.add_child(&self.view); - self.show_visualisation(); } else { self.drag_root.remove_child(&self.view); } + + match view_state { + ViewState::Enabled => self.enable_default_view(), + ViewState::Disabled => {} + ViewState::Preview => self.enable_preview(), + ViewState::Fullscreen => self.enable_fullscreen(), + } } - fn enable_fullscreen(&self) { - self.is_fullscreen.set(true); + fn set_vis_parents(&self, parent: &dyn display::Object, dom_parent: &DomScene) { if let Some(viz) = &*self.visualization.borrow() { - self.fullscreen_view.add_child(viz); + parent.add_child(viz); if let Some(dom) = viz.root_dom() { - self.scene.dom.layers.fullscreen_vis.manage(dom); + dom_parent.manage(dom); } viz.inputs.activate.emit(()); } } - fn disable_fullscreen(&self) { - self.is_fullscreen.set(false); - if let Some(viz) = &*self.visualization.borrow() { - self.view.add_child(viz); - if let Some(dom) = viz.root_dom() { - self.scene.dom.layers.back.manage(dom); - } - viz.inputs.deactivate.emit(()); - } + fn enable_fullscreen(&self) { + self.set_visualization_layer(visualization::Layer::Fullscreen); + self.set_vis_parents(&self.fullscreen_view, &self.scene.dom.layers.fullscreen_vis) } - fn toggle_visibility(&self) { - self.set_visibility(!self.is_active()) + fn enable_default_view(&self) { + self.set_visualization_layer(visualization::Layer::Default); + self.set_vis_parents(&self.view, &self.scene.dom.layers.back) + } + + fn enable_preview(&self) { + self.set_visualization_layer(visualization::Layer::Front); + self.set_vis_parents(&self.view, &self.scene.dom.layers.front); } fn set_visualization( &self, visualization: visualization::Instance, preprocessor: &frp::Any, + view_state: ViewState, ) { let size = self.size.get(); visualization.frp.set_size.emit(size); @@ -399,31 +435,27 @@ impl ContainerModel { vis_preprocessor_change <- visualization.on_preprocessor_change.map(|x| x.clone()); preprocessor <+ vis_preprocessor_change; } - preprocessor.emit(visualization.on_preprocessor_change.value()); - if self.is_fullscreen.get() { - self.fullscreen_view.add_child(&visualization) - } else { - self.view.add_child(&visualization); - } - self.visualization.replace(Some(visualization)); + self.visualization.replace(Some(visualization.clone_ref())); self.vis_frp_connection.replace(Some(vis_frp_connection)); + self.apply_view_state(view_state); + preprocessor.emit(visualization.on_preprocessor_change.value()); } fn set_visualization_data(&self, data: &visualization::Data) { self.visualization.borrow().for_each_ref(|vis| vis.send_data.emit(data)) } - fn update_shape_sizes(&self) { + fn update_shape_sizes(&self, view_state: ViewState) { let size = self.size.get(); - self.set_size(size); + self.update_layout(size, view_state); } - fn set_size(&self, size: impl Into) { + fn update_layout(&self, size: impl Into, view_state: ViewState) { let dom = self.view.background_dom.dom(); let bg_dom = self.fullscreen_view.background_dom.dom(); let size = size.into(); self.size.set(size); - if self.is_fullscreen.get() { + if view_state.is_fullscreen() { self.view.overlay.set_size(Vector2(0.0, 0.0)); dom.set_style_or_warn("width", "0"); dom.set_style_or_warn("height", "0"); @@ -461,16 +493,6 @@ impl ContainerModel { self.view.background.roundness.set(value); } - fn show_visualisation(&self) { - if let Some(vis) = self.visualization.borrow().as_ref() { - if self.is_fullscreen.get() { - self.fullscreen_view.add_child(vis); - } else { - self.view.add_child(vis); - } - } - } - /// Check if given mouse-event-target means this visualization. fn is_this_target(&self, target: scene::PointerTargetId) -> bool { self.view.overlay.is_this_target(target) @@ -526,7 +548,9 @@ impl Container { } fn init(self, app: &Application) -> Self { - let frp = &self.frp; + let frp = &self.frp.private; + let input = &frp.input; + let output = &frp.output; let network = &self.frp.network; let model = &self.model; let scene = &self.model.scene; @@ -536,30 +560,27 @@ impl Container { let selection = Animation::new(network); frp::extend! { network - eval frp.set_visibility ((v) model.set_visibility(*v)); - eval_ frp.toggle_visibility (model.toggle_visibility()); - - visualisation_uninitialised <- frp.set_visualization.map(|t| t.is_none()); - default_visualisation <- visualisation_uninitialised.on_true().map(|_| { + eval input.set_view_state((state) model.apply_view_state(*state)); + output.view_state <+ input.set_view_state.on_change(); + output.fullscreen <+ output.view_state.map(|state| state.is_fullscreen()).on_change(); + output.visible <+ output.view_state.map(|state| state.is_visible()).on_change(); + output.size <+ input.set_size.on_change(); + + visualisation_not_selected <- input.set_visualization.map(|t| t.is_none()); + input_type_not_set <- input.set_vis_input_type.is_some().not(); + uninitialised <- visualisation_not_selected && input_type_not_set; + set_default_visualisation <- uninitialised.on_change().on_true().map(|_| { Some(visualization::Registry::default_visualisation()) }); - vis_input_type <- frp.set_vis_input_type.on_change(); - vis_input_type <- vis_input_type.gate(&visualisation_uninitialised).unwrap(); - default_visualisation_for_type <- vis_input_type.map(f!((tp) { + vis_input_type_changed <- input.set_vis_input_type.on_change(); + vis_input_type_changed_without_selection <- + vis_input_type_changed.gate(&visualisation_not_selected).unwrap(); + set_default_visualisation_for_type <- vis_input_type_changed_without_selection.map(f!((tp) { registry.default_visualization_for_type(tp) })); - default_visualisation <- any(&default_visualisation, &default_visualisation_for_type); - - eval frp.set_data ((t) model.set_visualization_data(t)); - frp.source.size <+ frp.set_size; - frp.source.visible <+ frp.set_visibility; - frp.source.visible <+ frp.toggle_visibility.map(f!((()) model.is_active())); - eval frp.set_layer ([model](l) { - if let Some(vis) = model.visualization.borrow().as_ref() { - vis.set_layer.emit(l) - } - model.view.set_layer(*l); - }); + set_default_visualisation <- any( + &set_default_visualisation, &set_default_visualisation_for_type); + } @@ -569,15 +590,17 @@ impl Container { selected_definition <- action_bar.visualisation_selection.map(f!([registry](path) path.as_ref().and_then(|path| registry.definition_from_path(path)) )); - action_bar.set_vis_input_type <+ frp.set_vis_input_type; - frp.source.vis_input_type <+ frp.set_vis_input_type; + action_bar.hide_icons <+ selected_definition.constant(()); + output.vis_input_type <+ input.set_vis_input_type; + let chooser = &model.action_bar.visualization_chooser(); + chooser.frp.set_vis_input_type <+ input.set_vis_input_type; } // === Cycling Visualizations === frp::extend! { network - vis_after_cycling <- frp.cycle_visualization.map3(&frp.visualisation,&frp.vis_input_type, + vis_after_cycling <- input.cycle_visualization.map3(&output.visualisation, &output.vis_input_type, f!(((),vis,input_type) model.next_visualization(vis,input_type)) ); } @@ -587,19 +610,19 @@ impl Container { frp::extend! { network vis_definition_set <- any( - frp.set_visualization, + input.set_visualization, selected_definition, vis_after_cycling, - default_visualisation); + set_default_visualisation); new_vis_definition <- vis_definition_set.on_change(); - let preprocessor = &frp.source.preprocessor; - frp.source.visualisation <+ new_vis_definition.map(f!( - [model,action_bar,app,preprocessor](vis_definition) { + let preprocessor = &output.preprocessor; + output.visualisation <+ new_vis_definition.map2(&output.view_state, f!( + [model,action_bar,app,preprocessor](vis_definition, view_state) { if let Some(definition) = vis_definition { match definition.new_instance(&app) { Ok(vis) => { - model.set_visualization(vis,&preprocessor); + model.set_visualization(vis,&preprocessor, *view_state); let path = Some(definition.signature.path.clone()); action_bar.set_selected_visualization.emit(path); }, @@ -611,21 +634,24 @@ impl Container { vis_definition.clone() })); + output.visualization_path <+ output.visualisation.map(|definition| { + definition.as_ref().map(|def| def.signature.path.clone_ref()) + }); + } // === Visualisation Loading Spinner === - eval_ frp.source.visualisation ( model.view.show_waiting_screen() ); - eval_ frp.set_data ( model.view.disable_waiting_screen() ); - + frp::extend! { network + eval_ output.visualisation ( model.view.show_waiting_screen() ); + eval_ input.set_data ( model.view.disable_waiting_screen() ); } - // === Selecting Visualization === frp::extend! { network mouse_down_target <- scene.mouse.frp_deprecated.down.map(f_!(scene.mouse.target.get())); - selected_by_click <= mouse_down_target.map(f!([model] (target){ + selected_by_click <= mouse_down_target.map2(&output.view_state, f!([model] (target,view_state){ let vis = &model.visualization; let activate = || vis.borrow().as_ref().map(|v| v.activate.clone_ref()); let deactivate = || vis.borrow().as_ref().map(|v| v.deactivate.clone_ref()); @@ -634,7 +660,7 @@ impl Container { activate.emit(()); return Some(true); } - } else if !model.is_fullscreen.get() { + } else if !view_state.is_fullscreen() { if let Some(deactivate) = deactivate() { deactivate.emit(()); return Some(false); @@ -645,34 +671,28 @@ impl Container { selection_after_click <- selected_by_click.map(|sel| if *sel {1.0} else {0.0}); selection.target <+ selection_after_click; eval selection.value ((selection) model.view.background.selection.set(*selection)); - - selected_by_going_fullscreen <- bool(&frp.disable_fullscreen,&frp.enable_fullscreen); - selected <- any(selected_by_click,selected_by_going_fullscreen); - - is_selected_changed <= selected.map2(&frp.output.is_selected, |&new,&old| { - (new != old).as_some(new) - }); - frp.source.is_selected <+ is_selected_changed; + is_selected <- selected_by_click || output.fullscreen; + output.is_selected <+ is_selected.on_change(); } // === Fullscreen View === frp::extend! { network - eval_ frp.enable_fullscreen (model.enable_fullscreen()); - eval_ frp.disable_fullscreen (model.disable_fullscreen()); - fullscreen_enabled_weight <- frp.enable_fullscreen.constant(1.0); - fullscreen_disabled_weight <- frp.disable_fullscreen.constant(0.0); + enable_fullscreen <- output.fullscreen.on_true(); + disable_fullscreen <- output.fullscreen.on_false(); + + fullscreen_enabled_weight <- enable_fullscreen.constant(1.0); + fullscreen_disabled_weight <- disable_fullscreen.constant(0.0); fullscreen_weight <- any(fullscreen_enabled_weight,fullscreen_disabled_weight); - frp.source.size <+ frp.set_size; - _eval <- fullscreen_weight.all_with3(&frp.size,scene_shape, - f!([model] (weight,viz_size,scene_size) { + _eval <- fullscreen_weight.all_with4(&output.size,scene_shape,&output.view_state, + f!([model] (weight,viz_size,scene_size,view_state) { let weight_inv = 1.0 - weight; let scene_size : Vector2 = scene_size.into(); let current_size = viz_size * weight_inv + scene_size * *weight; model.set_corner_roundness(weight_inv); - model.set_size(current_size); + model.update_layout(current_size,*view_state); let m1 = model.scene.layers.panel.camera().inversed_view_matrix(); let m2 = model.scene.layers.viz.camera().view_matrix(); @@ -683,6 +703,16 @@ impl Container { let current_pos = pp * weight_inv; model.fullscreen_view.set_position(current_pos); })); + + + // === Data Update === + + data <- input.set_data.unwrap(); + has_data <- input.set_data.is_some(); + reset_data <- data.sample(&new_vis_definition).gate(&has_data); + data_update <- any(&data,&reset_data); + eval data_update ((t) model.set_visualization_data(t)); + } @@ -709,8 +739,8 @@ impl Container { // // This is not optimal the optimal solution to this problem, as it also means that we have // an animation on an invisible component running. - frp.set_size.emit(Vector2(DEFAULT_SIZE.0, DEFAULT_SIZE.1)); - frp.set_visualization.emit(None); + self.frp.public.set_size(Vector2(DEFAULT_SIZE.0, DEFAULT_SIZE.1)); + self.frp.public.set_visualization(None); self } diff --git a/app/gui/view/graph-editor/src/component/visualization/container/action_bar.rs b/app/gui/view/graph-editor/src/component/visualization/container/action_bar.rs index 7e7db0845f6c..6163c35057c4 100644 --- a/app/gui/view/graph-editor/src/component/visualization/container/action_bar.rs +++ b/app/gui/view/graph-editor/src/component/visualization/container/action_bar.rs @@ -432,6 +432,11 @@ impl ActionBar { } self } + + /// Visualization Chooser component getter. + pub fn visualization_chooser(&self) -> &VisualizationChooser { + &self.model.visualization_chooser + } } impl display::Object for ActionBar { diff --git a/app/gui/view/graph-editor/src/component/visualization/container/fullscreen.rs b/app/gui/view/graph-editor/src/component/visualization/container/fullscreen.rs index 110b7ac24283..35df480ffa0d 100644 --- a/app/gui/view/graph-editor/src/component/visualization/container/fullscreen.rs +++ b/app/gui/view/graph-editor/src/component/visualization/container/fullscreen.rs @@ -9,39 +9,6 @@ use ensogl::display; use ensogl::display::scene::Scene; use ensogl::display::DomSymbol; use ensogl::system::web; -use ensogl_hardcoded_theme as theme; - - - -// ============== -// === Shapes === -// ============== - -/// Container background shape definition. -/// -/// Provides a backdrop and outline for visualisations. Can indicate the selection status of the -/// container. -/// TODO : We do not use backgrounds because otherwise they would overlap JS -/// visualizations. Instead we added a HTML background to the `View`. -/// This should be further investigated while fixing rust visualization displaying. (#526) -pub mod background { - use super::*; - - ensogl::shape! { - alignment = center; - (style:Style,selected:f32,radius:f32,roundness:f32) { - let width : Var = "input_size.x".into(); - let height : Var = "input_size.y".into(); - let radius = 1.px() * &radius; - let color_path = theme::graph_editor::visualization::background; - let color_bg = style.get_color(color_path); - let corner_radius = &radius * &roundness; - let background = Rect((&width,&height)).corners_radius(corner_radius); - let background = background.fill(color_bg); - background.into() - } - } -} @@ -54,9 +21,9 @@ pub mod background { #[allow(missing_docs)] pub struct Panel { display_object: display::object::Instance, + // Note: We use a HTML background, because a EnsoGL background would be + // overlapping the JS visualization. pub background_dom: DomSymbol, - // TODO: See TODO above. - // background : background::View, } impl Panel { @@ -76,9 +43,6 @@ impl Panel { let div = web::document.create_div_or_panic(); let background_dom = DomSymbol::new(&div); - // TODO : We added a HTML background to the `View`, because "shape" background was - // overlapping the JS visualization. This should be further investigated - // while fixing rust visualization displaying. (#796) background_dom.dom().set_style_or_warn("width", "0"); background_dom.dom().set_style_or_warn("height", "0"); background_dom.dom().set_style_or_warn("z-index", "1"); diff --git a/app/gui/view/graph-editor/src/component/visualization/layer.rs b/app/gui/view/graph-editor/src/component/visualization/layer.rs index f34635bb9f96..45e355928fbb 100644 --- a/app/gui/view/graph-editor/src/component/visualization/layer.rs +++ b/app/gui/view/graph-editor/src/component/visualization/layer.rs @@ -16,6 +16,8 @@ pub enum Layer { Default, /// Display the visualisation over the scene. Front, + /// Display the visualisation in fullscreen mode. + Fullscreen, } impl Layer { @@ -24,6 +26,7 @@ impl Layer { match self { Layer::Default => scene.dom.layers.back.manage(dom), Layer::Front => scene.dom.layers.front.manage(dom), + Layer::Fullscreen => scene.dom.layers.fullscreen_vis.manage(dom), } } } diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index a1755d483a07..6a2dc41f1fef 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -1596,6 +1596,7 @@ impl GraphEditorModelWithNetwork { let touch = &self.touch_state; let model = &self.model; let NodeCreationContext { pointer_style, output_press, input_press, output } = ctx; + let visualisation = node.visualization(); if let Some(network) = self.network.upgrade_or_warn() { frp::new_bridge_network! { [network, node_network] graph_node_bridge @@ -1697,8 +1698,8 @@ impl GraphEditorModelWithNetwork { // === Visualizations === - visualization_shown <- node.visualization_visible.gate(&node.visualization_visible); - visualization_hidden <- node.visualization_visible.gate_not(&node.visualization_visible); + visualization_shown <- visualisation.visible.on_true(); + visualization_hidden <- visualisation.visible.on_false(); let vis_is_selected = node_model.visualization.frp.is_selected.clone_ref(); @@ -1711,7 +1712,7 @@ impl GraphEditorModelWithNetwork { node_model.visualization.frp.preprocessor.map(move |preprocessor| { (node_id,preprocessor.clone()) }); - output.visualization_preprocessor_changed <+ preprocessor_changed.gate(&node.visualization_visible); + output.visualization_preprocessor_changed <+ preprocessor_changed; metadata <- any(...); @@ -1729,7 +1730,7 @@ impl GraphEditorModelWithNetwork { init <- source::<()>(); enabled_visualization_path <- init.all_with3( - &node.visualization_enabled, &node.visualization_path, + &visualisation.visible, &visualisation.visualization_path, move |_init, is_enabled, path| (node_id, is_enabled.and_option(path.clone())) ); output.enabled_visualization_path <+ enabled_visualization_path; @@ -2004,17 +2005,20 @@ impl GraphEditorModel { } } - fn enable_visualization_fullscreen(&self, node_id: impl Into) { + fn enable_visualization_fullscreen(&self, node_id: impl Into) -> bool { let node_id = node_id.into(); if let Some(node) = self.nodes.get_cloned_ref(&node_id) { - node.model().visualization.frp.enable_fullscreen.emit(()); + node.frp().enable_fullscreen_visualization(); + node.visualization().fullscreen.value() + } else { + false } } fn disable_visualization_fullscreen(&self, node_id: impl Into) { let node_id = node_id.into(); if let Some(node) = self.nodes.get_cloned_ref(&node_id) { - node.model().visualization.frp.disable_fullscreen.emit(()); + node.model().visualization.frp.set_view_state(visualization::ViewState::Enabled); } } @@ -3581,7 +3585,7 @@ fn new_graph_editor(app: &Application) -> GraphEditor { viz_tgt_nodes_off <- viz_tgt_nodes.map(f!([model](node_ids) { node_ids.iter().cloned().filter(|node_id| { model.nodes.get_cloned_ref(node_id) - .map(|node| !node.visualization_enabled.value()) + .map(|node| !node.visualization().visible.value()) .unwrap_or_default() }).collect_vec() })); @@ -3597,7 +3601,9 @@ fn new_graph_editor(app: &Application) -> GraphEditor { eval viz_enable ((id) model.enable_visualization(id)); eval viz_disable ((id) model.disable_visualization(id)); eval viz_preview_disable ((id) model.disable_visualization(id)); - eval viz_fullscreen_on ((id) model.enable_visualization_fullscreen(id)); + fullscreen_vis_was_enabled <- viz_fullscreen_on.map(f!((id) + model.enable_visualization_fullscreen(id).then(|| *id)) + ).unwrap(); viz_fs_to_close <- out.visualization_fullscreen.sample(&inputs.close_fullscreen_visualization); eval viz_fs_to_close ([model](vis) { @@ -3607,7 +3613,7 @@ fn new_graph_editor(app: &Application) -> GraphEditor { } }); - out.visualization_fullscreen <+ viz_fullscreen_on.map(|id| Some(*id)); + out.visualization_fullscreen <+ fullscreen_vis_was_enabled.map(|id| Some(*id)); out.visualization_fullscreen <+ inputs.close_fullscreen_visualization.constant(None); out.is_fs_visualization_displayed <+ out.visualization_fullscreen.map(Option::is_some); diff --git a/build-config.yaml b/build-config.yaml index 175838ecdafc..d51cbb3f8813 100644 --- a/build-config.yaml +++ b/build-config.yaml @@ -1,6 +1,6 @@ # Options intended to be common for all developers. -wasm-size-limit: 15.91 MiB +wasm-size-limit: 15.92 MiB required-versions: # NB. The Rust version is pinned in rust-toolchain.toml. diff --git a/lib/rust/ensogl/component/toggle-button/src/lib.rs b/lib/rust/ensogl/component/toggle-button/src/lib.rs index 2859b58b674e..940cd9ca414e 100644 --- a/lib/rust/ensogl/component/toggle-button/src/lib.rs +++ b/lib/rust/ensogl/component/toggle-button/src/lib.rs @@ -54,7 +54,7 @@ pub trait ColorableShape: ShapeWithDefaultableData { // === Frp === // =========== -ensogl_core::define_endpoints! { +ensogl_core::define_endpoints_2! { Input { set_visibility (bool), set_color_scheme (ColorScheme), @@ -65,7 +65,11 @@ ensogl_core::define_endpoints! { set_read_only (bool), } Output { + /// Current state of the button, as visible in the scene, state (bool), + /// Last state of the button from a user interaction. + /// This ignores state changes based on the `set_state` input. + last_user_state (bool), visible (bool), mouse_over (), mouse_out (), @@ -232,6 +236,8 @@ impl ToggleButton { fn init_frp(self, app: &Application, tooltip_style: tooltip::Style) -> Self { let network = &self.frp.network; let frp = &self.frp; + let input = &self.frp.private.input; + let output = &self.frp.private.output; let model = &self.model; let color = color::Animation::new(network); let icon = &model.icon.events_deprecated; @@ -249,36 +255,37 @@ impl ToggleButton { // === Input Processing === - eval frp.set_size ((size) model.icon.set_size(*size);); + eval input.set_size ((size) model.icon.set_size(*size);); // === State === - clicked <- icon.mouse_down_primary.gate_not(&frp.set_read_only); - toggle <- any_(frp.toggle, clicked); - frp.source.state <+ frp.state.not().sample(&toggle); - frp.source.state <+ frp.set_state; + clicked <- icon.mouse_down_primary.gate_not(&input.set_read_only); + toggle <- any_(input.toggle, clicked); + output.state <+ output.state.not().sample(&toggle); + output.state <+ input.set_state; + output.last_user_state <+ output.state.sample(&clicked); // === Mouse Interactions === - frp.source.mouse_over <+ icon.mouse_over; - frp.source.mouse_out <+ icon.mouse_out; - frp.source.is_hovered <+ bool(&icon.mouse_out, &icon.mouse_over); - frp.source.is_pressed <+ bool(&icon.mouse_up_primary, &icon.mouse_down_primary); + output.mouse_over <+ icon.mouse_over; + output.mouse_out <+ icon.mouse_out; + output.is_hovered <+ bool(&icon.mouse_out, &icon.mouse_over); + output.is_pressed <+ bool(&icon.mouse_up_primary, &icon.mouse_down_primary); // === Color === - invisible <- frp.set_visibility.on_false().constant(0.0); + invisible <- input.set_visibility.on_false().constant(0.0); color.target_alpha <+ invisible; - frp.source.visible <+ frp.set_visibility; + output.visible <+ input.set_visibility; - button_state <- all_with4(&frp.visible,&frp.state,&frp.is_hovered,&frp.is_pressed, + button_state <- all_with4(&output.visible,&output.state,&output.is_hovered,&output.is_pressed, |a,b,c,d| ButtonState::new(*a,*b,*c,*d)); - color_target <- all_with(&frp.set_color_scheme,&button_state, + color_target <- all_with(&input.set_color_scheme,&button_state, |colors,state| colors.query(*state)); color.target <+ color_target; @@ -287,7 +294,7 @@ impl ToggleButton { // === Tooltip === - tooltip <- frp.is_hovered.map(move |is_hovered| { + tooltip <- output.is_hovered.map(move |is_hovered| { if *is_hovered { tooltip_style.clone() } else { From 3ffbe9cecfa0ce6a1ce449b0c73ea0f0bfc86b50 Mon Sep 17 00:00:00 2001 From: GregoryTravis Date: Mon, 5 Jun 2023 13:21:13 -0400 Subject: [PATCH 20/39] Handle some edge cases in rounding (inexact representations and overflows) (#6922) --- CHANGELOG.md | 2 + .../Base/0.0.0-dev/src/Data/Numbers.enso | 50 ++++--- test/Tests/src/Data/Numbers_Spec.enso | 128 +++++++++++++++--- 3 files changed, 142 insertions(+), 38 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 381646c6cb8f..7e79b9f292f3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -476,6 +476,7 @@ - [Added `.round`, `.truncate`, `.ceil`, and `.floor` to `Column`.][6817] - [Added execution control to `Table.write` and various bug fixes.][6835] - [Implemented `Table.add_row_number`.][6890] +- [Handling edge cases in rounding.][6922] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -691,6 +692,7 @@ [6817]: https://github.com/enso-org/enso/pull/6817 [6835]: https://github.com/enso-org/enso/pull/6835 [6890]: https://github.com/enso-org/enso/pull/6890 +[6922]: https://github.com/enso-org/enso/pull/6922 #### Enso Compiler diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso index 88917f73a335..2003eba9d3b4 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Numbers.enso @@ -615,23 +615,16 @@ type Decimal msg = "round cannot accept " + self.to_text Error.throw (Arithmetic_Error.Error msg) False -> check_round_input self <| - decimal_result = case use_bankers of - False -> - scale = 10 ^ decimal_places - ((self * scale) + 0.5).floor / scale - True -> - ## If the largest integer <= self is odd, use normal - round-towards-positive-infinity rounding; otherwise, - use round-towards-negative-infinity rounding. - scale = 10 ^ decimal_places - scaled_self = self * scale - self_scaled_floor = scaled_self.floor - is_even = (self_scaled_floor % 2) == 0 - case is_even of - False -> - (scaled_self + 0.5).floor / scale - True -> - (scaled_self - 0.5).ceil / scale + decimal_result = + # Algorithm taken from https://stackoverflow.com/a/7211688 + scale = 10 ^ decimal_places + scaled = self * scale + round_base = scaled.floor + round_midpoint = (round_base + 0.5) / scale + even_is_up = if self >= 0 then (scaled.truncate % 2) != 0 else (scaled.truncate % 2) == 0 + half_goes_up = if use_bankers then even_is_up else True + do_round_up = if half_goes_up then self >= round_midpoint else self > round_midpoint + if do_round_up then ((round_base + 1.0) / scale) else (round_base / scale) # Convert to integer if it's really an integer anyway. if decimal_places > 0 then decimal_result else decimal_result.truncate @@ -940,11 +933,24 @@ type Integer 12250 . round -2 use_bankers=True == 12200 round : Integer -> Boolean -> Integer ! Illegal_Argument round self decimal_places=0 use_bankers=False = - check_decimal_places decimal_places <| check_round_input self <| - ## It's already an integer so unless decimal_places is - negative, the value is unchanged. - if decimal_places >= 0 then self else - self.to_decimal.round decimal_places use_bankers . truncate + ## It's already an integer so unless decimal_places is + negative, the value is unchanged. + if decimal_places >= 0 then self else + check_decimal_places decimal_places <| check_round_input self <| + scale = 10 ^ -decimal_places + halfway = scale.div 2 + remainder = self % scale + scaled_down = self.div scale + result_unnudged = scaled_down * scale + case self >= 0 of + True -> + half_goes_up = if use_bankers then (scaled_down % 2) != 0 else True + round_up = if half_goes_up then remainder >= halfway else remainder > halfway + if round_up then result_unnudged + scale else result_unnudged + False -> + half_goes_up = if use_bankers then (scaled_down % 2) == 0 else True + round_up = if half_goes_up then remainder < -halfway else remainder <= -halfway + if round_up then result_unnudged - scale else result_unnudged ## Compute the negation of this. diff --git a/test/Tests/src/Data/Numbers_Spec.enso b/test/Tests/src/Data/Numbers_Spec.enso index 09eac34a8d7e..1663c79092b5 100644 --- a/test/Tests/src/Data/Numbers_Spec.enso +++ b/test/Tests/src/Data/Numbers_Spec.enso @@ -472,6 +472,7 @@ spec = Number.nan . equals 0 . should_fail_with Incomparable_Values Test.group "Decimal.round" <| + Test.specify "Can round positive decimals correctly" <| 3.0 . round . should_equal 3 3.00001 . round . should_equal 3 @@ -585,6 +586,64 @@ spec = 231.2 . round . should_be_a Integer 231.2 . round -1 . should_be_a Integer + Test.specify "Can round correctly near the precision limit" <| + 1.22222222225 . round 10 . should_equal 1.2222222223 + 1.222222222225 . round 11 . should_equal 1.22222222223 + 1.2222222222225 . round 12 . should_equal 1.222222222223 + 1.22222222222225 . round 13 . should_equal 1.2222222222223 + 1.222222222222225 . round 14 . should_equal 1.22222222222223 + 1.2222222222222225 . round 15 . should_equal 1.222222222222223 + + -1.22222222225 . round 10 . should_equal -1.2222222222 + -1.222222222225 . round 11 . should_equal -1.22222222222 + -1.2222222222225 . round 12 . should_equal -1.222222222222 + -1.22222222222225 . round 13 . should_equal -1.2222222222222 + -1.222222222222225 . round 14 . should_equal -1.22222222222222 + -1.2222222222222225 . round 15 . should_equal -1.222222222222222 + + 1.22222222235 . round 10 . should_equal 1.2222222224 + 1.222222222235 . round 11 . should_equal 1.22222222224 + 1.2222222222235 . round 12 . should_equal 1.222222222224 + 1.22222222222235 . round 13 . should_equal 1.2222222222224 + 1.222222222222235 . round 14 . should_equal 1.22222222222224 + 1.2222222222222235 . round 15 . should_equal 1.222222222222224 + + -1.22222222235 . round 10 . should_equal -1.2222222223 + -1.222222222235 . round 11 . should_equal -1.22222222223 + -1.2222222222235 . round 12 . should_equal -1.222222222223 + -1.22222222222235 . round 13 . should_equal -1.2222222222223 + -1.222222222222235 . round 14 . should_equal -1.22222222222223 + -1.2222222222222235 . round 15 . should_equal -1.222222222222223 + + Test.specify "Can round correctly near the precision limit, using banker's rounding" <| + 1.22222222225 . round 10 use_bankers=True . should_equal 1.2222222222 + 1.222222222225 . round 11 use_bankers=True . should_equal 1.22222222222 + 1.2222222222225 . round 12 use_bankers=True . should_equal 1.222222222222 + 1.22222222222225 . round 13 use_bankers=True . should_equal 1.2222222222222 + 1.222222222222225 . round 14 use_bankers=True . should_equal 1.22222222222222 + 1.2222222222222225 . round 15 use_bankers=True . should_equal 1.222222222222222 + + -1.22222222225 . round 10 use_bankers=True . should_equal -1.2222222222 + -1.222222222225 . round 11 use_bankers=True . should_equal -1.22222222222 + -1.2222222222225 . round 12 use_bankers=True . should_equal -1.222222222222 + -1.22222222222225 . round 13 use_bankers=True . should_equal -1.2222222222222 + -1.222222222222225 . round 14 use_bankers=True . should_equal -1.22222222222222 + -1.2222222222222225 . round 15 use_bankers=True . should_equal -1.222222222222222 + + 1.22222222235 . round 10 use_bankers=True . should_equal 1.2222222224 + 1.222222222235 . round 11 use_bankers=True . should_equal 1.22222222224 + 1.2222222222235 . round 12 use_bankers=True . should_equal 1.222222222224 + 1.22222222222235 . round 13 use_bankers=True . should_equal 1.2222222222224 + 1.222222222222235 . round 14 use_bankers=True . should_equal 1.22222222222224 + 1.2222222222222235 . round 15 use_bankers=True . should_equal 1.222222222222224 + + -1.22222222235 . round 10 use_bankers=True . should_equal -1.2222222224 + -1.222222222235 . round 11 use_bankers=True . should_equal -1.22222222224 + -1.2222222222235 . round 12 use_bankers=True . should_equal -1.222222222224 + -1.22222222222235 . round 13 use_bankers=True . should_equal -1.2222222222224 + -1.222222222222235 . round 14 use_bankers=True . should_equal -1.22222222222224 + -1.2222222222222235 . round 15 use_bankers=True . should_equal -1.222222222222224 + Test.specify "Input out of range" <| 100000000000000.0 . round . should_fail_with Illegal_Argument -100000000000000.0 . round . should_fail_with Illegal_Argument @@ -602,10 +661,12 @@ spec = Number.positive_infinity . round . should_fail_with Arithmetic_Error Number.negative_infinity . round . should_fail_with Arithmetic_Error - Test.specify "Banker's rounding failure" pending="Fails because of basic floating-point inaccuracy when adding 0.5" <| + Test.specify "Floating point imperfect representation counter-examples" <| 1.225 . round 2 use_bankers=True . should_equal 1.22 # Actual result 1.23 + 37.785 . round 2 . should_equal 37.79 Test.group "Integer.round" <| + Test.specify "Can round small integers to a specified number of decimal places correctly (value is unchanged)" 0 . round . should_equal 0 3 . round . should_equal 3 @@ -615,76 +676,110 @@ spec = 3 . round 1 . should_equal 3 -3 . round 1 . should_equal -3 - Test.specify "Can round small integers to a specified number of negative places correctly" + Test.specify "Can round integers to a specified number of negative places correctly" + 0 . round -1 . should_equal 0 4 . round -1 . should_equal 0 + 5 . round -1 . should_equal 10 + 6 . round -1 . should_equal 10 + 9 . round -1 . should_equal 10 + 10 . round -1 . should_equal 10 + 11 . round -1 . should_equal 10 24 . round -1 . should_equal 20 25 . round -1 . should_equal 30 29 . round -1 . should_equal 30 30 . round -1 . should_equal 30 31 . round -1 . should_equal 30 + 2000 . round -3 . should_equal 2000 + 2001 . round -3 . should_equal 2000 2412 . round -3 . should_equal 2000 2499 . round -3 . should_equal 2000 2500 . round -3 . should_equal 3000 + 2501 . round -3 . should_equal 3000 2511 . round -3 . should_equal 3000 2907 . round -3 . should_equal 3000 + 2999 . round -3 . should_equal 3000 3000 . round -3 . should_equal 3000 + 3001 . round -3 . should_equal 3000 3098 . round -3 . should_equal 3000 3101 . round -3 . should_equal 3000 - Test.specify "Can round negative small integers to a specified number of negative places correctly" + Test.specify "Can round negative integers to a specified number of negative places correctly" -4 . round -1 . should_equal 0 + -5 . round -1 . should_equal 0 + -6 . round -1 . should_equal -10 + -9 . round -1 . should_equal -10 + -10 . round -1 . should_equal -10 + -11 . round -1 . should_equal -10 -24 . round -1 . should_equal -20 -25 . round -1 . should_equal -20 -29 . round -1 . should_equal -30 -30 . round -1 . should_equal -30 -31 . round -1 . should_equal -30 + -2000 . round -3 . should_equal -2000 + -2001 . round -3 . should_equal -2000 -2412 . round -3 . should_equal -2000 -2499 . round -3 . should_equal -2000 -2500 . round -3 . should_equal -2000 + -2501 . round -3 . should_equal -3000 -2511 . round -3 . should_equal -3000 -2907 . round -3 . should_equal -3000 + -2999 . round -3 . should_equal -3000 -3000 . round -3 . should_equal -3000 + -3001 . round -3 . should_equal -3000 -3098 . round -3 . should_equal -3000 -3101 . round -3 . should_equal -3000 - Test.specify "Banker's rounding handles half-way values correctly" <| - 12350 . round -2 use_bankers=True . should_equal 12400 - 12250 . round -2 use_bankers=True . should_equal 12200 - -12350 . round -2 use_bankers=True . should_equal -12400 - -12250 . round -2 use_bankers=True . should_equal -12200 - - Test.specify "Banker's rounding handles non-half-way values just like normal rounding" <| + Test.specify "Can round negative integers to a specified number of negative places with banker's rounding correctly" <| + 12300 . round -2 use_bankers=True . should_equal 12300 + 12301 . round -2 use_bankers=True . should_equal 12300 12330 . round -2 use_bankers=True . should_equal 12300 + 12349 . round -2 use_bankers=True . should_equal 12300 + 12350 . round -2 use_bankers=True . should_equal 12400 + 12351 . round -2 use_bankers=True . should_equal 12400 12370 . round -2 use_bankers=True . should_equal 12400 12430 . round -2 use_bankers=True . should_equal 12400 12470 . round -2 use_bankers=True . should_equal 12500 + 12249 . round -2 use_bankers=True . should_equal 12200 + 12250 . round -2 use_bankers=True . should_equal 12200 + 12251 . round -2 use_bankers=True . should_equal 12300 + + -12300 . round -2 use_bankers=True . should_equal -12300 + -12301 . round -2 use_bankers=True . should_equal -12300 -12330 . round -2 use_bankers=True . should_equal -12300 + -12349 . round -2 use_bankers=True . should_equal -12300 + -12350 . round -2 use_bankers=True . should_equal -12400 + -12351 . round -2 use_bankers=True . should_equal -12400 -12370 . round -2 use_bankers=True . should_equal -12400 -12430 . round -2 use_bankers=True . should_equal -12400 -12470 . round -2 use_bankers=True . should_equal -12500 + -12249 . round -2 use_bankers=True . should_equal -12200 + -12250 . round -2 use_bankers=True . should_equal -12200 + -12251 . round -2 use_bankers=True . should_equal -12300 + Test.specify "Returns the correct type" <| 231 . round 1 . should_be_a Integer 231 . round 0 . should_be_a Integer 231 . round . should_be_a Integer 231 . round -1 . should_be_a Integer - Test.specify "Decimal places out of range" <| - 3 . round 16 . should_fail_with Illegal_Argument - 3 . round -16 . should_fail_with Illegal_Argument - Test.specify "Input out of range" <| - 100000000000000 . round . should_fail_with Illegal_Argument - -100000000000000 . round . should_fail_with Illegal_Argument 100000000000000 . round -2 . should_fail_with Illegal_Argument -100000000000000 . round -2 . should_fail_with Illegal_Argument 99999999999999 . round -2 . should_equal 100000000000000 -99999999999999 . round -2 . should_equal -100000000000000 + Test.specify "Input out of range is ignored when the implementation returns its argument immediately" <| + 100000000000000 . round . should_equal 100000000000000 + -100000000000000 . round . should_equal -100000000000000 + 100000000000000 . round 1 . should_equal 100000000000000 + -100000000000000 . round 1 . should_equal -100000000000000 + Test.group "Decimal.truncate" + Test.specify "Correctly converts to Integer" <| 0.1.truncate . should_equal 0 0.9.truncate . should_equal 0 @@ -696,6 +791,7 @@ spec = -3.9.truncate . should_equal -3 Test.group "Integer.truncate" + Test.specify "Returns its argument" <| 0.truncate . should_equal 0 3.truncate . should_equal 3 From 27feaf6bc583a3e221524b5add8abc50969a3f4e Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Tue, 6 Jun 2023 10:05:24 +0200 Subject: [PATCH 21/39] Eliminate deadlock scenario during file edit/open/close requests (#6920) At the beginning of the execution `EnsureCompiledJob` acquired write compilation lock. When compiling individual modules it would then - acquire file lock - acquire read compilation lock The second one was spurious since it already kept the write lock. This sequence meant however that `CloseFileCmd` or `OpenFileCmd` can lead to a deadlock when requests come in close succession. This is because commands: - acquire file lock - acquire read compilation lock So `EnsureCompiledJob` might have the (write) compilation lock but the commands could have file lock. And the second required lock for either the job or the command could never be acquired. Flipping the order did the trick. Partially solves #6841. # Important Notes For some reason we don't get updates for the newly added node, as illustrated in the screenshot, but that could be related to the close/open action. Will need to dig more. ![Screenshot from 2023-06-01 16-45-17](https://github.com/enso-org/enso/assets/292128/900aa9b3-b2b2-4e4d-93c8-267f92b79352) --- .../java/org/enso/interpreter/service/ExecutionService.java | 5 +---- .../enso/interpreter/instrument/command/CloseFileCmd.scala | 4 ++-- .../enso/interpreter/instrument/command/OpenFileCmd.scala | 4 ++-- .../enso/interpreter/instrument/job/EnsureCompiledJob.scala | 2 -- .../org/enso/interpreter/instrument/job/ExecuteJob.scala | 4 ++-- 5 files changed, 7 insertions(+), 12 deletions(-) diff --git a/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java b/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java index f6866ddd2ae9..80b9a01b6fdd 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/service/ExecutionService.java @@ -345,10 +345,7 @@ public void setModuleSources(File path, String contents) { if (module.isEmpty()) { module = context.createModuleForFile(path); } - module.ifPresent( - mod -> { - mod.setLiteralSource(contents); - }); + module.ifPresent(mod -> mod.setLiteralSource(contents)); } /** diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/CloseFileCmd.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/CloseFileCmd.scala index 7d8b7ea62855..1a80ecb32122 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/CloseFileCmd.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/CloseFileCmd.scala @@ -17,13 +17,13 @@ class CloseFileCmd(request: Api.CloseFileNotification) extends Command(None) { ec: ExecutionContext ): Future[Unit] = Future { - ctx.locking.acquireFileLock(request.path) ctx.locking.acquireReadCompilationLock() + ctx.locking.acquireFileLock(request.path) try { ctx.executionService.resetModuleSources(request.path) } finally { - ctx.locking.releaseReadCompilationLock() ctx.locking.releaseFileLock(request.path) + ctx.locking.releaseReadCompilationLock() } } diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/OpenFileCmd.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/OpenFileCmd.scala index 308762d6d614..1aa6c7aed120 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/OpenFileCmd.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/command/OpenFileCmd.scala @@ -17,16 +17,16 @@ class OpenFileCmd(request: Api.OpenFileNotification) extends Command(None) { ec: ExecutionContext ): Future[Unit] = Future { - ctx.locking.acquireFileLock(request.path) ctx.locking.acquireReadCompilationLock() + ctx.locking.acquireFileLock(request.path) try { ctx.executionService.setModuleSources( request.path, request.contents ) } finally { - ctx.locking.releaseReadCompilationLock() ctx.locking.releaseFileLock(request.path) + ctx.locking.releaseReadCompilationLock() } } diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala index 31cec95b5ad6..887d60aa3574 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala @@ -230,7 +230,6 @@ final class EnsureCompiledJob(protected val files: Iterable[File]) file: File )(implicit ctx: RuntimeContext): Option[Changeset[Rope]] = { ctx.locking.acquireFileLock(file) - ctx.locking.acquireReadCompilationLock() ctx.locking.acquirePendingEditsLock() try { val pendingEdits = ctx.state.pendingEdits.dequeue(file) @@ -253,7 +252,6 @@ final class EnsureCompiledJob(protected val files: Iterable[File]) Option.when(shouldExecute)(changeset) } finally { ctx.locking.releasePendingEditsLock() - ctx.locking.releaseReadCompilationLock() ctx.locking.releaseFileLock(file) } } diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala index 23ff69bf0492..109601f31724 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/instrument/job/ExecuteJob.scala @@ -26,8 +26,8 @@ class ExecuteJob( /** @inheritdoc */ override def run(implicit ctx: RuntimeContext): Unit = { - ctx.locking.acquireContextLock(contextId) ctx.locking.acquireReadCompilationLock() + ctx.locking.acquireContextLock(contextId) val context = ctx.executionService.getContext val originalExecutionEnvironment = executionEnvironment.map(_ => context.getExecutionEnvironment) @@ -48,8 +48,8 @@ class ExecuteJob( } } finally { originalExecutionEnvironment.foreach(context.setExecutionEnvironment) - ctx.locking.releaseReadCompilationLock() ctx.locking.releaseContextLock(contextId) + ctx.locking.releaseReadCompilationLock() } ctx.endpoint.sendToClient(Api.Response(Api.ExecutionComplete(contextId))) StartBackgroundProcessingJob.startBackgroundJobs() From f1bdcbb53452142ee67d1653610e8dfbcb2f862b Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Tue, 6 Jun 2023 10:06:10 +0200 Subject: [PATCH 22/39] Documenting how to obtain static methods for a type (#6938) Fixes #6748 by documenting how to obtain all static methods available on a `Type`. --- .../lib/Standard/Base/0.0.0-dev/src/Meta.enso | 21 ++++++++++++++++++- test/Tests/src/Semantic/Meta_Spec.enso | 11 ++++++++++ 2 files changed, 31 insertions(+), 1 deletion(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso index ccb64e2ac45b..3f224cedc8d5 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Meta.enso @@ -36,7 +36,26 @@ type Type ## ADVANCED - Returns a vector of `Meta.Constructor` for this type + Returns a vector of method names that can be invoked + on instances of this type. + ? Static Methods + + To obtain list of _static methods_ on a given type + use `Meta.type_of`. + + > Example + All instance methods to invoke on `Integer` as + `(v:Integer) v.method_name...`: + + Meta.meta Integer . methods + + > Example + All static methods to invoke on `Integer` as + `Integer.method_name...`: + + Meta.meta (Meta.type_of Integer) . methods + + methods : Vector methods self = Vector.from_polyglot_array (get_type_methods self.value) diff --git a/test/Tests/src/Semantic/Meta_Spec.enso b/test/Tests/src/Semantic/Meta_Spec.enso index c4f6eb612118..9ce2e1f17fef 100644 --- a/test/Tests/src/Semantic/Meta_Spec.enso +++ b/test/Tests/src/Semantic/Meta_Spec.enso @@ -30,9 +30,13 @@ type My_Type @b (self -> self.foo) other_method self a = a + create foo bar = My_Type.Value foo bar 3 + @self ("se" + "lf") My_Type.my_method self = self.foo + self.bar + self.baz +My_Type.factory = My_Type.create 1 2 + @a (test_method 3 4) @b (Test_Type.Value 49) @c (Error.throw "Error Value") @@ -247,9 +251,16 @@ spec = methods.sort . should_equal ['bar', 'baz', 'first_method', 'foo', 'my_method', 'other_method', 'second_method'] + Test.specify "static methods of MyType" <| + methods = Meta.meta (Meta.type_of My_Type) . methods + methods.sort . should_equal ['Value', 'create', 'factory', 'first_method', 'my_method', 'other_method', 'second_method'] + Test.specify "methods of Integer" <| Meta.meta Integer . methods . sort . should_equal ['round', 'truncate'] + Test.specify "static methods of Integer" <| + Meta.meta (Meta.type_of Integer) . methods . sort . should_equal ['parse', 'parse_builtin', 'round', 'truncate'] + Test.specify "should correctly handle Java values" <| java_meta = Meta.meta Random.new java_meta . should_be_a Meta.Polyglot From b5138394180c91a3bfda4c4a34780fe93771e019 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Tue, 6 Jun 2023 12:36:05 +0200 Subject: [PATCH 23/39] Refactor `create_database_table` into `Connection.create_table` and `select_into_database_table`, implement `Set`. (#6925) First part for #6498 - refactoring of the upload infrastructure, in preparation for `update_database_table`. Implemented a `Set` data structure which was long needed. The APIs are added and an initial implementation is created, but it is not complete - but it has grown significantly already so the remaining implementation will be done as a separate PR. Adds some basic ability for a function to ensure that it is only executed from within a transaction. --- CHANGELOG.md | 3 + .../Standard/Base/0.0.0-dev/src/Data/Map.enso | 18 +- .../Standard/Base/0.0.0-dev/src/Data/Set.enso | 109 ++++++++ .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 2 + .../0.0.0-dev/src/Connection/Connection.enso | 62 ++++- .../0.0.0-dev/src/Data/Update_Action.enso | 14 + .../Database/0.0.0-dev/src/Errors.enso | 17 +- .../src/Extensions/Upload_Database_Table.enso | 88 ++++++ .../Extensions/Upload_Default_Helpers.enso | 37 +++ .../Extensions/Upload_In_Memory_Table.enso | 93 +++++++ .../src/Extensions/Upload_Table.enso | 214 --------------- .../src/Internal/In_Transaction.enso | 23 ++ .../src/Internal/JDBC_Connection.enso | 5 +- .../Postgres/Postgres_Connection.enso | 64 ++++- .../Internal/SQLite/SQLite_Connection.enso | 64 ++++- .../0.0.0-dev/src/Internal/Upload_Table.enso | 248 +++++++++++++++++ .../Standard/Database/0.0.0-dev/src/Main.enso | 6 +- .../0.0.0-dev/src/Data/Type/Storage.enso | 2 + .../0.0.0-dev/src/Internal/Table_Helpers.enso | 2 +- .../0.0.0-dev/src/Scatter_Plot.enso | 2 +- .../Conversion_Spec.enso | 19 ++ .../Table_Tests/src/Database/Common_Spec.enso | 2 +- .../src/Database/Postgres_Spec.enso | 8 +- .../src/Database/Redshift_Spec.enso | 6 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 10 +- .../Types/SQLite_Type_Mapping_Spec.enso | 5 +- .../Table_Tests/src/Database/Upload_Spec.enso | 257 ++++++++++++++---- .../Table_Tests/src/In_Memory/Table_Spec.enso | 5 +- test/Tests/src/Data/Map_Spec.enso | 4 +- test/Tests/src/Data/Set_Spec.enso | 55 ++++ test/Tests/src/Main.enso | 6 +- test/Visualization_Tests/src/Table_Spec.enso | 2 +- 32 files changed, 1121 insertions(+), 331 deletions(-) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Data/Update_Action.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Default_Helpers.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso delete mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Internal/In_Transaction.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso create mode 100644 test/Tests/src/Data/Set_Spec.enso diff --git a/CHANGELOG.md b/CHANGELOG.md index 7e79b9f292f3..c35032d9dc27 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -477,6 +477,8 @@ - [Added execution control to `Table.write` and various bug fixes.][6835] - [Implemented `Table.add_row_number`.][6890] - [Handling edge cases in rounding.][6922] +- [Split `Table.create_database_table` into `Connection.create_table` and + `Table.select_into_database_table`.][6925] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -693,6 +695,7 @@ [6835]: https://github.com/enso-org/enso/pull/6835 [6890]: https://github.com/enso-org/enso/pull/6890 [6922]: https://github.com/enso-org/enso/pull/6922 +[6925]: https://github.com/enso-org/enso/pull/6925 #### Enso Compiler diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso index 03af74175e92..08338b088d44 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso @@ -58,11 +58,11 @@ type Map key value Arguments: - vec: A vector of key-value pairs (2 element vectors). - - allow_duplicates: A flag which specifies if duplicate keys on the input - vector are allowed. By default, set to `False`, meaning that if two - entries in the vector share the same key, an `Illegal_Argument.Error` - will be thrown. If set to `True`, the last entry with a given key will - be kept. + - error_on_duplicates: A flag which specifies if duplicate keys on the + input vector should result in an error. By default, set to `True`, + meaning that if two entries in the vector share the same key, an + `Illegal_Argument` error is raised. If set to `False`, the last entry + with a given key will be kept. > Example Building a map containing two key-value pairs. @@ -70,12 +70,12 @@ type Map key value import Standard.Base.Data.Map.Map example_from_vector = Map.from_vector [["A", 1], ["B", 2]] - from_vector : Vector Any -> Boolean -> Map - from_vector vec allow_duplicates=False = + from_vector : Vector Any -> Boolean -> Map ! Illegal_Argument + from_vector vec error_on_duplicates=True = vec.fold Map.empty m-> el-> if el.length != 2 then Error.throw (Illegal_Argument.Error "`Map.from_vector` encountered an invalid value. Each value in the vector has to be a key-value pair - it must have exactly 2 elements.") else key = el.at 0 value = el.at 1 - if allow_duplicates || (m.contains_key key . not) then m.insert key value else + if error_on_duplicates.not || (m.contains_key key . not) then m.insert key value else Error.throw (Illegal_Argument.Error "`Map.from_vector` encountered duplicate key: "+key.to_display_text) ## Returns True iff the Map is empty, i.e., does not have any entries. @@ -264,7 +264,7 @@ type Map key value transform self function = func_pairs = p -> function (p.at 0) (p.at 1) vec_transformed = self.to_vector.map func_pairs - new_map = Map.from_vector vec_transformed allow_duplicates=False + new_map = Map.from_vector vec_transformed error_on_duplicates=True new_map.catch Illegal_Argument error-> case error.message.starts_with "`Map.from_vector` encountered duplicate key" of True -> diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso new file mode 100644 index 000000000000..f3b291970a53 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso @@ -0,0 +1,109 @@ +import project.Any.Any +import project.Data.Array_Proxy.Array_Proxy +import project.Data.Map.Map +import project.Data.Numbers.Integer +import project.Data.Ordering.Comparable +import project.Data.Ordering.Ordering +import project.Data.Vector.Vector +import project.Data.Text.Extensions +import project.Data.Text.Text +import project.Errors.Illegal_Argument.Illegal_Argument +import project.Nothing.Nothing +import project.Panic.Panic + +from project.Data.Boolean import Boolean, True, False + +## UNSTABLE + An unordered collection of unique values. +type Set + ## PRIVATE + Value (underlying_map : Map Any Nothing) + + ## Constructs a new set from a vector. + + Arguments: + - vector: the vector of elements to add to the set. + - error_on_duplicates: specifies if duplicate elements in the input + should result in an error. Defaults to `False`, meaning that the last + occurrence of each duplicated element is retained in the set. If set to + `True` it will raise an `Illegal_Argument` if duplicate elements are + encountered. + from_vector : Vector Any -> Boolean -> Set ! Illegal_Argument + from_vector (vector : Vector) (error_on_duplicates : Boolean = False) = + pairs_array = Array_Proxy.new vector.length (i-> [vector.at i, Nothing]) + pairs = Vector.from_polyglot_array pairs_array + map = Map.from_vector pairs error_on_duplicates=error_on_duplicates + Set.Value map + + ## Constructs an empty set. + empty : Set + empty = Set.Value Map.empty + + ## Returns a vector containing all elements of this set. + to_vector : Vector + to_vector self = self.underlying_map.keys + + ## Returns the number of elements in this set. + size : Integer + size self = self.underlying_map.size + + ## Checks if the set is empty. + is_empty : Boolean + is_empty self = self.underlying_map.is_empty + + ## Checks if the set is not empty. + not_empty : Boolean + not_empty self = self.underlying_map.not_empty + + ## Checks if this set contains a given value. + contains : Any -> Boolean + contains self value = self.underlying_map.contains_key value + + ## ALIAS Add + Adds a value to this set. + insert : Any -> Set + insert self value = + new_map = self.underlying_map.insert value Nothing + Set.Value new_map + + ## Creates a union of the two sets. + union : Set -> Set + union self (other : Set) = + start_map = self.underlying_map + new_map = other.to_vector.fold start_map m-> el-> m.insert el Nothing + Set.Value new_map + + ## Creates an intersection of the two sets. + intersection : Set -> Set + intersection self (other : Set) = + other_map = other.underlying_map + new_map = self.underlying_map.keys.fold Map.empty m-> el-> + if other_map.contains_key el then m.insert el Nothing else m + Set.Value new_map + + ## Computes a set difference. + + Returns the set that contains all elements of this set that are not in + the other set. + difference : Set -> Set + difference self (other : Set) = + other_map = other.underlying_map + new_map = self.underlying_map.keys.fold Map.empty m-> el-> + if other_map.contains_key el then m else m.insert el Nothing + Set.Value new_map + + ## PRIVATE + to_text : Text + to_text self = self.to_vector.join ", " "Set{" "}" + +## PRIVATE +type Set_Comparator + ## PRIVATE + compare x y = + if x.size != y.size then Nothing else + if (x.difference y).is_empty then Ordering.Equal else Nothing + + ## PRIVATE + hash x = + vec = x.to_vector.sort . remove_warnings + Comparable.from vec . hash vec diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index dd0822e2858d..5633b1bf3190 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -4,6 +4,7 @@ import project.Data.Boolean import project.Data.List.List import project.Data.Numbers import project.Data.Map.Map +import project.Data.Set.Set import project.Data.Text.Text import project.Data.Vector.Vector import project.Error.Error @@ -36,6 +37,7 @@ export project.Any.Any export project.Data.Array.Array export project.Data.List.List export project.Data.Map.Map +export project.Data.Set.Set export project.Data.Text.Text export project.Data.Vector.Vector export project.Error.Error diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 371acdce05bf..fd5140ce6fa8 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -9,11 +9,12 @@ from Standard.Base.Metadata.Choice import Option import Standard.Base.Metadata.Display import Standard.Table.Data.Table.Table as Materialized_Table +import Standard.Table.Data.Type.Value_Type.Value_Type import project.Data.SQL_Query.SQL_Query import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type -import project.Data.Table.Table +import project.Data.Table.Table as Database_Table import project.Data.Table as Database_Table_Module import project.Internal.IR.Context.Context import project.Internal.IR.SQL_Expression.SQL_Expression @@ -23,7 +24,8 @@ import project.Internal.Statement_Setter.Statement_Setter from project.Internal.Result_Set import read_column, result_set_to_table from project.Internal.JDBC_Connection import handle_sql_errors -from project.Errors import SQL_Error, Table_Not_Found +from project.Errors import SQL_Error, Table_Not_Found, Table_Already_Exists +from project.Internal.Upload_Table import create_table_structure polyglot java import java.lang.UnsupportedOperationException polyglot java import java.util.UUID @@ -152,7 +154,7 @@ type Connection - If provided with a `Table_Name` or a text short-hand and the table is not found, a `Table_Not_Found` error is raised. @query make_table_name_selector - query : Text | SQL_Query -> Text -> Table ! Table_Not_Found | SQL_Error + query : Text | SQL_Query -> Text -> Database_Table ! Table_Not_Found | SQL_Error query self query alias="" = case query of _ : Text -> result = self.query alias=alias <| @@ -162,7 +164,7 @@ type Connection case self.dialect.is_probably_a_query query of True -> result False -> - Error.throw (Table_Not_Found.Error query sql_error treated_as_query=True) + Error.throw (Table_Not_Found.Error query sql_error treated_as_query=True extra_message="") SQL_Query.Raw_SQL raw_sql -> handle_sql_errors <| self.jdbc_connection.ensure_query_has_no_holes raw_sql . if_not_error <| columns = self.fetch_columns raw_sql Statement_Setter.null @@ -177,7 +179,7 @@ type Connection columns = self.fetch_columns statement statement_setter Database_Table_Module.make_table self name columns ctx result.catch SQL_Error sql_error-> - Error.throw (Table_Not_Found.Error name sql_error treated_as_query=False) + Error.throw (Table_Not_Found.Error name sql_error treated_as_query=False extra_message="") ## PRIVATE Execute the query and load the results into memory as a Table. @@ -187,10 +189,53 @@ type Connection If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - limit: the maximum number of rows to return. @query make_table_name_selector - read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table + read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table ! Table_Not_Found read self query limit=Nothing = self.query query . read max_rows=limit + ## PRIVATE + Creates a new empty table in the database and returns a query referencing + the new table. + + Arguments: + - table_name: the name of the table to create. If not provided, a random + name will be generated for temporary tables. If `temporary=False`, then + a name must be provided. + - structure: the structure of the table. This can be provided as a vector + of pairs of column names and types or an existing `Table` to copy the + structure from it. Note that if a `Table` is provided, only its column + structure is inherited - no table content is copied. + - primary_key: the names of the columns to use as the primary key. The + first column from the table is used by default. If it is set to + `Nothing` or an empty vector, no primary key will be created. + - temporary: if set to `True`, the table will be temporary, meaning that + it will be dropped once the `connection` is closed. Defaults to + `False`. + - allow_existing: Defaults to `False`, meaning that if the table with the + provided name already exists, an error will be raised. If set to `True`, + the existing table will be returned instead. Note that the existing + table is not guaranteed to have the same structure as the one provided. + - on_problems: the behavior to use when encountering non-fatal problems. + Defaults to reporting them as warning. + + ! Error Conditions + + - If a table with the given name already exists, then a + `Table_Already_Exists` error is raised. + - If a column type is not supported and is coerced to a similar + supported type, an `Inexact_Type_Coercion` problem is reported + according to the `on_problems` setting. + - If a column type is not supported and there is no replacement (e.g. + native Enso types), an `Unsupported_Type` error is raised. + - If the provided primary key columns are not present in table + structure provided, `Missing_Input_Columns` error is raised. + - An `SQL_Error` may be reported if there is a failure on the database + side. + create_table : Text|Nothing -> Vector (Pair Text Value_Type) | Database_Table | Materialized_Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists + create_table self (table_name : Text | Nothing = Nothing) (structure : Vector | Database_Table | Materialized_Table) (primary_key : (Vector Text | Nothing) = [first_column_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + created_table_name = create_table_structure self table_name structure primary_key temporary allow_existing on_problems + self.query (SQL_Query.Table_Name created_table_name) + ## PRIVATE Internal read function for a statement with optional types. @@ -260,3 +305,8 @@ make_table_name_selector : Connection -> Widget make_table_name_selector connection = tables_to_display = connection.tables.at "Name" . to_vector Single_Choice display=Display.Always values=(tables_to_display.map t-> Option t t.pretty) + +## PRIVATE +first_column_in_structure structure = case structure of + _ : Vector -> structure.first.first + _ -> structure.column_names.first diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Update_Action.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Update_Action.enso new file mode 100644 index 000000000000..584b93ec99ca --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Update_Action.enso @@ -0,0 +1,14 @@ +type Update_Action + ## Records are appended but if cause a primary key clash will fail. + Insert + + ## Just update the existing records. Unmatched columns are left unchanged. + Errors if any record is not matched in the target table. + Update + + ## Append the records to the new table if not found. + Updates existing records to the new values. Unmatched columns are left unchanged. + Update_Or_Insert + + ## Appends new records, updates existing records, removes records not in the target table + Align_Records diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso index a12bca5a15df..b6a7dd63269e 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso @@ -126,14 +126,23 @@ type Table_Not_Found for the table was executed. - treated_as_query: Whether the table name was treated as a raw query string. - Error (name:Text) (related_query_error:SQL_Error) (treated_as_query:Boolean) + - extra_message: An extra message to append. + Error (name:Text) (related_query_error:SQL_Error|Nothing) (treated_as_query:Boolean) (extra_message:Text) ## PRIVATE Pretty print the table not found error. to_display_text : Text - to_display_text self = case self.treated_as_query of - True -> "The name " + self.name + " was treated as a query, but the query failed with the following error: " + self.related_query_error.to_display_text + "; if you want to force to use that as a table name, wrap it in `SQL_Query.Table_Name`." - False -> "Table " + self.name + " was not found in the database." + to_display_text self = + base_repr = case self.treated_as_query of + True -> "The name " + self.name + " was treated as a query, but the query failed with the following error: " + self.related_query_error.to_display_text + "; if you want to force to use that as a table name, wrap it in `SQL_Query.Table_Name`." + False -> "Table " + self.name + " was not found in the database." + base_repr + self.extra_message + + ## PRIVATE + Creates a copy of this error with a changed `extra_message`. + with_changed_extra_message : Table_Not_Found + with_changed_extra_message self new_extra_message = + Table_Not_Found.Error self.name self.related_query_error self.treated_as_query new_extra_message type Table_Already_Exists ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso new file mode 100644 index 000000000000..580622282807 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Database_Table.enso @@ -0,0 +1,88 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +import Standard.Table.Internal.Widget_Helpers +from Standard.Table.Errors import all +from Standard.Table import Column_Selector + +import project.Connection.Connection.Connection +import project.Data.Table.Table +import project.Data.Update_Action.Update_Action +from project.Errors import all +from project.Extensions.Upload_Default_Helpers import default_key_columns +from project.Internal.Upload_Table import all + +## Creates a new database table from this table. + + Arguments: + - connection: the database connection to use. The table will be created in + the database and schema associated with this connection. + - table_name: the name of the table to create. If not provided, a random name + will be generated for temporary tables. If `temporary=False`, then a name + must be provided. + - primary_key: the names of the columns to use as the primary key. The first + column from the table is used by default. If it is set to `Nothing` or an + empty vector, no primary key will be created. + - temporary: if set to `True`, the table will be temporary, meaning that it + will be dropped once the `connection` is closed. Defaults to `False`. + - on_problems: the behavior to use when encountering non-fatal problems. + Defaults to reporting them as warning. + + ! Error Conditions + + - If a table with the given name already exists, then a + `Table_Already_Exists` error is raised. + - If a column type is not supported and is coerced to a similar supported + type, an `Inexact_Type_Coercion` problem is reported according to the + `on_problems` setting. + - If a column type is not supported and there is no replacement (e.g. + native Enso types), an `Unsupported_Type` error is raised. + - If the provided primary key columns are not present in the source table, + `Missing_Input_Columns` error is raised. + - If the selected primary key columns are not unique, a + `Non_Unique_Primary_Key` error is raised. + - An `SQL_Error` may be reported if there is a failure on the database + side. + + If an error has been raised, the table is not created (that may not always + apply to `SQL_Error`). +@primary_key Widget_Helpers.make_column_name_vector_selector +Table.select_into_database_table : Connection -> Text|Nothing -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Primary_Key | SQL_Error | Illegal_Argument +Table.select_into_database_table self connection table_name=Nothing primary_key=[self.columns.first.name] temporary=False on_problems=Problem_Behavior.Report_Warning = Panic.recover SQL_Error <| + upload_database_table self connection table_name primary_key temporary on_problems + +## Updates the target table with the contents of this table. + + Arguments: + - connection: the database connection of the target table. + - table_name: the name of the table to update. + - update_action: specifies the update strategy - how to handle existing new + and missing rows. + - key_columns: the names of the columns to use identify correlate rows from + the source table with rows in the target table. This key is used to + determine if a row from the source table exists in the target or is a new + one. + - error_on_missing_columns: if set to `False` (the default), any columns + missing from the source table will be left unchanged or initialized with + the default value if inserting. If a missing column has no default value, + this will trigger a `SQL_Error`. If set to `True`, any columns missing from + the source will cause an error. + - on_problems: the behavior to use when encountering non-fatal problems. + + ! Error Conditions + + - If `key_columns` are not present in either the source or target tables, a + `Missing_Input_Columns` error is raised. + - If the target table does not exist, a `Table_Not_Found` error is raised. + - If `error_on_missing_columns` is set to `True` and a column is missing + from the source table, a `Missing_Input_Columns` error is raised. + - If the source table contains columns that are not present in the target + table, an `Unmatched_Columns` error is raised. + - If a column in the source table has a type that cannot be trivially + widened to the corresponding column in the target table, a + `Column_Type_Mismatch` error is raised. + + If any error was raised, the data in the target table is not modified. +Table.update_database_table : Connection -> Text -> Update_Action -> Vector Text-> Boolean -> Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument +Table.update_database_table connection (table_name : Text) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector = default_key_columns connection table_name) (error_on_missing_columns : Boolean = False) = + common_update_table self connection table_name update_action key_columns error_on_missing_columns diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Default_Helpers.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Default_Helpers.enso new file mode 100644 index 000000000000..d1bda2f5c7b4 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Default_Helpers.enso @@ -0,0 +1,37 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +from Standard.Table import Column_Selector + +from project.Internal.Result_Set import result_set_to_table + +## PRIVATE +default_key_columns connection table_name = + keys = get_primary_key connection table_name + keys.catch Any _-> + Error.throw (Illegal_Argument.Error "Could not determine the primary key for table "+table_name+". Please provide it explicitly.") + +## PRIVATE + + This method may not work correctly with temporary tables, possibly resulting + in `SQL_Error` as such tables may not be found. + + ! Temporary Tables in SQLite + + The temporary tables in SQLite live in a `temp` database. There is a bug in + how JDBC retrieves primary keys - it only queries the `sqlite_schema` table + which contains schemas of only permanent tables. + + Ideally, we should provide a custom implementation for SQLite that will + UNION both `sqlite_schema` and `temp.sqlite_schema` tables to get results + for both temporary and permanent tables. + + TODO [RW] fix keys for SQLite temporary tables and test it +get_primary_key connection table_name = + connection.jdbc_connection.with_connection java_connection-> + rs = java_connection.getMetaData.getPrimaryKeys Nothing Nothing table_name + keys_table = result_set_to_table rs connection.dialect.make_column_fetcher_for_type + # The names of the columns are sometimes lowercase and sometimes uppercase, so we do a case insensitive select first. + selected = keys_table.select_columns [Column_Selector.By_Name "COLUMN_NAME", Column_Selector.By_Name "KEY_SEQ"] reorder=True + key_column_names = selected.order_by 1 . at 0 . to_vector + if key_column_names.is_empty then Nothing else key_column_names diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso new file mode 100644 index 000000000000..a26a8b3d0c09 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_In_Memory_Table.enso @@ -0,0 +1,93 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +import Standard.Table.Data.Table.Table +import Standard.Table.Internal.Widget_Helpers +from Standard.Table.Errors import all +from Standard.Table import Column_Selector + +import project.Connection.Connection.Connection +import project.Data.Table.Table as Database_Table +import project.Data.Update_Action.Update_Action +from project.Errors import all +from project.Extensions.Upload_Default_Helpers import default_key_columns +from project.Internal.Upload_Table import all + +## Creates a new database table from this in-memory table. + + Arguments: + - connection: the database connection to use. The table will be created in + the database and schema associated with this connection. + - table_name: the name of the table to create. If not provided, a random name + will be generated for temporary tables. If `temporary=False`, then a name + must be provided. + - primary_key: the names of the columns to use as the primary key. The first + column from the table is used by default. If it is set to `Nothing` or an + empty vector, no primary key will be created. + - temporary: if set to `True`, the table will be temporary, meaning that it + will be dropped once the `connection` is closed. Defaults to `False`. + - on_problems: the behavior to use when encountering non-fatal problems. + Defaults to reporting them as warning. + + ! Error Conditions + + - If a table with the given name already exists, then a + `Table_Already_Exists` error is raised. + - If a column type is not supported and is coerced to a similar supported + type, an `Inexact_Type_Coercion` problem is reported according to the + `on_problems` setting. + - If a column type is not supported and there is no replacement (e.g. + native Enso types), an `Unsupported_Type` error is raised. + - If the provided primary key columns are not present in the source table, + `Missing_Input_Columns` error is raised. + - If the selected primary key columns are not unique, a + `Non_Unique_Primary_Key` error is raised. + - An `SQL_Error` may be reported if there is a failure on the database + side. + + If an error has been raised, the table is not created (that may not always + apply to `SQL_Error`). +@primary_key Widget_Helpers.make_column_name_vector_selector +Table.select_into_database_table : Connection -> Text|Nothing -> Vector Text | Nothing -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Primary_Key | SQL_Error | Illegal_Argument +Table.select_into_database_table self connection table_name=Nothing primary_key=[self.columns.first.name] temporary=False on_problems=Problem_Behavior.Report_Warning = + upload_in_memory_table self connection table_name primary_key temporary on_problems + +## Updates the target table with the contents of this table. + + Arguments: + - connection: the database connection of the target table. + - table_name: the name of the table to update. + - update_action: specifies the update strategy - how to handle existing new + and missing rows. + - key_columns: the names of the columns to use identify correlate rows from + the source table with rows in the target table. This key is used to + determine if a row from the source table exists in the target or is a new + one. The key can be an empty list if the action is `Insert` - then all rows + are treated as new (but the Database may still reject them with `SQL_Error` + if they violate an integrity constraint). + - error_on_missing_columns: if set to `False` (the default), any columns + missing from the source table will be left unchanged or initialized with + the default value if inserting. If a missing column has no default value, + this will trigger a `SQL_Error`. If set to `True`, any columns missing from + the source will cause an error. + - on_problems: the behavior to use when encountering non-fatal problems. + + ! Error Conditions + + - If `key_columns` are not present in either the source or target tables, a + `Missing_Input_Columns` error is raised. + - If no `key_columns` are specified and the `update_action` is other than + `Insert`, an `Illegal_Argument` error is raised. + - If the target table does not exist, a `Table_Not_Found` error is raised. + - If `error_on_missing_columns` is set to `True` and a column is missing + from the source table, a `Missing_Input_Columns` error is raised. + - If the source table contains columns that are not present in the target + table, an `Unmatched_Columns` error is raised. + - If a column in the source table has a type that cannot be trivially + widened to the corresponding column in the target table, a + `Column_Type_Mismatch` error is raised. + + If any error was raised, the data in the target table is not modified. +Table.update_database_table : Connection -> Text -> Update_Action -> Vector Text-> Boolean -> Database_Table ! Table_Not_Found | Unmatched_Columns | Missing_Input_Columns | Column_Type_Mismatch | SQL_Error | Illegal_Argument +Table.update_database_table connection (table_name : Text) (update_action : Update_Action = Update_Action.Update_Or_Insert) (key_columns : Vector = default_key_columns connection table_name) (error_on_missing_columns : Boolean = False) = + common_update_table self connection table_name update_action key_columns error_on_missing_columns diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso deleted file mode 100644 index 7f2130e0f416..000000000000 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Extensions/Upload_Table.enso +++ /dev/null @@ -1,214 +0,0 @@ -from Standard.Base import all -from Standard.Base.Random import random_uuid -import Standard.Base.Errors.Illegal_Argument.Illegal_Argument - -import Standard.Table.Data.Table.Table as In_Memory_Table -from Standard.Table.Errors import all -from Standard.Table import Aggregate_Column - -import project.Connection.Connection.Connection -import project.Data.SQL_Query.SQL_Query -import project.Data.SQL_Statement.SQL_Statement -import project.Data.Table.Table as Database_Table -import project.Internal.IR.Query.Query -import project.Internal.IR.SQL_Expression.SQL_Expression -from project.Errors import all - -## Creates a new database table from this in-memory table. - - Arguments: - - connection: the database connection to use. The table will be created in - the database and schema associated with this connection. - - table_name: the name of the table to create. If not provided, a random name - will be generated for temporary tables. If `temporary=False`, then a name - must be provided. - - primary_key: the names of the columns to use as the primary key. The first - column from the table is used by default. If it is set to `Nothing` or an - empty vector, no primary key will be created. - - temporary: if set to `True`, the table will be temporary, meaning that it - will be dropped once the `connection` is closed. Defaults to `False`. - - structure_only: if set to `True`, the created table will inherit the - structure (column names and types) of the source table, but no rows will be - inserted. Defaults to `False`. - - on_problems: the behavior to use when encountering non-fatal problems. - Defaults to reporting them as warning. - - ! Error Conditions - - - If a table with the given name already exists, then a - `Table_Already_Exists` error is raised. - - If a column type is not supported and is coerced to a similar supported - type, an `Inexact_Type_Coercion` problem is reported according to the - `on_problems` setting. - - If a column type is not supported and there is no replacement (e.g. - native Enso types), an `Unsupported_Type` error is raised. - - If the provided primary key columns are not present in the source table, - `Missing_Input_Columns` error is raised. - - If the selected primary key columns are not unique, a - `Non_Unique_Primary_Key` error is raised. - - An `SQL_Error` may be reported if there is a failure on the database - side. - - If an error has been raised, the table is not created (that may not always - apply to `SQL_Error`). -In_Memory_Table.create_database_table : Connection -> Text|Nothing -> (Vector Text) | Nothing -> Boolean -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Primary_Key | SQL_Error | Illegal_Argument -In_Memory_Table.create_database_table self connection table_name=Nothing primary_key=[self.columns.first.name] temporary=False structure_only=False on_problems=Problem_Behavior.Report_Warning = Panic.recover SQL_Error <| - resolved_primary_key = resolve_primary_key self primary_key - effective_table_name = resolve_effective_table_name table_name temporary - create_table_statement = prepare_create_table_statement connection effective_table_name self.columns resolved_primary_key temporary on_problems - - ## `create_table_statement.if_not_error` is used to ensure that if there are - any dataflow errors up to this point, we want to propagate them and not - continue. Otherwise, they could 'leak' to `Panic.rethrow` and be wrongly - raised as panics. - upload_status = create_table_statement.if_not_error <| - translate_known_upload_errors self connection resolved_primary_key <| - connection.jdbc_connection.run_within_transaction <| - Panic.rethrow <| connection.execute_update create_table_statement - if structure_only.not then - insert_template = make_batched_insert_template connection effective_table_name self.column_names - statement_setter = connection.dialect.get_statement_setter - Panic.rethrow <| connection.jdbc_connection.batch_insert insert_template statement_setter self default_batch_size - - upload_status.if_not_error <| - connection.query (SQL_Query.Table_Name effective_table_name) - -## Creates a new database table from this table. - - Arguments: - - connection: the database connection to use. The table will be created in - the database and schema associated with this connection. - - table_name: the name of the table to create. If not provided, a random name - will be generated for temporary tables. If `temporary=False`, then a name - must be provided. - - primary_key: the names of the columns to use as the primary key. The first - column from the table is used by default. If it is set to `Nothing` or an - empty vector, no primary key will be created. - - temporary: if set to `True`, the table will be temporary, meaning that it - will be dropped once the `connection` is closed. Defaults to `False`. - - structure_only: if set to `True`, the created table will inherit the - structure (column names and types) of the source table, but no rows will be - inserted. Defaults to `False`. - - on_problems: the behavior to use when encountering non-fatal problems. - Defaults to reporting them as warning. - - ! Error Conditions - - - If a table with the given name already exists, then a - `Table_Already_Exists` error is raised. - - If a column type is not supported and is coerced to a similar supported - type, an `Inexact_Type_Coercion` problem is reported according to the - `on_problems` setting. - - If a column type is not supported and there is no replacement (e.g. - native Enso types), an `Unsupported_Type` error is raised. - - If the provided primary key columns are not present in the source table, - `Missing_Input_Columns` error is raised. - - If the selected primary key columns are not unique, a - `Non_Unique_Primary_Key` error is raised. - - An `SQL_Error` may be reported if there is a failure on the database - side. - - If an error has been raised, the table is not created (that may not always - apply to `SQL_Error`). -Database_Table.create_database_table : Connection -> Text|Nothing -> (Vector Text) | Nothing -> Boolean -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists | Inexact_Type_Coercion | Missing_Input_Columns | Non_Unique_Primary_Key | SQL_Error | Illegal_Argument -Database_Table.create_database_table self connection table_name=Nothing primary_key=[self.columns.first.name] temporary=False structure_only=False on_problems=Problem_Behavior.Report_Warning = Panic.recover SQL_Error <| - resolved_primary_key = resolve_primary_key self primary_key - effective_table_name = resolve_effective_table_name table_name temporary - create_table_statement = prepare_create_table_statement connection effective_table_name self.columns resolved_primary_key temporary on_problems - connection_check = if self.connection.jdbc_connection == connection.jdbc_connection then True else - Error.throw (Unsupported_Database_Operation.Error "The Database table to be uploaded must be coming from the same connection as the connection on which the new table is being created. Cross-connection uploads are currently not supported. To work around this, you can first `.read` the table into memory and then upload it from memory to a different connection.") - - upload_status = connection_check.if_not_error <| create_table_statement.if_not_error <| - translate_known_upload_errors self connection resolved_primary_key <| - connection.jdbc_connection.run_within_transaction <| - Panic.rethrow <| connection.execute_update create_table_statement - if structure_only.not then - ## We need to ensure that the columns in this statement are matching - positionally the columns in `create_table_statement`. But we - create both from the same table, so that is guaranteed. - copy_into_statement = connection.dialect.generate_sql <| - Query.Insert_From_Select effective_table_name self.to_select_query - Panic.rethrow <| connection.execute_update copy_into_statement - - upload_status.if_not_error <| - connection.query (SQL_Query.Table_Name effective_table_name) - -## PRIVATE - Ensures that provided primary key columns are present in the table and that - there are no duplicates. -resolve_primary_key table primary_key = case primary_key of - Nothing -> Nothing - _ : Vector -> if primary_key.is_empty then Nothing else - table.select_columns primary_key reorder=True . column_names - -## PRIVATE - Inspects any `SQL_Error` thrown and replaces it with a more precise error - type when available. -translate_known_upload_errors source_table connection primary_key ~action = - handler caught_panic = - error_mapper = connection.dialect.get_error_mapper - sql_error = caught_panic.payload - case error_mapper.is_primary_key_violation sql_error of - True -> raise_duplicated_primary_key_error source_table primary_key caught_panic - False -> Panic.throw caught_panic - Panic.catch SQL_Error action handler - -## PRIVATE - Creates a `Non_Unique_Primary_Key` error containing information about an - example group violating the uniqueness constraint. -raise_duplicated_primary_key_error source_table primary_key original_panic = - agg = source_table.aggregate [Aggregate_Column.Count]+(primary_key.map Aggregate_Column.Group_By) - filtered = agg.filter column=0 (Filter_Condition.Greater than=1) - materialized = filtered.read max_rows=1 - case materialized.row_count == 0 of - ## If we couldn't find a duplicated key, we give up the translation and - rethrow the original panic containing the SQL error. This could - happen if the constraint violation is on some non-trivial key, like - case insensitive. - True -> Panic.throw original_panic - False -> - row = materialized.first_row.to_vector - example_count = row.first - example_entry = row.drop 1 - Error.throw (Non_Unique_Primary_Key.Error primary_key example_entry example_count) - - -## PRIVATE - Creates a statement that will create a table with structure determined by the - provided columns. - - The `primary_key` columns must be present in `columns`, but it is the - responsibility of the caller to ensure that, otherwise the generated - statement will be invalid. -prepare_create_table_statement : Connection -> Text -> Vector -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement -prepare_create_table_statement connection table_name columns primary_key temporary on_problems = - type_mapping = connection.dialect.get_type_mapping - column_descriptors = columns.map column-> - name = column.name - value_type = column.value_type - sql_type = type_mapping.value_type_to_sql value_type on_problems - sql_type_text = type_mapping.sql_type_to_text sql_type - Pair.new name sql_type_text - connection.dialect.generate_sql <| - Query.Create_Table table_name column_descriptors primary_key temporary - -## PRIVATE - Generates a random table name if it was nothing, if it is allowed (temporary=True). -resolve_effective_table_name table_name temporary = case table_name of - Nothing -> if temporary then "temporary-table-"+random_uuid else - Error.throw (Illegal_Argument.Error "A name must be provided when creating a non-temporary table.") - _ : Text -> table_name - -## PRIVATE - The recommended batch size seems to be between 50 and 100. - See: https://docs.oracle.com/cd/E18283_01/java.112/e16548/oraperf.htm#:~:text=batch%20sizes%20in%20the%20general%20range%20of%2050%20to%20100 -default_batch_size = 100 - -## PRIVATE -make_batched_insert_template : Connection -> Text -> Vector (Vector Text) -> SQL_Query -make_batched_insert_template connection table_name column_names = - # We add Nothing as placeholders, they will be replaced with the actual values later. - pairs = column_names.map name->[name, SQL_Expression.Constant Nothing] - query = connection.dialect.generate_sql <| Query.Insert table_name pairs - template = query.prepare.first - template diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/In_Transaction.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/In_Transaction.enso new file mode 100644 index 000000000000..5468f4cb6b54 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/In_Transaction.enso @@ -0,0 +1,23 @@ +from Standard.Base import all +from Standard.Base.Errors.Common import Uninitialized_State +import Standard.Base.Errors.Illegal_State.Illegal_State +import Standard.Base.Runtime.State + +## PRIVATE +type In_Transaction + ## PRIVATE + Checks if a transaction is currently being run. + is_in_transaction : Boolean + is_in_transaction = + Panic.catch Uninitialized_State (State.get In_Transaction) (_->False) + + ## PRIVATE + Executes the provided action marking as being run within a transaction. + mark_running_in_transaction ~action = + State.run In_Transaction True action + + ## PRIVATE + Runs the provided action, failing if the call is not made from within a + transaction. + ensure_in_transaction ~action = + if In_Transaction.is_in_transaction then action else Error.throw (Illegal_State.Error "`ensure_in_transaction` called outside of a transaction.") diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso index fec77e9af8df..41e3efd64ce7 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso @@ -11,6 +11,7 @@ import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Table.Table as Database_Table import project.Internal.Statement_Setter.Statement_Setter +import project.Internal.In_Transaction.In_Transaction from project.Errors import SQL_Error, SQL_Timeout @@ -140,13 +141,15 @@ type JDBC_Connection take precedence over the original panic that caused that rollback. run_within_transaction : Any -> Any run_within_transaction self ~action = + if In_Transaction.is_in_transaction then + Panic.throw (Illegal_State.Error "`run_within_transaction` is executed within an existing transaction. Nesting transactions is not allowed as its semantics are unclear.") self.run_without_autocommit <| self.with_connection java_connection-> handle_panic caught_panic = java_connection.rollback Panic.throw caught_panic result = Panic.catch Any handler=handle_panic <| - action + In_Transaction.mark_running_in_transaction action java_connection.commit result diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso index a91855346232..7a3a97fe6e5d 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Connection.enso @@ -6,6 +6,7 @@ from Standard.Base.Metadata.Choice import Option import Standard.Base.Metadata.Display import Standard.Table.Data.Table.Table as Materialized_Table +import Standard.Table.Data.Type.Value_Type.Value_Type import project.Connection.Connection.Connection import project.Data.Dialect @@ -17,8 +18,8 @@ import project.Internal.IR.Query.Query import project.Internal.JDBC_Connection import project.Internal.SQL_Type_Reference.SQL_Type_Reference -from project.Connection.Connection import make_table_types_selector, make_schema_selector, make_table_name_selector -from project.Errors import SQL_Error +from project.Connection.Connection import make_table_types_selector, make_schema_selector, make_table_name_selector, first_column_in_structure +from project.Errors import SQL_Error, Table_Not_Found, Table_Already_Exists from project.Internal.Result_Set import read_column type Postgres_Connection @@ -105,14 +106,24 @@ type Postgres_Connection tables self name_like=Nothing database=self.database schema=Nothing types=self.dialect.default_table_types all_fields=False = self.connection.tables name_like database schema types all_fields - ## Set up a query returning a Table object, which can be used to work with data within the database or load it into memory. + ## Set up a query returning a Table object, which can be used to work with + data within the database or load it into memory. Arguments: - query: name of the table or sql statement to query. - If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. + If supplied as `Text`, the name is checked against the `tables` list to + determine if it is a table or a query. - alias: optionally specify a friendly alias for the query. + + ! Error Conditions + + - If provided with a `Raw_SQL` query or `Text` that looks like a query, if + any SQL error occurs when executing the query, a `SQL_Error` error is + raised. + - If provided with a `Table_Name` or a text short-hand and the table is + not found, a `Table_Not_Found` error is raised. @query make_table_name_selector - query : Text | SQL_Query -> Text -> Database_Table + query : Text | SQL_Query -> Text -> Database_Table ! Table_Not_Found query self query alias="" = self.connection.query query alias ## Execute the query and load the results into memory as a Table. @@ -122,9 +133,50 @@ type Postgres_Connection If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - limit: the maximum number of rows to return. @query make_table_name_selector - read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table + read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table ! Table_Not_Found read self query limit=Nothing = self.connection.read query limit + ## Creates a new empty table in the database and returns a query referencing + the new table. + + Arguments: + - table_name: the name of the table to create. If not provided, a random + name will be generated for temporary tables. If `temporary=False`, then + a name must be provided. + - structure: the structure of the table. This can be provided as a vector + of pairs of column names and types or an existing `Table` to copy the + structure from it. Note that if a `Table` is provided, only its column + structure is inherited - no table content is copied. + - primary_key: the names of the columns to use as the primary key. The + first column from the table is used by default. If it is set to + `Nothing` or an empty vector, no primary key will be created. + - temporary: if set to `True`, the table will be temporary, meaning that + it will be dropped once the `connection` is closed. Defaults to + `False`. + - allow_existing: Defaults to `False`, meaning that if the table with the + provided name already exists, an error will be raised. If set to `True`, + the existing table will be returned instead. Note that the existing + table is not guaranteed to have the same structure as the one provided. + - on_problems: the behavior to use when encountering non-fatal problems. + Defaults to reporting them as warning. + + ! Error Conditions + + - If a table with the given name already exists, then a + `Table_Already_Exists` error is raised. + - If a column type is not supported and is coerced to a similar + supported type, an `Inexact_Type_Coercion` problem is reported + according to the `on_problems` setting. + - If a column type is not supported and there is no replacement (e.g. + native Enso types), an `Unsupported_Type` error is raised. + - If the provided primary key columns are not present in table + structure provided, `Missing_Input_Columns` error is raised. + - An `SQL_Error` may be reported if there is a failure on the database + side. + create_table : Text|Nothing -> Vector (Pair Text Value_Type) | Database_Table | Materialized_Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists + create_table self (table_name : Text | Nothing = Nothing) (structure : Vector | Database_Table | Materialized_Table) (primary_key : (Vector Text | Nothing) = [first_column_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + self.connection.create_table table_name structure primary_key temporary allow_existing on_problems + ## ADVANCED Executes a raw update query. If the query was inserting, updating or diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso index 7f638d34c260..a1b2143488a9 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Connection.enso @@ -6,6 +6,7 @@ from Standard.Base.Metadata.Choice import Option import Standard.Base.Metadata.Display import Standard.Table.Data.Table.Table as Materialized_Table +import Standard.Table.Data.Type.Value_Type.Value_Type import project.Connection.Connection.Connection import project.Data.SQL_Query.SQL_Query @@ -17,8 +18,8 @@ import project.Internal.IR.Query.Query import project.Internal.JDBC_Connection import project.Internal.SQL_Type_Reference.SQL_Type_Reference -from project.Connection.Connection import make_table_types_selector, make_schema_selector, make_table_name_selector -from project.Errors import SQL_Error +from project.Connection.Connection import make_table_types_selector, make_schema_selector, make_table_name_selector, first_column_in_structure +from project.Errors import SQL_Error, Table_Not_Found, Table_Already_Exists type SQLite_Connection ## PRIVATE @@ -99,14 +100,24 @@ type SQLite_Connection tables self name_like=Nothing database=self.database schema=Nothing types=self.dialect.default_table_types all_fields=False = self.connection.tables name_like database schema types all_fields - ## Set up a query returning a Table object, which can be used to work with data within the database or load it into memory. + ## Set up a query returning a Table object, which can be used to work with + data within the database or load it into memory. Arguments: - query: name of the table or sql statement to query. - If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. + If supplied as `Text`, the name is checked against the `tables` list to + determine if it is a table or a query. - alias: optionally specify a friendly alias for the query. + + ! Error Conditions + + - If provided with a `Raw_SQL` query or `Text` that looks like a query, if + any SQL error occurs when executing the query, a `SQL_Error` error is + raised. + - If provided with a `Table_Name` or a text short-hand and the table is + not found, a `Table_Not_Found` error is raised. @query make_table_name_selector - query : Text | SQL_Query -> Text -> Database_Table + query : Text | SQL_Query -> Text -> Database_Table ! Table_Not_Found query self query alias="" = self.connection.query query alias ## Execute the query and load the results into memory as a Table. @@ -116,9 +127,50 @@ type SQLite_Connection If supplied as `Text`, the name is checked against the `tables` list to determine if it is a table or a query. - limit: the maximum number of rows to return. @query make_table_name_selector - read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table + read : Text | SQL_Query -> Integer | Nothing -> Materialized_Table ! Table_Not_Found read self query limit=Nothing = self.connection.read query limit + ## Creates a new empty table in the database and returns a query referencing + the new table. + + Arguments: + - table_name: the name of the table to create. If not provided, a random + name will be generated for temporary tables. If `temporary=False`, then + a name must be provided. + - structure: the structure of the table. This can be provided as a vector + of pairs of column names and types or an existing `Table` to copy the + structure from it. Note that if a `Table` is provided, only its column + structure is inherited - no table content is copied. + - primary_key: the names of the columns to use as the primary key. The + first column from the table is used by default. If it is set to + `Nothing` or an empty vector, no primary key will be created. + - temporary: if set to `True`, the table will be temporary, meaning that + it will be dropped once the `connection` is closed. Defaults to + `False`. + - allow_existing: Defaults to `False`, meaning that if the table with the + provided name already exists, an error will be raised. If set to `True`, + the existing table will be returned instead. Note that the existing + table is not guaranteed to have the same structure as the one provided. + - on_problems: the behavior to use when encountering non-fatal problems. + Defaults to reporting them as warning. + + ! Error Conditions + + - If a table with the given name already exists, then a + `Table_Already_Exists` error is raised. + - If a column type is not supported and is coerced to a similar + supported type, an `Inexact_Type_Coercion` problem is reported + according to the `on_problems` setting. + - If a column type is not supported and there is no replacement (e.g. + native Enso types), an `Unsupported_Type` error is raised. + - If the provided primary key columns are not present in table + structure provided, `Missing_Input_Columns` error is raised. + - An `SQL_Error` may be reported if there is a failure on the database + side. + create_table : Text|Nothing -> Vector (Pair Text Value_Type) | Database_Table | Materialized_Table -> Vector Text | Nothing -> Boolean -> Boolean -> Problem_Behavior -> Database_Table ! Table_Already_Exists + create_table self (table_name : Text | Nothing = Nothing) (structure : Vector | Database_Table | Materialized_Table) (primary_key : (Vector Text | Nothing) = [first_column_in_structure structure]) (temporary : Boolean = False) (allow_existing : Boolean = False) (on_problems:Problem_Behavior = Problem_Behavior.Report_Warning) = + self.connection.create_table table_name structure primary_key temporary allow_existing on_problems + ## ADVANCED Executes a raw update query. If the query was inserting, updating or diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso new file mode 100644 index 000000000000..a3bf132aacf3 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso @@ -0,0 +1,248 @@ +from Standard.Base import all +from Standard.Base.Random import random_uuid +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Errors.Illegal_State.Illegal_State + +import Standard.Table.Data.Table.Table as In_Memory_Table +import Standard.Table.Data.Type.Value_Type.Value_Type +from Standard.Table import Aggregate_Column +from Standard.Table.Errors import all + +import project.Connection.Connection.Connection +import project.Data.SQL_Query.SQL_Query +import project.Data.SQL_Statement.SQL_Statement +import project.Data.Table.Table as Database_Table +import project.Data.Update_Action.Update_Action +import project.Internal.In_Transaction.In_Transaction +import project.Internal.IR.Query.Query +import project.Internal.IR.SQL_Expression.SQL_Expression +from project.Connection.Connection import all_known_table_names +from project.Errors import all + +## PRIVATE + Creates a new database table with the provided structure and returns the name + of the created table. +create_table_structure connection table_name structure primary_key temporary allow_existing on_problems = + case table_name.is_nothing.not && all_known_table_names connection . contains table_name of + True -> + if allow_existing then table_name else Error.throw (Table_Already_Exists.Error table_name) + False -> + effective_table_name = resolve_effective_table_name table_name temporary + aligned_structure = align_structure structure + resolved_primary_key = resolve_primary_key aligned_structure primary_key + create_table_statement = prepare_create_table_statement connection effective_table_name aligned_structure resolved_primary_key temporary on_problems + update_result = create_table_statement.if_not_error <| + connection.execute_update create_table_statement + update_result.if_not_error <| + effective_table_name + +## PRIVATE + A helper that can upload a table from any backend to a database. + It should be run within a transaction and wrapped in `handle_upload_errors`. +internal_upload_table source_table connection table_name primary_key temporary on_problems = + case source_table of + _ : In_Memory_Table -> + internal_upload_in_memory_table source_table connection table_name primary_key temporary on_problems + _ : Database_Table -> + internal_upload_database_table source_table connection table_name primary_key temporary on_problems + _ -> + Panic.throw <| Illegal_Argument.Error ("Unsupported table type: " + Meta.get_qualified_type_name source_table) + +## PRIVATE +upload_in_memory_table source_table connection table_name primary_key temporary on_problems = + Panic.recover SQL_Error <| handle_upload_errors <| + connection.jdbc_connection.run_within_transaction <| + internal_upload_in_memory_table source_table connection table_name primary_key temporary on_problems + +## PRIVATE + It should be run within a transaction and wrapped in `handle_upload_errors`. +internal_upload_in_memory_table source_table connection table_name primary_key temporary on_problems = + In_Transaction.ensure_in_transaction <| + created_table_name = create_table_structure connection table_name structure=source_table primary_key=primary_key temporary=temporary allow_existing=False on_problems=on_problems + column_names = source_table.column_names + + ## `created_table_name.if_not_error` is used to ensure that if there are + any dataflow errors up to this point, we want to propagate them and not + continue. Otherwise, they could 'leak' to `Panic.rethrow` and be wrongly + raised as panics. + upload_status = created_table_name.if_not_error <| + internal_translate_known_upload_errors source_table connection primary_key <| + insert_template = make_batched_insert_template connection created_table_name column_names + statement_setter = connection.dialect.get_statement_setter + Panic.rethrow <| connection.jdbc_connection.batch_insert insert_template statement_setter source_table default_batch_size + + upload_status.if_not_error <| + connection.query (SQL_Query.Table_Name created_table_name) + +## PRIVATE +upload_database_table source_table connection table_name primary_key temporary on_problems = + Panic.recover SQL_Error <| handle_upload_errors <| + connection.jdbc_connection.run_within_transaction <| + internal_upload_database_table source_table connection table_name primary_key temporary on_problems + +## PRIVATE + It should be run within a transaction and wrapped in `handle_upload_errors`. +internal_upload_database_table source_table connection table_name primary_key temporary on_problems = + In_Transaction.ensure_in_transaction <| + connection_check = if source_table.connection.jdbc_connection == connection.jdbc_connection then True else + Error.throw (Unsupported_Database_Operation.Error "The Database table to be uploaded must be coming from the same connection as the connection on which the new table is being created. Cross-connection uploads are currently not supported. To work around this, you can first `.read` the table into memory and then upload it from memory to a different connection.") + + connection_check.if_not_error <| + created_table_name = create_table_structure connection table_name structure=source_table primary_key=primary_key temporary=temporary allow_existing=False on_problems=on_problems + upload_status = created_table_name.if_not_error <| + internal_translate_known_upload_errors source_table connection primary_key <| + ## We need to ensure that the columns in this statement are + matching positionally the columns in the newly created + table. But we create both from the same source table, so + that is guaranteed. + copy_into_statement = connection.dialect.generate_sql <| + Query.Insert_From_Select created_table_name source_table.to_select_query + Panic.rethrow <| connection.execute_update copy_into_statement + + upload_status.if_not_error <| + connection.query (SQL_Query.Table_Name created_table_name) + +## PRIVATE + Ensures that provided primary key columns are present in the table and that + there are no duplicates. +resolve_primary_key structure primary_key = case primary_key of + Nothing -> Nothing + _ : Vector -> if primary_key.is_empty then Nothing else + validated = primary_key.map key-> + if key.is_a Text then key else + Error.throw (Illegal_Argument.Error "Primary key must be a vector of column names.") + validated.if_not_error <| + column_names = Set.from_vector (structure.map .first) + missing_columns = (Set.from_vector primary_key).difference column_names + if missing_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector) else + primary_key + +## PRIVATE + Inspects any `SQL_Error` thrown and replaces it with an error recipe, that is + converted into a proper error in an outer layer. + + The special handling is needed, because computing the + `Non_Unique_Primary_Key` error may need to perform a SQL query that must be + run outside of the just-failed transaction. +internal_translate_known_upload_errors source_table connection primary_key ~action = + handler caught_panic = + error_mapper = connection.dialect.get_error_mapper + sql_error = caught_panic.payload + case error_mapper.is_primary_key_violation sql_error of + True -> Panic.throw (Non_Unique_Primary_Key_Recipe.Recipe source_table primary_key caught_panic) + False -> Panic.throw caught_panic + Panic.catch SQL_Error action handler + +## PRIVATE +handle_upload_errors ~action = + Panic.catch Non_Unique_Primary_Key_Recipe action caught_panic-> + recipe = caught_panic.payload + raise_duplicated_primary_key_error recipe.source_table recipe.primary_key recipe.original_panic + +## PRIVATE +type Non_Unique_Primary_Key_Recipe + ## PRIVATE + Recipe source_table primary_key original_panic + +## PRIVATE + Creates a `Non_Unique_Primary_Key` error containing information about an + example group violating the uniqueness constraint. +raise_duplicated_primary_key_error source_table primary_key original_panic = + agg = source_table.aggregate [Aggregate_Column.Count]+(primary_key.map Aggregate_Column.Group_By) + filtered = agg.filter column=0 (Filter_Condition.Greater than=1) + materialized = filtered.read max_rows=1 + case materialized.row_count == 0 of + ## If we couldn't find a duplicated key, we give up the translation and + rethrow the original panic containing the SQL error. This could + happen if the constraint violation is on some non-trivial key, like + case insensitive. + True -> Panic.throw original_panic + False -> + row = materialized.first_row.to_vector + example_count = row.first + example_entry = row.drop 1 + Error.throw (Non_Unique_Primary_Key.Error primary_key example_entry example_count) + +## PRIVATE +align_structure : Database_Table | In_Memory_Table | Vector (Pair Text Value_Type) -> Vector (Pair Text Value_Type) +align_structure table_or_columns = case table_or_columns of + _ : Vector -> table_or_columns.map pair-> + if pair.length != 2 then Error.throw (Illegal_Argument.Error "The structure must be an existing Table or vector of pairs.") else + name = pair.first + value_type = pair.second + if name . is_a Text . not then Error.throw (Illegal_Argument.Error "Column names must be a Text. Instead, got a: "+name.to_display_text+".") else + if value_type . is_a Value_Type . not then Error.throw (Illegal_Argument.Error "Column value types must be a Value_Type. Instead, got a: "+value_type.to_display_text+".") else + pair + _ -> table_or_columns.columns.map column-> + Pair.new column.name column.value_type + +## PRIVATE + Creates a statement that will create a table with structure determined by the + provided columns. + + The `primary_key` columns must be present in `columns`, but it is the + responsibility of the caller to ensure that, otherwise the generated + statement will be invalid. +prepare_create_table_statement : Connection -> Text -> Vector -> Vector Text -> Boolean -> Problem_Behavior -> SQL_Statement +prepare_create_table_statement connection table_name columns primary_key temporary on_problems = + type_mapping = connection.dialect.get_type_mapping + column_descriptors = columns.map pair-> + name = pair.first + value_type = pair.second + sql_type = type_mapping.value_type_to_sql value_type on_problems + sql_type_text = type_mapping.sql_type_to_text sql_type + Pair.new name sql_type_text + connection.dialect.generate_sql <| + Query.Create_Table table_name column_descriptors primary_key temporary + +## PRIVATE + Generates a random table name if it was nothing, if it is allowed (temporary=True). +resolve_effective_table_name table_name temporary = case table_name of + Nothing -> if temporary then "temporary-table-"+random_uuid else + Error.throw (Illegal_Argument.Error "A name must be provided when creating a non-temporary table.") + _ : Text -> table_name + +## PRIVATE + The recommended batch size seems to be between 50 and 100. + See: https://docs.oracle.com/cd/E18283_01/java.112/e16548/oraperf.htm#:~:text=batch%20sizes%20in%20the%20general%20range%20of%2050%20to%20100 +default_batch_size = 100 + +## PRIVATE +make_batched_insert_template : Connection -> Text -> Vector (Vector Text) -> SQL_Query +make_batched_insert_template connection table_name column_names = + # We add Nothing as placeholders, they will be replaced with the actual values later. + pairs = column_names.map name->[name, SQL_Expression.Constant Nothing] + query = connection.dialect.generate_sql <| Query.Insert table_name pairs + template = query.prepare.first + template + +## PRIVATE +common_update_table source_table connection table_name update_action key_columns error_on_missing_columns = + Panic.recover SQL_Error <| handle_upload_errors <| + connection.jdbc_connection.run_within_transaction <| + target_table = connection.query (SQL_Query.Table_Name table_name) + # We catch the `Table_Not_Found` error and handle it specially, if the error was different, it will just get passed through further. + handle_error = target_table.catch Table_Not_Found error-> + # Rethrow the error with more info. + msg_suffix = " Use `Connection.create_table` to create a table before trying to append to it." + new_error = error.with_changed_extra_message msg_suffix + Error.throw new_error + if target_table.is_error then handle_error else + tmp_table_name = "temporary-source-table-"+random_uuid + tmp_table = internal_upload_table source_table connection tmp_table_name key_columns temporary=True structure_only=False on_problems=Problem_Behavior.Report_Error + tmp_table.if_not_error <| + resulting_table = append_to_existing_table tmp_table target_table update_action key_columns error_on_missing_columns + connection.drop_table tmp_table.name + resulting_table + +## PRIVATE +append_to_existing_table source_table target_table update_action key_columns error_on_missing_columns = + source_columns = Set.from_vector source_table.column_names + target_columns = Set.from_vector target_table.column_names + extra_columns = source_columns.difference target_columns + if extra_columns.not_empty then Error.throw (Unmatched_Columns.Error extra_columns) else + missing_columns = target_columns.difference source_columns + if missing_columns.not_empty && error_on_missing_columns then Error.throw (Missing_Input_Columns.Error missing_columns "the source table") else + # TODO [RW] to be finished in follow up PR + _ = [update_action, key_columns] + Error.throw (Illegal_State.Error "TODO: Not implemented yet.") diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso index 66e8fec763ba..68426756126c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso @@ -9,7 +9,8 @@ import project.Connection.SQLite_Details.In_Memory import project.Connection.SQLite_Format.SQLite_Format import project.Connection.SSL_Mode.SSL_Mode import project.Data.SQL_Query.SQL_Query -import project.Extensions.Upload_Table +import project.Extensions.Upload_Database_Table +import project.Extensions.Upload_In_Memory_Table from project.Connection.Postgres_Details.Postgres_Details import Postgres from project.Connection.SQLite_Details.SQLite_Details import SQLite @@ -25,7 +26,8 @@ export project.Connection.SQLite_Details.In_Memory export project.Connection.SQLite_Format.SQLite_Format export project.Connection.SSL_Mode.SSL_Mode export project.Data.SQL_Query.SQL_Query -export project.Extensions.Upload_Table +export project.Extensions.Upload_Database_Table +export project.Extensions.Upload_In_Memory_Table from project.Connection.Postgres_Details.Postgres_Details export Postgres from project.Connection.SQLite_Details.SQLite_Details export SQLite diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Type/Storage.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Type/Storage.enso index ba6aacb30dec..609424bea1a6 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Type/Storage.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Type/Storage.enso @@ -1,5 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Common.Index_Out_Of_Bounds +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Table.Data.Type.Value_Type.Value_Type @@ -50,6 +51,7 @@ closest_storage_type value_type = case value_type of Value_Type.Date_Time _ -> DateTimeType.INSTANCE Value_Type.Time -> TimeOfDayType.INSTANCE Value_Type.Mixed -> AnyObjectType.INSTANCE + _ -> Error.throw (Illegal_Argument.Error "Columns of type "+value_type.to_display_text+" are currently not supported in the in-memory backend.") ## PRIVATE Converts a value type to an in-memory storage type, possibly approximating it diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 2678e8032581..ab03417b923c 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -174,7 +174,7 @@ type Table_Column_Helper _ -> [selectors] selected_columns = vector.map resolve_selector . flatten if reorder then selected_columns.distinct on=_.name else - map = Map.from_vector (selected_columns.map column-> [column.name, True]) allow_duplicates=True + map = Map.from_vector (selected_columns.map column-> [column.name, True]) error_on_duplicates=False self.internal_columns.filter column-> map.contains_key column.name ## PRIVATE diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso index e21e564073a0..e9f90e976018 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso @@ -171,7 +171,7 @@ limit_data limit data = case limit of bounds = case data.fold_with_index (Extreme.Value first first first first) update_extreme of Extreme.Value min_x max_x min_y max_y -> [min_x, max_x, min_y, max_y] _ -> [] - extreme = Map.from_vector bounds allow_duplicates=True . values + extreme = Map.from_vector bounds error_on_duplicates=False . values if limit <= extreme.length then extreme.take (First limit) else extreme + data.take (Index_Sub_Range.Sample (limit - extreme.length)) diff --git a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso index 34405405b934..3ff920c70ec2 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Conversion_Spec.enso @@ -128,6 +128,10 @@ spec setup = c3.value_type . should_equal (Value_Type.Integer Bits.Bits_32) c3.to_vector . should_equal [1, 2, 3] + c4 = c.cast Value_Type.Byte + c4.value_type . should_equal Value_Type.Byte + c4.to_vector . should_equal [1, 2, 3] + Test.specify "should allow to cast a floating point column to integer" <| t = table_builder [["X", [1.0001, 2.25, 4.0]]] c = t.at "X" . cast Value_Type.Integer @@ -371,6 +375,21 @@ spec setup = w7 = Problems.expect_warning Conversion_Failure c7 w7.affected_rows_count . should_equal 6+3+1 + if setup.is_database.not then + Test.specify "should fail if there is no conversion available for a given type" <| + t = table_builder [["X", [1, 2, 3]]] + + # currently unsupported + r1 = t.cast "X" Value_Type.Decimal + r1.should_fail_with Illegal_Argument + + r2 = t.cast "X" Value_Type.Binary + r2.should_fail_with Illegal_Argument + + # this is not supposed to be supported, but still needs a friendly message + r3 = t.cast "X" (Value_Type.Unsupported_Data_Type "foobar" "foobar") + r3.should_fail_with Illegal_Argument + Test.group prefix+"Simple variant of Table/Column.parse in all backends" <| Test.specify "should be able to parse simple integers" <| t = table_builder [["X", ["42", "0", "-1"]]] diff --git a/test/Table_Tests/src/Database/Common_Spec.enso b/test/Table_Tests/src/Database/Common_Spec.enso index 1876e71823dd..4f1ca3ab5f25 100644 --- a/test/Table_Tests/src/Database/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common_Spec.enso @@ -18,7 +18,7 @@ spec prefix connection = tables_to_clean = Vector.new_builder upload prefix data temporary=True = name = Name_Generator.random_name prefix - table = data.create_database_table connection name temporary=temporary primary_key=Nothing + table = data.select_into_database_table connection name temporary=temporary primary_key=Nothing tables_to_clean.append table.name table diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index b9f959e7b3c3..7347644ba361 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -57,7 +57,7 @@ postgres_specific_spec connection db_name setup = connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";' temporary_table = Name_Generator.random_name "TemporaryTable" - (Table.new [["X", [1, 2, 3]]]).create_database_table connection temporary_table temporary=True + (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True Test.specify "should be able to list table types" <| table_types = connection.table_types @@ -202,7 +202,7 @@ run_tests connection db_name = name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns - in_mem_table.create_database_table connection name primary_key=Nothing temporary=True + in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True materialize = .read Common_Spec.spec prefix connection @@ -210,9 +210,9 @@ run_tests connection db_name = common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True take_drop=False allows_mixed_type_comparisons=False fixed_length_text_columns=True aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = agg_in_memory_table.create_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True + agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True tables.append agg_table.name - empty_agg_table = (agg_in_memory_table.take (First 0)).create_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True tables.append empty_agg_table.name setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection diff --git a/test/Table_Tests/src/Database/Redshift_Spec.enso b/test/Table_Tests/src/Database/Redshift_Spec.enso index f60d549eec90..fa61748f7ab5 100644 --- a/test/Table_Tests/src/Database/Redshift_Spec.enso +++ b/test/Table_Tests/src/Database/Redshift_Spec.enso @@ -45,7 +45,7 @@ run_tests connection = name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns - in_mem_table.create_database_table connection name primary_key=Nothing temporary=True + in_mem_table.select_into_database_table connection name primary_key=Nothing temporary=True materialize = .read Common_Spec.spec prefix connection @@ -54,9 +54,9 @@ run_tests connection = common_selection = Common_Table_Operations.Main.Test_Selection.Config supports_case_sensitive_columns=True order_by_unicode_normalization_by_default=True take_drop=False allows_mixed_type_comparisons=False aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config first_last_row_order=False aggregation_problems=False date_support=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = agg_in_memory_table.create_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True + agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True tables.append agg_table.name - empty_agg_table = (agg_in_memory_table.take (First 0)).create_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True tables.append empty_agg_table.name setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 4621dcce289f..0453db287c51 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -42,7 +42,7 @@ sqlite_specific_spec prefix connection = connection.execute_update 'CREATE VIEW "'+vinfo+'" AS SELECT "A" FROM "'+tinfo+'";' temporary_table = Name_Generator.random_name "TemporaryTable" - (Table.new [["X", [1, 2, 3]]]).create_database_table connection temporary_table temporary=True + (Table.new [["X", [1, 2, 3]]]).select_into_database_table connection temporary_table temporary=True Test.specify "should be able to list table types" <| table_types = connection.table_types @@ -126,7 +126,7 @@ sqlite_spec connection prefix = name = Name_Generator.random_name "table_"+ix.to_text in_mem_table = Table.new columns - in_mem_table.create_database_table connection name primary_key=Nothing + in_mem_table.select_into_database_table connection name primary_key=Nothing materialize = .read Common_Spec.spec prefix connection @@ -144,8 +144,8 @@ sqlite_spec connection prefix = the missing statistics. aggregate_selection = Common_Table_Operations.Aggregate_Spec.Test_Selection.Config advanced_stats=False text_shortest_longest=False first_last=False first_last_row_order=False multi_distinct=False aggregation_problems=False nan=False date_support=False agg_in_memory_table = (enso_project.data / "data.csv") . read - agg_table = agg_in_memory_table.create_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True - empty_agg_table = (agg_in_memory_table.take (First 0)).create_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + agg_table = agg_in_memory_table.select_into_database_table connection (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True + empty_agg_table = (agg_in_memory_table.take (First 0)).select_into_database_table connection (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table empty_agg_table table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection Common_Table_Operations.Main.spec setup @@ -211,7 +211,7 @@ spec = Test.specify 'should not duplicate warnings' <| c = Database.connect (SQLite In_Memory) t0 = Table.new [["X", ["a", "bc", "def"]]] - t1 = t0.create_database_table c "Tabela" + t1 = t0.select_into_database_table c "Tabela" t2 = t1.cast "X" (Value_Type.Char size=1) Warning.get_all t2 . length . should_equal 1 diff --git a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso index 477877c1630f..4ff60f59c6a2 100644 --- a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso @@ -5,7 +5,8 @@ from Standard.Table import Aggregate_Column, Value_Type, Table from Standard.Table.Errors import Invalid_Value_Type, Inexact_Type_Coercion import Standard.Database.Data.Dialect -import Standard.Database.Extensions.Upload_Table +import Standard.Database.Extensions.Upload_Database_Table +import Standard.Database.Extensions.Upload_In_Memory_Table import Standard.Database.Internal.SQLite.SQLite_Type_Mapping from Standard.Database import Database, SQLite, In_Memory, SQL_Query from Standard.Database.Errors import Unsupported_Database_Operation @@ -103,7 +104,7 @@ spec = Test.specify "does not support creating tables with date/time values" <| t = Table.new [["a", [Date.now]], ["b", [Time_Of_Day.now]], ["c", [Date_Time.now]]] - r1 = t.create_database_table connection temporary=True + r1 = t.select_into_database_table connection temporary=True r1.should_fail_with Unsupported_Database_Operation Test.specify "should be able to infer types for all supported operations" <| diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index e3985e1f2d54..f5794df8cbb2 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -1,4 +1,5 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Table import all from Standard.Table.Errors import Missing_Input_Columns @@ -6,6 +7,7 @@ from Standard.Table.Errors import Missing_Input_Columns from Standard.Database import all from Standard.Database.Errors import all from Standard.Database.Internal.Result_Set import result_set_to_table +from Standard.Database.Extensions.Upload_Default_Helpers import get_primary_key from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions @@ -31,10 +33,129 @@ main = Test_Suite.run_main <| database, so features relying on persistence cannot really be tested. spec make_new_connection prefix persistent_connector=True = connection = make_new_connection Nothing + Test.group prefix+"Creating an empty table" <| + Test.specify "should allow to specify the column names and types" <| + t = connection.create_table structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char]] temporary=True + t.column_names . should_equal ["X", "Y"] + t.at "X" . to_vector . should_equal [] + t.at "X" . value_type . is_integer . should_be_true + t.at "Y" . to_vector . should_equal [] + t.at "Y" . value_type . is_text . should_be_true + t.row_count . should_equal 0 + + Test.specify "should allow to inherit the structure of an existing in-memory table" <| + t = Table.new [["X", [1, 2]], ["Y", ['a', 'b']]] + db_table = connection.create_table structure=t temporary=True + db_table.column_names . should_equal ["X", "Y"] + db_table.at "X" . to_vector . should_equal [] + db_table.at "X" . value_type . is_integer . should_be_true + db_table.at "Y" . to_vector . should_equal [] + db_table.at "Y" . value_type . is_text . should_be_true + db_table.row_count . should_equal 0 + + Test.specify "should allow to inherit the structure of an existing Database table" <| + t = Table.new [["X", [1, 2]], ["Y", ['a', 'b']]] + input_db_table = t.select_into_database_table connection (Name_Generator.random_name "input_table") temporary=True + input_db_table.at "X" . to_vector . should_equal [1, 2] + + db_table = connection.create_table structure=input_db_table temporary=True + db_table.column_names . should_equal ["X", "Y"] + db_table.at "X" . to_vector . should_equal [] + db_table.at "X" . value_type . is_integer . should_be_true + db_table.at "Y" . to_vector . should_equal [] + db_table.at "Y" . value_type . is_text . should_be_true + db_table.row_count . should_equal 0 + + Test.specify "should fail if the table already exists" <| + name = Name_Generator.random_name "table-already-exists 1" + connection.create_table name structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char]] temporary=True + r1 = connection.create_table name structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char]] temporary=True + r1.should_fail_with Table_Already_Exists + + Test.specify "should not fail if the table exists, if `allow_existing=True`, even if the structure does not match" <| + name = Name_Generator.random_name "table-already-exists 2" + connection.create_table name structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char]] temporary=True + r1 = connection.create_table name structure=[["Z", Value_Type.Float]] temporary=True allow_existing=True + r1.column_names . should_equal ["X", "Y"] + + Test.specify "should include the created table in the tables directory" <| + name = Name_Generator.random_name "persistent_table 1" + db_table = connection.create_table name structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char]] temporary=False + Panic.with_finalizer (connection.drop_table db_table.name) <| + db_table.column_names . should_equal ["X", "Y"] + db_table.at "X" . to_vector . should_equal [] + + connection.tables.at "Name" . to_vector . should_contain name + connection.query name . column_names . should_equal ["X", "Y"] + connection.query name . at "X" . to_vector . should_equal [] + + Test.specify "should include the temporary table in the tables directory" <| + name = Name_Generator.random_name "temporary_table 1" + db_table = connection.create_table name structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char]] temporary=True + db_table.column_names . should_equal ["X", "Y"] + db_table.at "X" . to_vector . should_equal [] + + connection.tables.at "Name" . to_vector . should_contain name + connection.query name . column_names . should_equal ["X", "Y"] + connection.query name . at "X" . to_vector . should_equal [] + + if persistent_connector then + Test.specify "should drop the temporary table after the connection is closed" <| + name = Name_Generator.random_name "temporary_table 2" + tmp_connection = make_new_connection Nothing + tmp_connection.create_table name [["X", Value_Type.Integer]] temporary=True + tmp_connection.query (SQL_Query.Table_Name name) . column_names . should_equal ["X"] + tmp_connection.close + connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found + + Test.specify "should preserve the regular table after the connection is closed" <| + name = Name_Generator.random_name "persistent_table 2" + tmp_connection = make_new_connection Nothing + tmp_connection.create_table name [["X", Value_Type.Integer]] temporary=False + Panic.with_finalizer (connection.drop_table name) <| + t1 = tmp_connection.query (SQL_Query.Table_Name name) + t1.column_names . should_equal ["X"] + t1.at "X" . value_type . is_integer . should_be_true + tmp_connection.close + t2 = connection.query (SQL_Query.Table_Name name) + t2.column_names . should_equal ["X"] + t2.at "X" . value_type . is_integer . should_be_true + + Test.specify "should be able to specify a primary key" <| + name = Name_Generator.random_name "primary_key 1" + db_table = connection.create_table table_name=name structure=[["X", Value_Type.Integer], ["Y", Value_Type.Char], ["Z", Value_Type.Integer], ["W", Value_Type.Float]] primary_key=["Y", "Z"] temporary=False + Panic.with_finalizer (connection.drop_table db_table.name) <| + get_primary_key connection db_table.name . should_equal ["Y", "Z"] + + Test.specify "should ensure that primary key columns specified are valid" <| + r1 = connection.create_table structure=[["X", Value_Type.Integer]] primary_key=["Y"] temporary=True + r1.should_fail_with Missing_Input_Columns + + t = Table.new [["X", [1, 2, 3]]] + r2 = connection.create_table structure=t primary_key=["Y"] temporary=True + r2.should_fail_with Missing_Input_Columns + + Test.specify "should not allow creating a table without columns" <| + r1 = connection.create_table structure=[] temporary=True + r1.should_fail_with Illegal_Argument + + Test.specify "should check types of primary key" <| + r1 = connection.create_table structure=[["X", Value_Type.Integer]] primary_key=[0] temporary=True + r1.should_fail_with Illegal_Argument + Test.group prefix+"Uploading an in-memory Table" <| in_memory_table = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] + Test.specify "should create a database table with the same contents as the source" <| + db_table = in_memory_table.select_into_database_table connection temporary=True + db_table.column_names . should_equal ["X", "Y"] + db_table.at "X" . to_vector . should_equal [1, 2, 3] + db_table.at "Y" . to_vector . should_equal ['a', 'b', 'c'] + db_table.at "X" . value_type . is_integer . should_be_true + db_table.at "Y" . value_type . is_text . should_be_true + db_table.row_count . should_equal 3 + Test.specify "should include the created table in the tables directory" <| - db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "permanent_table 1") temporary=False + db_table = in_memory_table.select_into_database_table connection (Name_Generator.random_name "permanent_table 1") temporary=False Panic.with_finalizer (connection.drop_table db_table.name) <| db_table.at "X" . to_vector . should_equal [1, 2, 3] @@ -42,7 +163,7 @@ spec make_new_connection prefix persistent_connector=True = connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] Test.specify "should include the temporary table in the tables directory" <| - db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "temporary_table 1") temporary=True + db_table = in_memory_table.select_into_database_table connection (Name_Generator.random_name "temporary_table 1") temporary=True db_table.at "X" . to_vector . should_equal [1, 2, 3] connection.tables.at "Name" . to_vector . should_contain db_table.name connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] @@ -50,7 +171,7 @@ spec make_new_connection prefix persistent_connector=True = if persistent_connector then Test.specify "should drop the temporary table after the connection is closed" <| tmp_connection = make_new_connection Nothing - db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "temporary_table 2") temporary=True + db_table = in_memory_table.select_into_database_table tmp_connection (Name_Generator.random_name "temporary_table 2") temporary=True name = db_table.name tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] tmp_connection.close @@ -58,76 +179,84 @@ spec make_new_connection prefix persistent_connector=True = Test.specify "should preserve the regular table after the connection is closed" <| tmp_connection = make_new_connection Nothing - db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "permanent_table 1") temporary=False + db_table = in_memory_table.select_into_database_table tmp_connection (Name_Generator.random_name "permanent_table 1") temporary=False name = db_table.name Panic.with_finalizer (connection.drop_table name) <| tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] tmp_connection.close connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] - Test.specify "should rollback the table creation if create_database_table fails" <| + Test.specify "should not create any table if upload fails" <| normal_column = Column.from_vector "Y" ((100+0).up_to (100+1000)).to_vector exploding_column = make_mock_column "X" (0.up_to 1000).to_vector 512 exploding_table = Table.new [normal_column, exploding_column] name = Name_Generator.random_name "rolling-back-table" connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found Test.expect_panic_with matcher=ExplodingStoragePayload <| - exploding_table.create_database_table connection name temporary=False primary_key=Nothing + exploding_table.select_into_database_table connection name temporary=False primary_key=Nothing connection.query (SQL_Query.Table_Name name) . should_fail_with Table_Not_Found Test.specify "should set a primary key for the table" <| t1 = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [1.0, 2.0, 3.0]]] - db_table_1 = t1.create_database_table connection (Name_Generator.random_name "primary-key-1") primary_key=["Y", "X"] + db_table_1 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-1") primary_key=["Y", "X"] Panic.with_finalizer (connection.drop_table db_table_1.name) <| db_table_1.at "X" . to_vector . should_equal [1, 2, 3] get_primary_key connection db_table_1.name . should_equal ["Y", "X"] - db_table_2 = t1.create_database_table connection (Name_Generator.random_name "primary-key-2") + db_table_2 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-2") Panic.with_finalizer (connection.drop_table db_table_2.name) <| db_table_2.at "X" . to_vector . should_equal [1, 2, 3] get_primary_key connection db_table_2.name . should_equal ["X"] - db_table_3 = t1.create_database_table connection (Name_Generator.random_name "primary-key-3") primary_key=Nothing + db_table_3 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-3") primary_key=Nothing Panic.with_finalizer (connection.drop_table db_table_3.name) <| db_table_3.at "X" . to_vector . should_equal [1, 2, 3] get_primary_key connection db_table_3.name . should_equal Nothing Test.specify "should ensure that primary key columns are valid" <| - r1 = in_memory_table.create_database_table connection (Name_Generator.random_name "primary-key-4") primary_key=["X", "nonexistent"] + r1 = in_memory_table.select_into_database_table connection (Name_Generator.random_name "primary-key-4") primary_key=["X", "nonexistent"] r1.should_fail_with Missing_Input_Columns - db_table_2 = in_memory_table.create_database_table connection (Name_Generator.random_name "primary-key-5") primary_key=["X", 0] - Panic.with_finalizer (connection.drop_table db_table_2.name) <| - get_primary_key connection db_table_2.name . should_equal ["X"] - Test.specify "should fail if the primary key is not unique" <| t1 = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] - r1 = t1.create_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["X"] + r1 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["X"] r1.should_fail_with Non_Unique_Primary_Key e1 = r1.catch e1.clashing_primary_key . should_equal [1] e1.clashing_example_row_count . should_equal 2 e1.to_display_text . should_equal "The primary key [X] is not unique. The key [1] corresponds to 2 rows." - r2 = t1.create_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["Y"] + r2 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-6") temporary=True primary_key=["Y"] r2.should_fail_with Non_Unique_Primary_Key r2.catch . clashing_primary_key . should_equal ['b'] - r3 = t1.create_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] + r3 = t1.select_into_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] r3.at "X" . to_vector . should_equal [1, 2, 1] t2 = Table.new [["X", [1, 2, 1]], ["Y", ['a', 'b', 'a']]] - r4 = t2.create_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] + r4 = t2.select_into_database_table connection (Name_Generator.random_name "primary-key-7") temporary=True primary_key=["X", "Y"] r4.should_fail_with Non_Unique_Primary_Key r4.catch . clashing_primary_key . should_equal [1, 'a'] + Test.specify "should fail if the target table already exists" <| + name = Name_Generator.random_name "table-already-exists" + db_table = connection.create_table name [["X", Value_Type.Integer]] temporary=True + t = Table.new [["Y", ['a', 'b']]] + r1 = t.select_into_database_table connection name temporary=True + r1.should_fail_with Table_Already_Exists + r2 = t.select_into_database_table connection name temporary=False + r2.should_fail_with Table_Already_Exists + + db_table.column_names . should_equal ["X"] + db_table.at "X" . to_vector . should_equal [] + Test.group prefix+"Persisting a Database Table (query)" <| Test.specify "should be able to create a persistent copy of a DB table" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']], ["Z", [1.0, 2.0, 3.0]]] tmp_connection = make_new_connection Nothing - db_table = t.create_database_table tmp_connection (Name_Generator.random_name "source-table") temporary=True + db_table = t.select_into_database_table tmp_connection (Name_Generator.random_name "source-table") temporary=True - copied_table = db_table.create_database_table tmp_connection (Name_Generator.random_name "copied-table") temporary=False + copied_table = db_table.select_into_database_table tmp_connection (Name_Generator.random_name "copied-table") temporary=False name = copied_table.name Panic.with_finalizer (connection.drop_table name) <| copied_table.at "X" . value_type . is_integer . should_be_true @@ -143,14 +272,14 @@ spec make_new_connection prefix persistent_connector=True = Test.specify "should be able to persist a complex query with generated columns, joins etc." <| t1 = Table.new [["X", [1, 1, 2]], ["Y", [1, 2, 3]]] - db_table_1 = t1.create_database_table connection (Name_Generator.random_name "source-table-1") temporary=True primary_key=Nothing + db_table_1 = t1.select_into_database_table connection (Name_Generator.random_name "source-table-1") temporary=True primary_key=Nothing db_table_2 = db_table_1.set "[Y] + 100 * [X]" "C1" . set '"constant_text"' "C2" db_table_3 = db_table_1.aggregate [Aggregate_Column.Group_By "X", Aggregate_Column.Sum "[Y]*[Y]" "C3"] . set "[X] + 1" "X" db_table_4 = db_table_2.join db_table_3 join_kind=Join_Kind.Left_Outer - copied_table = db_table_4.create_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing + copied_table = db_table_4.select_into_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "Right X", "C3"] copied_table.at "X" . to_vector . should_equal [1, 1, 2] copied_table.at "C1" . to_vector . should_equal [101, 102, 203] @@ -168,10 +297,10 @@ spec make_new_connection prefix persistent_connector=True = Test.specify "should be able to create a temporary copy of a query" <| tmp_connection = make_new_connection Nothing t = Table.new [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] - db_table = t.create_database_table tmp_connection (Name_Generator.random_name "source-table") temporary=True + db_table = t.select_into_database_table tmp_connection (Name_Generator.random_name "source-table") temporary=True db_table_2 = db_table.set "[X] + 100 * [Y]" "computed" - copied_table = db_table_2.create_database_table tmp_connection (Name_Generator.random_name "copied-table") temporary=True + copied_table = db_table_2.select_into_database_table tmp_connection (Name_Generator.random_name "copied-table") temporary=True name = copied_table.name copied_table_accessed = tmp_connection.query name @@ -183,33 +312,33 @@ spec make_new_connection prefix persistent_connector=True = Test.specify "should be able to specify a primary key" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - db_table = t.create_database_table connection (Name_Generator.random_name "source-table") temporary=True - db_table_2 = db_table.create_database_table connection (Name_Generator.random_name "copied-table") primary_key=["X"] + db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True + db_table_2 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") primary_key=["X"] Panic.with_finalizer (connection.drop_table db_table_2.name) <| get_primary_key connection db_table_2.name . should_equal ["X"] Test.specify "should ensure that primary key columns are valid" <| t = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] - db_table = t.create_database_table connection (Name_Generator.random_name "source-table") temporary=True - r1 = db_table.create_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["nonexistent"] + db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True + r1 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["nonexistent"] r1.should_fail_with Missing_Input_Columns Test.specify "should fail when the primary key is not unique" <| t = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] - db_table = t.create_database_table connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing + db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing Problems.assume_no_problems db_table - r1 = db_table.create_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["X"] + r1 = db_table.select_into_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=["X"] r1.should_fail_with Non_Unique_Primary_Key e1 = r1.catch e1.clashing_primary_key . should_equal [1] e1.clashing_example_row_count . should_equal 2 t2 = Table.new [["X", [1, 3, 1, 2, 3, 2, 2, 2, 0]], ["Y", ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i']]] - db_table_2 = t2.create_database_table connection (Name_Generator.random_name "source-table-2") temporary=True primary_key=Nothing + db_table_2 = t2.select_into_database_table connection (Name_Generator.random_name "source-table-2") temporary=True primary_key=Nothing Problems.assume_no_problems db_table_2 - r2 = db_table_2.create_database_table connection (Name_Generator.random_name "copied-table-2") temporary=True primary_key=["X"] + r2 = db_table_2.select_into_database_table connection (Name_Generator.random_name "copied-table-2") temporary=True primary_key=["X"] r2.should_fail_with Non_Unique_Primary_Key e2 = r2.catch e2.clashing_primary_key.length . should_equal 1 @@ -220,13 +349,49 @@ spec make_new_connection prefix persistent_connector=True = Test.specify "will not allow to upload tables across connections" <| t = Table.new [["X", [1, 2, 1]], ["Y", ['b', 'b', 'a']]] - db_table = t.create_database_table connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing + db_table = t.select_into_database_table connection (Name_Generator.random_name "source-table") temporary=True primary_key=Nothing connection_2 = make_new_connection Nothing - r1 = db_table.create_database_table connection_2 (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing + r1 = db_table.select_into_database_table connection_2 (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing r1.should_fail_with Unsupported_Database_Operation r1.catch.message . should_contain "same connection" + Test.specify "should fail if the target table already exists" <| + name = Name_Generator.random_name "table-already-exists" + db_table = connection.create_table name [["X", Value_Type.Integer]] temporary=True + t = Table.new [["Y", ['a', 'b']]] + db_table_2 = t.select_into_database_table connection temporary=True + r1 = db_table_2.select_into_database_table connection name temporary=True + r1.should_fail_with Table_Already_Exists + r2 = db_table_2.select_into_database_table connection name temporary=False + r2.should_fail_with Table_Already_Exists + + db_table.column_names . should_equal ["X"] + db_table.at "X" . to_vector . should_equal [] + + test_table_append source_table_builder target_table_builder = + Test.specify "should be able to append new rows to a table" <| + dest = target_table_builder [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] primary_key=["X"] + src = source_table_builder [["X", [4, 5, 6]], ["Y", ['d', 'e', 'f']]] + result = src.update_database_table connection dest.name + result.column_names . should_equal ["X", "Y"] + result.at "X" . to_vector . should_equal [1, 2, 3, 4, 5, 6] + result.at "Y" . to_vector . should_equal ['a', 'b', 'c', 'd', 'e', 'f'] + + Test.specify "should fail if the target table does not exist" <| + t = source_table_builder ["X", [1, 2, 3], "Y", ['a', 'b', 'c']] + nonexistent_name = Name_Generator.random_name "nonexistent-table" + r1 = t.update_database_table connection nonexistent_name + r1.should_fail_with Table_Not_Found + + table_builder name_prefix args primary_key=[] = + in_memory_table = Table.new args + in_memory_table.select_into_database_table connection (Name_Generator.random_name name_prefix) temporary=True primary_key=primary_key + Test.group "Appending an in-memory table to a Database table" pending="TODO" <| + test_table_append Table.new (table_builder "target-table") + + Test.group "Appending a Database table to a Database table" pending="TODO" <| + test_table_append (table_builder "source-table") (table_builder "target-table") ## PRIVATE Creates a mock column containing `values`. @@ -235,27 +400,3 @@ spec make_new_connection prefix persistent_connector=True = make_mock_column name values exploding_index = storage = ExplodingStorage.new values.to_array exploding_index Column.from_storage name storage - -## PRIVATE - - This method may not work correctly with temporary tables, possibly resulting - in `SQL_Error` as such tables may not be found. - - ! Temporary Tables in SQLite - - The temporary tables in SQLite live in a `temp` database. There is a bug in - how JDBC retrieves primary keys - it only queries the `sqlite_schema` table - which contains schemas of only permanent tables. For now, we just adapt to - it and check primary keys for non-temporary tables only. If we ever want to - use the primary key information in the actual product, we will likely need - to reimplement this by ourselves and UNION both `sqlite_schema` and - `temp.sqlite_schema` tables to get results for both temporary and permanent - tables. -get_primary_key connection table_name = - connection.jdbc_connection.with_connection java_connection-> - rs = java_connection.getMetaData.getPrimaryKeys Nothing Nothing table_name - keys_table = result_set_to_table rs connection.dialect.make_column_fetcher_for_type - # The names of the columns are sometimes lowercase and sometimes uppercase, so we do a case insensitive select first. - selected = keys_table.select_columns [Column_Selector.By_Name "COLUMN_NAME", Column_Selector.By_Name "KEY_SEQ"] reorder=True - key_column_names = selected.order_by 1 . at 0 . to_vector - if key_column_names.is_empty then Nothing else key_column_names diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 8254122fdd8d..6b85e554cdf5 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -12,7 +12,8 @@ from Standard.Table.Errors import Invalid_Output_Column_Names, Duplicate_Output_ import Standard.Visualization -import Standard.Database.Extensions.Upload_Table +import Standard.Database.Extensions.Upload_Database_Table +import Standard.Database.Extensions.Upload_In_Memory_Table from Standard.Database import Database, SQLite, In_Memory from Standard.Test import Test, Test_Suite, Problems @@ -854,7 +855,7 @@ spec = t.join 42 . should_fail_with Type_Error db_connection = Database.connect (SQLite In_Memory) - db_table = (Table.new [["Y", [4, 5, 6]]]).create_database_table db_connection "test" + db_table = (Table.new [["Y", [4, 5, 6]]]).select_into_database_table db_connection "test" r = t.join db_table r.should_fail_with Illegal_Argument diff --git a/test/Tests/src/Data/Map_Spec.enso b/test/Tests/src/Data/Map_Spec.enso index e544a77d7474..375ccd8cf489 100644 --- a/test/Tests/src/Data/Map_Spec.enso +++ b/test/Tests/src/Data/Map_Spec.enso @@ -1,8 +1,6 @@ from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.No_Such_Key.No_Such_Key -import Standard.Base.Data.Time.Date_Time.Date_Time -from Standard.Base.Data.Map import Map from Standard.Test import Test, Test_Suite, Problems import Standard.Test.Extensions @@ -252,7 +250,7 @@ spec = m1.should_fail_with Illegal_Argument m1.catch.message . should_equal "`Map.from_vector` encountered duplicate key: 0" - m2 = Map.from_vector vec allow_duplicates=True + m2 = Map.from_vector vec error_on_duplicates=False Problems.assume_no_problems m2 m2.get 0 . should_equal 1 m2.get 3 . should_equal -5 diff --git a/test/Tests/src/Data/Set_Spec.enso b/test/Tests/src/Data/Set_Spec.enso new file mode 100644 index 000000000000..13deef702dad --- /dev/null +++ b/test/Tests/src/Data/Set_Spec.enso @@ -0,0 +1,55 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument + +from Standard.Test import Test, Test_Suite, Problems +import Standard.Test.Extensions + +spec = + Test.group "Enso Set" <| + Test.specify "should allow checking for emptiness" <| + empty_map = Set.empty + non_empty = Set.empty . insert "foo" + empty_map.is_empty . should_be_true + non_empty.is_empty . should_be_false + + empty_map.not_empty . should_be_false + non_empty.not_empty . should_be_true + + Test.specify "should be constructed from a vector" <| + s1 = Set.from_vector [1, 2, 3, 2] + s1.size . should_equal 3 + s1.to_vector.sort . should_equal [1, 2, 3] + + r2 = Set.from_vector [1, 2, 2] error_on_duplicates=True + r2.should_fail_with Illegal_Argument + + Test.specify "should allow checking contains" <| + s1 = Set.from_vector [1, 2, 3, 2] + s1.contains 1 . should_be_true + s1.contains 2 . should_be_true + s1.contains 3 . should_be_true + s1.contains 4 . should_be_false + + Test.specify "should allow to compute a union, intersection and difference" <| + s1 = Set.from_vector [1, 2] + s2 = Set.from_vector [2, 3] + s3 = Set.from_vector [3, 4] + + (s1.union s2).to_vector.sort . should_equal [1, 2, 3] + (s1.union s3).to_vector.sort . should_equal [1, 2, 3, 4] + (s1.intersection s2).to_vector.sort . should_equal [2] + (s1.intersection s3).to_vector . should_equal [] + (s1.difference s2).to_vector.sort . should_equal [1] + (s1.difference s3).to_vector.sort . should_equal [1, 2] + (s1.difference s1).to_vector . should_equal [] + + Test.specify "should allow to check for equality of two sets" <| + s1 = Set.from_vector [1, 2] + s2 = Set.from_vector [2, 1, 1] + s3 = Set.from_vector [1, 2, 3] + + (s1 == s2) . should_be_true + (s1 == s1) . should_be_true + (s1 == s3) . should_be_false + +main = Test_Suite.run_main spec diff --git a/test/Tests/src/Main.enso b/test/Tests/src/Main.enso index 1c5db730d991..15ebd49f9447 100644 --- a/test/Tests/src/Main.enso +++ b/test/Tests/src/Main.enso @@ -43,10 +43,11 @@ import project.Data.Ordering.Vector_Lexicographic_Order_Spec import project.Data.Pair_Spec import project.Data.Problems_Spec import project.Data.Range_Spec +import project.Data.Regression_Spec +import project.Data.Set_Spec +import project.Data.Statistics_Spec import project.Data.Time.Spec as Time_Spec import project.Data.Vector_Spec -import project.Data.Statistics_Spec -import project.Data.Regression_Spec import project.Data.Text_Spec import project.Data.Text.Text_Sub_Range_Spec @@ -106,6 +107,7 @@ main = Test_Suite.run_main <| List_Spec.spec Locale_Spec.spec Map_Spec.spec + Set_Spec.spec Maybe_Spec.spec Meta_Spec.spec Meta_Location_Spec.spec diff --git a/test/Visualization_Tests/src/Table_Spec.enso b/test/Visualization_Tests/src/Table_Spec.enso index 46960772b92c..15ca245ce9ab 100644 --- a/test/Visualization_Tests/src/Table_Spec.enso +++ b/test/Visualization_Tests/src/Table_Spec.enso @@ -24,7 +24,7 @@ type Foo visualization_spec connection = in_mem = Table.new [["A", ['a', 'a', 'a']], ["B", [2, 2, 3]], ["C", [3, 5, 6]]] - t = in_mem.create_database_table connection "T" primary_key=Nothing temporary=True + t = in_mem.select_into_database_table connection "T" primary_key=Nothing temporary=True make_json header data all_rows ixes_header ixes = p_header = ["header", header] From 06c8d2977d3d998d094fbe91ab501096d4299b1d Mon Sep 17 00:00:00 2001 From: Adam Obuchowicz Date: Tue, 6 Jun 2023 12:41:30 +0200 Subject: [PATCH 24/39] Fix entering node failure handling (#6932) Fixes #6763 The bug was caused by pushing stack frames in the engine first, then failing to get graph controller - in that case we didn't update graph, but kept the stack, so graph was not synchronized with stack. This PR changes the approach: we try to open graph first, only then push frames to stack. If _any_ frame will fail, we try to pop those we pushed so far, to restore the previous state. The same fix was applied for leaving nodes. Also, I realized, that running several "enter node" / "leave node" actions could mix push/pop operations, making a mess with our execution context state. I've added a mutex to ensure we won't do this: the contesting operation will simply fail. # Important Notes In case when _restoring_ state fails, I did not have any better idea than just trying until it succeed. --- app/gui/src/controller/graph/executed.rs | 125 +++++++++--- app/gui/src/ide/initializer.rs | 23 +-- app/gui/src/lib.rs | 1 + app/gui/src/model/execution_context.rs | 4 + app/gui/src/model/execution_context/plain.rs | 27 ++- .../model/execution_context/synchronized.rs | 4 + app/gui/src/retry.rs | 185 ++++++++++++++++++ lib/rust/prelude/src/data/non_empty_vec.rs | 5 + lib/rust/prelude/src/lib.rs | 1 + lib/rust/prelude/src/result.rs | 2 +- lib/rust/prelude/src/sync.rs | 115 +++++++++++ 11 files changed, 442 insertions(+), 50 deletions(-) create mode 100644 app/gui/src/retry.rs create mode 100644 lib/rust/prelude/src/sync.rs diff --git a/app/gui/src/controller/graph/executed.rs b/app/gui/src/controller/graph/executed.rs index a7947f67fa66..b5f784075f6a 100644 --- a/app/gui/src/controller/graph/executed.rs +++ b/app/gui/src/controller/graph/executed.rs @@ -14,6 +14,7 @@ use crate::model::execution_context::QualifiedMethodPointer; use crate::model::execution_context::Visualization; use crate::model::execution_context::VisualizationId; use crate::model::execution_context::VisualizationUpdateData; +use crate::retry::retry_operation_errors_cap; use double_representation::name::QualifiedName; use engine_protocol::language_server::ExecutionEnvironment; @@ -22,6 +23,8 @@ use futures::stream; use futures::TryStreamExt; use span_tree::generate::context::CalledMethodInfo; use span_tree::generate::context::Context; +use std::assert_matches::debug_assert_matches; +use std::time::Duration; // ============== @@ -49,9 +52,14 @@ pub struct NoResolvedMethod(double_representation::node::Id); #[allow(missing_docs)] #[derive(Debug, Fail, Clone, Copy)] -#[fail(display = "Operation is not permitted in read only mode")] +#[fail(display = "Operation is not permitted in read only mode.")] pub struct ReadOnly; +#[allow(missing_docs)] +#[derive(Debug, Fail, Clone, Copy)] +#[fail(display = "Previous operation modifying call stack is still in progress.")] +pub struct SyncingStack; + // ==================== // === Notification === @@ -92,6 +100,11 @@ pub struct Handle { /// The publisher allowing sending notification to subscribed entities. Note that its outputs /// is merged with publishers from the stored graph and execution controllers. notifier: notification::Publisher, + /// A mutex guarding a process syncing Execution Context stack with the current graph. As + /// the syncing requires multiple async calls to the engine, and the stack updates depend on + /// each other, we should not mix various operations (e.g. entering node while still in + /// process of entering another node). + syncing_stack: Rc>, } impl Handle { @@ -122,9 +135,13 @@ impl Handle { project: model::Project, execution_ctx: model::ExecutionContext, ) -> Self { - let graph = Rc::new(RefCell::new(graph)); - let notifier = default(); - Handle { graph, execution_ctx, project, notifier } + Handle { + graph: Rc::new(RefCell::new(graph)), + execution_ctx, + project, + notifier: default(), + syncing_stack: default(), + } } /// See [`model::ExecutionContext::when_ready`]. @@ -229,27 +246,49 @@ impl Handle { pub async fn enter_stack(&self, stack: Vec) -> FallibleResult { if self.project.read_only() { Err(ReadOnly.into()) - } else { - let mut successful_calls = Vec::new(); - let result = stream::iter(stack) + } else if let Some(last_call) = stack.last() { + let _syncing = self.syncing_stack.try_lock().map_err(|_| SyncingStack)?; + // Before adding new items to stack, first make sure we're actually able to construct + // the graph controller. + let graph = controller::Graph::new_method(&self.project, &last_call.definition).await?; + let mut successful_calls = 0; + let result = stream::iter(stack.iter()) .then(|local_call| async { - debug!("Entering node {}.", local_call.call); + info!("Entering node {}.", local_call.call); self.execution_ctx.push(local_call.clone()).await?; - Ok(local_call) + Ok(()) }) - .map_ok(|local_call| successful_calls.push(local_call)) + .map_ok(|()| successful_calls += 1) .try_collect::<()>() .await; - if let Some(last_successful_call) = successful_calls.last() { - let graph = - controller::Graph::new_method(&self.project, &last_successful_call.definition) - .await?; - debug!("Replacing graph with {graph:?}."); - self.graph.replace(graph); - debug!("Sending graph invalidation signal."); - self.notifier.publish(Notification::EnteredStack(successful_calls)).await; + match &result { + Ok(()) => { + info!("Replacing graph with {graph:?}."); + self.graph.replace(graph); + info!("Sending graph invalidation signal."); + self.notifier.publish(Notification::EnteredStack(stack)).await; + } + Err(_) => { + let successful_calls_to_revert = iter::repeat(()).take(successful_calls); + for () in successful_calls_to_revert { + let error_msg = "Error while restoring execution context stack after \ + unsuccessful entering node"; + let retry_result = retry_operation_errors_cap( + || self.execution_ctx.pop(), + self.retry_times_for_restoring_stack_operations(), + error_msg, + 0, + ) + .await; + debug_assert_matches!(FallibleResult::from(retry_result), Ok(_)); + } + } } + result + } else { + // The stack passed as argument is empty; nothing to do. + Ok(()) } } @@ -276,22 +315,46 @@ impl Handle { if self.project.read_only() { Err(ReadOnly.into()) } else { - let mut successful_pop_count = 0; - let result = stream::iter(0..frame_count) - .then(|_| self.execution_ctx.pop()) - .map_ok(|_| successful_pop_count += 1) + let method = self.execution_ctx.method_at_frame_back(frame_count)?; + let _syncing = self.syncing_stack.try_lock().map_err(|_| SyncingStack)?; + let graph = controller::Graph::new_method(&self.project, &method).await?; + + let mut successful_pops = Vec::new(); + let result = stream::iter(iter::repeat(()).take(frame_count)) + .then(|()| self.execution_ctx.pop()) + .map_ok(|local_call| successful_pops.push(local_call)) .try_collect::<()>() .await; - if successful_pop_count > 0 { - let method = self.execution_ctx.current_method(); - let graph = controller::Graph::new_method(&self.project, &method).await?; - self.graph.replace(graph); - self.notifier.publish(Notification::ExitedStack(successful_pop_count)).await; + match &result { + Ok(()) => { + self.graph.replace(graph); + self.notifier.publish(Notification::ExitedStack(frame_count)).await; + } + Err(_) => + for frame in successful_pops.into_iter().rev() { + let error_msg = "Error while restoring execution context stack after \ + unsuccessful leaving node"; + let retry_result = retry_operation_errors_cap( + || self.execution_ctx.push(frame.clone()), + self.retry_times_for_restoring_stack_operations(), + error_msg, + 0, + ) + .await; + debug_assert_matches!(FallibleResult::from(retry_result), Ok(_)); + }, } result } } + fn retry_times_for_restoring_stack_operations(&self) -> impl Iterator { + iter::repeat(()).scan(Duration::from_secs(1), |delay, ()| { + *delay = min(*delay * 2, Duration::from_secs(30)); + Some(*delay) + }) + } + /// Interrupt the program execution. pub async fn interrupt(&self) -> FallibleResult { self.execution_ctx.interrupt().await?; @@ -448,7 +511,6 @@ pub mod tests { use crate::test::mock::Fixture; use controller::graph::SpanTree; use engine_protocol::language_server::types::test::value_update_with_type; - use wasm_bindgen_test::wasm_bindgen_test; use wasm_bindgen_test::wasm_bindgen_test_configure; wasm_bindgen_test_configure!(run_in_browser); @@ -487,7 +549,7 @@ pub mod tests { } // Test that checks that value computed notification is properly relayed by the executed graph. - #[wasm_bindgen_test] + #[test] fn dispatching_value_computed_notification() { use crate::test::mock::Fixture; // Setup the controller. @@ -534,8 +596,9 @@ pub mod tests { notifications.expect_pending(); } + // Test that moving nodes is possible in read-only mode. - #[wasm_bindgen_test] + #[test] fn read_only_mode_does_not_restrict_moving_nodes() { use model::module::Position; @@ -555,7 +618,7 @@ pub mod tests { } // Test that certain actions are forbidden in read-only mode. - #[wasm_bindgen_test] + #[test] fn read_only_mode() { fn run(code: &str, f: impl FnOnce(&Handle)) { let mut data = crate::test::mock::Unified::new(); diff --git a/app/gui/src/ide/initializer.rs b/app/gui/src/ide/initializer.rs index 1f320877670f..ece7f37f50cf 100644 --- a/app/gui/src/ide/initializer.rs +++ b/app/gui/src/ide/initializer.rs @@ -4,11 +4,11 @@ use crate::prelude::*; use crate::config; use crate::ide::Ide; +use crate::retry::retry_operation; use crate::transport::web::WebSocket; use crate::FailedIde; use engine_protocol::project_manager; -use enso_web::sleep; use ensogl::application::Application; use std::time::Duration; @@ -99,22 +99,11 @@ impl Initializer { } async fn initialize_ide_controller_with_retries(&self) -> FallibleResult { - let mut retry_after = INITIALIZATION_RETRY_TIMES.iter(); - loop { - match self.initialize_ide_controller().await { - Ok(controller) => break Ok(controller), - Err(error) => { - error!("Failed to initialize controller: {error}"); - match retry_after.next() { - Some(time) => { - error!("Retrying after {} seconds", time.as_secs_f32()); - sleep(*time).await; - } - None => break Err(error), - } - } - } - } + let retry_times = INITIALIZATION_RETRY_TIMES.iter().copied(); + let error_message = "Failed to initialize controller."; + retry_operation(|| self.initialize_ide_controller(), retry_times, error_message) + .await + .into() } /// Initialize and return a new Ide Controller. diff --git a/app/gui/src/lib.rs b/app/gui/src/lib.rs index 491ba641382e..9b15f1d354f0 100644 --- a/app/gui/src/lib.rs +++ b/app/gui/src/lib.rs @@ -82,6 +82,7 @@ pub mod ide; pub mod integration_test; pub mod model; pub mod presenter; +pub mod retry; pub mod sync; pub mod test; pub mod transport; diff --git a/app/gui/src/model/execution_context.rs b/app/gui/src/model/execution_context.rs index bb106931fc62..aa7cea01b44f 100644 --- a/app/gui/src/model/execution_context.rs +++ b/app/gui/src/model/execution_context.rs @@ -414,6 +414,10 @@ pub trait API: Debug { /// Obtain the method pointer to the method of the call stack's top frame. fn current_method(&self) -> MethodPointer; + /// Obtain the method pointer to the method of the call `count` frames back from the stack's top + /// (calling with 0 is the same as [`current_method`](Self::current_method). + fn method_at_frame_back(&self, count: usize) -> FallibleResult; + /// Get the information about the given visualization. Fails, if there's no such visualization /// active. fn visualization_info(&self, id: VisualizationId) -> FallibleResult; diff --git a/app/gui/src/model/execution_context/plain.rs b/app/gui/src/model/execution_context/plain.rs index b0ee3823e82e..b83fc3e9c7a5 100644 --- a/app/gui/src/model/execution_context/plain.rs +++ b/app/gui/src/model/execution_context/plain.rs @@ -15,6 +15,7 @@ use engine_protocol::language_server::ExecutionEnvironment; use engine_protocol::language_server::MethodPointer; use engine_protocol::language_server::VisualisationConfiguration; use futures::future::LocalBoxFuture; +use std::cmp::Ordering; @@ -22,11 +23,22 @@ use futures::future::LocalBoxFuture; // === Errors === // ============== -/// Error then trying to pop stack item on ExecutionContext when there only root call remains. +/// Error when trying to pop stack item on ExecutionContext when there only root call remains. #[derive(Clone, Copy, Debug, Fail)] #[fail(display = "Tried to pop an entry point.")] pub struct PopOnEmptyStack(); +/// Error when trying to refer too much frames back. +#[derive(Clone, Copy, Debug, Fail)] +#[fail( + display = "Tried to get information from {} frames back, but stack has only {} frames.", + requested, actual +)] +pub struct TooManyFrames { + requested: usize, + actual: usize, +} + /// Error when using an Id that does not correspond to any known visualization. #[derive(Clone, Copy, Debug, Fail)] #[fail(display = "Tried to use incorrect visualization Id: {}.", _0)] @@ -176,6 +188,19 @@ impl model::execution_context::API for ExecutionContext { } } + fn method_at_frame_back(&self, count: usize) -> FallibleResult { + let stack = self.stack.borrow(); + match count.cmp(&stack.len()) { + Ordering::Less => { + let index = stack.len() - count - 1; + Ok(stack[index].definition.clone()) + } + Ordering::Equal => Ok(self.entry_point.borrow().clone()), + Ordering::Greater => + Err(TooManyFrames { requested: count, actual: stack.len() }.into()), + } + } + fn visualization_info(&self, id: VisualizationId) -> FallibleResult { let err = || InvalidVisualizationId(id).into(); self.visualizations.borrow_mut().get(&id).map(|v| v.visualization.clone()).ok_or_else(err) diff --git a/app/gui/src/model/execution_context/synchronized.rs b/app/gui/src/model/execution_context/synchronized.rs index 41d5dd58a2f6..52ff03d63a7a 100644 --- a/app/gui/src/model/execution_context/synchronized.rs +++ b/app/gui/src/model/execution_context/synchronized.rs @@ -171,6 +171,10 @@ impl model::execution_context::API for ExecutionContext { self.model.current_method() } + fn method_at_frame_back(&self, count: usize) -> FallibleResult { + self.model.method_at_frame_back(count) + } + fn visualization_info(&self, id: VisualizationId) -> FallibleResult { self.model.visualization_info(id) } diff --git a/app/gui/src/retry.rs b/app/gui/src/retry.rs new file mode 100644 index 000000000000..8e86b4cffde3 --- /dev/null +++ b/app/gui/src/retry.rs @@ -0,0 +1,185 @@ +//! A utilities used for retrying various asynchronous operations. + +use crate::prelude::*; + +use enso_web::sleep; +use std::time::Duration; + + + +// ======================= +// === retry_operation === +// ======================= + +// === RetryResult === + +/// A result of `retry_operation` method. +/// +/// Similar to [`Result`] it returns a vector of all failures and it has additional variant +/// `OkAfterRetries` returning both result of successful call and all errors. It can be cast to +/// `Result` or `Result` with the first error from the list. +#[derive(Clone, Debug, Eq, PartialEq)] +#[must_use] +pub enum RetryResult { + /// The operation was successful without any retries. + Ok(T), + /// The operation succeeded at some retry. + OkAfterRetries(T, NonEmptyVec), + /// The operation and all retries failed. + Err(NonEmptyVec), +} + +impl From> for Result> { + fn from(value: RetryResult) -> Self { + match value { + RetryResult::Ok(value) => Ok(value), + RetryResult::OkAfterRetries(value, _) => Ok(value), + RetryResult::Err(errors) => Err(errors), + } + } +} + +impl From> for Result { + fn from(value: RetryResult) -> Self { + let result_with_vec: Result> = value.into(); + result_with_vec.map_err(|errors| errors.take_first()) + } +} + + +// === retry_operation === + +/// Run asynchronous operation retrying if not successful. +/// +/// This function runs the `operation` and, if it returned [`Err`] wait some time and try again. +/// +/// The waiting times are specified by `retry_times` argument. If the iterator yield no more +/// element, no retry is performed anymore and [`RetryResult::Err`] is returned. +/// +/// If operation was successful only after some retry, [`RetryResult::OkAfterRetries`] is returned. +pub async fn retry_operation( + operation: impl FnMut() -> Operation, + retry_times: impl IntoIterator, + message_on_failure: &str, +) -> RetryResult +where + Operation: Future>, + E: Display, +{ + retry_operation_errors_cap(operation, retry_times, message_on_failure, usize::MAX).await +} + + +// === retry_operation_errors_cap === + +/// Similar to [`retry_operation`] but the number of errors is capped, preventing making huge +/// vectors in cases when, for example, we retry something indefinitely. +/// +/// If cap is reached, the earlier errors will be kept and later will be discarded. +pub async fn retry_operation_errors_cap( + mut operation: impl FnMut() -> Operation, + retry_times: impl IntoIterator, + message_on_failure: &str, + errors_cap: usize, +) -> RetryResult +where + Operation: Future>, + E: Display, +{ + let result = operation().await; + result.log_err(message_on_failure); + match result { + Ok(result) => RetryResult::Ok(result), + Err(first_error) => { + let mut errors = NonEmptyVec::singleton(first_error); + let mut retry_times = retry_times.into_iter(); + loop { + match retry_times.next() { + Some(time) => { + error!("Retrying after {} seconds", time.as_secs_f32()); + sleep(time).await; + } + None => break RetryResult::Err(errors), + }; + let retry = operation().await; + retry.log_err(message_on_failure); + match retry { + Ok(result) => break RetryResult::OkAfterRetries(result, errors), + Err(error) if errors.len() < errors_cap => errors.push(error), + Err(_) => {} + } + } + } + } +} + + + +// ============ +// === Test === +// ============ + +#[cfg(test)] +pub mod tests { + use super::*; + + #[test] + fn successful_operation() { + let operation = || async { Ok(4) }; + let retry_times = iter::repeat_with(|| panic!("Should not ask for retry time.")); + let mut future = + retry_operation(operation, retry_times, "Test operation failed. This cannot happen.") + .boxed_local(); + let result: RetryResult<_, usize> = future.expect_ready(); + assert_eq!(result, RetryResult::Ok(4)); + } + + #[test] + fn operation_successful_after_retry() { + let mut call_index = 0; + let operation = move || { + call_index += 1; + async move { + if call_index >= 3 { + Ok(call_index) + } else { + Err(call_index) + } + } + }; + let retry_times = [10, 20, 30, 40, 50].into_iter().map(Duration::from_millis); + let mut future = + retry_operation(operation, retry_times, "Test operation failed.").boxed_local(); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(10)); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(30)); + let result = future.expect_ready(); + assert_eq!(result, RetryResult::OkAfterRetries(3, NonEmptyVec::new(1, vec![2]))); + } + + #[test] + fn operation_always_failing() { + let mut call_index = 0; + let operation = move || { + call_index += 1; + async move { Err(call_index) } + }; + let retry_times = [10, 20, 30, 40, 50].into_iter().map(Duration::from_millis); + let mut future = + retry_operation(operation, retry_times, "One does not simply walk into Mordor.") + .boxed_local(); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(10)); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(20)); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(30)); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(40)); + future.expect_pending(); + std::thread::sleep(Duration::from_millis(60)); + let result: RetryResult = future.expect_ready(); + assert_eq!(result, RetryResult::Err(NonEmptyVec::new(1, vec![2, 3, 4, 5, 6]))); + } +} diff --git a/lib/rust/prelude/src/data/non_empty_vec.rs b/lib/rust/prelude/src/data/non_empty_vec.rs index 2b4940c6258e..dccb95631ec9 100644 --- a/lib/rust/prelude/src/data/non_empty_vec.rs +++ b/lib/rust/prelude/src/data/non_empty_vec.rs @@ -258,6 +258,11 @@ impl NonEmptyVec { self.elems.into() } + /// Consume this non-empty vector and return it's first element. The rest will be dropped. + pub fn take_first(self) -> T { + self.elems.into_iter().next().unwrap_or_else(|| unreachable!()) + } + /// Consume this non-empty vector, map each element with a function, and produce a new one. pub fn mapped(self, f: impl FnMut(T) -> S) -> NonEmptyVec { let elems = self.elems.into_iter().map(f).collect(); diff --git a/lib/rust/prelude/src/lib.rs b/lib/rust/prelude/src/lib.rs index 78a266dd6236..62ff193a4967 100644 --- a/lib/rust/prelude/src/lib.rs +++ b/lib/rust/prelude/src/lib.rs @@ -47,6 +47,7 @@ mod smallvec; mod std_reexports; mod string; mod switch; +pub mod sync; mod test; mod tp; mod vec; diff --git a/lib/rust/prelude/src/result.rs b/lib/rust/prelude/src/result.rs index 096b5d686092..edced2692011 100644 --- a/lib/rust/prelude/src/result.rs +++ b/lib/rust/prelude/src/result.rs @@ -41,7 +41,7 @@ impl ResultOps for Result { fn log_err_fmt(&self, fmt: fmt::Arguments) where Self::Error: Display { if let Err(err) = self { - crate::error!("{}: {}", fmt, err) + crate::error!("{} {}", fmt, err) } } } diff --git a/lib/rust/prelude/src/sync.rs b/lib/rust/prelude/src/sync.rs new file mode 100644 index 000000000000..cfe3f7c73e34 --- /dev/null +++ b/lib/rust/prelude/src/sync.rs @@ -0,0 +1,115 @@ +//! A common utilities for synchronization many asynchronous tasks. + +use crate::*; + +use std::thread; + + +// ============== +// === Export === +// ============== + +pub use std::sync::PoisonError; +pub use std::sync::TryLockError; +pub use std::sync::TryLockResult; + + + +// ========================= +// === SingleThreadMutex === +// ========================= + +// === MutexGuard === + +/// A guard keeping the lock on a [`SingleThreadMutex`] and leaving it once dropped. +#[derive(Debug)] +pub struct MutexGuard<'a, T> { + mutex: &'a SingleThreadMutex, + data: RefMut<'a, T>, + created_when_panicking: bool, +} + +impl<'a, T> Deref for MutexGuard<'a, T> { + type Target = T; + + fn deref(&self) -> &Self::Target { + &self.data + } +} + +impl<'a, T> DerefMut for MutexGuard<'a, T> { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.data + } +} + +impl<'a, T> MutexGuard<'a, T> { + fn new(mutex: &'a SingleThreadMutex, data: RefMut<'a, T>) -> Self { + Self { mutex, data, created_when_panicking: thread::panicking() } + } +} + +impl<'a, T> Drop for MutexGuard<'a, T> { + fn drop(&mut self) { + if !self.created_when_panicking && thread::panicking() { + self.mutex.poisoned.set(true); + } + } +} + + +// === SingleThreadMutex === + +/// A Mutex designed to be used in a single-thread environment. +/// +/// It is much simpler version of [`std::sync::Mutex`]. It does not have a blocking [`lock`] as it +/// would always result in a deadlock. +/// +/// # Poisoning +/// +/// When panic occurs while keeping lock on the mutex, it goes to "poisoned" state. It's implemented +/// the same way as in [`std::sync::Mutex`] - see its documentation for details. +#[derive(Debug, Default)] +pub struct SingleThreadMutex { + data: RefCell, + poisoned: Cell, +} + +impl SingleThreadMutex { + /// Create new mutex. + pub fn new(data: T) -> Self { + Self { data: RefCell::new(data), poisoned: default() } + } + + /// Try to lock mutex. Returns lock guard if succeeded, which unlocks the mutex on drop. + pub fn try_lock(&self) -> TryLockResult> { + let data = self.data.try_borrow_mut().map_err(|_| TryLockError::WouldBlock)?; + let guard = MutexGuard::new(self, data); + if self.poisoned.get() { + Err(TryLockError::Poisoned(PoisonError::new(guard))) + } else { + Ok(guard) + } + } +} + + + +// ============= +// === Tests === +// ============= + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn locking() { + let mutex = SingleThreadMutex::new(13); + let lock = mutex.try_lock().expect("First lock should be successful"); + assert_eq!(*lock, 13); + mutex.try_lock().expect_err("Locking while already lock should fail"); + drop(lock); + mutex.try_lock().expect("Locking after dropping previous lock should succeed"); + } +} From 6929c03207643f7d5b2fe5e1c5eddaabd3d81d38 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Tue, 6 Jun 2023 12:07:00 +0100 Subject: [PATCH 25/39] Truncate the error message for to_uri. (#6963) `to_uri` was sending huge error messages, which crashed the IDE. --- .../lib/Standard/Base/0.0.0-dev/src/Network/URI.enso | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso index 1e37e2e52457..1a362bbcd158 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/URI.enso @@ -39,9 +39,12 @@ type URI example_parse = URI.parse "http://example.com" parse : Text -> URI ! Syntax_Error - parse uri = + parse uri:Text = Panic.catch JException (URI.Value (Java_URI.create uri)) caught_panic-> - Error.throw (Syntax_Error.Error ("URI syntax error: " + caught_panic.payload.getMessage)) + message = caught_panic.payload.getMessage + truncated = if message.is_nothing || message.length > 100 then "Invalid URI '" + uri.to_display_text + "'" else + "URI syntax error: " + message + Error.throw (Syntax_Error.Error truncated) ## PRIVATE From 5cc21001b12c7cbc1185e086ae5c1d14d77bdd83 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Tue, 6 Jun 2023 22:00:07 +1000 Subject: [PATCH 26/39] Save backend type in localStorage, and automatically open projects when ready (#6728) * Save backend type; fix `-startup.project` * Attempt to fix for cloud * Fix for cloud and implement automatic opening * Fixes * Add missing functionality * Switch default backend to local backend * Fix type error * Fix saving backend * Make loading message appear instantly * Fix context menu positioning * Fixes and QoL improvements * Style scrollbar on `document.body` * Fix `-startup.project`; other minor fixes * Open project immediately when creating from template; minor fix * Finally fix spinner bugs * Fix some minor bugs * Fix bugs when closing project * Disallow deleting local projects while they are still running * Close modals when buttons are clicked --- app/ide-desktop/eslint.config.js | 24 ++ app/ide-desktop/lib/content/src/index.ts | 17 +- .../components/registration.tsx | 2 + .../src/authentication/providers/auth.tsx | 6 +- .../src/authentication/src/components/app.tsx | 2 + .../authentication/src/dashboard/backend.ts | 1 + .../components/changePasswordModal.tsx | 2 + .../components/confirmDeleteModal.tsx | 45 ++- .../src/dashboard/components/contextMenu.tsx | 7 +- .../dashboard/components/contextMenuEntry.tsx | 4 +- .../src/dashboard/components/dashboard.tsx | 253 ++++++++++---- .../components/projectActionButton.tsx | 312 ++++++++++++++---- .../src/dashboard/components/renameModal.tsx | 44 ++- .../src/dashboard/components/templates.tsx | 12 +- .../src/dashboard/components/topBar.tsx | 16 +- .../src/dashboard/components/userMenu.tsx | 14 +- .../src/dashboard/localBackend.ts | 11 +- .../src/dashboard/validation.ts | 8 + .../src/authentication/src/hooks.tsx | 49 +++ .../authentication/src/providers/backend.tsx | 27 +- app/ide-desktop/lib/dashboard/src/index.tsx | 1 + .../lib/dashboard/src/tailwind.css | 30 +- .../lib/dashboard/tailwind.config.ts | 3 + app/ide-desktop/lib/dashboard/watch.ts | 1 + 24 files changed, 699 insertions(+), 192 deletions(-) diff --git a/app/ide-desktop/eslint.config.js b/app/ide-desktop/eslint.config.js index 42172a05dbc8..146654203945 100644 --- a/app/ide-desktop/eslint.config.js +++ b/app/ide-desktop/eslint.config.js @@ -433,6 +433,30 @@ export default [ 'no-undef': 'off', }, }, + { + files: [ + 'lib/dashboard/src/**/*.ts', + 'lib/dashboard/src/**/*.mts', + 'lib/dashboard/src/**/*.cts', + 'lib/dashboard/src/**/*.tsx', + 'lib/dashboard/src/**/*.mtsx', + 'lib/dashboard/src/**/*.ctsx', + ], + rules: { + 'no-restricted-properties': [ + 'error', + { + object: 'console', + message: 'Avoid leaving debugging statements when committing code', + }, + { + object: 'hooks', + property: 'useDebugState', + message: 'Avoid leaving debugging statements when committing code', + }, + ], + }, + }, { files: ['**/*.d.ts'], rules: { diff --git a/app/ide-desktop/lib/content/src/index.ts b/app/ide-desktop/lib/content/src/index.ts index b974e66e1229..b5aec160b6f2 100644 --- a/app/ide-desktop/lib/content/src/index.ts +++ b/app/ide-desktop/lib/content/src/index.ts @@ -219,13 +219,13 @@ class Main implements AppRunner { const isOpeningMainEntryPoint = contentConfig.OPTIONS.groups.startup.options.entry.value === contentConfig.OPTIONS.groups.startup.options.entry.default - const isNotOpeningProject = - contentConfig.OPTIONS.groups.startup.options.project.value === '' - if ( - (isUsingAuthentication || isUsingNewDashboard) && - isOpeningMainEntryPoint && - isNotOpeningProject - ) { + // This MUST be removed as it would otherwise override the `startup.project` passed + // explicitly in `ide.tsx`. + if (isOpeningMainEntryPoint && url.searchParams.has('startup.project')) { + url.searchParams.delete('startup.project') + history.replaceState(null, '', url.toString()) + } + if ((isUsingAuthentication || isUsingNewDashboard) && isOpeningMainEntryPoint) { this.runAuthentication(isInAuthenticationFlow, inputConfig) } else { void this.runApp(inputConfig) @@ -235,6 +235,8 @@ class Main implements AppRunner { /** Begins the authentication UI flow. */ runAuthentication(isInAuthenticationFlow: boolean, inputConfig?: StringConfig) { + const initialProjectName = + contentConfig.OPTIONS.groups.startup.options.project.value || null /** TODO [NP]: https://github.com/enso-org/cloud-v2/issues/345 * `content` and `dashboard` packages **MUST BE MERGED INTO ONE**. The IDE * should only have one entry point. Right now, we have two. One for the cloud @@ -250,6 +252,7 @@ class Main implements AppRunner { supportsLocalBackend: SUPPORTS_LOCAL_BACKEND, supportsDeepLinks: SUPPORTS_DEEP_LINKS, showDashboard: contentConfig.OPTIONS.groups.featurePreview.options.newDashboard.value, + initialProjectName, onAuthenticated: () => { if (isInAuthenticationFlow) { const initialUrl = localStorage.getItem(INITIAL_URL_KEY) diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/registration.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/registration.tsx index 527074fd132b..b343ba68b31c 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/registration.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/registration.tsx @@ -81,6 +81,7 @@ function Registration() { { setInitialized(true) setUserSession(OFFLINE_USER_SESSION) - setBackend(new localBackend.LocalBackend()) + setBackendWithoutSavingType(new localBackend.LocalBackend()) } const goOffline = () => { diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx index f5a6925764cd..7159aa59ee31 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx @@ -89,6 +89,8 @@ export interface AppProps { supportsDeepLinks: boolean /** Whether the dashboard should be rendered. */ showDashboard: boolean + /** The name of the project to open on startup, if any. */ + initialProjectName: string | null onAuthenticated: () => void appRunner: AppRunner } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/backend.ts b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/backend.ts index 296366f63593..3dfebe2c9684 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/backend.ts +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/backend.ts @@ -1,4 +1,5 @@ /** @file Type definitions common between all backends. */ + import * as dateTime from './dateTime' import * as newtype from '../newtype' diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx index 78fbf6552bb3..34f76bb59293 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx @@ -72,6 +72,8 @@ function ChangePasswordModal() { type="password" name="old_password" placeholder="Old Password" + pattern={validation.PREVIOUS_PASSWORD_PATTERN} + title={validation.PREVIOUS_PASSWORD_TITLE} value={oldPassword} setValue={setOldPassword} className="text-sm sm:text-base placeholder-gray-500 pl-10 pr-4 rounded-lg border border-gray-400 w-full py-2 focus:outline-none focus:border-blue-400" diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx index 14362dc14230..8952304f4cbb 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx @@ -1,4 +1,5 @@ /** @file Modal for confirming delete of any type of asset. */ +import * as react from 'react' import toast from 'react-hot-toast' import * as modalProvider from '../../providers/modal' @@ -23,14 +24,23 @@ function ConfirmDeleteModal(props: ConfirmDeleteModalProps) { const { assetType, name, doDelete, onSuccess } = props const { unsetModal } = modalProvider.useSetModal() + const [isSubmitting, setIsSubmitting] = react.useState(false) + const onSubmit = async () => { - unsetModal() - await toast.promise(doDelete(), { - loading: `Deleting ${assetType}...`, - success: `Deleted ${assetType}.`, - error: `Could not delete ${assetType}.`, - }) - onSuccess() + if (!isSubmitting) { + try { + setIsSubmitting(true) + await toast.promise(doDelete(), { + loading: `Deleting ${assetType}...`, + success: `Deleted ${assetType}.`, + error: `Could not delete ${assetType}.`, + }) + unsetModal() + onSuccess() + } finally { + setIsSubmitting(false) + } + } } return ( @@ -52,18 +62,25 @@ function ConfirmDeleteModal(props: ConfirmDeleteModalProps) { Are you sure you want to delete the {assetType} '{name}'?
-
Delete -
-
+
+
diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenu.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenu.tsx index fb7a254cb60c..d65a1db64b31 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenu.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenu.tsx @@ -25,9 +25,13 @@ export interface ContextMenuProps { function ContextMenu(props: react.PropsWithChildren) { const { children, event } = props const contextMenuRef = react.useRef(null) + const [top, setTop] = react.useState(event.pageY) + // This must be the original height before the returned element affects the `scrollHeight`. + const [bodyHeight] = react.useState(document.body.scrollHeight) react.useEffect(() => { if (contextMenuRef.current != null) { + setTop(Math.min(top, bodyHeight - contextMenuRef.current.clientHeight)) const boundingBox = contextMenuRef.current.getBoundingClientRect() const scrollBy = boundingBox.bottom - innerHeight + SCROLL_MARGIN if (scrollBy > 0) { @@ -39,7 +43,8 @@ function ContextMenu(props: react.PropsWithChildren) { return (
{children} diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenuEntry.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenuEntry.tsx index c45834a92779..0dab37ba088f 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenuEntry.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/contextMenuEntry.tsx @@ -8,16 +8,18 @@ import * as react from 'react' /** Props for a {@link ContextMenuEntry}. */ export interface ContextMenuEntryProps { disabled?: boolean + title?: string onClick: (event: react.MouseEvent) => void } // This component MUST NOT use `useState` because it is not rendered directly. /** An item in a `ContextMenu`. */ function ContextMenuEntry(props: react.PropsWithChildren) { - const { children, disabled, onClick } = props + const { children, disabled, title, onClick } = props return (
- +
{columnsFor(columnDisplayMode, backend.type).map(column => ( @@ -948,7 +1060,7 @@ function Dashboard(props: DashboardProps) { > items={visibleProjectAssets} - getKey={proj => proj.id} + getKey={projectAsset => projectAsset.id} isLoading={isLoadingAssets} placeholder={ @@ -979,8 +1091,11 @@ function Dashboard(props: DashboardProps) { event.preventDefault() event.stopPropagation() const doOpenForEditing = () => { - // FIXME[sb]: Switch to IDE tab - // once merged with `show-and-open-workspace` branch. + unsetModal() + setProjectEvent({ + type: projectActionButton.ProjectEventType.open, + projectId: projectAsset.id, + }) } const doOpenAsFolder = () => { // FIXME[sb]: Uncomment once backend support @@ -1031,9 +1146,12 @@ function Dashboard(props: DashboardProps) { /> )) } + const isDisabled = + backend.type === backendModule.BackendType.local && + (projectDatas[projectAsset.id]?.isRunning ?? false) setModal(() => ( - - + + Open for editing {backend.type !== backendModule.BackendType.local && ( @@ -1044,7 +1162,15 @@ function Dashboard(props: DashboardProps) { Rename - + Delete @@ -1059,7 +1185,7 @@ function Dashboard(props: DashboardProps) { backendModule.Asset > items={visibleDirectoryAssets} - getKey={dir => dir.id} + getKey={directoryAsset => directoryAsset.id} isLoading={isLoadingAssets} placeholder={ @@ -1091,11 +1217,14 @@ function Dashboard(props: DashboardProps) { : [directoryAsset] ) }} - onContextMenu={(_directory, event) => { + onContextMenu={(directoryAsset, event) => { event.preventDefault() event.stopPropagation() setModal(() => ( - + )) }} /> @@ -1153,7 +1282,7 @@ function Dashboard(props: DashboardProps) { )) } setModal(() => ( - + Delete @@ -1224,7 +1353,7 @@ function Dashboard(props: DashboardProps) { /** TODO: Wait for backend endpoint. */ } setModal(() => ( - + Copy diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx index 5bd037805746..5551bb5d7cde 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx @@ -1,15 +1,51 @@ /** @file An interactive button displaying the status of a project. */ import * as react from 'react' +import toast from 'react-hot-toast' import * as backendModule from '../backend' import * as backendProvider from '../../providers/backend' import * as localBackend from '../localBackend' +import * as modalProvider from '../../providers/modal' import * as svg from '../../components/svg' // ============= // === Types === // ============= +/** Data associated with a project, used for rendering. + * FIXME[sb]: This is a hack that is required because each row does not carry its own extra state. + * It will be obsoleted by the implementation in https://github.com/enso-org/enso/pull/6546. */ +export interface ProjectData { + isRunning: boolean +} + +/** Possible types of project state change. */ +export enum ProjectEventType { + open = 'open', + cancelOpeningAll = 'cancelOpeningAll', +} + +/** Properties common to all project state change events. */ +interface ProjectBaseEvent { + type: Type +} + +/** Requests the specified project to be opened. */ +export interface ProjectOpenEvent extends ProjectBaseEvent { + // FIXME: provide projectId instead + /** This must be a name because it may be specified by name on the command line. + * Note that this will not work properly with the cloud backend if there are multiple projects + * with the same name. */ + projectId: backendModule.ProjectId +} + +/** Requests the specified project to be opened. */ +export interface ProjectCancelOpeningAllEvent + extends ProjectBaseEvent {} + +/** Every possible type of project event. */ +export type ProjectEvent = ProjectCancelOpeningAllEvent | ProjectOpenEvent + /** The state of the spinner. It should go from initial, to loading, to done. */ enum SpinnerState { initial = 'initial', @@ -21,10 +57,26 @@ enum SpinnerState { // === Constants === // ================= +/** The default {@link ProjectData} associated with a {@link backendModule.Project}. */ +export const DEFAULT_PROJECT_DATA: ProjectData = Object.freeze({ + isRunning: false, +}) +const LOADING_MESSAGE = + 'Your environment is being created. It will take some time, please be patient.' /** The interval between requests checking whether the IDE is ready. */ const CHECK_STATUS_INTERVAL_MS = 5000 /** The interval between requests checking whether the VM is ready. */ const CHECK_RESOURCES_INTERVAL_MS = 1000 +/** The fallback project state, when it is set to `null` before it is first set. */ +const DEFAULT_PROJECT_STATE = backendModule.ProjectState.created +/** The corresponding {@link SpinnerState} for each {@link backendModule.ProjectState}. */ +const SPINNER_STATE: Record = { + [backendModule.ProjectState.closed]: SpinnerState.initial, + [backendModule.ProjectState.created]: SpinnerState.initial, + [backendModule.ProjectState.new]: SpinnerState.initial, + [backendModule.ProjectState.openInProgress]: SpinnerState.loading, + [backendModule.ProjectState.opened]: SpinnerState.done, +} const SPINNER_CSS_CLASSES: Record = { [SpinnerState.initial]: 'dasharray-5 ease-linear', @@ -39,7 +91,12 @@ const SPINNER_CSS_CLASSES: Record = { /** Props for a {@link ProjectActionButton}. */ export interface ProjectActionButtonProps { project: backendModule.Asset + projectData: ProjectData + setProjectData: react.Dispatch> appRunner: AppRunner | null + event: ProjectEvent | null + /** Called when the project is opened via the {@link ProjectActionButton}. */ + doOpenManually: () => void onClose: () => void openIde: () => void doRefresh: () => void @@ -47,63 +104,139 @@ export interface ProjectActionButtonProps { /** An interactive button displaying the status of a project. */ function ProjectActionButton(props: ProjectActionButtonProps) { - const { project, onClose, appRunner, openIde, doRefresh } = props + const { + project, + setProjectData, + event, + appRunner, + doOpenManually, + onClose, + openIde, + doRefresh, + } = props const { backend } = backendProvider.useBackend() + const { unsetModal } = modalProvider.useSetModal() - const [state, setState] = react.useState(backendModule.ProjectState.created) + const [state, setState] = react.useState(null) const [isCheckingStatus, setIsCheckingStatus] = react.useState(false) const [isCheckingResources, setIsCheckingResources] = react.useState(false) - const [spinnerState, setSpinnerState] = react.useState(SpinnerState.done) + const [spinnerState, setSpinnerState] = react.useState(SpinnerState.initial) + const [shouldOpenWhenReady, setShouldOpenWhenReady] = react.useState(false) + const [toastId, setToastId] = react.useState(null) + + react.useEffect(() => { + if (toastId != null) { + return () => { + toast.dismiss(toastId) + } + } else { + return + } + }, [toastId]) + + react.useEffect(() => { + // Ensure that the previous spinner state is visible for at least one frame. + requestAnimationFrame(() => { + setSpinnerState(SPINNER_STATE[state ?? DEFAULT_PROJECT_STATE]) + }) + }, [state]) + + react.useEffect(() => { + if (toastId != null && state !== backendModule.ProjectState.openInProgress) { + toast.dismiss(toastId) + } + }, [state]) react.useEffect(() => { switch (project.projectState.type) { case backendModule.ProjectState.opened: setState(backendModule.ProjectState.openInProgress) - setSpinnerState(SpinnerState.initial) setIsCheckingResources(true) break case backendModule.ProjectState.openInProgress: setState(backendModule.ProjectState.openInProgress) - setSpinnerState(SpinnerState.initial) setIsCheckingStatus(true) break default: - setState(project.projectState.type) + // Some functions below set the state to something different to + // the backend state. In that case, the state should not be overridden. + setState(oldState => oldState ?? project.projectState.type) break } }, []) react.useEffect(() => { - if (backend.type === backendModule.BackendType.local) { - if (project.id !== localBackend.LocalBackend.currentlyOpeningProjectId) { - setIsCheckingResources(false) - setIsCheckingStatus(false) - setState(backendModule.ProjectState.closed) - setSpinnerState(SpinnerState.done) + if (event != null) { + switch (event.type) { + case ProjectEventType.open: { + if (event.projectId !== project.id) { + setShouldOpenWhenReady(false) + } else { + setShouldOpenWhenReady(true) + void openProject() + } + break + } + case ProjectEventType.cancelOpeningAll: { + setShouldOpenWhenReady(false) + } } } + }, [event]) + + react.useEffect(() => { + if (shouldOpenWhenReady && state === backendModule.ProjectState.opened) { + openIde() + setShouldOpenWhenReady(false) + } + }, [shouldOpenWhenReady, state]) + + react.useEffect(() => { + if ( + backend.type === backendModule.BackendType.local && + project.id !== localBackend.LocalBackend.currentlyOpeningProjectId + ) { + setState(backendModule.ProjectState.closed) + } }, [project, state, localBackend.LocalBackend.currentlyOpeningProjectId]) react.useEffect(() => { if (!isCheckingStatus) { return } else { + let handle: number | null = null + let continuePolling = true + let previousTimestamp = 0 const checkProjectStatus = async () => { - const response = await backend.getProjectDetails(project.id) - if (response.state.type === backendModule.ProjectState.opened) { - setIsCheckingStatus(false) - setIsCheckingResources(true) - } else { - setState(response.state.type) + try { + const response = await backend.getProjectDetails(project.id) + handle = null + if ( + continuePolling && + response.state.type === backendModule.ProjectState.opened + ) { + continuePolling = false + setIsCheckingStatus(false) + setIsCheckingResources(true) + } + } finally { + if (continuePolling) { + const nowTimestamp = Number(new Date()) + const delay = CHECK_STATUS_INTERVAL_MS - (nowTimestamp - previousTimestamp) + previousTimestamp = nowTimestamp + handle = window.setTimeout( + () => void checkProjectStatus(), + Math.max(0, delay) + ) + } } } - const handle = window.setInterval( - () => void checkProjectStatus(), - CHECK_STATUS_INTERVAL_MS - ) void checkProjectStatus() return () => { - clearInterval(handle) + continuePolling = false + if (handle != null) { + clearTimeout(handle) + } } } }, [isCheckingStatus]) @@ -112,85 +245,144 @@ function ProjectActionButton(props: ProjectActionButtonProps) { if (!isCheckingResources) { return } else { + let handle: number | null = null + let continuePolling = true + let previousTimestamp = 0 const checkProjectResources = async () => { - if (!('checkResources' in backend)) { + if (backend.type === backendModule.BackendType.local) { setState(backendModule.ProjectState.opened) setIsCheckingResources(false) - setSpinnerState(SpinnerState.done) } else { try { // This call will error if the VM is not ready yet. await backend.checkResources(project.id) - setState(backendModule.ProjectState.opened) - setIsCheckingResources(false) - setSpinnerState(SpinnerState.done) + handle = null + if (continuePolling) { + continuePolling = false + setState(backendModule.ProjectState.opened) + setIsCheckingResources(false) + } } catch { - // Ignored. + if (continuePolling) { + const nowTimestamp = Number(new Date()) + const delay = + CHECK_RESOURCES_INTERVAL_MS - (nowTimestamp - previousTimestamp) + previousTimestamp = nowTimestamp + handle = window.setTimeout( + () => void checkProjectResources(), + Math.max(0, delay) + ) + } } } } - const handle = window.setInterval( - () => void checkProjectResources(), - CHECK_RESOURCES_INTERVAL_MS - ) void checkProjectResources() return () => { - clearInterval(handle) + continuePolling = false + if (handle != null) { + clearTimeout(handle) + } } } }, [isCheckingResources]) - const closeProject = () => { + const closeProject = async () => { + onClose() + setShouldOpenWhenReady(false) setState(backendModule.ProjectState.closed) appRunner?.stopApp() - void backend.closeProject(project.id) setIsCheckingStatus(false) setIsCheckingResources(false) - onClose() + try { + await backend.closeProject(project.id) + } finally { + // This is not 100% correct, but it is better than never setting `isRunning` to `false`, + // which would prevent the project from ever being deleted. + setProjectData(oldProjectData => ({ ...oldProjectData, isRunning: false })) + } } const openProject = async () => { setState(backendModule.ProjectState.openInProgress) - setSpinnerState(SpinnerState.initial) - // The `setTimeout` is required so that the completion percentage goes from - // the `initial` fraction to the `loading` fraction, - // rather than starting at the `loading` fraction. - setTimeout(() => { - setSpinnerState(SpinnerState.loading) - }, 0) - switch (backend.type) { - case backendModule.BackendType.remote: - await backend.openProject(project.id) - doRefresh() - setIsCheckingStatus(true) - break - case backendModule.BackendType.local: - await backend.openProject(project.id) - doRefresh() - setState(backendModule.ProjectState.opened) - setSpinnerState(SpinnerState.done) - break + try { + switch (backend.type) { + case backendModule.BackendType.remote: + setToastId(toast.loading(LOADING_MESSAGE)) + await backend.openProject(project.id) + setProjectData(oldProjectData => ({ ...oldProjectData, isRunning: true })) + doRefresh() + setIsCheckingStatus(true) + break + case backendModule.BackendType.local: + await backend.openProject(project.id) + setProjectData(oldProjectData => ({ ...oldProjectData, isRunning: true })) + setState(oldState => { + if (oldState === backendModule.ProjectState.openInProgress) { + doRefresh() + return backendModule.ProjectState.opened + } else { + return oldState + } + }) + break + } + } catch { + setIsCheckingStatus(false) + setIsCheckingResources(false) + toast.error(`Error opening project '${project.title}'.`) + setState(backendModule.ProjectState.closed) } } switch (state) { + case null: case backendModule.ProjectState.created: case backendModule.ProjectState.new: case backendModule.ProjectState.closed: - return + return ( + + ) case backendModule.ProjectState.openInProgress: return ( - ) case backendModule.ProjectState.opened: return ( <> - - + ) } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/renameModal.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/renameModal.tsx index e27bd6362e42..b3d2600a1d72 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/renameModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/renameModal.tsx @@ -27,22 +27,28 @@ function RenameModal(props: RenameModalProps) { const { assetType, name, namePattern, title, doRename, onSuccess } = props const { unsetModal } = modalProvider.useSetModal() + const [isSubmitting, setIsSubmitting] = react.useState(false) const [newName, setNewName] = react.useState(null) const onSubmit = async (event: React.FormEvent) => { event.preventDefault() if (newName == null) { toast.error('Please provide a new name.') - } else { - unsetModal() - await toast.promise(doRename(newName), { - loading: `Renaming ${assetType}...`, - success: `Renamed ${assetType}.`, - // This is UNSAFE, as the original function's parameter is of type `any`. - error: (promiseError: Error) => - `Error renaming ${assetType}: ${promiseError.message}`, - }) - onSuccess() + } else if (!isSubmitting) { + try { + setIsSubmitting(true) + await toast.promise(doRename(newName), { + loading: `Renaming ${assetType}...`, + success: `Renamed ${assetType}.`, + // This is UNSAFE, as the original function's parameter is of type `any`. + error: (promiseError: Error) => + `Error renaming ${assetType}: ${promiseError.message}`, + }) + unsetModal() + onSuccess() + } finally { + setIsSubmitting(false) + } } } @@ -65,9 +71,10 @@ function RenameModal(props: RenameModalProps) { -
Cancel -
+ diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx index ecec1468da77..57a340db61cd 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx @@ -74,7 +74,7 @@ function TemplatesRender(props: TemplatesRenderProps) { onClick={() => { onTemplateClick(null) }} - className="h-40 cursor-pointer" + className="h-40 w-60 cursor-pointer" >
@@ -91,7 +91,7 @@ function TemplatesRender(props: TemplatesRenderProps) { {templates.map(template => (