From 65273124868e9e16f6a6de146d716d314a7c1081 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Thu, 4 May 2023 14:54:28 +0200 Subject: [PATCH 01/28] Cache result of slow function resolution on Any that is present on a hot path (#6536) Otherwise things can go horribly slow. Closes #6523. Follow up on https://github.com/enso-org/enso/pull/6441. --- .../node/callable/InvokeMethodNode.java | 54 +++++++++++++++---- 1 file changed, 45 insertions(+), 9 deletions(-) diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java index 670882debfa3..79b74ab577d1 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java @@ -53,6 +53,7 @@ @ImportStatic({HostMethodCallNode.PolyglotCallType.class, HostMethodCallNode.class}) public abstract class InvokeMethodNode extends BaseNode { + protected static final int CACHE_SIZE = 10; private @Child InvokeFunctionNode invokeFunctionNode; private final ConditionProfile errorReceiverProfile = ConditionProfile.createCountingProfile(); private @Child InvokeMethodNode childDispatch; @@ -104,30 +105,46 @@ public void setTailStatus(TailStatus tailStatus) { public abstract Object execute( VirtualFrame frame, State state, UnresolvedSymbol symbol, Object self, Object[] arguments); - @Specialization(guards = {"typesLibrary.hasType(self)", "!typesLibrary.hasSpecialDispatch(self)"}) - Object doFunctionalDispatch( + @Specialization(guards = { + "typesLibrary.hasType(self)", + "!typesLibrary.hasSpecialDispatch(self)", + "cachedSymbol == symbol", + "cachedSelfTpe == typesLibrary.getType(self)", + "function != null" + }, limit = "CACHE_SIZE") + Object doFunctionalDispatchCachedSymbol( VirtualFrame frame, State state, UnresolvedSymbol symbol, Object self, Object[] arguments, @CachedLibrary(limit = "10") TypesLibrary typesLibrary, - @Cached MethodResolverNode methodResolverNode) { + @Cached MethodResolverNode methodResolverNode, + @Cached("symbol") UnresolvedSymbol cachedSymbol, + @Cached("typesLibrary.getType(self)") Type cachedSelfTpe, + @Cached("resolveFunction(cachedSymbol, cachedSelfTpe, methodResolverNode)") Function function) { + return invokeFunctionNode.execute(function, frame, state, arguments); + } - Type selfTpe = typesLibrary.getType(self); - Function function = methodResolverNode.expectNonNull(self, selfTpe, symbol); + Function resolveFunction(UnresolvedSymbol symbol, Type selfTpe, MethodResolverNode methodResolverNode) { + Function function = methodResolverNode.execute(selfTpe, symbol); + if (function == null) { + return null; + } RootNode where = function.getCallTarget().getRootNode(); // If both Any and the type where `function` is declared, define `symbol` // and the method is invoked statically, i.e. type of self is the eigentype, // then we want to disambiguate method resolution by always resolved to the one in Any. - if (where instanceof MethodRootNode node && typeCanOverride(node, EnsoContext.get(this))) { - Function anyFun = symbol.getScope().lookupMethodDefinition(EnsoContext.get(this).getBuiltins().any(), symbol.getName()); + EnsoContext ctx = EnsoContext.get(this); + if (where instanceof MethodRootNode node && typeCanOverride(node, ctx)) { + Type any = ctx.getBuiltins().any(); + Function anyFun = symbol.getScope().lookupMethodDefinition(any, symbol.getName()); if (anyFun != null) { function = anyFun; } } - return invokeFunctionNode.execute(function, frame, state, arguments); + return function; } private boolean typeCanOverride(MethodRootNode node, EnsoContext ctx) { @@ -137,13 +154,32 @@ private boolean typeCanOverride(MethodRootNode node, EnsoContext ctx) { Type warning = builtins.warning(); Type panic = builtins.panic(); return methodOwnerType.isEigenType() - && builtins.nothing() != methodOwnerType && any.getEigentype() != methodOwnerType && panic.getEigentype() != methodOwnerType && warning.getEigentype() != methodOwnerType; } + @Specialization( + replaces = "doFunctionalDispatchCachedSymbol", + guards = {"typesLibrary.hasType(self)", "!typesLibrary.hasSpecialDispatch(self)"}) + Object doFunctionalDispatchUncachedSymbol( + VirtualFrame frame, + State state, + UnresolvedSymbol symbol, + Object self, + Object[] arguments, + @CachedLibrary(limit = "10") TypesLibrary typesLibrary, + @Cached MethodResolverNode methodResolverNode) { + Type selfTpe = typesLibrary.getType(self); + Function function = resolveFunction(symbol, selfTpe, methodResolverNode); + if (function == null) { + throw new PanicException( + EnsoContext.get(this).getBuiltins().error().makeNoSuchMethod(self, symbol), this); + } + return invokeFunctionNode.execute(function, frame, state, arguments); + } + @Specialization Object doDataflowError( VirtualFrame frame, From a832c5e2bb03daea87abcbe7101fd9bce0e2fcf7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wawrzyniec=20Urba=C5=84czyk?= Date: Thu, 4 May 2023 15:48:33 +0200 Subject: [PATCH 02/28] Build nightly 3 hours earlier. (#6551) --- .github/workflows/nightly.yml | 2 +- build/build/src/ci_gen.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index d15ffb8d1185..cc9bce064998 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -4,7 +4,7 @@ name: Nightly Release on: schedule: - - cron: 0 5 * * 2-6 + - cron: 0 2 * * 2-6 workflow_dispatch: {} jobs: promote-nightly: diff --git a/build/build/src/ci_gen.rs b/build/build/src/ci_gen.rs index 089834e3ce1b..57ee9ffa733a 100644 --- a/build/build/src/ci_gen.rs +++ b/build/build/src/ci_gen.rs @@ -293,8 +293,8 @@ pub fn changelog() -> Result { pub fn nightly() -> Result { let on = Event { workflow_dispatch: Some(default()), - // 5am (UTC) from Tuesday to Saturday (i.e. after every workday) - schedule: vec![Schedule::new("0 5 * * 2-6")?], + // 2am (UTC) from Tuesday to Saturday (i.e. after every workday) + schedule: vec![Schedule::new("0 2 * * 2-6")?], ..default() }; From 41a8257e8da267274fb9c48cbd24763b2dff699d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Thu, 4 May 2023 19:36:51 +0200 Subject: [PATCH 03/28] Separating Redshift connector from `Database` library into a new `AWS` library (#6550) Related to #5777 --- CHANGELOG.md | 2 + build.sbt | 54 ++++++++++-- .../NOTICES | 12 +-- .../Standard/AWS/0.0.0-dev/THIRD-PARTY/NOTICE | 77 ++++++++++++++++++ .../LICENSE | 0 .../NOTICES | 0 .../NOTICES | 0 .../NOTICES | 0 .../NOTICES | 0 .../NOTICES | 0 .../LICENSE | 0 .../LICENSE | 0 .../NOTICE | 0 .../NOTICES | 0 .../LICENSE | 0 .../CREDITS-2.x.txt | 0 .../NOTICE.txt | 0 .../commons-codec.commons-codec-1.15/NOTICES | 0 .../NOTICE.txt | 0 .../NOTICES | 0 .../joda-time.joda-time-2.8.1/LICENSE.txt | 0 .../joda-time.joda-time-2.8.1/NOTICE.txt | 0 .../joda-time.joda-time-2.8.1/NOTICES | 0 .../0.0.0-dev/THIRD-PARTY/licenses/APACHE2.0 | 0 .../NOTICE | 0 .../NOTICE | 0 .../NOTICES | 0 .../lib/Standard/AWS/0.0.0-dev/package.yaml | 10 +++ .../Redshift/Internal}/Redshift_Dialect.enso | 51 ++++++------ .../Internal}/Redshift_Error_Mapper.enso | 2 +- .../Database/Redshift/Redshift_Details.enso} | 24 +++--- .../lib/Standard/AWS/0.0.0-dev/src/Main.enso | 7 ++ .../Database/0.0.0-dev/THIRD-PARTY/NOTICE | 75 ----------------- .../src/Connection/Connection_Details.enso | 23 ++++++ .../src/Connection/Connection_Options.enso | 4 +- .../0.0.0-dev/src/Connection/Database.enso | 28 +++++-- ...res_Options.enso => Postgres_Details.enso} | 4 +- ...QLite_Options.enso => SQLite_Details.enso} | 2 +- .../src/Connection/SQLite_Format.enso | 4 +- .../Database/0.0.0-dev/src/Data/Dialect.enso | 7 -- .../Standard/Database/0.0.0-dev/src/Main.enso | 26 +++--- .../src/main/resources/default/src/Main.enso | 1 + project/Distribution.scala | 4 +- project/Editions.scala | 1 + project/plugins.sbt | 2 +- .../licenses/DependencyInformation.scala | 2 +- .../licenses/DistributionDescription.scala | 2 +- .../licenses/frontend/DependencyFilter.scala | 2 +- .../scala/licenses/frontend/SbtLicenses.scala | 2 +- .../RedshiftConnectionDetailsSPI.java | 26 ++++++ .../DatabaseConnectionDetailsSPI.java | 41 ++++++++++ .../PostgresConnectionDetailsSPI.java | 24 ++++++ .../database/SQLiteConnectionDetailsSPI.java | 24 ++++++ .../src/Database/Redshift_Spec.enso | 4 +- .../System/Reporting_Stream_Decoder_Spec.enso | 6 +- .../src/Widgets/Database_Widgets_Spec.enso | 13 ++- .../copyright-ignore | Bin .../copyright-keep | 0 .../custom-license | 0 .../files-keep | 0 .../copyright-ignore | 0 .../copyright-keep | 0 .../copyright-keep | 0 .../copyright-ignore | 0 .../copyright-keep | 0 .../copyright-keep | 0 .../custom-license | 0 .../files-keep | 0 .../copyright-keep | 0 .../custom-license | 0 .../files-keep | 0 .../custom-license | 0 .../files-ignore | 0 .../files-keep | 0 .../files-add/CREDITS-2.x.txt | 0 .../files-ignore | 2 + .../copyright-ignore | 0 .../copyright-keep | 0 .../files-ignore | 0 .../files-keep | 0 .../copyright-keep | 0 .../files-ignore | 0 .../files-keep | 0 .../joda-time.joda-time-2.8.1/copyright-keep | 0 .../joda-time.joda-time-2.8.1/custom-license | 0 .../joda-time.joda-time-2.8.1/files-ignore | 0 .../joda-time.joda-time-2.8.1/files-keep | 0 .../copyright-ignore | 0 .../copyright-keep | 0 .../files-ignore | 0 .../files-keep | 0 .../copyright-ignore | 0 .../copyright-keep | 0 .../files-ignore | 0 .../files-keep | 0 tools/legal-review/AWS/report-state | 3 + .../reviewed-licenses/Apache_2 | 0 .../Apache_License__Version_2.0 | 0 .../The_Apache_License__Version_2.0 | 0 .../The_Apache_Software_License__Version_2.0 | 0 .../copyright-ignore | 0 .../copyright-keep | 0 .../files-ignore | 2 - tools/legal-review/Database/report-state | 4 +- .../Database/reviewed-licenses/BSD-2-Clause | 1 - .../Database/reviewed-licenses/BSD_License | 1 - .../files-ignore | 2 +- .../copyright-add | 1 + .../copyright-keep-context | 1 - tools/legal-review/engine/report-state | 2 +- .../files-ignore | 2 +- 111 files changed, 400 insertions(+), 187 deletions(-) create mode 100644 distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/NOTICE rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/LICENSE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-core-1.12.273/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-redshift-1.12.273/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-sts-1.12.273/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.amazonaws.jmespath-java-1.12.273/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-annotations-2.12.6/LICENSE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/LICENSE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/LICENSE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/CREDITS-2.x.txt (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICE.txt (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICE.txt (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/LICENSE.txt (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICE.txt (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICES (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/licenses/APACHE2.0 (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpclient-4.5.13/NOTICE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpcore-4.4.13/NOTICE (100%) rename distribution/lib/Standard/{Database => AWS}/0.0.0-dev/THIRD-PARTY/software.amazon.ion.ion-java-1.0.2/NOTICES (100%) create mode 100644 distribution/lib/Standard/AWS/0.0.0-dev/package.yaml rename distribution/lib/Standard/{Database/0.0.0-dev/src/Internal/Redshift => AWS/0.0.0-dev/src/Database/Redshift/Internal}/Redshift_Dialect.enso (77%) rename distribution/lib/Standard/{Database/0.0.0-dev/src/Internal/Redshift => AWS/0.0.0-dev/src/Database/Redshift/Internal}/Redshift_Error_Mapper.enso (85%) rename distribution/lib/Standard/{Database/0.0.0-dev/src/Connection/Redshift_Options.enso => AWS/0.0.0-dev/src/Database/Redshift/Redshift_Details.enso} (83%) create mode 100644 distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso rename distribution/lib/Standard/Database/0.0.0-dev/src/Connection/{Postgres_Options.enso => Postgres_Details.enso} (98%) rename distribution/lib/Standard/Database/0.0.0-dev/src/Connection/{SQLite_Options.enso => SQLite_Details.enso} (98%) create mode 100644 std-bits/aws/src/main/java/org/enso/aws/database/RedshiftConnectionDetailsSPI.java create mode 100644 std-bits/database/src/main/java/org/enso/database/DatabaseConnectionDetailsSPI.java create mode 100644 std-bits/database/src/main/java/org/enso/database/PostgresConnectionDetailsSPI.java create mode 100644 std-bits/database/src/main/java/org/enso/database/SQLiteConnectionDetailsSPI.java rename tools/legal-review/{Database => AWS}/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/com.amazon.redshift.redshift-jdbc42-2.1.0.9/custom-license (100%) rename tools/legal-review/{Database => AWS}/com.amazon.redshift.redshift-jdbc42-2.1.0.9/files-keep (100%) rename tools/legal-review/{Database => AWS}/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/com.amazonaws.aws-java-sdk-redshift-1.12.273/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/com.amazonaws.jmespath-java-1.12.273/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-annotations-2.12.6/custom-license (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-annotations-2.12.6/files-keep (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-core-2.12.6/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-core-2.12.6/custom-license (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-core-2.12.6/files-keep (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/custom-license (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-ignore (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-keep (100%) rename tools/legal-review/{Database => AWS}/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-add/CREDITS-2.x.txt (100%) create mode 100644 tools/legal-review/AWS/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore rename tools/legal-review/{Database => AWS}/commons-codec.commons-codec-1.15/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/commons-codec.commons-codec-1.15/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/commons-codec.commons-codec-1.15/files-ignore (100%) rename tools/legal-review/{Database => AWS}/commons-codec.commons-codec-1.15/files-keep (100%) rename tools/legal-review/{Database => AWS}/commons-logging.commons-logging-1.1.3/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/commons-logging.commons-logging-1.1.3/files-ignore (100%) rename tools/legal-review/{Database => AWS}/commons-logging.commons-logging-1.1.3/files-keep (100%) rename tools/legal-review/{Database => AWS}/joda-time.joda-time-2.8.1/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/joda-time.joda-time-2.8.1/custom-license (100%) rename tools/legal-review/{Database => AWS}/joda-time.joda-time-2.8.1/files-ignore (100%) rename tools/legal-review/{Database => AWS}/joda-time.joda-time-2.8.1/files-keep (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpclient-4.5.13/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpclient-4.5.13/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpclient-4.5.13/files-ignore (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpclient-4.5.13/files-keep (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpcore-4.4.13/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpcore-4.4.13/copyright-keep (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpcore-4.4.13/files-ignore (100%) rename tools/legal-review/{Database => AWS}/org.apache.httpcomponents.httpcore-4.4.13/files-keep (100%) create mode 100644 tools/legal-review/AWS/report-state rename tools/legal-review/{Database => AWS}/reviewed-licenses/Apache_2 (100%) rename tools/legal-review/{Database => AWS}/reviewed-licenses/Apache_License__Version_2.0 (100%) rename tools/legal-review/{Database => AWS}/reviewed-licenses/The_Apache_License__Version_2.0 (100%) rename tools/legal-review/{Database => AWS}/reviewed-licenses/The_Apache_Software_License__Version_2.0 (100%) rename tools/legal-review/{Database => AWS}/software.amazon.ion.ion-java-1.0.2/copyright-ignore (100%) rename tools/legal-review/{Database => AWS}/software.amazon.ion.ion-java-1.0.2/copyright-keep (100%) delete mode 100644 tools/legal-review/Database/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore delete mode 100644 tools/legal-review/Database/reviewed-licenses/BSD-2-Clause delete mode 100644 tools/legal-review/Database/reviewed-licenses/BSD_License create mode 100644 tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-add delete mode 100644 tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-keep-context diff --git a/CHANGELOG.md b/CHANGELOG.md index fb46543de6fe..39eab0c08148 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -408,6 +408,7 @@ `Text.write`.][6459] - [Implemented `create_database_table` allowing saving queries as database tables.][6467] +- [Moved `Redshift` connector into a separate `AWS` library.][6550] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -614,6 +615,7 @@ [6429]: https://github.com/enso-org/enso/pull/6429 [6459]: https://github.com/enso-org/enso/pull/6459 [6467]: https://github.com/enso-org/enso/pull/6467 +[6550]: https://github.com/enso-org/enso/pull/6550 #### Enso Compiler diff --git a/build.sbt b/build.sbt index 0f6e9db905de..45bc87871a30 100644 --- a/build.sbt +++ b/build.sbt @@ -131,7 +131,8 @@ GatherLicenses.distributions := Seq( ), makeStdLibDistribution("Table", Distribution.sbtProjects(`std-table`)), makeStdLibDistribution("Database", Distribution.sbtProjects(`std-database`)), - makeStdLibDistribution("Image", Distribution.sbtProjects(`std-image`)) + makeStdLibDistribution("Image", Distribution.sbtProjects(`std-image`)), + makeStdLibDistribution("AWS", Distribution.sbtProjects(`std-aws`)) ) GatherLicenses.licenseConfigurations := Set("compile") @@ -295,6 +296,7 @@ lazy val enso = (project in file(".")) `std-google-api`, `std-image`, `std-table`, + `std-aws`, `simple-httpbin`, `enso-test-java-helpers` ) @@ -1376,6 +1378,7 @@ lazy val runtime = (project in file("engine/runtime")) .dependsOn(`std-database` / Compile / packageBin) .dependsOn(`std-google-api` / Compile / packageBin) .dependsOn(`std-table` / Compile / packageBin) + .dependsOn(`std-aws` / Compile / packageBin) .value ) .settings( @@ -1905,6 +1908,8 @@ val `google-api-polyglot-root` = stdLibComponentRoot("Google_Api") / "polyglot" / "java" val `database-polyglot-root` = stdLibComponentRoot("Database") / "polyglot" / "java" +val `std-aws-polyglot-root` = + stdLibComponentRoot("AWS") / "polyglot" / "java" lazy val `std-base` = project .in(file("std-bits") / "base") @@ -2064,10 +2069,35 @@ lazy val `std-database` = project autoScalaLibrary := false, Compile / packageBin / artifactPath := `database-polyglot-root` / "std-database.jar", + libraryDependencies ++= Seq( + "org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided", + "org.xerial" % "sqlite-jdbc" % sqliteVersion, + "org.postgresql" % "postgresql" % "42.4.0" + ), + Compile / packageBin := Def.task { + val result = (Compile / packageBin).value + val _ = StdBits + .copyDependencies( + `database-polyglot-root`, + Seq("std-database.jar"), + ignoreScalaLibrary = true + ) + .value + result + }.value + ) + .dependsOn(`std-base` % "provided") + .dependsOn(`std-table` % "provided") + +lazy val `std-aws` = project + .in(file("std-bits") / "aws") + .settings( + frgaalJavaCompilerSetting, + autoScalaLibrary := false, + Compile / packageBin / artifactPath := + `std-aws-polyglot-root` / "std-aws.jar", libraryDependencies ++= Seq( "org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided", - "org.xerial" % "sqlite-jdbc" % sqliteVersion, - "org.postgresql" % "postgresql" % "42.4.0", "com.amazon.redshift" % "redshift-jdbc42" % "2.1.0.9", "com.amazonaws" % "aws-java-sdk-core" % "1.12.273", "com.amazonaws" % "aws-java-sdk-redshift" % "1.12.273", @@ -2077,8 +2107,8 @@ lazy val `std-database` = project val result = (Compile / packageBin).value val _ = StdBits .copyDependencies( - `database-polyglot-root`, - Seq("std-database.jar"), + `std-aws-polyglot-root`, + Seq("std-aws.jar"), ignoreScalaLibrary = true ) .value @@ -2086,6 +2116,8 @@ lazy val `std-database` = project }.value ) .dependsOn(`std-base` % "provided") + .dependsOn(`std-table` % "provided") + .dependsOn(`std-database` % "provided") /* Note [Native Image Workaround for GraalVM 20.2] * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ @@ -2191,7 +2223,14 @@ runEngineDistribution := { val allStdBitsSuffix = List("All", "AllWithIndex") val stdBitsProjects = - List("Base", "Database", "Google_Api", "Image", "Table") ++ allStdBitsSuffix + List( + "AWS", + "Base", + "Database", + "Google_Api", + "Image", + "Table" + ) ++ allStdBitsSuffix val allStdBits: Parser[String] = stdBitsProjects.map(v => v: Parser[String]).reduce(_ | _) @@ -2240,6 +2279,8 @@ pkgStdLibInternal := Def.inputTask { (`std-table` / Compile / packageBin).value case "TestHelpers" => (`enso-test-java-helpers` / Compile / packageBin).value + case "AWS" => + (`std-aws` / Compile / packageBin).value case _ if buildAllCmd => (`std-base` / Compile / packageBin).value (`enso-test-java-helpers` / Compile / packageBin).value @@ -2247,6 +2288,7 @@ pkgStdLibInternal := Def.inputTask { (`std-database` / Compile / packageBin).value (`std-image` / Compile / packageBin).value (`std-google-api` / Compile / packageBin).value + (`std-aws` / Compile / packageBin).value case _ => } val libs = diff --git a/distribution/engine/THIRD-PARTY/org.reactivestreams.reactive-streams-1.0.4/NOTICES b/distribution/engine/THIRD-PARTY/org.reactivestreams.reactive-streams-1.0.4/NOTICES index bd0a85c0c769..0dc12aa1331a 100644 --- a/distribution/engine/THIRD-PARTY/org.reactivestreams.reactive-streams-1.0.4/NOTICES +++ b/distribution/engine/THIRD-PARTY/org.reactivestreams.reactive-streams-1.0.4/NOTICES @@ -1,10 +1,2 @@ -/************************************************************************ - * Licensed under Public Domain (CC0) * - * * - * To the extent possible under law, the person who associated CC0 with * - * this code has waived all copyright and related or neighboring * - * rights to this code. * - * * - * You should have received a copy of the CC0 legalcode along with this * - * work. If not, see .* - ************************************************************************/ +See https://github.com/reactive-streams/reactive-streams-jvm for more information. + diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/NOTICE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/NOTICE new file mode 100644 index 000000000000..3c2ede9a8464 --- /dev/null +++ b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/NOTICE @@ -0,0 +1,77 @@ +Enso +Copyright 2020 - 2023 New Byte Order sp. z o. o. + +'redshift-jdbc42', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license information can be found along with the copyright notices. +Copyright notices related to this dependency can be found in the directory `com.amazon.redshift.redshift-jdbc42-2.1.0.9`. + + +'aws-java-sdk-core', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-core-1.12.273`. + + +'aws-java-sdk-redshift', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-redshift-1.12.273`. + + +'aws-java-sdk-sts', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-sts-1.12.273`. + + +'jmespath-java', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `com.amazonaws.jmespath-java-1.12.273`. + + +'jackson-annotations', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS. +The license information can be found along with the copyright notices. +Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-annotations-2.12.6`. + + +'jackson-core', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS. +The license information can be found along with the copyright notices. +Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-core-2.12.6`. + + +'jackson-databind', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS. +The license information can be found along with the copyright notices. +Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-databind-2.12.6.1`. + + +'jackson-dataformat-cbor', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6`. + + +'commons-codec', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `commons-codec.commons-codec-1.15`. + + +'commons-logging', licensed under the The Apache Software License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `commons-logging.commons-logging-1.1.3`. + + +'joda-time', licensed under the Apache 2, is distributed with the AWS. +The license information can be found along with the copyright notices. +Copyright notices related to this dependency can be found in the directory `joda-time.joda-time-2.8.1`. + + +'httpclient', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpclient-4.5.13`. + + +'httpcore', licensed under the Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpcore-4.4.13`. + + +'ion-java', licensed under the The Apache License, Version 2.0, is distributed with the AWS. +The license file can be found at `licenses/APACHE2.0`. +Copyright notices related to this dependency can be found in the directory `software.amazon.ion.ion-java-1.0.2`. + diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/LICENSE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/LICENSE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/LICENSE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/LICENSE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazon.redshift.redshift-jdbc42-2.1.0.9/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-core-1.12.273/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-core-1.12.273/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-core-1.12.273/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-core-1.12.273/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-redshift-1.12.273/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-redshift-1.12.273/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-redshift-1.12.273/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-redshift-1.12.273/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-sts-1.12.273/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-sts-1.12.273/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-sts-1.12.273/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.aws-java-sdk-sts-1.12.273/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.jmespath-java-1.12.273/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.jmespath-java-1.12.273/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.amazonaws.jmespath-java-1.12.273/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.amazonaws.jmespath-java-1.12.273/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-annotations-2.12.6/LICENSE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-annotations-2.12.6/LICENSE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-annotations-2.12.6/LICENSE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-annotations-2.12.6/LICENSE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/LICENSE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/LICENSE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/LICENSE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/LICENSE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-core-2.12.6/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/LICENSE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/LICENSE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/LICENSE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/LICENSE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/CREDITS-2.x.txt b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/CREDITS-2.x.txt similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/CREDITS-2.x.txt rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/CREDITS-2.x.txt diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICE.txt b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICE.txt similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICE.txt rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICE.txt diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-codec.commons-codec-1.15/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICE.txt b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICE.txt similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICE.txt rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICE.txt diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/commons-logging.commons-logging-1.1.3/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/LICENSE.txt b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/LICENSE.txt similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/LICENSE.txt rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/LICENSE.txt diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICE.txt b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICE.txt similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICE.txt rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICE.txt diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/joda-time.joda-time-2.8.1/NOTICES diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/licenses/APACHE2.0 b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/licenses/APACHE2.0 similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/licenses/APACHE2.0 rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/licenses/APACHE2.0 diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpclient-4.5.13/NOTICE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpclient-4.5.13/NOTICE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpclient-4.5.13/NOTICE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpclient-4.5.13/NOTICE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpcore-4.4.13/NOTICE b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpcore-4.4.13/NOTICE similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpcore-4.4.13/NOTICE rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/org.apache.httpcomponents.httpcore-4.4.13/NOTICE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/software.amazon.ion.ion-java-1.0.2/NOTICES b/distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/software.amazon.ion.ion-java-1.0.2/NOTICES similarity index 100% rename from distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/software.amazon.ion.ion-java-1.0.2/NOTICES rename to distribution/lib/Standard/AWS/0.0.0-dev/THIRD-PARTY/software.amazon.ion.ion-java-1.0.2/NOTICES diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/package.yaml b/distribution/lib/Standard/AWS/0.0.0-dev/package.yaml new file mode 100644 index 000000000000..d9ac9519a586 --- /dev/null +++ b/distribution/lib/Standard/AWS/0.0.0-dev/package.yaml @@ -0,0 +1,10 @@ +name: AWS +namespace: Standard +version: 0.0.0-dev +license: APLv2 +authors: + - name: Enso Team + email: contact@enso.org +maintainers: + - name: Enso Team + email: contact@enso.org diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso similarity index 77% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso rename to distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso index 64edcb84d930..20b5ad7fcff5 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Dialect.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso @@ -4,31 +4,32 @@ import Standard.Table.Internal.Naming_Helpers.Naming_Helpers from Standard.Table import Aggregate_Column from Standard.Table import Value_Type -import project.Connection.Connection.Connection -import project.Data.Dialect -import project.Data.SQL.Builder -import project.Data.SQL_Statement.SQL_Statement -import project.Data.SQL_Type.SQL_Type -import project.Data.Table.Table -import project.Internal.Base_Generator -import project.Internal.Column_Fetcher.Column_Fetcher -import project.Internal.Column_Fetcher as Column_Fetcher_Module -import project.Internal.Error_Mapper.Error_Mapper -import project.Internal.IR.Context.Context -import project.Internal.IR.From_Spec.From_Spec -import project.Internal.IR.Internal_Column.Internal_Column -import project.Internal.IR.SQL_Expression.SQL_Expression -import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind -import project.Internal.IR.Order_Descriptor.Order_Descriptor -import project.Internal.IR.Query.Query -import project.Internal.Postgres.Postgres_Dialect -import project.Internal.Common.Database_Join_Helper -import project.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_Mapping -import project.Internal.Redshift.Redshift_Error_Mapper.Redshift_Error_Mapper -import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping -import project.Internal.SQL_Type_Reference.SQL_Type_Reference -import project.Internal.Statement_Setter.Statement_Setter -from project.Errors import Unsupported_Database_Operation +import Standard.Database.Connection.Connection.Connection +import Standard.Database.Data.Dialect +import Standard.Database.Data.SQL.Builder +import Standard.Database.Data.SQL_Statement.SQL_Statement +import Standard.Database.Data.SQL_Type.SQL_Type +import Standard.Database.Data.Table.Table +import Standard.Database.Internal.Base_Generator +import Standard.Database.Internal.Column_Fetcher.Column_Fetcher +import Standard.Database.Internal.Column_Fetcher as Column_Fetcher_Module +import Standard.Database.Internal.Error_Mapper.Error_Mapper +import Standard.Database.Internal.IR.Context.Context +import Standard.Database.Internal.IR.From_Spec.From_Spec +import Standard.Database.Internal.IR.Internal_Column.Internal_Column +import Standard.Database.Internal.IR.SQL_Expression.SQL_Expression +import Standard.Database.Internal.IR.SQL_Join_Kind.SQL_Join_Kind +import Standard.Database.Internal.IR.Order_Descriptor.Order_Descriptor +import Standard.Database.Internal.IR.Query.Query +import Standard.Database.Internal.Postgres.Postgres_Dialect +import Standard.Database.Internal.Common.Database_Join_Helper +import Standard.Database.Internal.Postgres.Postgres_Type_Mapping.Postgres_Type_Mapping +import Standard.Database.Internal.SQL_Type_Mapping.SQL_Type_Mapping +import Standard.Database.Internal.SQL_Type_Reference.SQL_Type_Reference +import Standard.Database.Internal.Statement_Setter.Statement_Setter +from Standard.Database.Errors import Unsupported_Database_Operation + +import project.Database.Redshift.Internal.Redshift_Error_Mapper.Redshift_Error_Mapper ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Error_Mapper.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso similarity index 85% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Error_Mapper.enso rename to distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso index f20735fb3582..98f06d3652e6 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Redshift/Redshift_Error_Mapper.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso @@ -1,6 +1,6 @@ from Standard.Base import all -from project.Errors import SQL_Error +from Standard.Database.Errors import SQL_Error ## PRIVATE type Redshift_Error_Mapper diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Redshift_Details.enso similarity index 83% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso rename to distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Redshift_Details.enso index 0cc44d18cb42..61905bb5abe5 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Redshift_Options.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Redshift_Details.enso @@ -1,19 +1,19 @@ from Standard.Base import all -import project.Connection.Client_Certificate.Client_Certificate -import project.Connection.Connection.Connection -import project.Connection.Connection_Options.Connection_Options -import project.Connection.Credentials.Credentials -import project.Connection.SSL_Mode.SSL_Mode -import project.Data.Dialect -import project.Internal.JDBC_Connection -import project.Internal.Postgres.Pgpass +import Standard.Database.Connection.Client_Certificate.Client_Certificate +import Standard.Database.Connection.Connection.Connection +import Standard.Database.Connection.Connection_Options.Connection_Options +import Standard.Database.Connection.Credentials.Credentials +import Standard.Database.Connection.SSL_Mode.SSL_Mode +import Standard.Database.Internal.JDBC_Connection +import Standard.Database.Internal.Postgres.Pgpass + +import project.Database.Redshift.Internal.Redshift_Dialect polyglot java import com.amazon.redshift.jdbc.Driver polyglot java import java.util.Properties -polyglot java import org.enso.database.JDBCProxy -type Redshift_Options +type Redshift_Details ## Connect to a AWS Redshift database. Arguments: @@ -40,7 +40,7 @@ type Redshift_Options java_props.setProperty pair.first pair.second jdbc_connection = JDBC_Connection.create self.jdbc_url properties - Connection.Value jdbc_connection Dialect.redshift + Connection.Value jdbc_connection Redshift_Dialect.redshift ## PRIVATE Provides the jdbc url for the connection. @@ -49,7 +49,7 @@ type Redshift_Options prefix = case self.credentials of _ : AWS_Credential -> 'jdbc:redshift:iam://' _ -> 'jdbc:redshift://' - prefix + self.host + ':' + self.port.to_text + (if self.schema == '' then '' else '/' + self.schema) + prefix + self.host + ':' + self.port.to_text + '/' + self.schema ## PRIVATE Provides the properties for the connection. diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso new file mode 100644 index 000000000000..60af936b1fe3 --- /dev/null +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Main.enso @@ -0,0 +1,7 @@ +import project.Database.Redshift.Redshift_Details.Redshift_Details +import project.Database.Redshift.Redshift_Details.AWS_Credential + +export project.Database.Redshift.Redshift_Details.Redshift_Details +export project.Database.Redshift.Redshift_Details.AWS_Credential + +from project.Database.Redshift.Redshift_Details.Redshift_Details export Redshift diff --git a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/NOTICE b/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/NOTICE index 7fa7dc342520..9fa9ce40f0da 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/NOTICE +++ b/distribution/lib/Standard/Database/0.0.0-dev/THIRD-PARTY/NOTICE @@ -1,76 +1,6 @@ Enso Copyright 2020 - 2023 New Byte Order sp. z o. o. -'redshift-jdbc42', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license information can be found along with the copyright notices. -Copyright notices related to this dependency can be found in the directory `com.amazon.redshift.redshift-jdbc42-2.1.0.9`. - - -'aws-java-sdk-core', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-core-1.12.273`. - - -'aws-java-sdk-redshift', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-redshift-1.12.273`. - - -'aws-java-sdk-sts', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `com.amazonaws.aws-java-sdk-sts-1.12.273`. - - -'jmespath-java', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `com.amazonaws.jmespath-java-1.12.273`. - - -'jackson-annotations', licensed under the The Apache Software License, Version 2.0, is distributed with the Database. -The license information can be found along with the copyright notices. -Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-annotations-2.12.6`. - - -'jackson-core', licensed under the The Apache Software License, Version 2.0, is distributed with the Database. -The license information can be found along with the copyright notices. -Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-core-2.12.6`. - - -'jackson-databind', licensed under the The Apache Software License, Version 2.0, is distributed with the Database. -The license information can be found along with the copyright notices. -Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.core.jackson-databind-2.12.6.1`. - - -'jackson-dataformat-cbor', licensed under the The Apache Software License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6`. - - -'commons-codec', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `commons-codec.commons-codec-1.15`. - - -'commons-logging', licensed under the The Apache Software License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `commons-logging.commons-logging-1.1.3`. - - -'joda-time', licensed under the Apache 2, is distributed with the Database. -The license information can be found along with the copyright notices. -Copyright notices related to this dependency can be found in the directory `joda-time.joda-time-2.8.1`. - - -'httpclient', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpclient-4.5.13`. - - -'httpcore', licensed under the Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `org.apache.httpcomponents.httpcore-4.4.13`. - - 'checker-qual', licensed under the The MIT License, is distributed with the Database. The license information can be found along with the copyright notices. Copyright notices related to this dependency can be found in the directory `org.checkerframework.checker-qual-3.5.0`. @@ -85,8 +15,3 @@ Copyright notices related to this dependency can be found in the directory `org. The license information can be found along with the copyright notices. Copyright notices related to this dependency can be found in the directory `org.xerial.sqlite-jdbc-3.41.2.1`. - -'ion-java', licensed under the The Apache License, Version 2.0, is distributed with the Database. -The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `software.amazon.ion.ion-java-1.0.2`. - diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso new file mode 100644 index 000000000000..2add0d4d232a --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso @@ -0,0 +1,23 @@ +from Standard.Base import all +import Standard.Base.Errors.Unimplemented.Unimplemented + +import project.Connection.Connection.Connection +import project.Connection.Connection_Options.Connection_Options +from project.Errors import SQL_Error + + +## Specifies the connection details for the database. + + This is an interface that is implemented by particular database types, like + `Postgres_Details`, `SQLite_Details` etc. +type Connection_Details + + ## PRIVATE + Build the Connection resource. + + Arguments: + - options: Overrides for the connection properties. + connect : Connection_Options -> Connection ! SQL_Error + connect self options = + _ = options + Unimplemented.throw "This is an interface only." diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Options.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Options.enso index 5ed8f4c3c58d..e27550bf1db0 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Options.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Options.enso @@ -1,7 +1,9 @@ from Standard.Base import all type Connection_Options - ## Hold a set of key value pairs used to configure the connection. + ## Additional customization options for the JDBC connection. + + Hold a set of key value pairs used to configure the connection. Value options:Vector=[] ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Database.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Database.enso index 6a37da3d5f86..2aba46142bd5 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Database.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Database.enso @@ -1,20 +1,36 @@ from Standard.Base import all +from Standard.Base.Metadata.Widget import Single_Choice +from Standard.Base.Metadata.Choice import Option +import Standard.Base.Metadata.Display + +import project.Connection.Connection_Details.Connection_Details import project.Connection.Connection_Options.Connection_Options -import project.Connection.Postgres_Options.Postgres_Options -import project.Connection.SQLite_Options.SQLite_Options -import project.Connection.Redshift_Options.Redshift_Options import project.Connection.Connection.Connection from project.Errors import SQL_Error +polyglot java import org.enso.database.DatabaseConnectionDetailsSPI + ## UNSTABLE Tries to connect to the database. Arguments: - - details: Connection_Details to use to connect. - - options: Any overriding options to use. -connect : (Postgres_Options|SQLite_Options|Redshift_Options) -> Connection_Options -> Connection ! SQL_Error + - details: `Connection_Details` specifying the database to connect to. + - options: Additional custom connection options for the JDBC connection. +@details connection_details_widget +connect : Connection_Details -> Connection_Options -> Connection ! SQL_Error connect details options=Connection_Options.Value = details.connect options + +## PRIVATE +connection_details_widget : Single_Choice +connection_details_widget = + default_constructors = Vector.from_polyglot_array <| + DatabaseConnectionDetailsSPI.get_default_constructors False + choices = default_constructors.map pair-> + name = pair.first + code = pair.second + Option name code + Single_Choice display=Display.Always values=choices diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Details.enso similarity index 98% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso rename to distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Details.enso index b0157aaf8697..678e91f8a5d7 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Options.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Details.enso @@ -11,7 +11,7 @@ import project.Internal.Postgres.Pgpass polyglot java import org.postgresql.Driver -type Postgres_Options +type Postgres_Details ## Connect to a PostgreSQL database. Arguments: @@ -41,7 +41,7 @@ type Postgres_Options ## Cannot use default argument values as gets in an infinite loop if you do. make_new database schema = - Postgres_Options.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options + Postgres_Details.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options Postgres_Connection.create self.jdbc_url properties make_new diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Details.enso similarity index 98% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso rename to distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Details.enso index ae135623e8b9..df381c4325fd 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Options.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Details.enso @@ -3,7 +3,7 @@ from Standard.Base import all import project.Connection.Connection_Options.Connection_Options import project.Internal.SQLite.SQLite_Connection -type SQLite_Options +type SQLite_Details ## Connect to a SQLite DB File or InMemory DB. Arguments: diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso index 4a2dac39a0d8..05b46d7796fe 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/SQLite_Format.enso @@ -2,7 +2,7 @@ from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import project.Connection.Database -import project.Connection.SQLite_Options.SQLite_Options +import project.Connection.SQLite_Details.SQLite_Details ## Read the file to a `SQLite_Connection` from a `.db` or `.sqlite` file type SQLite_Format @@ -30,4 +30,4 @@ type SQLite_Format read : File -> Problem_Behavior -> Any read self file on_problems = _ = [on_problems] - Database.connect (SQLite_Options.SQLite file) + Database.connect (SQLite_Details.SQLite file) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso index aa367514290f..9b166cbc9407 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Dialect.enso @@ -19,7 +19,6 @@ import project.Internal.IR.Order_Descriptor.Order_Descriptor import project.Internal.IR.Query.Query import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.Postgres.Postgres_Dialect -import project.Internal.Redshift.Redshift_Dialect import project.Internal.SQLite.SQLite_Dialect import project.Internal.SQL_Type_Mapping.SQL_Type_Mapping import project.Internal.SQL_Type_Reference.SQL_Type_Reference @@ -213,12 +212,6 @@ sqlite = SQLite_Dialect.sqlite postgres : Dialect postgres = Postgres_Dialect.postgres -## PRIVATE - - The dialect of Redshift databases. -redshift : Dialect -redshift = Redshift_Dialect.redshift - ## PRIVATE default_fetch_types_query dialect expression context = empty_context = context.add_where_filters [SQL_Expression.Literal "FALSE"] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso index 01e24099b7ef..6986f97ec3a0 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso @@ -3,33 +3,27 @@ import project.Connection.Client_Certificate.Client_Certificate import project.Connection.Connection_Options.Connection_Options import project.Connection.Credentials.Credentials import project.Connection.Database -import project.Connection.Postgres_Options.Postgres_Options -import project.Connection.Redshift_Options.Redshift_Options -import project.Connection.Redshift_Options.AWS_Credential -import project.Connection.SQLite_Options.SQLite_Options -import project.Connection.SQLite_Options.In_Memory +import project.Connection.Postgres_Details.Postgres_Details +import project.Connection.SQLite_Details.SQLite_Details +import project.Connection.SQLite_Details.In_Memory import project.Connection.SSL_Mode.SSL_Mode import project.Data.SQL_Query.SQL_Query import project.Extensions.Upload_Table -from project.Connection.Postgres_Options.Postgres_Options import Postgres -from project.Connection.Redshift_Options.Redshift_Options import Redshift -from project.Connection.SQLite_Options.SQLite_Options import SQLite +from project.Connection.Postgres_Details.Postgres_Details import Postgres +from project.Connection.SQLite_Details.SQLite_Details import SQLite export project.Connection.Client_Certificate.Client_Certificate export project.Connection.Connection_Options.Connection_Options export project.Connection.Credentials.Credentials export project.Connection.Database -export project.Connection.Postgres_Options.Postgres_Options -export project.Connection.Redshift_Options.Redshift_Options -export project.Connection.Redshift_Options.AWS_Credential -export project.Connection.SQLite_Options.SQLite_Options -export project.Connection.SQLite_Options.In_Memory +export project.Connection.Postgres_Details.Postgres_Details +export project.Connection.SQLite_Details.SQLite_Details +export project.Connection.SQLite_Details.In_Memory export project.Connection.SSL_Mode.SSL_Mode export project.Data.SQL_Query.SQL_Query export project.Extensions.Upload_Table -from project.Connection.Postgres_Options.Postgres_Options export Postgres -from project.Connection.Redshift_Options.Redshift_Options export Redshift -from project.Connection.SQLite_Options.SQLite_Options export SQLite +from project.Connection.Postgres_Details.Postgres_Details export Postgres +from project.Connection.SQLite_Details.SQLite_Details export SQLite diff --git a/lib/scala/pkg/src/main/resources/default/src/Main.enso b/lib/scala/pkg/src/main/resources/default/src/Main.enso index 452f0c0496e5..0ef490401d16 100644 --- a/lib/scala/pkg/src/main/resources/default/src/Main.enso +++ b/lib/scala/pkg/src/main/resources/default/src/Main.enso @@ -1,6 +1,7 @@ from Standard.Base import all from Standard.Table import all from Standard.Database import all +from Standard.AWS import all main = operator1 = "Press TAB key to create a new node" diff --git a/project/Distribution.scala b/project/Distribution.scala index 4789f542ef55..706a69eb915b 100644 --- a/project/Distribution.scala +++ b/project/Distribution.scala @@ -1,8 +1,8 @@ -import com.typesafe.sbt.SbtLicenseReport.autoImportImpl.{ +import sbtlicensereport.SbtLicenseReport.autoImportImpl.{ licenseOverrides, licenseSelection } -import com.typesafe.sbt.license +import sbtlicensereport.license import sbt.Keys.{ivyModule, streams, update, updateClassifiers} import sbt.{File, Project} import src.main.scala.licenses.{ diff --git a/project/Editions.scala b/project/Editions.scala index c445b1f7c4bd..97b09c24869a 100644 --- a/project/Editions.scala +++ b/project/Editions.scala @@ -12,6 +12,7 @@ object Editions { "Standard.Test", "Standard.Table", "Standard.Database", + "Standard.AWS", "Standard.Image", "Standard.Geo", "Standard.Visualization", diff --git a/project/plugins.sbt b/project/plugins.sbt index 7dbd6735aaa8..817ce11afb6e 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -1,6 +1,6 @@ addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "1.1.0") addSbtPlugin("ch.epfl.scala" % "sbt-bloop" % "1.5.3") -addSbtPlugin("com.typesafe.sbt" % "sbt-license-report" % "1.2.0") +addSbtPlugin("com.github.sbt" % "sbt-license-report" % "1.3.0") addSbtPlugin("com.lightbend.sbt" % "sbt-java-formatter" % "0.7.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3") diff --git a/project/src/main/scala/licenses/DependencyInformation.scala b/project/src/main/scala/licenses/DependencyInformation.scala index 87b8cc000df2..ff31f6ea0c7d 100644 --- a/project/src/main/scala/licenses/DependencyInformation.scala +++ b/project/src/main/scala/licenses/DependencyInformation.scala @@ -2,7 +2,7 @@ package src.main.scala.licenses import java.nio.file.Path -import com.typesafe.sbt.license.{DepModuleInfo, LicenseInfo} +import sbtlicensereport.license.{DepModuleInfo, LicenseInfo} import src.main.scala.licenses.report.Review /** Defines a way to access sources of a dependency. diff --git a/project/src/main/scala/licenses/DistributionDescription.scala b/project/src/main/scala/licenses/DistributionDescription.scala index 10b8a85c8e6d..21204cae64a1 100644 --- a/project/src/main/scala/licenses/DistributionDescription.scala +++ b/project/src/main/scala/licenses/DistributionDescription.scala @@ -1,6 +1,6 @@ package src.main.scala.licenses -import com.typesafe.sbt.license.LicenseReport +import sbtlicensereport.license.LicenseReport import sbt.File import sbt.librarymanagement.UpdateReport diff --git a/project/src/main/scala/licenses/frontend/DependencyFilter.scala b/project/src/main/scala/licenses/frontend/DependencyFilter.scala index 14e960db8618..1b75e9abcd9e 100644 --- a/project/src/main/scala/licenses/frontend/DependencyFilter.scala +++ b/project/src/main/scala/licenses/frontend/DependencyFilter.scala @@ -1,6 +1,6 @@ package src.main.scala.licenses.frontend -import com.typesafe.sbt.license.DepModuleInfo +import sbtlicensereport.license.DepModuleInfo import src.main.scala.licenses.DependencyInformation /** Filters out irrelevant dependencies. diff --git a/project/src/main/scala/licenses/frontend/SbtLicenses.scala b/project/src/main/scala/licenses/frontend/SbtLicenses.scala index 2a7a63a68952..3b05d50bd10c 100644 --- a/project/src/main/scala/licenses/frontend/SbtLicenses.scala +++ b/project/src/main/scala/licenses/frontend/SbtLicenses.scala @@ -2,7 +2,7 @@ package src.main.scala.licenses.frontend import java.nio.file.Path -import com.typesafe.sbt.license.{DepLicense, DepModuleInfo} +import sbtlicensereport.license.{DepLicense, DepModuleInfo} import org.apache.ivy.core.resolve.IvyNode import sbt.Compile import sbt.internal.util.ManagedLogger diff --git a/std-bits/aws/src/main/java/org/enso/aws/database/RedshiftConnectionDetailsSPI.java b/std-bits/aws/src/main/java/org/enso/aws/database/RedshiftConnectionDetailsSPI.java new file mode 100644 index 000000000000..ed0623931443 --- /dev/null +++ b/std-bits/aws/src/main/java/org/enso/aws/database/RedshiftConnectionDetailsSPI.java @@ -0,0 +1,26 @@ +package org.enso.aws.database; + +import org.enso.database.DatabaseConnectionDetailsSPI; + +@org.openide.util.lookup.ServiceProvider(service = DatabaseConnectionDetailsSPI.class) +public class RedshiftConnectionDetailsSPI extends DatabaseConnectionDetailsSPI { + @Override + protected String getModuleName() { + return "Standard.AWS.Database.Redshift.Redshift_Details"; + } + + @Override + protected String getTypeName() { + return "Redshift_Details"; + } + + @Override + protected String getCodeForDefaultConstructor() { + return "(Redshift host=_ port=_)"; + } + + @Override + protected String getUserFacingConnectionName() { + return "Redshift"; + } +} diff --git a/std-bits/database/src/main/java/org/enso/database/DatabaseConnectionDetailsSPI.java b/std-bits/database/src/main/java/org/enso/database/DatabaseConnectionDetailsSPI.java new file mode 100644 index 000000000000..b803a2e4deec --- /dev/null +++ b/std-bits/database/src/main/java/org/enso/database/DatabaseConnectionDetailsSPI.java @@ -0,0 +1,41 @@ +package org.enso.database; + +import java.util.ServiceLoader; + +public abstract class DatabaseConnectionDetailsSPI { + private static final ServiceLoader loader = + ServiceLoader.load( + DatabaseConnectionDetailsSPI.class, DatabaseConnectionDetailsSPI.class.getClassLoader()); + + /** + * Returns an array of pairs, where the first element is the user facing connection name and the + * second element is a string representing the code to insert to create a default connection + * instance. That code may contain `_` placeholders for expected arguments. + */ + public static String[][] get_default_constructors(boolean refresh) { + if (refresh) { + loader.reload(); + } + return loader.stream() + .map( + provider -> { + var spi = provider.get(); + return new String[] { + spi.getUserFacingConnectionName(), spi.getCodeForDefaultConstructor() + }; + }) + .toArray(String[][]::new); + } + + /** The module in which the connection details type is defined. */ + protected abstract String getModuleName(); + + /** The name of the connection details type. */ + protected abstract String getTypeName(); + + /** Default code that can be used to construct a default instance of the connection details. */ + protected abstract String getCodeForDefaultConstructor(); + + /** The user facing name of the connection. */ + protected abstract String getUserFacingConnectionName(); +} diff --git a/std-bits/database/src/main/java/org/enso/database/PostgresConnectionDetailsSPI.java b/std-bits/database/src/main/java/org/enso/database/PostgresConnectionDetailsSPI.java new file mode 100644 index 000000000000..033d073d3e62 --- /dev/null +++ b/std-bits/database/src/main/java/org/enso/database/PostgresConnectionDetailsSPI.java @@ -0,0 +1,24 @@ +package org.enso.database; + +@org.openide.util.lookup.ServiceProvider(service = DatabaseConnectionDetailsSPI.class) +public class PostgresConnectionDetailsSPI extends DatabaseConnectionDetailsSPI { + @Override + protected String getModuleName() { + return "Standard.Database.Connection.Postgres_Details"; + } + + @Override + protected String getTypeName() { + return "Postgres_Details"; + } + + @Override + protected String getCodeForDefaultConstructor() { + return "(Postgres)"; + } + + @Override + protected String getUserFacingConnectionName() { + return "Postgres"; + } +} diff --git a/std-bits/database/src/main/java/org/enso/database/SQLiteConnectionDetailsSPI.java b/std-bits/database/src/main/java/org/enso/database/SQLiteConnectionDetailsSPI.java new file mode 100644 index 000000000000..7b954f1cc25e --- /dev/null +++ b/std-bits/database/src/main/java/org/enso/database/SQLiteConnectionDetailsSPI.java @@ -0,0 +1,24 @@ +package org.enso.database; + +@org.openide.util.lookup.ServiceProvider(service = DatabaseConnectionDetailsSPI.class) +public class SQLiteConnectionDetailsSPI extends DatabaseConnectionDetailsSPI { + @Override + protected String getModuleName() { + return "Standard.Database.Connection.SQLite_Details"; + } + + @Override + protected String getTypeName() { + return "SQLite_Details"; + } + + @Override + protected String getCodeForDefaultConstructor() { + return "(SQLite location=_)"; + } + + @Override + protected String getUserFacingConnectionName() { + return "SQLite"; + } +} diff --git a/test/Table_Tests/src/Database/Redshift_Spec.enso b/test/Table_Tests/src/Database/Redshift_Spec.enso index bf1ada2d771d..f60d549eec90 100644 --- a/test/Table_Tests/src/Database/Redshift_Spec.enso +++ b/test/Table_Tests/src/Database/Redshift_Spec.enso @@ -4,7 +4,9 @@ import Standard.Base.Runtime.Ref.Ref import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import Table, Value_Type -from Standard.Database import Database, Redshift, AWS_Credential, SQL_Query +from Standard.Database import Database, SQL_Query + +from Standard.AWS import Redshift, AWS_Credential from Standard.Test import Test, Test_Suite import Standard.Test.Extensions diff --git a/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso b/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso index dcb622a2450b..a9a0e832ff41 100644 --- a/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso +++ b/test/Tests/src/System/Reporting_Stream_Decoder_Spec.enso @@ -15,7 +15,7 @@ spec = f = enso_project.data / "short.txt" f.delete_if_exists f.exists.should_be_false - "Cup".write f + "Cup".write f . should_succeed f.with_input_stream [File_Access.Read] stream-> stream.with_stream_decoder Encoding.utf_8 reporting_stream_decoder-> reporting_stream_decoder.read.should_equal 67 @@ -29,7 +29,7 @@ spec = f = enso_project.data / "transient" / "varying_chunks.txt" fragment = 'Hello 😎🚀🚧!' contents = 1.up_to 1000 . map _->fragment . join '\n' - contents.write f + contents.write f . should_succeed all_codepoints = Vector.new_builder read_chars decoder n = case read_characters decoder n of @@ -77,7 +77,7 @@ spec = Test.specify "should allow reading a UTF-8 file" <| f = enso_project.data / "transient" / "utf8.txt" encoding = Encoding.utf_8 - ((0.up_to 100).map _->'Hello World!' . join '\n').write f + ((0.up_to 100).map _->'Hello World!' . join '\n').write f . should_succeed expected_contents = f.read_text contents = read_file_one_by_one f encoding expected_contents.length contents.should_equal expected_contents diff --git a/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso b/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso index 762fb6ae7134..c14a5c57d9aa 100644 --- a/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso +++ b/test/Visualization_Tests/src/Widgets/Database_Widgets_Spec.enso @@ -5,6 +5,8 @@ import Standard.Base.Metadata.Widget import Standard.Base.Metadata.Display from Standard.Database import all +# This ensures that the Redshift connection details are available in the widget. +from Standard.AWS import all import Standard.Visualization.Widgets @@ -20,8 +22,8 @@ spec = Test.group "Widgets for In-Database Connection with table types" <| Test.specify "works for `tables`" <| result = Widgets.get_widget_json connection "tables" ["types"] - result.contains "'TABLE'" . should_be_true - result.contains "'VIEW'" . should_be_true + result.should_contain "'TABLE'" + result.should_contain "'VIEW'" Test.group "Widgets for In-Database Connection with table name sets" <| Test.specify "works for `query` and `read`" <| @@ -44,4 +46,11 @@ spec = expect = [["column", Widget.Single_Choice choices Nothing Display.Always]] . to_json Widgets.get_widget_json mock_table "filter" ["column"] . should_equal expect + Test.group "Widgets for Database" <| + Test.specify "works for `connect`" <| + result = Widgets.get_widget_json Database "connect" ["details"] + result.should_contain "SQLite" + result.should_contain "Postgres" + result.should_contain "Redshift" + main = Test_Suite.run_main spec diff --git a/tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-ignore b/tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-ignore similarity index 100% rename from tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-ignore rename to tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-ignore diff --git a/tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-keep b/tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-keep similarity index 100% rename from tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-keep rename to tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/copyright-keep diff --git a/tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/custom-license b/tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/custom-license similarity index 100% rename from tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/custom-license rename to tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/custom-license diff --git a/tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/files-keep b/tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/files-keep similarity index 100% rename from tools/legal-review/Database/com.amazon.redshift.redshift-jdbc42-2.1.0.9/files-keep rename to tools/legal-review/AWS/com.amazon.redshift.redshift-jdbc42-2.1.0.9/files-keep diff --git a/tools/legal-review/Database/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-ignore b/tools/legal-review/AWS/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-ignore similarity index 100% rename from tools/legal-review/Database/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-ignore rename to tools/legal-review/AWS/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-ignore diff --git a/tools/legal-review/Database/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-keep b/tools/legal-review/AWS/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-keep similarity index 100% rename from tools/legal-review/Database/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-keep rename to tools/legal-review/AWS/com.amazonaws.aws-java-sdk-core-1.12.273/copyright-keep diff --git a/tools/legal-review/Database/com.amazonaws.aws-java-sdk-redshift-1.12.273/copyright-keep b/tools/legal-review/AWS/com.amazonaws.aws-java-sdk-redshift-1.12.273/copyright-keep similarity index 100% rename from tools/legal-review/Database/com.amazonaws.aws-java-sdk-redshift-1.12.273/copyright-keep rename to tools/legal-review/AWS/com.amazonaws.aws-java-sdk-redshift-1.12.273/copyright-keep diff --git a/tools/legal-review/Database/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-ignore b/tools/legal-review/AWS/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-ignore similarity index 100% rename from tools/legal-review/Database/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-ignore rename to tools/legal-review/AWS/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-ignore diff --git a/tools/legal-review/Database/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-keep b/tools/legal-review/AWS/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-keep similarity index 100% rename from tools/legal-review/Database/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-keep rename to tools/legal-review/AWS/com.amazonaws.aws-java-sdk-sts-1.12.273/copyright-keep diff --git a/tools/legal-review/Database/com.amazonaws.jmespath-java-1.12.273/copyright-keep b/tools/legal-review/AWS/com.amazonaws.jmespath-java-1.12.273/copyright-keep similarity index 100% rename from tools/legal-review/Database/com.amazonaws.jmespath-java-1.12.273/copyright-keep rename to tools/legal-review/AWS/com.amazonaws.jmespath-java-1.12.273/copyright-keep diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-annotations-2.12.6/custom-license b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-annotations-2.12.6/custom-license similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-annotations-2.12.6/custom-license rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-annotations-2.12.6/custom-license diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-annotations-2.12.6/files-keep b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-annotations-2.12.6/files-keep similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-annotations-2.12.6/files-keep rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-annotations-2.12.6/files-keep diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-core-2.12.6/copyright-keep b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-core-2.12.6/copyright-keep similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-core-2.12.6/copyright-keep rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-core-2.12.6/copyright-keep diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-core-2.12.6/custom-license b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-core-2.12.6/custom-license similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-core-2.12.6/custom-license rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-core-2.12.6/custom-license diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-core-2.12.6/files-keep b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-core-2.12.6/files-keep similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-core-2.12.6/files-keep rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-core-2.12.6/files-keep diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/custom-license b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/custom-license similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/custom-license rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/custom-license diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-ignore b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-ignore similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-ignore rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-ignore diff --git a/tools/legal-review/Database/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-keep b/tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-keep similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-keep rename to tools/legal-review/AWS/com.fasterxml.jackson.core.jackson-databind-2.12.6.1/files-keep diff --git a/tools/legal-review/Database/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-add/CREDITS-2.x.txt b/tools/legal-review/AWS/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-add/CREDITS-2.x.txt similarity index 100% rename from tools/legal-review/Database/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-add/CREDITS-2.x.txt rename to tools/legal-review/AWS/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-add/CREDITS-2.x.txt diff --git a/tools/legal-review/AWS/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore b/tools/legal-review/AWS/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore new file mode 100644 index 000000000000..b16fbfb884ec --- /dev/null +++ b/tools/legal-review/AWS/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore @@ -0,0 +1,2 @@ +#license +/FasterXML/jackson-dataformats-binary/blob/2.16/LICENSE diff --git a/tools/legal-review/Database/commons-codec.commons-codec-1.15/copyright-ignore b/tools/legal-review/AWS/commons-codec.commons-codec-1.15/copyright-ignore similarity index 100% rename from tools/legal-review/Database/commons-codec.commons-codec-1.15/copyright-ignore rename to tools/legal-review/AWS/commons-codec.commons-codec-1.15/copyright-ignore diff --git a/tools/legal-review/Database/commons-codec.commons-codec-1.15/copyright-keep b/tools/legal-review/AWS/commons-codec.commons-codec-1.15/copyright-keep similarity index 100% rename from tools/legal-review/Database/commons-codec.commons-codec-1.15/copyright-keep rename to tools/legal-review/AWS/commons-codec.commons-codec-1.15/copyright-keep diff --git a/tools/legal-review/Database/commons-codec.commons-codec-1.15/files-ignore b/tools/legal-review/AWS/commons-codec.commons-codec-1.15/files-ignore similarity index 100% rename from tools/legal-review/Database/commons-codec.commons-codec-1.15/files-ignore rename to tools/legal-review/AWS/commons-codec.commons-codec-1.15/files-ignore diff --git a/tools/legal-review/Database/commons-codec.commons-codec-1.15/files-keep b/tools/legal-review/AWS/commons-codec.commons-codec-1.15/files-keep similarity index 100% rename from tools/legal-review/Database/commons-codec.commons-codec-1.15/files-keep rename to tools/legal-review/AWS/commons-codec.commons-codec-1.15/files-keep diff --git a/tools/legal-review/Database/commons-logging.commons-logging-1.1.3/copyright-keep b/tools/legal-review/AWS/commons-logging.commons-logging-1.1.3/copyright-keep similarity index 100% rename from tools/legal-review/Database/commons-logging.commons-logging-1.1.3/copyright-keep rename to tools/legal-review/AWS/commons-logging.commons-logging-1.1.3/copyright-keep diff --git a/tools/legal-review/Database/commons-logging.commons-logging-1.1.3/files-ignore b/tools/legal-review/AWS/commons-logging.commons-logging-1.1.3/files-ignore similarity index 100% rename from tools/legal-review/Database/commons-logging.commons-logging-1.1.3/files-ignore rename to tools/legal-review/AWS/commons-logging.commons-logging-1.1.3/files-ignore diff --git a/tools/legal-review/Database/commons-logging.commons-logging-1.1.3/files-keep b/tools/legal-review/AWS/commons-logging.commons-logging-1.1.3/files-keep similarity index 100% rename from tools/legal-review/Database/commons-logging.commons-logging-1.1.3/files-keep rename to tools/legal-review/AWS/commons-logging.commons-logging-1.1.3/files-keep diff --git a/tools/legal-review/Database/joda-time.joda-time-2.8.1/copyright-keep b/tools/legal-review/AWS/joda-time.joda-time-2.8.1/copyright-keep similarity index 100% rename from tools/legal-review/Database/joda-time.joda-time-2.8.1/copyright-keep rename to tools/legal-review/AWS/joda-time.joda-time-2.8.1/copyright-keep diff --git a/tools/legal-review/Database/joda-time.joda-time-2.8.1/custom-license b/tools/legal-review/AWS/joda-time.joda-time-2.8.1/custom-license similarity index 100% rename from tools/legal-review/Database/joda-time.joda-time-2.8.1/custom-license rename to tools/legal-review/AWS/joda-time.joda-time-2.8.1/custom-license diff --git a/tools/legal-review/Database/joda-time.joda-time-2.8.1/files-ignore b/tools/legal-review/AWS/joda-time.joda-time-2.8.1/files-ignore similarity index 100% rename from tools/legal-review/Database/joda-time.joda-time-2.8.1/files-ignore rename to tools/legal-review/AWS/joda-time.joda-time-2.8.1/files-ignore diff --git a/tools/legal-review/Database/joda-time.joda-time-2.8.1/files-keep b/tools/legal-review/AWS/joda-time.joda-time-2.8.1/files-keep similarity index 100% rename from tools/legal-review/Database/joda-time.joda-time-2.8.1/files-keep rename to tools/legal-review/AWS/joda-time.joda-time-2.8.1/files-keep diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/copyright-ignore b/tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/copyright-ignore similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/copyright-ignore rename to tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/copyright-ignore diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/copyright-keep b/tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/copyright-keep similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/copyright-keep rename to tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/copyright-keep diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/files-ignore b/tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/files-ignore similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/files-ignore rename to tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/files-ignore diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/files-keep b/tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/files-keep similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpclient-4.5.13/files-keep rename to tools/legal-review/AWS/org.apache.httpcomponents.httpclient-4.5.13/files-keep diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/copyright-ignore b/tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/copyright-ignore similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/copyright-ignore rename to tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/copyright-ignore diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/copyright-keep b/tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/copyright-keep similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/copyright-keep rename to tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/copyright-keep diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/files-ignore b/tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/files-ignore similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/files-ignore rename to tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/files-ignore diff --git a/tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/files-keep b/tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/files-keep similarity index 100% rename from tools/legal-review/Database/org.apache.httpcomponents.httpcore-4.4.13/files-keep rename to tools/legal-review/AWS/org.apache.httpcomponents.httpcore-4.4.13/files-keep diff --git a/tools/legal-review/AWS/report-state b/tools/legal-review/AWS/report-state new file mode 100644 index 000000000000..437695e8c9a3 --- /dev/null +++ b/tools/legal-review/AWS/report-state @@ -0,0 +1,3 @@ +2F8678B00965B3AF1B2A31D12012CD7B7DD9C91ED3DF66311284EFAB1ED25EBB +F0685E9E9F5315627AEDD222C040620631BE0146BA5C093F750E8E0CDEC8E493 +0 diff --git a/tools/legal-review/Database/reviewed-licenses/Apache_2 b/tools/legal-review/AWS/reviewed-licenses/Apache_2 similarity index 100% rename from tools/legal-review/Database/reviewed-licenses/Apache_2 rename to tools/legal-review/AWS/reviewed-licenses/Apache_2 diff --git a/tools/legal-review/Database/reviewed-licenses/Apache_License__Version_2.0 b/tools/legal-review/AWS/reviewed-licenses/Apache_License__Version_2.0 similarity index 100% rename from tools/legal-review/Database/reviewed-licenses/Apache_License__Version_2.0 rename to tools/legal-review/AWS/reviewed-licenses/Apache_License__Version_2.0 diff --git a/tools/legal-review/Database/reviewed-licenses/The_Apache_License__Version_2.0 b/tools/legal-review/AWS/reviewed-licenses/The_Apache_License__Version_2.0 similarity index 100% rename from tools/legal-review/Database/reviewed-licenses/The_Apache_License__Version_2.0 rename to tools/legal-review/AWS/reviewed-licenses/The_Apache_License__Version_2.0 diff --git a/tools/legal-review/Database/reviewed-licenses/The_Apache_Software_License__Version_2.0 b/tools/legal-review/AWS/reviewed-licenses/The_Apache_Software_License__Version_2.0 similarity index 100% rename from tools/legal-review/Database/reviewed-licenses/The_Apache_Software_License__Version_2.0 rename to tools/legal-review/AWS/reviewed-licenses/The_Apache_Software_License__Version_2.0 diff --git a/tools/legal-review/Database/software.amazon.ion.ion-java-1.0.2/copyright-ignore b/tools/legal-review/AWS/software.amazon.ion.ion-java-1.0.2/copyright-ignore similarity index 100% rename from tools/legal-review/Database/software.amazon.ion.ion-java-1.0.2/copyright-ignore rename to tools/legal-review/AWS/software.amazon.ion.ion-java-1.0.2/copyright-ignore diff --git a/tools/legal-review/Database/software.amazon.ion.ion-java-1.0.2/copyright-keep b/tools/legal-review/AWS/software.amazon.ion.ion-java-1.0.2/copyright-keep similarity index 100% rename from tools/legal-review/Database/software.amazon.ion.ion-java-1.0.2/copyright-keep rename to tools/legal-review/AWS/software.amazon.ion.ion-java-1.0.2/copyright-keep diff --git a/tools/legal-review/Database/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore b/tools/legal-review/Database/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore deleted file mode 100644 index 82f5404b1ffd..000000000000 --- a/tools/legal-review/Database/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.12.6/files-ignore +++ /dev/null @@ -1,2 +0,0 @@ -#license -/FasterXML/jackson-dataformats-binary/blob/2.15/LICENSE diff --git a/tools/legal-review/Database/report-state b/tools/legal-review/Database/report-state index e1e69c1dc970..912a44057c13 100644 --- a/tools/legal-review/Database/report-state +++ b/tools/legal-review/Database/report-state @@ -1,3 +1,3 @@ -F04556111778820D07D28B45846E7DB8144686C5571EBEFDCF1D3A23E98DA640 -48C573837ADE7AA7B2DF62F71F6522827EEB47194545260798FB8651A7DEB11F +A3B875E753CDAF89505D461A995C45B4FA642314D80643553FD7240B4FA74C90 +305BAB4B6F0C1192CBD5577DF551B756922CE6627FAC91D59989769F2EA99D0D 0 diff --git a/tools/legal-review/Database/reviewed-licenses/BSD-2-Clause b/tools/legal-review/Database/reviewed-licenses/BSD-2-Clause deleted file mode 100644 index 0e3ee3004f8d..000000000000 --- a/tools/legal-review/Database/reviewed-licenses/BSD-2-Clause +++ /dev/null @@ -1 +0,0 @@ -tools/legal-review/license-texts/BSD-2-Clause diff --git a/tools/legal-review/Database/reviewed-licenses/BSD_License b/tools/legal-review/Database/reviewed-licenses/BSD_License deleted file mode 100644 index c547acb36c9b..000000000000 --- a/tools/legal-review/Database/reviewed-licenses/BSD_License +++ /dev/null @@ -1 +0,0 @@ -tools/legal-review/license-texts/BSD-3-Clause diff --git a/tools/legal-review/engine/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore b/tools/legal-review/engine/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore index 82f5404b1ffd..b16fbfb884ec 100644 --- a/tools/legal-review/engine/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore +++ b/tools/legal-review/engine/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore @@ -1,2 +1,2 @@ #license -/FasterXML/jackson-dataformats-binary/blob/2.15/LICENSE +/FasterXML/jackson-dataformats-binary/blob/2.16/LICENSE diff --git a/tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-add b/tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-add new file mode 100644 index 000000000000..8786d0baf98d --- /dev/null +++ b/tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-add @@ -0,0 +1 @@ +See https://github.com/reactive-streams/reactive-streams-jvm for more information. diff --git a/tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-keep-context b/tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-keep-context deleted file mode 100644 index 2d1291cd5315..000000000000 --- a/tools/legal-review/engine/org.reactivestreams.reactive-streams-1.0.4/copyright-keep-context +++ /dev/null @@ -1 +0,0 @@ -this code has waived all copyright and related or neighboring * diff --git a/tools/legal-review/engine/report-state b/tools/legal-review/engine/report-state index 8906949a45e0..e69d16364369 100644 --- a/tools/legal-review/engine/report-state +++ b/tools/legal-review/engine/report-state @@ -1,3 +1,3 @@ 4A60E6DF1A26DB1AFA55D834537366D7FD5260B08A6DFD1E507D2AFFEE659DE2 -C5B585894062096A4041C121B962E581E1B797922B192935C959B1C464DED266 +B7EE0B813DE3C154A8F95ECD932171A96FB363227D6DEDF8E28224C4F09E74FB 0 diff --git a/tools/legal-review/project-manager/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore b/tools/legal-review/project-manager/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore index 82f5404b1ffd..b16fbfb884ec 100644 --- a/tools/legal-review/project-manager/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore +++ b/tools/legal-review/project-manager/com.fasterxml.jackson.dataformat.jackson-dataformat-cbor-2.13.3/files-ignore @@ -1,2 +1,2 @@ #license -/FasterXML/jackson-dataformats-binary/blob/2.15/LICENSE +/FasterXML/jackson-dataformats-binary/blob/2.16/LICENSE From 0a8f80959f65c9891ff8cffb68c5d9a7e6cd527c Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Fri, 5 May 2023 04:39:22 +1000 Subject: [PATCH 04/28] Fix lint CI (#6567) --- app/ide-desktop/lib/content-config/src/index.ts | 2 +- app/ide-desktop/lib/types/modules.d.ts | 17 +++++++++++++++++ 2 files changed, 18 insertions(+), 1 deletion(-) diff --git a/app/ide-desktop/lib/content-config/src/index.ts b/app/ide-desktop/lib/content-config/src/index.ts index c2aed90d433f..1a68e317199a 100644 --- a/app/ide-desktop/lib/content-config/src/index.ts +++ b/app/ide-desktop/lib/content-config/src/index.ts @@ -2,7 +2,7 @@ import * as semver from 'semver' -import * as linkedDist from '../../../../../target/ensogl-pack/linked-dist/index' +import * as linkedDist from '../../../../../target/ensogl-pack/linked-dist' import BUILD_INFO from '../../../build.json' assert { type: 'json' } // Aliases with the same name as the original. diff --git a/app/ide-desktop/lib/types/modules.d.ts b/app/ide-desktop/lib/types/modules.d.ts index 3c68a6e47708..58762fe78f19 100644 --- a/app/ide-desktop/lib/types/modules.d.ts +++ b/app/ide-desktop/lib/types/modules.d.ts @@ -2,6 +2,23 @@ * * This file MUST NOT `export {}` for the modules to be visible to other files. */ +declare module '*/build.json' { + interface BuildInfo { + commit: string + version: string + engineVersion: string + name: string + } + + const BUILD_INFO: BuildInfo + export default BUILD_INFO +} + +declare module '*/ensogl-pack/linked-dist' { + // eslint-disable-next-line no-restricted-syntax + export * from '../../../../lib/rust/ensogl/pack/js/src/runner/index' +} + declare module '*/gui/config.yaml' { interface Config { windowAppScopeName: string From 1817da799c6f22a0d1e3b309fd3022086304abba Mon Sep 17 00:00:00 2001 From: Michael Mauderer Date: Thu, 4 May 2023 20:43:08 +0200 Subject: [PATCH 05/28] Disregard visualisation for selection of nodes. (#6487) --- CHANGELOG.md | 6 +++++- app/gui/view/graph-editor/src/component/node.rs | 12 ++++++++---- app/gui/view/graph-editor/src/selection.rs | 2 +- 3 files changed, 14 insertions(+), 6 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 39eab0c08148..fa2f0d0f9412 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -135,7 +135,9 @@ - [Added capability to create node widgets with complex UI][6347]. Node widgets such as dropdown can now be placed in the node and affect the code text flow. - [The IDE UI element for selecting the execution mode of the project is now - sending messages to the backend][6341]. + sending messages to the backend.][6341]. +- [Area selectionof nodes no longer takes into account the visualisation that + belongs to the node.][6487]. - [List Editor Widget][6470]. Now you can edit lists by clicking buttons on nodes or by dragging the elements. - [Fixed text visualisations which were being cut off at the last line.][6421] @@ -203,6 +205,8 @@ [5895]: https://github.com/enso-org/enso/pull/6130 [6035]: https://github.com/enso-org/enso/pull/6035 [6097]: https://github.com/enso-org/enso/pull/6097 +[6097]: https://github.com/enso-org/enso/pull/6341 +[6487]: https://github.com/enso-org/enso/pull/6487 [6341]: https://github.com/enso-org/enso/pull/6341 [6470]: https://github.com/enso-org/enso/pull/6470 diff --git a/app/gui/view/graph-editor/src/component/node.rs b/app/gui/view/graph-editor/src/component/node.rs index 33a73885fd17..34dfc1fb17ef 100644 --- a/app/gui/view/graph-editor/src/component/node.rs +++ b/app/gui/view/graph-editor/src/component/node.rs @@ -373,6 +373,8 @@ ensogl::define_endpoints_2! { /// [`visualization_visible`] is updated. Please remember, that the [`position`] is not /// immediately updated, only during the Display Object hierarchy update bounding_box (BoundingBox), + /// The bounding box of the node without the visualization. + inner_bounding_box (BoundingBox), /// A set of widgets attached to a method requires metadata to be queried. The tuple /// contains the ID of the call expression the widget is attached to, and the ID of that /// call's target expression (`self` or first argument). @@ -1009,7 +1011,10 @@ impl Node { visualization_enabled_and_visible <- visualization_enabled && visualization_visible; bbox_input <- all4( &out.position,&new_size,&visualization_enabled_and_visible,visualization_size); - out.bounding_box <+ bbox_input.map(|(a,b,c,d)| bounding_box(*a,*b,*c,*d)); + out.bounding_box <+ bbox_input.map(|(a,b,c,d)| bounding_box(*a,*b,c.then(|| *d))); + + inner_bbox_input <- all2(&out.position,&new_size); + out.inner_bounding_box <+ inner_bbox_input.map(|(a,b)| bounding_box(*a,*b,None)); // === VCS Handling === @@ -1069,13 +1074,12 @@ fn visualization_offset(node_width: f32) -> Vector2 { fn bounding_box( node_position: Vector2, node_size: Vector2, - visualization_enabled_and_visible: bool, - visualization_size: Vector2, + visualization_size: Option, ) -> BoundingBox { let x_offset_to_node_center = x_offset_to_node_center(node_size.x); let node_bbox_pos = node_position + Vector2(x_offset_to_node_center, 0.0) - node_size / 2.0; let node_bbox = BoundingBox::from_position_and_size(node_bbox_pos, node_size); - if visualization_enabled_and_visible { + if let Some(visualization_size) = visualization_size { let visualization_offset = visualization_offset(node_size.x); let visualization_pos = node_position + visualization_offset; let visualization_bbox_pos = visualization_pos - visualization_size / 2.0; diff --git a/app/gui/view/graph-editor/src/selection.rs b/app/gui/view/graph-editor/src/selection.rs index 2528f25e9591..b59724cf3925 100644 --- a/app/gui/view/graph-editor/src/selection.rs +++ b/app/gui/view/graph-editor/src/selection.rs @@ -235,7 +235,7 @@ fn get_nodes_in_bounding_box(bounding_box: &BoundingBox, nodes: &Nodes) -> Vec Date: Thu, 4 May 2023 14:09:00 -0700 Subject: [PATCH 06/28] Apply screen size to screen-size-independent passes (#6569) It is needed to restore the viewport. Fixes #6500. --- .../core/src/display/render/composer.rs | 34 +++++++++++++------ .../ensogl/core/src/display/render/pass.rs | 1 - 2 files changed, 23 insertions(+), 12 deletions(-) diff --git a/lib/rust/ensogl/core/src/display/render/composer.rs b/lib/rust/ensogl/core/src/display/render/composer.rs index 09134edeeffd..94b9817952f7 100644 --- a/lib/rust/ensogl/core/src/display/render/composer.rs +++ b/lib/rust/ensogl/core/src/display/render/composer.rs @@ -61,18 +61,10 @@ impl { self.width = width; self.height = height; self.pixel_ratio = pixel_ratio; - let ctx = &self.context; - let vars = &self.variables; let defs = self.pipeline.passes_clone(); - let old_passes = self.passes.drain(..); - let passes = defs.into_iter().zip(old_passes).map(|(def, pass)| { - if def.is_screen_size_independent() { - pass - } else { - ComposerPass::new(ctx, vars, def, width, height, pixel_ratio) - } - }).collect_vec(); - self.passes = passes; + for (pass, def) in self.passes.iter_mut().zip(defs) { + pass.resize(def, width, height, pixel_ratio); + } } /// Initialize all pass definitions from the [`Pipeline`]. @@ -145,4 +137,24 @@ impl ComposerPass { pub fn run(&mut self, update_status: UpdateStatus) { self.pass.run(&self.instance, update_status); } + + /// Update the pass for a change in screen size. Depending on the pass, this may require + /// reinitialization. + pub fn resize( + &mut self, + def: Box, + width: i32, + height: i32, + pixel_ratio: f32, + ) { + if def.is_screen_size_independent() { + self.instance.width = width; + self.instance.height = height; + self.instance.pixel_ratio = pixel_ratio; + } else { + let ctx = self.context.clone(); + let vars = mem::take(&mut self.variables); + *self = ComposerPass::new(&ctx, &vars, def, width, height, pixel_ratio); + } + } } diff --git a/lib/rust/ensogl/core/src/display/render/pass.rs b/lib/rust/ensogl/core/src/display/render/pass.rs index 62078713d785..63789aa3ad76 100644 --- a/lib/rust/ensogl/core/src/display/render/pass.rs +++ b/lib/rust/ensogl/core/src/display/render/pass.rs @@ -52,7 +52,6 @@ pub struct Instance { impl Instance { /// Constructor - #[allow(clippy::borrowed_box)] pub fn new( context: &Context, variables: &UniformScope, From fcfc44c3e72a3b3c79d4aa4b0e035c96bf932cb8 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Fri, 5 May 2023 14:52:22 +1000 Subject: [PATCH 07/28] Run prettier on `CHANGELOG.md`; bump prettier (#6571) --- CHANGELOG.md | 2 +- package-lock.json | 14 +++++++------- package.json | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index fa2f0d0f9412..8f3a630fbf1f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -136,7 +136,7 @@ such as dropdown can now be placed in the node and affect the code text flow. - [The IDE UI element for selecting the execution mode of the project is now sending messages to the backend.][6341]. -- [Area selectionof nodes no longer takes into account the visualisation that +- [Area selectionof nodes no longer takes into account the visualisation that belongs to the node.][6487]. - [List Editor Widget][6470]. Now you can edit lists by clicking buttons on nodes or by dragging the elements. diff --git a/package-lock.json b/package-lock.json index 368f1fdf224a..f6c5dbf72b3d 100644 --- a/package-lock.json +++ b/package-lock.json @@ -11,7 +11,7 @@ "devDependencies": { "@typescript-eslint/eslint-plugin": "^5.48.1", "@typescript-eslint/parser": "^5.48.1", - "prettier": "2.8.1" + "prettier": "2.8.8" } }, "node_modules/@eslint-community/eslint-utils": { @@ -1635,9 +1635,9 @@ } }, "node_modules/prettier": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.1.tgz", - "integrity": "sha512-lqGoSJBQNJidqCHE80vqZJHWHRFoNYsSpP9AjFhlhi9ODCJA541svILes/+/1GM3VaL/abZi7cpFzOpdR9UPKg==", + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", "dev": true, "bin": { "prettier": "bin-prettier.js" @@ -3176,9 +3176,9 @@ "peer": true }, "prettier": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.1.tgz", - "integrity": "sha512-lqGoSJBQNJidqCHE80vqZJHWHRFoNYsSpP9AjFhlhi9ODCJA541svILes/+/1GM3VaL/abZi7cpFzOpdR9UPKg==", + "version": "2.8.8", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.8.tgz", + "integrity": "sha512-tdN8qQGvNjw4CHbY+XXk0JgCXn9QiF21a55rBe5LJAU+kDyC4WQn4+awm2Xfk2lQMk5fKup9XgzTZtGkjBdP9Q==", "dev": true }, "proxy-from-env": { diff --git a/package.json b/package.json index 97eaabec2938..5054bac2a09b 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "devDependencies": { "@typescript-eslint/eslint-plugin": "^5.48.1", "@typescript-eslint/parser": "^5.48.1", - "prettier": "2.8.1" + "prettier": "2.8.8" }, "dependencies": { "chromedriver": "^106.0.1", From 3a832d3e53e9b8ef1b8c52d232dd1f54163a48e5 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Thu, 4 May 2023 22:48:24 -0700 Subject: [PATCH 08/28] Fix default dynamic-asssets directory (#6537) Set the default value correctly, and use it. Fixes #6475. --- app/ide-desktop/lib/content/src/index.ts | 2 -- lib/rust/ensogl/pack/js/src/runner/config.json | 2 +- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/app/ide-desktop/lib/content/src/index.ts b/app/ide-desktop/lib/content/src/index.ts index 0f71f036b00d..b3f27b68b35c 100644 --- a/app/ide-desktop/lib/content/src/index.ts +++ b/app/ide-desktop/lib/content/src/index.ts @@ -142,8 +142,6 @@ class Main implements AppRunner { { loader: { wasmUrl: 'pkg-opt.wasm', - jsUrl: 'pkg.js', - assetsUrl: 'dynamic-assets', }, }, inputConfig diff --git a/lib/rust/ensogl/pack/js/src/runner/config.json b/lib/rust/ensogl/pack/js/src/runner/config.json index b16832f647db..64a5d57f79ff 100644 --- a/lib/rust/ensogl/pack/js/src/runner/config.json +++ b/lib/rust/ensogl/pack/js/src/runner/config.json @@ -25,7 +25,7 @@ "primary": false }, "assetsUrl": { - "value": "assets", + "value": "dynamic-assets", "description": "The URL of the dynamic assets directory.", "primary": false }, From 0be572af6a806d3fb1b18dc436ab8a19123a3c67 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Fri, 5 May 2023 10:12:07 +0200 Subject: [PATCH 09/28] Only `Join_Kind.Inner` removes the common-named columns (#6564) As requested by @jdunkerley --- .../Standard/Database/0.0.0-dev/src/Data/Table.enso | 10 ++++++---- .../src/Internal/Common/Database_Join_Helper.enso | 4 ++-- .../lib/Standard/Table/0.0.0-dev/src/Data/Table.enso | 9 +++++---- .../src/Common_Table_Operations/Integration_Tests.enso | 10 +++++----- .../src/Common_Table_Operations/Join/Join_Spec.enso | 6 +++++- test/Table_Tests/src/Database/Upload_Spec.enso | 3 ++- 6 files changed, 25 insertions(+), 17 deletions(-) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 6aa040edba4e..b2ddb208ca0f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -897,9 +897,10 @@ type Table ? Joining on equality of columns with the same name - When joining two columns with the same name and an equality condition, - only one copy of column will be included in the output (avoiding - unnecessary duplication and renaming). + When performing an Inner join on two columns with the same name and an + equality condition, only one copy of column will be included in the + output (as these two columns would have the exact same content, so they + would be redundant). ? Same-name column join shorthand @@ -931,7 +932,8 @@ type Table problem_builder = Problem_Builder.new join_resolution = Database_Join_Helper.make_join_helpers left right left_setup.column_mapping right_setup.column_mapping . resolve on on_problems - result_columns = Database_Join_Helper.select_columns_for_join join_kind left_setup.new_columns right_setup.new_columns join_resolution.redundant_column_names right_prefix problem_builder + right_columns_to_drop = if join_kind == Join_Kind.Inner then join_resolution.redundant_column_names else [] + result_columns = Database_Join_Helper.select_columns_for_join join_kind left_setup.new_columns right_setup.new_columns right_columns_to_drop right_prefix problem_builder ## TODO proper equality of nulls in join conditions, see: https://www.pivotaltracker.com/story/show/184109759 diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso index bcd871267e45..3cf97a23d564 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso @@ -44,13 +44,13 @@ make_join_helpers left_table right_table left_column_mapping right_column_mappin names and returns a list of new columns that should be present in the join result (depending on the kind of performed join), ensuring that any colliding columns are properly renamed. -select_columns_for_join join_kind left_new_columns right_new_columns redundant_column_names right_prefix problem_builder = +select_columns_for_join join_kind left_new_columns right_new_columns right_columns_to_drop right_prefix problem_builder = case join_kind of Join_Kind.Left_Exclusive -> left_new_columns Join_Kind.Right_Exclusive -> right_new_columns _ -> left_names = left_new_columns.map .name - right_kept_columns = right_new_columns.filter c-> redundant_column_names.contains c.name . not + right_kept_columns = right_new_columns.filter c-> right_columns_to_drop.contains c.name . not right_names = right_kept_columns.map .name renaming_strategy = Unique_Name_Strategy.new diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 46ada8c4d882..c4a844e69c8f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -1363,9 +1363,10 @@ type Table ? Joining on equality of columns with the same name - When joining two columns with the same name and an equality condition, - only one copy of column will be included in the output (avoiding - unnecessary duplication and renaming). + When performing an Inner join on two columns with the same name and an + equality condition, only one copy of column will be included in the + output (as these two columns would have the exact same content, so they + would be redundant). ? Same-name column join shorthand @@ -1393,7 +1394,7 @@ type Table _ -> [True, True] join_resolution = make_join_helpers self right . resolve on on_problems - right_columns_to_drop = join_resolution.redundant_column_names + right_columns_to_drop = if join_kind == Join_Kind.Inner then join_resolution.redundant_column_names else [] java_conditions = join_resolution.conditions new_java_table = self.java_table.join right.java_table java_conditions (rows_to_keep.at 0) (rows_to_keep.at 1) (rows_to_keep.at 2) (columns_to_keep.at 0) (columns_to_keep.at 1) right_columns_to_drop right_prefix diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index c358faddf0e2..94eafeba889b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -37,12 +37,12 @@ spec setup = t3 = t2.aggregate [Group_By "Letter", Count] t4 = t3.join t1 on="Count" join_kind=Join_Kind.Left_Outer |> materialize |> _.order_by "Letter" - t4.columns.map .name . should_equal ["Letter", "Count", "Class"] + t4.columns.map .name . should_equal ["Letter", "Count", "Right_Count", "Class"] rows = t4.rows . map .to_vector - rows.at 0 . should_equal ["A", 4, Nothing] - rows.at 1 . should_equal ["B", 3, "Z"] - rows.at 2 . should_equal ["C", 2, "Y"] - rows.at 3 . should_equal ["D", 2, "Y"] + rows.at 0 . should_equal ["A", 4, Nothing, Nothing] + rows.at 1 . should_equal ["B", 3, 3, "Z"] + rows.at 2 . should_equal ["C", 2, 2, "Y"] + rows.at 3 . should_equal ["D", 2, 2, "Y"] Test.specify "aggregates and distinct" <| t2 = table_builder [["Letter", ["A", "B", "A", "A", "C", "C"]], ["Points", [2, 5, 2, 1, 10, 3]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index 20f5ae11ece4..b0b56bfffd65 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -88,11 +88,15 @@ spec setup = r.at "Z" . to_vector . should_equal [2, 2, 3, 3, 4, 5] r.at "Right_Z" . to_vector . should_equal [1, 2, 1, 2, 4, 4] - Test.specify "should allow to join on equality of multiple columns and drop redundant columns" <| + Test.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <| conditions = [Join_Condition.Equals "Y" "Y", Join_Condition.Equals "X" "X"] r = t3.join t4 on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right_Z"] check_xy_joined r + [Join_Kind.Full, Join_Kind.Left_Outer, Join_Kind.Right_Outer].each kind-> + r2 = t3.join t4 join_kind=kind on=conditions + expect_column_names ["X", "Y", "Z", "Right_X", "Right_Y", "Right_Z"] r2 + Test.specify "should support same-name column join shorthand" <| r = t3.join t4 on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right_Z"] check_xy_joined r diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index 1fa4bca70e87..3e3f0bfba7b2 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -143,10 +143,11 @@ spec make_new_connection prefix persistent_connector=True = db_table_4 = db_table_2.join db_table_3 join_kind=Join_Kind.Left_Outer copied_table = db_table_4.create_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing - copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "C3"] + copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "Right_X", "C3"] copied_table.at "X" . to_vector . should_equal [1, 1, 2] copied_table.at "C1" . to_vector . should_equal [101, 102, 203] copied_table.at "C2" . to_vector . should_equal ["constant_text", "constant_text", "constant_text"] + copied_table.at "Right_X" . to_vector . should_equal [Nothing, Nothing, 2] copied_table.at "C3" . to_vector . should_equal [Nothing, Nothing, 5] # We check that this is indeed querying a simple DB table and not a complex query like `db_table_4` would be, From 0578196623c826aff812ac00b8399fa70606bb6a Mon Sep 17 00:00:00 2001 From: "Stijn (\"stain\") Seghers" Date: Fri, 5 May 2023 11:04:07 +0200 Subject: [PATCH 10/28] Fix #5075: stop panning on full-screen visualisation (#6530) Closes #5075: dragging or scrolling while a visualisation is in full screen caused the camera to move in the graph editor. The problem was that clicking on the visualisation triggered some FRP node that indicated that the project list should be closed, which then indicated that the navigator shouldn't be disabled. However, the FRP code in the graph editor interpreted "shouldn't be disabled" as "should be enabled", ignoring the fact that there's also a full-screen visualisation, which should always disable the navigator. https://user-images.githubusercontent.com/607786/235908932-0b579d69-5fd8-438e-a82b-02678d8e6156.mp4 --- CHANGELOG.md | 3 +++ app/gui/view/graph-editor/src/lib.rs | 13 ++----------- 2 files changed, 5 insertions(+), 11 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f3a630fbf1f..0346125f0b28 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -141,8 +141,11 @@ - [List Editor Widget][6470]. Now you can edit lists by clicking buttons on nodes or by dragging the elements. - [Fixed text visualisations which were being cut off at the last line.][6421] +- [Fixed a bug where, when scrolling or dragging on a full-screen visualization, + the view of the graph changed as well.][6530] [6421]: https://github.com/enso-org/enso/pull/6421 +[6530]: https://github.com/enso-org/enso/pull/6530 #### EnsoGL (rendering engine) diff --git a/app/gui/view/graph-editor/src/lib.rs b/app/gui/view/graph-editor/src/lib.rs index c0fe45a37692..ec91a150ae0b 100644 --- a/app/gui/view/graph-editor/src/lib.rs +++ b/app/gui/view/graph-editor/src/lib.rs @@ -2794,17 +2794,8 @@ fn new_graph_editor(app: &Application) -> GraphEditor { // ======================== frp::extend! { network - no_vis_selected <- out.some_visualisation_selected.on_false(); - some_vis_selected <- out.some_visualisation_selected.on_true(); - - set_navigator_false <- inputs.set_navigator_disabled.on_true(); - set_navigator_true <- inputs.set_navigator_disabled.on_false(); - - disable_navigator <- any_(&set_navigator_false,&some_vis_selected); - enable_navigator <- any_(&set_navigator_true,&no_vis_selected); - - model.navigator.frp.set_enabled <+ bool(&disable_navigator,&enable_navigator); - + navigator_disabled <- out.some_visualisation_selected.or(&inputs.set_navigator_disabled); + model.navigator.frp.set_enabled <+ navigator_disabled.not(); out.navigator_active <+ model.navigator.frp.enabled; } From a842130ff3162595f093e554a7d943c967146e63 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Fri, 5 May 2023 11:46:00 +0200 Subject: [PATCH 11/28] Let ChangesetBuilder.invalidated search even container elements (#6548) --- .../compiler/context/ChangesetBuilder.scala | 88 +++++++++++++++---- .../test/context/ChangesetBuilderTest.scala | 69 ++++++++++++++- 2 files changed, 138 insertions(+), 19 deletions(-) diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/ChangesetBuilder.scala b/engine/runtime/src/main/scala/org/enso/compiler/context/ChangesetBuilder.scala index 5445e48fec37..a49c3a3c3619 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/context/ChangesetBuilder.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/context/ChangesetBuilder.scala @@ -194,8 +194,12 @@ final class ChangesetBuilder[A: TextEditor: IndexedSource]( else { val edit = edits.dequeue() val locationEdit = ChangesetBuilder.toLocationEdit(edit, source) - val invalidatedSet = - ChangesetBuilder.invalidated(tree, locationEdit.location) + var invalidatedSet = + ChangesetBuilder.invalidated(tree, locationEdit.location, true) + if (invalidatedSet.isEmpty) { + invalidatedSet = + ChangesetBuilder.invalidated(tree, locationEdit.location, false) + } val newTree = ChangesetBuilder.updateLocations(tree, locationEdit) val newSource = TextEditor[A].edit(source, edit) go(newTree, newSource, edits, ids ++= invalidatedSet.map(_.id)) @@ -278,7 +282,7 @@ object ChangesetBuilder { * @param id the node id * @param location the node location */ - private case class Node(id: NodeId, location: Location) { + private case class Node(id: NodeId, location: Location, leaf: Boolean) { /** Shift the node location. * @@ -302,7 +306,7 @@ object ChangesetBuilder { * @return the node if `ir` contains a location */ def fromIr(ir: IR): Option[Node] = - ir.location.map(loc => Node(NodeId(ir), loc.location)) + ir.location.map(loc => Node(NodeId(ir), loc.location, true)) /** Create an artificial node with fixed [[NodeId]]. It is used to select * nodes by location in the tree. @@ -311,7 +315,11 @@ object ChangesetBuilder { * @return a select node */ def select(location: Location): Node = - new Node(NodeId(UUID.nameUUIDFromBytes(Array()), None, None), location) + new Node( + NodeId(UUID.nameUUIDFromBytes(Array()), None, None), + location, + false + ) implicit val ordering: Ordering[Node] = (x: Node, y: Node) => { val compareStart = @@ -336,17 +344,55 @@ object ChangesetBuilder { * @return the tree representation of the IR */ private def buildTree(ir: IR): Tree = { - @scala.annotation.tailrec - def go(input: mutable.Queue[IR], acc: Tree): Tree = - if (input.isEmpty) acc - else { - val ir = input.dequeue() - if (ir.children.isEmpty) { - Node.fromIr(ir).foreach(acc.add) + def depthFirstSearch(currentIr: IR, acc: Tree): Unit = { + if (currentIr.children.isEmpty) { + Node.fromIr(currentIr).foreach(acc.add) + } else { + val hasImportantId = currentIr.getExternalId.nonEmpty + if (hasImportantId) { + val collectChildrenIntervals = new Tree() + currentIr.children.map(depthFirstSearch(_, collectChildrenIntervals)) + + def fillGapsInChildrenNodesWithNonLeafNodes( + previousPosition: Int, + nextNode: Node + ): Int = { + if (previousPosition < nextNode.location.start) { + val nodeBetweenPreviousPositionAndNextNode = + Node( + NodeId(currentIr), + Location(previousPosition, nextNode.location.start), + false + ) + acc += nodeBetweenPreviousPositionAndNextNode + } + acc += nextNode + nextNode.location.end + } + val beginOfNonLeafIr = currentIr.location.get.location.start + val endOfNonLeafIr = currentIr.location.get.location.end + val lastCoveredPosition = + collectChildrenIntervals.foldLeft(beginOfNonLeafIr)( + fillGapsInChildrenNodesWithNonLeafNodes + ) + val hasRemainingTextAfterLastChild = + lastCoveredPosition < endOfNonLeafIr + if (hasRemainingTextAfterLastChild) { + val nodeAfterLastChild = Node( + NodeId(currentIr), + Location(lastCoveredPosition, endOfNonLeafIr), + false + ) + acc += nodeAfterLastChild + } + } else { + currentIr.children.map(depthFirstSearch(_, acc)) } - go(input ++= ir.children, acc) } - go(mutable.Queue(ir), mutable.TreeSet()) + } + val collectNodes = new Tree() + depthFirstSearch(ir, collectNodes) + collectNodes } /** Update the tree locations after applying the edit. @@ -371,12 +417,18 @@ object ChangesetBuilder { * @param edit the location of the edit * @return the invalidated nodes of the tree */ - private def invalidated(tree: Tree, edit: Location): Tree = { + private def invalidated( + tree: Tree, + edit: Location, + onlyLeafs: Boolean + ): Tree = { val invalidated = mutable.TreeSet[ChangesetBuilder.Node]() tree.iterator.foreach { node => - if (intersect(edit, node)) { - invalidated += node - tree -= node + if (!onlyLeafs || node.leaf) { + if (intersect(edit, node)) { + invalidated += node + tree -= node + } } } invalidated diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala index b0854b754274..106031134eb1 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala @@ -375,10 +375,77 @@ class ChangesetBuilderTest extends CompilerTest { ) } + "toggle defaulted boolean parameter" in { + implicit val moduleContext: ModuleContext = freshModuleContext + + val code = + """ + |from Standard.Base import all + |from Standard.Base.Data.Boolean import Boolean + |import Standard.Visualization + | + |main = + | text2 = " B B B " + | text1 = "B" + | text3 = "@" + | operator2 = text2.replace text1 text3 (Case_Sensitivity.Insensitive) + | + | + | + | + |#### METADATA #### + |[[{"index":{"value":5},"size":{"value":8}},"6d97d061-2c1d-4540-8f7d-3d8aa765323d"],[{"index":{"value":13},"size":{"value":1}},"a6a1b572-4867-4317-8a6e-b496b286ea2d"],[{"index":{"value":14},"size":{"value":4}},"b82e52c6-e2f0-4e16-b835-59ab8838cb2c"],[{"index":{"value":5},"size":{"value":13}},"4dd74fbb-c6df-4109-8821-f4643d483355"],[{"index":{"value":0},"size":{"value":29}},"82014253-a4f5-431b-971b-2348f5a2ca1f"],[{"index":{"value":35},"size":{"value":8}},"b22aad40-a237-4d47-a140-930d731ca80e"],[{"index":{"value":43},"size":{"value":1}},"9d7f45f1-f537-480d-b65f-37a90c77455a"],[{"index":{"value":44},"size":{"value":4}},"c6ad377f-1fce-431d-bbd7-cfe88624c50d"],[{"index":{"value":35},"size":{"value":13}},"202d7fa4-c782-4bb8-900f-7bf5f9ebad99"],[{"index":{"value":48},"size":{"value":1}},"6847cb14-aa49-455d-be9d-e82a39460005"],[{"index":{"value":49},"size":{"value":4}},"36dcdde8-1667-495a-ab87-7154c8d3be67"],[{"index":{"value":35},"size":{"value":18}},"d67c4ce2-711a-4dd9-a1f8-b48f2730b141"],[{"index":{"value":53},"size":{"value":1}},"295fa6b1-260d-4f23-8bd6-64a1d60f01e5"],[{"index":{"value":54},"size":{"value":7}},"923a2769-ddc0-4339-b919-941155a4d1b7"],[{"index":{"value":35},"size":{"value":26}},"64ef6c8a-810e-42f4-8334-8b12c4ab14bc"],[{"index":{"value":69},"size":{"value":7}},"e8cf68da-2b41-4c70-b163-567a223459fd"],[{"index":{"value":30},"size":{"value":46}},"0b934140-6464-4a05-ae6c-416670e2c7e2"],[{"index":{"value":84},"size":{"value":8}},"eaa41303-7d43-420f-8d64-da8d376fab65"],[{"index":{"value":92},"size":{"value":1}},"d582d163-0d4b-4406-96a3-e4591766406d"],[{"index":{"value":93},"size":{"value":13}},"2649f663-b1b8-44a1-9158-f2cf23038471"],[{"index":{"value":84},"size":{"value":22}},"5b4e8628-e0b7-408e-8070-e5636514c7ac"],[{"index":{"value":77},"size":{"value":29}},"78558eb9-cfb0-41d1-8f9a-0f0c17db3b7c"],[{"index":{"value":108},"size":{"value":4}},"58c9224e-644d-40c1-be25-068b6b6dc947"],[{"index":{"value":113},"size":{"value":1}},"d01b8ec7-e780-40a3-b401-97e89e000484"],[{"index":{"value":120},"size":{"value":5}},"aa43682d-01ee-453d-9bcb-34c905d8d314"],[{"index":{"value":126},"size":{"value":1}},"2b5d6c3f-e030-4ee4-9102-b573f066efa0"],[{"index":{"value":128},"size":{"value":16}},"54493640-4981-4437-ad6e-fc80ab1b581f"],[{"index":{"value":120},"size":{"value":24}},"c04888d6-bc60-4534-aa7e-df3dd5d52410"],[{"index":{"value":149},"size":{"value":5}},"85ae1943-cfcf-4e53-97c7-f73fdae42529"],[{"index":{"value":155},"size":{"value":1}},"113ae47b-3a1d-497e-8a2a-b7775be6c66e"],[{"index":{"value":157},"size":{"value":3}},"c143e355-9d64-4780-b387-dd364eefa2f9"],[{"index":{"value":149},"size":{"value":11}},"73113bd8-d3cb-4b06-a4d7-425a88bca849"],[{"index":{"value":165},"size":{"value":5}},"a286047b-54a5-45bb-995e-60f207e2af65"],[{"index":{"value":171},"size":{"value":1}},"abace020-1a84-4fa4-a5ee-2f98f73dfcb1"],[{"index":{"value":173},"size":{"value":3}},"ed3e40fb-d19e-4fb6-bebd-f74ee179ae08"],[{"index":{"value":165},"size":{"value":11}},"8ac6594b-c472-41d5-9b00-f13e6f30d5f2"],[{"index":{"value":181},"size":{"value":9}},"25eaa42e-026d-4afd-a433-dde5804b7a5c"],[{"index":{"value":191},"size":{"value":1}},"799a83db-5893-40f7-9f1e-e07c7a3071cd"],[{"index":{"value":193},"size":{"value":5}},"49332623-fe9a-4fe5-9657-7c0ea2b23942"],[{"index":{"value":198},"size":{"value":1}},"7690e4c2-31e7-4a12-8f5c-78c4484b9553"],[{"index":{"value":199},"size":{"value":7}},"3606a45e-50be-49c5-9dda-415263145a15"],[{"index":{"value":193},"size":{"value":13}},"91640cfc-f985-49da-95c8-87181d725299"],[{"index":{"value":207},"size":{"value":5}},"afb24ea2-2f35-4c7e-8394-3ba34626484e"],[{"index":{"value":193},"size":{"value":19}},"86eb77c6-14cc-487a-8993-bf9998244321"],[{"index":{"value":213},"size":{"value":5}},"52a3e9b8-f8d0-43c9-9f6d-9785359c7ba7"],[{"index":{"value":193},"size":{"value":25}},"1cddcb6a-95c8-445b-9a6f-267093b2609c"],[{"index":{"value":220},"size":{"value":16}},"df114ed3-b64d-4e74-ad60-b43a7147ec19"],[{"index":{"value":236},"size":{"value":1}},"b6a14995-bb54-4956-9abd-9b8cff898ad3"],[{"index":{"value":237},"size":{"value":11}},"fdf942d8-8064-4067-ae38-131b80fa6e0c"],[{"index":{"value":220},"size":{"value":28}},"b8df7e14-9004-4f61-8076-09354f8dbd36"],[{"index":{"value":193},"size":{"value":56}},"0d4b3071-3f7d-472a-bfa7-af118222894e"],[{"index":{"value":181},"size":{"value":68}},"d9ee4c00-74b3-4859-a252-6c9105085076"],[{"index":{"value":115},"size":{"value":135}},"3eba123a-ea77-4db5-89d8-800d35e102e7"],[{"index":{"value":108},"size":{"value":142}},"11d6a77c-3644-4a27-8e15-16808af51c19"]] + |""".stripMargin.linesIterator.mkString("\n") + val edit = + TextEdit(Range(Position(9, 72), Position(9, 72)), " Boolean.True") + + val ir = code.preprocessModule + val atId = "0d4b3071-3f7d-472a-bfa7-af118222894e" + val at = findIR(ir, atId) + val atCode = findCode(code, at) + atCode shouldBe "text2.replace text1 text3 (Case_Sensitivity.Insensitive)" + + val all = new ChangesetBuilder(Rope(code), ir).invalidated(Seq(edit)) + + all + .map(n => n.externalId.getOrElse(n.internalId)) + .map(findCode(code, ir, _)) should contain theSameElementsAs Seq( + atCode + ) + } + } + + def findIR(ir: IR, uuid: String): IR = { + val list = ir.preorder.filter( + _.location + .map(_.id.map(_.toString() == uuid).getOrElse(false)) + .getOrElse(false) + ) + if (list.isEmpty) { + null + } else { + list.head + } + } + + def findCode(code: String, at: IR): String = { + val loc = at.location.get.location + code.substring(loc.start, loc.end) + } + + def findCode(code: String, ir: IR, uuid: UUID): String = { + val at = findIR(ir, uuid.toString()) + if (at == null) { + uuid.toString + } else { + findCode(code, at) + } } def invalidated(ir: IR, code: String, edits: TextEdit*): Set[IR.Identifier] = - new ChangesetBuilder(Rope(code), ir).invalidated(edits).map(_.internalId) + new ChangesetBuilder(Rope(code), ir) + .invalidated(edits) + .map(n => n.externalId.getOrElse(n.internalId)) def invalidatedAll( ir: IR, From 30f27be2194bbb2acf3139fa3e6673adfd6caba6 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Fri, 5 May 2023 20:50:44 +1000 Subject: [PATCH 12/28] Fix dashboard issues (#6502) * Fix cloud-v2/#406 * Fix cloud-v2/#408 * Fix cloud-v2/#405 * Fix cloud-v2/#404 * More fixes for cloud-v2/#406 * Fix cloud-v2/#410 * Remove unused variable * Fix template names; fix template creation --- .../src/dashboard/components/dashboard.tsx | 22 +++++++---- .../src/dashboard/components/ide.tsx | 9 ----- .../components/projectActionButton.tsx | 34 +++++++++++------ .../src/dashboard/components/templates.tsx | 37 +++---------------- .../src/dashboard/components/topBar.tsx | 2 +- .../src/dashboard/localBackend.ts | 19 ++++++---- .../src/authentication/src/index.tsx | 3 +- 7 files changed, 56 insertions(+), 70 deletions(-) diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx index adef2ebca13f..416e8def13bf 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx @@ -270,7 +270,8 @@ function Dashboard(props: DashboardProps) { setTab(Tab.dashboard) const ideElement = document.getElementById(IDE_ELEMENT_ID) if (ideElement) { - ideElement.hidden = true + ideElement.style.top = '-100vh' + ideElement.style.display = 'fixed' } } } @@ -370,10 +371,13 @@ function Dashboard(props: DashboardProps) { }} openIde={async () => { setTab(Tab.ide) - setProject(await backend.getProjectDetails(projectAsset.id)) + if (project?.projectId !== projectAsset.id) { + setProject(await backend.getProjectDetails(projectAsset.id)) + } const ideElement = document.getElementById(IDE_ELEMENT_ID) if (ideElement) { - ideElement.hidden = false + ideElement.style.top = '' + ideElement.style.display = 'absolute' } }} /> @@ -619,11 +623,11 @@ function Dashboard(props: DashboardProps) { return `${prefix}${highestProjectIndex + 1}` } - async function handleCreateProject(templateName?: string | null) { - const projectName = getNewProjectName(templateName) + async function handleCreateProject(templateId?: string | null) { + const projectName = getNewProjectName(templateId) const body: backendModule.CreateProjectRequestBody = { projectName, - projectTemplateName: templateName?.replace(/_/g, '').toLocaleLowerCase() ?? null, + projectTemplateName: templateId ?? null, parentDirectoryId: directoryId, } const projectAsset = await backend.createProject(body) @@ -662,13 +666,15 @@ function Dashboard(props: DashboardProps) { setTab(Tab.ide) const ideElement = document.getElementById(IDE_ELEMENT_ID) if (ideElement) { - ideElement.hidden = false + ideElement.style.top = '' + ideElement.style.display = 'absolute' } } else { setTab(Tab.dashboard) const ideElement = document.getElementById(IDE_ELEMENT_ID) if (ideElement) { - ideElement.hidden = true + ideElement.style.top = '-100vh' + ideElement.style.display = 'fixed' } } }} diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/ide.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/ide.tsx index f20171e2390d..998d865274af 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/ide.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/ide.tsx @@ -9,8 +9,6 @@ import * as platformModule from '../../platform' // === Constants === // ================= -/** The `id` attribute of the element into which the IDE will be rendered. */ -const IDE_ELEMENT_ID = 'root' const IDE_CDN_URL = 'https://ensocdn.s3.us-west-1.amazonaws.com/ide' const JS_EXTENSION: Record = { [platformModule.Platform.cloud]: '.js.gz', @@ -31,13 +29,6 @@ function Ide(props: Props) { const { project, appRunner } = props const { backend } = backendProvider.useBackend() - react.useEffect(() => { - document.getElementById(IDE_ELEMENT_ID)?.classList.remove('hidden') - return () => { - document.getElementById(IDE_ELEMENT_ID)?.classList.add('hidden') - } - }, []) - react.useEffect(() => { void (async () => { const ideVersion = diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx index bd6a66abb0a3..ec0daecd86d5 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/projectActionButton.tsx @@ -3,6 +3,7 @@ import * as react from 'react' import * as backendModule from '../backend' import * as backendProvider from '../../providers/backend' +import * as localBackend from '../localBackend' import * as platform from '../../platform' import * as svg from '../../components/svg' @@ -53,6 +54,28 @@ function ProjectActionButton(props: ProjectActionButtonProps) { const [isCheckingResources, setIsCheckingResources] = react.useState(false) const [spinnerState, setSpinnerState] = react.useState(SpinnerState.done) + react.useEffect(() => { + void (async () => { + const projectDetails = await backend.getProjectDetails(project.id) + setState(projectDetails.state.type) + if (projectDetails.state.type === backendModule.ProjectState.openInProgress) { + setSpinnerState(SpinnerState.initial) + setIsCheckingStatus(true) + } + })() + }, []) + + react.useEffect(() => { + if (backend.platform === platform.Platform.desktop) { + if (project.id !== localBackend.LocalBackend.currentlyOpeningProjectId) { + setIsCheckingResources(false) + setIsCheckingStatus(false) + setState(backendModule.ProjectState.closed) + setSpinnerState(SpinnerState.done) + } + } + }, [project, state, localBackend.LocalBackend.currentlyOpeningProjectId]) + react.useEffect(() => { if (!isCheckingStatus) { return @@ -107,17 +130,6 @@ function ProjectActionButton(props: ProjectActionButtonProps) { } }, [isCheckingResources]) - react.useEffect(() => { - void (async () => { - const projectDetails = await backend.getProjectDetails(project.id) - setState(projectDetails.state.type) - if (projectDetails.state.type === backendModule.ProjectState.openInProgress) { - setSpinnerState(SpinnerState.initial) - setIsCheckingStatus(true) - } - })() - }, []) - function closeProject() { setState(backendModule.ProjectState.closed) appRunner?.stopApp() diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx index b957c059a583..ce1a9b58caaf 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/templates.tsx @@ -1,6 +1,4 @@ /** @file Renders the list of templates from which a project can be created. */ -import * as backendProvider from '../../providers/backend' -import * as platformModule from '../../platform' import * as svg from '../../components/svg' // ================= @@ -15,8 +13,8 @@ interface Template { background: string } -/** The full list of templates available to cloud projects. */ -const CLOUD_TEMPLATES: Template[] = [ +/** The full list of templates. */ +const TEMPLATES: Template[] = [ { title: 'Colorado COVID', id: 'Colorado_COVID', @@ -25,32 +23,16 @@ const CLOUD_TEMPLATES: Template[] = [ }, { title: 'KMeans', - id: 'Kmeans', + id: 'KMeans', description: 'Learn where to open a coffee shop to maximize your income.', background: '#6b7280', }, { title: 'NASDAQ Returns', - id: 'NASDAQ_Returns', + id: 'NASDAQReturns', description: 'Learn how to clean your data to prepare it for advanced analysis.', background: '#6b7280', }, - { - title: 'Restaurants', - id: 'Orders', - description: 'Learn how to clean your data to prepare it for advanced analysis.', - background: '#6b7280', - }, - { - title: 'Github Stars', - id: 'Stargazers', - description: 'Learn how to clean your data to prepare it for advanced analysis.', - background: '#6b7280', - }, -] - -/** The full list of templates available to local projects. */ -const DESKTOP_TEMPLATES: Template[] = [ { title: 'Combine spreadsheets', id: 'Orders', @@ -71,11 +53,6 @@ const DESKTOP_TEMPLATES: Template[] = [ }, ] -const TEMPLATES: Record = { - [platformModule.Platform.cloud]: CLOUD_TEMPLATES, - [platformModule.Platform.desktop]: DESKTOP_TEMPLATES, -} - // ======================= // === TemplatesRender === // ======================= @@ -148,16 +125,12 @@ interface TemplatesProps { function Templates(props: TemplatesProps) { const { onTemplateClick } = props - const { backend } = backendProvider.useBackend() return (
- +
diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx index f0683251f557..695bd5c59d5c 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx @@ -72,7 +72,7 @@ function TopBar(props: TopBarProps) { tab === dashboard.Tab.dashboard ? 'm-2 w-16' : 'w-0' }`} > - Dashboard + {projectName ?? 'Dashboard'}
{svg.BARS_ICON} diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/localBackend.ts b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/localBackend.ts index e8344d216906..d804babdff89 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/localBackend.ts +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/localBackend.ts @@ -29,10 +29,10 @@ interface CurrentlyOpenProjectInfo { /** Class for sending requests to the Project Manager API endpoints. * This is used instead of the cloud backend API when managing local projects from the dashboard. */ export class LocalBackend implements Partial { + static currentlyOpeningProjectId: backend.ProjectId | null = null + static currentlyOpenProject: CurrentlyOpenProjectInfo | null = null readonly platform = platformModule.Platform.desktop private readonly projectManager = projectManager.ProjectManager.default() - private currentlyOpeningProjectId: string | null = null - private currentlyOpenProject: CurrentlyOpenProjectInfo | null = null async listDirectory(): Promise { const result = await this.projectManager.listProjects({}) @@ -79,11 +79,14 @@ export class LocalBackend implements Partial { async closeProject(projectId: backend.ProjectId): Promise { await this.projectManager.closeProject({ projectId }) - this.currentlyOpenProject = null + if (projectId === LocalBackend.currentlyOpeningProjectId) { + LocalBackend.currentlyOpeningProjectId = null + LocalBackend.currentlyOpenProject = null + } } async getProjectDetails(projectId: backend.ProjectId): Promise { - if (projectId !== this.currentlyOpenProject?.id) { + if (projectId !== LocalBackend.currentlyOpenProject?.id) { const result = await this.projectManager.listProjects({}) const project = result.projects.find(listedProject => listedProject.id === projectId) const engineVersion = project?.engineVersion @@ -109,14 +112,14 @@ export class LocalBackend implements Partial { projectId, state: { type: - projectId === this.currentlyOpeningProjectId + projectId === LocalBackend.currentlyOpeningProjectId ? backend.ProjectState.openInProgress : backend.ProjectState.closed, }, }) } } else { - const project = this.currentlyOpenProject.project + const project = LocalBackend.currentlyOpenProject.project return Promise.resolve({ name: project.projectName, engineVersion: { @@ -140,11 +143,11 @@ export class LocalBackend implements Partial { } async openProject(projectId: backend.ProjectId): Promise { - this.currentlyOpeningProjectId = projectId + LocalBackend.currentlyOpeningProjectId = projectId const project = await this.projectManager.openProject({ projectId, missingComponentAction: projectManager.MissingComponentAction.install, }) - this.currentlyOpenProject = { id: projectId, project } + LocalBackend.currentlyOpenProject = { id: projectId, project } } } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/index.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/index.tsx index 3ceecf34906b..77d31faed149 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/index.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/index.tsx @@ -46,7 +46,8 @@ export function run(props: app.AppProps) { } else if (ideElement == null) { logger.error(`Could not find IDE element with ID '${IDE_ELEMENT_ID}'.`) } else { - ideElement.hidden = true + ideElement.style.top = '-100vh' + ideElement.style.display = 'fixed' reactDOM.createRoot(root).render() } } From b5578ec2c9e2c58ec66871123dbdfdcd12eedf5e Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Fri, 5 May 2023 15:30:06 +0200 Subject: [PATCH 13/28] Identify SyntaxError exception and avoid printing a stack trace (#6574) --- .../src/main/scala/org/enso/runner/Main.scala | 4 +- .../CompilationAbortedException.java | 24 ++++++++++ .../CompilationAbortedException.scala | 10 ----- .../org/enso/interpreter/test/EqualsTest.java | 8 ++-- .../interpreter/test/PolyglotErrorTest.java | 4 +- .../enso/interpreter/test/SignatureTest.java | 45 +++++++++++++++++++ .../org/enso/interpreter/test/TestBase.java | 20 +++++---- .../interpreter/test/ValuesGenerator.java | 35 ++++++++------- 8 files changed, 109 insertions(+), 41 deletions(-) create mode 100644 engine/runtime/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java delete mode 100644 engine/runtime/src/main/scala/org/enso/compiler/exception/CompilationAbortedException.scala create mode 100644 engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java diff --git a/engine/runner/src/main/scala/org/enso/runner/Main.scala b/engine/runner/src/main/scala/org/enso/runner/Main.scala index 166f10693869..ab3783cf7b71 100644 --- a/engine/runner/src/main/scala/org/enso/runner/Main.scala +++ b/engine/runner/src/main/scala/org/enso/runner/Main.scala @@ -785,7 +785,9 @@ object Main { } println(s" at <$langId> $fmtFrame") } - if (dropInitJava.isEmpty) { + if (exception.isSyntaxError()) { + // no stack + } else if (dropInitJava.isEmpty) { fullStack.foreach(printFrame) } else { dropInitJava.foreach(printFrame) diff --git a/engine/runtime/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java b/engine/runtime/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java new file mode 100644 index 000000000000..9c837b93c4e6 --- /dev/null +++ b/engine/runtime/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java @@ -0,0 +1,24 @@ +package org.enso.compiler.exception; + +import com.oracle.truffle.api.exception.AbstractTruffleException; +import com.oracle.truffle.api.interop.ExceptionType; +import com.oracle.truffle.api.interop.InteropLibrary; +import com.oracle.truffle.api.library.ExportLibrary; +import com.oracle.truffle.api.library.ExportMessage; + +/** + * An exception thrown to break out of the compilation flow after reporting all the encountered + * errors. + */ +@ExportLibrary(InteropLibrary.class) +public final class CompilationAbortedException extends AbstractTruffleException { + @Override + public String getMessage() { + return "Compilation aborted due to errors."; + } + + @ExportMessage + ExceptionType getExceptionType() { + return ExceptionType.PARSE_ERROR; + } +} diff --git a/engine/runtime/src/main/scala/org/enso/compiler/exception/CompilationAbortedException.scala b/engine/runtime/src/main/scala/org/enso/compiler/exception/CompilationAbortedException.scala deleted file mode 100644 index 7fa79e90646c..000000000000 --- a/engine/runtime/src/main/scala/org/enso/compiler/exception/CompilationAbortedException.scala +++ /dev/null @@ -1,10 +0,0 @@ -package org.enso.compiler.exception - -import com.oracle.truffle.api.exception.AbstractTruffleException - -/** An exception thrown to break out of the compilation flow after reporting - * all the encountered errors. - */ -class CompilationAbortedException extends AbstractTruffleException { - override def getMessage: String = "Compilation aborted due to errors." -} diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/EqualsTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/EqualsTest.java index 96c378530ac7..621142f1f4b1 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/EqualsTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/EqualsTest.java @@ -1,8 +1,5 @@ package org.enso.interpreter.test; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - import java.time.LocalDate; import java.time.LocalTime; import java.time.ZoneId; @@ -10,12 +7,15 @@ import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; + import org.enso.interpreter.node.expression.builtin.interop.syntax.HostValueToEnsoNode; import org.enso.interpreter.node.expression.builtin.meta.EqualsNode; import org.enso.interpreter.node.expression.builtin.meta.EqualsNodeGen; import org.graalvm.polyglot.Context; import org.graalvm.polyglot.Value; import org.junit.AfterClass; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.junit.BeforeClass; import org.junit.Test; import org.junit.experimental.theories.DataPoints; @@ -184,7 +184,7 @@ public void testDateTimeEquality() { @Test public void testVectorsEquality() { Object ensoVector = - unwrapValue(context, createValue(context, "[1,2,3]", "from Standard.Base.import all")); + unwrapValue(context, createValue(context, "[1,2,3]", "from Standard.Base import all")); Object javaVector = unwrapValue(context, context.asValue(List.of(1L, 2L, 3L))); executeInContext( context, diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java index f67ebfcf27d0..17ef33a5c8e0 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/PolyglotErrorTest.java @@ -3,10 +3,9 @@ import org.graalvm.polyglot.Context; import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; +import org.junit.After; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; - -import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -24,6 +23,7 @@ public void prepareCtx() throws Exception { var code = """ import Standard.Base.Panic.Panic + import Standard.Base.Data.Text.Text import Standard.Base.Error.Error import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Nothing.Nothing diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java b/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java new file mode 100644 index 000000000000..2e7055054afc --- /dev/null +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/SignatureTest.java @@ -0,0 +1,45 @@ +package org.enso.interpreter.test; + +import java.net.URI; + +import org.graalvm.polyglot.Context; +import org.graalvm.polyglot.PolyglotException; +import org.graalvm.polyglot.Source; +import org.junit.AfterClass; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; +import org.junit.BeforeClass; +import org.junit.Test; + +public class SignatureTest extends TestBase { + private static Context ctx; + + @BeforeClass + public static void prepareCtx() { + ctx = createDefaultContext(); + } + + @AfterClass + public static void disposeCtx() { + ctx.close(); + } + + @Test + public void wrongFunctionSignature() throws Exception { + final URI uri = new URI("memory://neg.enso"); + final Source src = Source.newBuilder("enso", """ + neg : Xyz -> Abc + neg a = 0 - a + """, uri.getHost()) + .uri(uri) + .buildLiteral(); + + try { + var module = ctx.eval(src); + var neg = module.invokeMember("eval_expression", "neg"); + fail("Expecting an exception from compilation, not: " + neg); + } catch (PolyglotException e) { + assertTrue("It is a syntax error exception", e.isSyntaxError()); + } + } +} diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java b/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java index e67f6fa07f6b..157853e2e902 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/TestBase.java @@ -1,19 +1,11 @@ package org.enso.interpreter.test; -import static org.junit.Assert.assertNotNull; - -import com.oracle.truffle.api.frame.VirtualFrame; -import com.oracle.truffle.api.interop.InteropLibrary; -import com.oracle.truffle.api.interop.TruffleObject; -import com.oracle.truffle.api.library.ExportLibrary; -import com.oracle.truffle.api.library.ExportMessage; -import com.oracle.truffle.api.nodes.Node; -import com.oracle.truffle.api.nodes.RootNode; import java.io.ByteArrayOutputStream; import java.io.OutputStream; import java.nio.file.Paths; import java.util.Map; import java.util.concurrent.Callable; + import org.enso.interpreter.EnsoLanguage; import org.enso.polyglot.MethodNames.Module; import org.enso.polyglot.RuntimeOptions; @@ -22,6 +14,15 @@ import org.graalvm.polyglot.Source; import org.graalvm.polyglot.Value; import org.graalvm.polyglot.proxy.ProxyExecutable; +import static org.junit.Assert.assertNotNull; + +import com.oracle.truffle.api.frame.VirtualFrame; +import com.oracle.truffle.api.interop.InteropLibrary; +import com.oracle.truffle.api.interop.TruffleObject; +import com.oracle.truffle.api.library.ExportLibrary; +import com.oracle.truffle.api.library.ExportMessage; +import com.oracle.truffle.api.nodes.Node; +import com.oracle.truffle.api.nodes.RootNode; public abstract class TestBase { protected static Context createDefaultContext() { @@ -44,6 +45,7 @@ private static Context.Builder defaultContextBuilder() { .allowIO(true) .allowAllAccess(true) .logHandler(new ByteArrayOutputStream()) + .option(RuntimeOptions.STRICT_ERRORS, "true") .option( RuntimeOptions.LANGUAGE_HOME_OVERRIDE, Paths.get("../../distribution/component").toFile().getAbsolutePath()); diff --git a/engine/runtime/src/test/java/org/enso/interpreter/test/ValuesGenerator.java b/engine/runtime/src/test/java/org/enso/interpreter/test/ValuesGenerator.java index f8aca03bf393..1c49bfdcb74c 100644 --- a/engine/runtime/src/test/java/org/enso/interpreter/test/ValuesGenerator.java +++ b/engine/runtime/src/test/java/org/enso/interpreter/test/ValuesGenerator.java @@ -51,23 +51,28 @@ public static ValuesGenerator create(Context ctx, Language... langs) { } private ValueInfo v(String key, String prelude, String typeOrValue) { + return v(key, prelude, typeOrValue, key != null ? typeOrValue : null); + } + + private ValueInfo v(String key, String prelude, String typeOrValue, String typeCheck) { if (key == null) { key = typeOrValue; } var v = values.get(key); if (v == null) { - var f = ctx.eval("enso", prelude + "\nn = " + typeOrValue); + var code = prelude + "\nn = " + typeOrValue; + var f = ctx.eval("enso", code); var value = f.invokeMember("eval_expression", "n"); - var c = ctx.eval("enso", """ - {import} + if (typeCheck != null) { + var c = ctx.eval("enso", """ + {import} - check x = case x of - v : {type} -> 1 - _ -> 0 + check x = case x of + _ : {type} -> 1 + _ -> 0 - """.replace("{type}", typeOrValue).replace("{import}", prelude) - ); - if (key != null) { + """.replace("{type}", typeCheck).replace("{import}", prelude) + ); var check = c.invokeMember("eval_expression", "check"); assertTrue("Can execute the check", check.canExecute()); v = new ValueInfo(value, check); @@ -109,7 +114,7 @@ private List createValuesOfCustomType(String typeDefs, List expre } sb.append(""" check_{i} x = case x of - v : {type} -> 1 + _ : {type} -> 1 _ -> 0 """.replace("{type}", c).replace("{i}", "" + i)); @@ -343,9 +348,9 @@ public List textual() { collect.add(v(null, "", "'?'").type()); collect.add(v(null, "", """ ''' - block of - multi-line - texts + block of + multi-line + texts """).type()); } @@ -426,7 +431,7 @@ public List timeZones() { "Time_Zone.parse 'Europe/London'", "Time_Zone.parse 'CET'" )) { - collect.add(v("timeZones-" + expr, "import Standard.Base.Data.Time.Time_Zone.Time_Zone", expr).type()); + collect.add(v("timeZones-" + expr, "import Standard.Base.Data.Time.Time_Zone.Time_Zone", expr, "Time_Zone").type()); } } if (languages.contains(Language.JAVA)) { @@ -569,7 +574,7 @@ public List maps() { "Map.empty.insert 'A' 1 . insert 'B' 2 . insert 'C' 3", "Map.empty.insert 'C' 3 . insert 'B' 2 . insert 'A' 1" )) { - collect.add(v("maps-" + expr, imports, expr).type()); + collect.add(v("maps-" + expr, imports, expr, "Map").type()); } } return collect; From e1b4019b4516f824a62abfe77bf9650fc8d0e50f Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Fri, 5 May 2023 23:49:34 +1000 Subject: [PATCH 14/28] Fix opening links in desktop IDE (#6507) * Fix opening links * Address review --------- Co-authored-by: mergify[bot] <37929162+mergify[bot]@users.noreply.github.com> --- app/ide-desktop/lib/client/src/security.ts | 20 ++++++++++++++++--- .../src/dashboard/components/topBar.tsx | 1 + 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/app/ide-desktop/lib/client/src/security.ts b/app/ide-desktop/lib/client/src/security.ts index f5c0d4238e76..81b36206f862 100644 --- a/app/ide-desktop/lib/client/src/security.ts +++ b/app/ide-desktop/lib/client/src/security.ts @@ -10,6 +10,9 @@ import * as electron from 'electron' /** The list of hosts that the app can access. They are required for user authentication to work. */ const TRUSTED_HOSTS = ['accounts.google.com', 'accounts.youtube.com', 'github.com'] +/** The list of hosts that the app can open external links to. */ +const TRUSTED_EXTERNAL_HOSTS = ['discord.gg'] + /** The list of URLs a new WebView can be pointed to. */ const WEBVIEW_URL_WHITELIST: string[] = [] @@ -79,7 +82,12 @@ function preventNavigation() { electron.app.on('web-contents-created', (_event, contents) => { contents.on('will-navigate', (event, navigationUrl) => { const parsedUrl = new URL(navigationUrl) - if (parsedUrl.origin !== origin && !TRUSTED_HOSTS.includes(parsedUrl.host)) { + const currentWindowUrl = electron.BrowserWindow.getFocusedWindow()?.webContents.getURL() + const parsedCurrentWindowUrl = currentWindowUrl ? new URL(currentWindowUrl) : null + if ( + parsedUrl.origin !== parsedCurrentWindowUrl?.origin && + !TRUSTED_HOSTS.includes(parsedUrl.host) + ) { event.preventDefault() console.error(`Prevented navigation to '${navigationUrl}'.`) } @@ -95,8 +103,14 @@ function preventNavigation() { function disableNewWindowsCreation() { electron.app.on('web-contents-created', (_event, contents) => { contents.setWindowOpenHandler(({ url }) => { - console.error(`Blocking new window creation request to '${url}'.`) - return { action: 'deny' } + const parsedUrl = new URL(url) + if (TRUSTED_EXTERNAL_HOSTS.includes(parsedUrl.host)) { + void electron.shell.openExternal(url) + return { action: 'deny' } + } else { + console.error(`Blocking new window creation request to '${url}'.`) + return { action: 'deny' } + } }) }) } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx index 695bd5c59d5c..ac47c185b8a6 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/topBar.tsx @@ -101,6 +101,7 @@ function TopBar(props: TopBarProps) {
help chat From c957581978eaeb9f3702f7d081c971bc61eea144 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Fri, 5 May 2023 10:53:24 -0700 Subject: [PATCH 15/28] Catch 5813 and avoid crash (#6585) --- app/gui/view/documentation/src/html.rs | 15 ++++++++++++- app/gui/view/documentation/src/lib.rs | 30 ++++++-------------------- 2 files changed, 21 insertions(+), 24 deletions(-) diff --git a/app/gui/view/documentation/src/html.rs b/app/gui/view/documentation/src/html.rs index 4d28ce1de4b9..802a7c3a1416 100644 --- a/app/gui/view/documentation/src/html.rs +++ b/app/gui/view/documentation/src/html.rs @@ -63,16 +63,29 @@ fn svg_icon(content: &'static str) -> impl Render { /// Render entry documentation to HTML code with Tailwind CSS styles. #[profile(Detail)] pub fn render(docs: EntryDocumentation) -> String { - match docs { + let html = match docs { EntryDocumentation::Placeholder(placeholder) => match placeholder { Placeholder::NoDocumentation => String::from("No documentation available."), Placeholder::VirtualComponentGroup { name } => render_virtual_component_group_docs(name), }, EntryDocumentation::Docs(docs) => render_documentation(docs), + }; + match validate_utf8(&html) { + Ok(_) => html, + Err(_) => { + error!("Internal error. Generated HTML is not valid utf-8. This is bug #5813."); + String::from("Failed to load documentation.") + } } } +#[profile(Debug)] +fn validate_utf8(s: &str) -> Result<&str, std::str::Utf8Error> { + let bytes = s.as_bytes(); + std::str::from_utf8(bytes) +} + fn render_documentation(docs: Documentation) -> String { match docs { Documentation::Module(module_docs) => render_module_documentation(&module_docs, None), diff --git a/app/gui/view/documentation/src/lib.rs b/app/gui/view/documentation/src/lib.rs index a1f827815cf3..1436b07675a7 100644 --- a/app/gui/view/documentation/src/lib.rs +++ b/app/gui/view/documentation/src/lib.rs @@ -18,21 +18,12 @@ //! [`Tailwind CSS`]: https://tailwindcss.com/ // === Features === -#![feature(associated_type_bounds)] -#![feature(associated_type_defaults)] #![feature(drain_filter)] -#![feature(fn_traits)] #![feature(option_result_contains)] -#![feature(specialization)] -#![feature(trait_alias)] -#![feature(type_alias_impl_trait)] -#![feature(unboxed_closures)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] -#![warn(unsafe_code)] #![allow(clippy::bool_to_int_with_if)] #![allow(clippy::let_and_return)] -#![allow(incomplete_features)] // To be removed, see: https://github.com/enso-org/ide/issues/1559 #![warn(missing_copy_implementations)] #![warn(missing_debug_implementations)] #![warn(missing_docs)] @@ -64,10 +55,8 @@ use ensogl::Animation; use ensogl_component::shadow; use ensogl_derive_theme::FromTheme; use ensogl_hardcoded_theme::application::component_browser::documentation as theme; -use web::Closure; use web::HtmlElement; use web::JsCast; -use web::MouseEvent; pub mod html; @@ -89,6 +78,7 @@ const MIN_CAPTION_HEIGHT: f32 = 1.0; /// Delay before updating the displayed documentation. const DISPLAY_DELAY_MS: i32 = 0; + // === Style === #[derive(Debug, Clone, Copy, Default, FromTheme)] @@ -108,24 +98,20 @@ pub struct Style { // === Model === // ============= -type CodeCopyClosure = Closure; - /// Model of Native visualization that generates documentation for given Enso code and embeds /// it in a HTML container. #[derive(Clone, CloneRef, Debug)] #[allow(missing_docs)] pub struct Model { - outer_dom: DomSymbol, - caption_dom: DomSymbol, - inner_dom: DomSymbol, + outer_dom: DomSymbol, + caption_dom: DomSymbol, + inner_dom: DomSymbol, /// The purpose of this overlay is stop propagating mouse events under the documentation panel /// to EnsoGL shapes, and pass them to the DOM instead. - overlay: overlay::View, - display_object: display::object::Instance, - code_copy_closures: Rc>>, + overlay: overlay::View, + display_object: display::object::Instance, } - impl Model { /// Constructor. fn new(scene: &Scene) -> Self { @@ -164,9 +150,7 @@ impl Model { scene.dom.layers.node_searcher.manage(&inner_dom); scene.dom.layers.node_searcher.manage(&caption_dom); - let code_copy_closures = default(); - Model { outer_dom, inner_dom, caption_dom, overlay, display_object, code_copy_closures } - .init() + Model { outer_dom, inner_dom, caption_dom, overlay, display_object }.init() } fn init(self) -> Self { From 608c5d8945d1c9931bc3df42699e6c8453ddeedd Mon Sep 17 00:00:00 2001 From: Pavel Marek Date: Fri, 5 May 2023 21:24:19 +0200 Subject: [PATCH 16/28] Benchmark Engine job runs only engine, not Enso benchmarks (#6534) Engine Benchmark job runs only engine benchmarks, not Enso benchmarks. Enso benchmarks do not report their output anywhere, and take more than 5 hours to run nowadays. We might define a new job in the future and probably rename it to "Library benchmarks". But that is the responsibility of the lib team. --- .github/workflows/benchmark.yml | 2 +- build/build/src/ci_gen.rs | 2 +- engine/runtime/src/main/scala/org/enso/compiler/core/IR.scala | 2 +- .../src/main/scala/org/enso/compiler/data/BindingsMap.scala | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index c44f8fc842ba..f2000ebf09c0 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -55,7 +55,7 @@ jobs: run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - run: ./run backend benchmark runtime enso + - run: ./run backend benchmark runtime env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - if: failure() && runner.os == 'Windows' diff --git a/build/build/src/ci_gen.rs b/build/build/src/ci_gen.rs index 57ee9ffa733a..afc25ad0f175 100644 --- a/build/build/src/ci_gen.rs +++ b/build/build/src/ci_gen.rs @@ -488,7 +488,7 @@ pub fn benchmark() -> Result { ); let mut benchmark_job = - plain_job(&BenchmarkRunner, "Benchmark Engine", "backend benchmark runtime enso"); + plain_job(&BenchmarkRunner, "Benchmark Engine", "backend benchmark runtime"); benchmark_job.timeout_minutes = Some(60 * 8); workflow.add_job(benchmark_job); Ok(workflow) diff --git a/engine/runtime/src/main/scala/org/enso/compiler/core/IR.scala b/engine/runtime/src/main/scala/org/enso/compiler/core/IR.scala index 90668ed267c1..4c5b41cd42e4 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/core/IR.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/core/IR.scala @@ -300,7 +300,7 @@ object IR { * @param diagnostics compiler diagnostics for this node */ @SerialVersionUID( - 3667L // removes Vector.Vector_Data constructor + 3668L // removes special handling for `enso_project` method ) // prevents reading broken caches, see PR-3692 for details sealed case class Module( imports: List[Module.Scope.Import], diff --git a/engine/runtime/src/main/scala/org/enso/compiler/data/BindingsMap.scala b/engine/runtime/src/main/scala/org/enso/compiler/data/BindingsMap.scala index 791c124e0c97..d27ae396f747 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/data/BindingsMap.scala +++ b/engine/runtime/src/main/scala/org/enso/compiler/data/BindingsMap.scala @@ -23,7 +23,7 @@ import scala.annotation.unused */ @SerialVersionUID( - 5568L // stable serialization of bindings + 5569L // removes special handling of `enso_project` method ) case class BindingsMap( definedEntities: List[DefinedEntity], From f7282b7cff074b8c4e471fa6e3dbc1a2189d307d Mon Sep 17 00:00:00 2001 From: GregoryTravis Date: Sat, 6 May 2023 02:22:09 -0400 Subject: [PATCH 17/28] "at_least_one" flag for tokenize_to_rows (#6539) Add "at_least_one" flag for tokenize_to_rows. --- CHANGELOG.md | 2 ++ app/ide-desktop/lib/dashboard/src/index.html | 2 +- .../Database/0.0.0-dev/src/Data/Table.enso | 11 ++++---- .../Table/0.0.0-dev/src/Data/Table.enso | 11 ++++---- .../src/Internal/Split_Tokenize.enso | 27 ++++++++++++------- .../src/In_Memory/Split_Tokenize_Spec.enso | 12 +++++++++ 6 files changed, 44 insertions(+), 21 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0346125f0b28..7308d7b33a4b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -415,6 +415,7 @@ `Text.write`.][6459] - [Implemented `create_database_table` allowing saving queries as database tables.][6467] +- [Added `at_least_one` flag to `Table.tokenize_to_rows`.][6539] - [Moved `Redshift` connector into a separate `AWS` library.][6550] [debug-shortcuts]: @@ -622,6 +623,7 @@ [6429]: https://github.com/enso-org/enso/pull/6429 [6459]: https://github.com/enso-org/enso/pull/6459 [6467]: https://github.com/enso-org/enso/pull/6467 +[6539]: https://github.com/enso-org/enso/pull/6539 [6550]: https://github.com/enso-org/enso/pull/6550 #### Enso Compiler diff --git a/app/ide-desktop/lib/dashboard/src/index.html b/app/ide-desktop/lib/dashboard/src/index.html index 84f7c3f2bb35..9845828fa650 120000 --- a/app/ide-desktop/lib/dashboard/src/index.html +++ b/app/ide-desktop/lib/dashboard/src/index.html @@ -1 +1 @@ -../../content/src/index.html \ No newline at end of file +../../content/src/index.html diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index b2ddb208ca0f..8c355eedf35f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -1426,7 +1426,6 @@ type Table Arguments: - column: The name or index of the column to split the text of. - delimiter: The term or terms used to split the text. - - on_problems: Specifies the behavior when a problem occurs. split_to_rows : Text | Integer -> Text -> Table split_to_rows self column delimiter="," = _ = [column delimiter] @@ -1468,10 +1467,12 @@ type Table - pattern: The pattern used to find within the text. - case_sensitivity: Specifies if the text values should be compared case sensitively. - - on_problems: Specifies the behavior when a problem occurs. - tokenize_to_rows : Text | Integer -> Text -> Case_Sensitivity -> Table - tokenize_to_rows self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive = - _ = [column pattern case_sensitivity] + - at_least_one_row: If True, a tokenization that returns no values will still + produce at least one row, with `Nothing` for the output column values. + Equivalent to converting a tokenization output of [] to [Nothing]. + tokenize_to_rows : Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Table + tokenize_to_rows self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive at_least_one_row=False = + _ = [column, pattern, case_sensitivity, at_least_one_row] Error.throw (Unsupported_Database_Operation.Error "Table.tokenize_to_rows is not implemented yet for the Database backends.") ## Converts a Text column into new columns using a regular expression diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index c4a844e69c8f..49dae0ee81b5 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -949,7 +949,6 @@ type Table Arguments: - column: The name or index of the column to split the text of. - delimiter: The term or terms used to split the text. - - on_problems: Specifies the behavior when a problem occurs. split_to_rows : Text | Integer -> Text -> Table split_to_rows self column delimiter="," = Split_Tokenize.split_to_rows self column delimiter @@ -989,10 +988,12 @@ type Table - pattern: The pattern used to find within the text. - case_sensitivity: Specifies if the text values should be compared case sensitively. - - on_problems: Specifies the behavior when a problem occurs. - tokenize_to_rows : Text | Integer -> Text -> Case_Sensitivity -> Table - tokenize_to_rows self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive = - Split_Tokenize.tokenize_to_rows self column pattern case_sensitivity + - at_least_one_row: If True, a tokenization that returns no values will still + produce at least one row, with `Nothing` for the output column values. + Equivalent to converting a tokenization output of [] to [Nothing]. + tokenize_to_rows : Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Table + tokenize_to_rows self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive at_least_one_row=False = + Split_Tokenize.tokenize_to_rows self column pattern case_sensitivity at_least_one_row ## Converts a Text column into new columns using a regular expression pattern. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso index dfd842cd3972..fb2199da456b 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso @@ -47,11 +47,11 @@ tokenize_to_columns table input_column_id pattern case_sensitivity column_count Tokenizes a column of text into a set of new rows using a regular expression. See `Table.tokenize_to_rows`. -tokenize_to_rows : Table -> Text | Integer -> Text -> Case_Sensitivity -> Table -tokenize_to_rows table input_column_id pattern="." case_sensitivity=Case_Sensitivity.Sensitive = +tokenize_to_rows : Table -> Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Table +tokenize_to_rows table input_column_id pattern="." case_sensitivity=Case_Sensitivity.Sensitive at_least_one_row=False = column = table.at input_column_id Value_Type.expect_text column - fan_out_to_rows table input_column_id (handle_nothing (_.tokenize pattern case_sensitivity)) + fan_out_to_rows table input_column_id (handle_nothing (_.tokenize pattern case_sensitivity)) at_least_one_row=at_least_one_row ## PRIVATE Converts a Text column into new columns using a regular expression @@ -68,7 +68,7 @@ parse_to_columns table input_column_id pattern="." case_sensitivity=Case_Sensiti column = table.at input_column_id new_table = Value_Type.expect_text column <| - fan_out_to_rows_and_columns table input_column_id fun column_names on_problems + fan_out_to_rows_and_columns table input_column_id fun column_names on_problems=on_problems if parse_values then new_table.parse on_problems=on_problems else new_table ## PRIVATE @@ -138,14 +138,14 @@ fan_out_to_columns table input_column_id function column_count=Nothing on_proble - input_column: The column to transform. - function: A function that transforms a single element of `input_column` to multiple values. -fan_out_to_rows : Table -> Text | Integer -> (Any -> Vector Any) -> Problem_Behavior -> Table -fan_out_to_rows table input_column_id function on_problems=Report_Error = +fan_out_to_rows : Table -> Text | Integer -> (Any -> Vector Any) -> Boolean -> Problem_Behavior -> Table +fan_out_to_rows table input_column_id function at_least_one_row=False on_problems=Report_Error = ## Treat this as a special case of fan_out_to_rows_and_columns, with one column. Wrap the provided function to convert each value to a singleton `Vector`. wrapped_function x = function x . map y-> [y] column_names = [input_column_id] - fan_out_to_rows_and_columns table input_column_id wrapped_function column_names on_problems + fan_out_to_rows_and_columns table input_column_id wrapped_function column_names at_least_one_row=at_least_one_row on_problems=on_problems ## PRIVATE Transform a column by applying the given function to the values in the @@ -187,8 +187,8 @@ fan_out_to_rows table input_column_id function on_problems=Report_Error = to a `Vector` of `Vector` of values. - column_names: The names for the generated columns. - on_problems: Specifies the behavior when a problem occurs. -fan_out_to_rows_and_columns : Table -> Text | Integer -> (Any -> Vector (Vector Any)) -> Vector Text -> Problem_Behavior -> Table -fan_out_to_rows_and_columns table input_column_id function column_names on_problems=Report_Error = +fan_out_to_rows_and_columns : Table -> Text | Integer -> (Any -> Vector (Vector Any)) -> Vector Text -> Boolean -> Problem_Behavior -> Table +fan_out_to_rows_and_columns table input_column_id function column_names at_least_one_row=False on_problems=Report_Error = problem_builder = Problem_Builder.new unique = Unique_Name_Strategy.new @@ -205,9 +205,15 @@ fan_out_to_rows_and_columns table input_column_id function column_names on_probl # Accumulates repeated position indices for the order mask. order_mask_positions = Vector.new_builder initial_size + maybe_add_empty_row vecs = + should_add_empty_row = vecs.is_empty && at_least_one_row + if should_add_empty_row.not then vecs else + empty_row = Vector.fill num_output_columns Nothing + [empty_row] + 0.up_to num_input_rows . each i-> input_value = input_storage.getItemBoxed i - output_values = function input_value + output_values = function input_value |> maybe_add_empty_row # Append each group of values to the builder. output_values.each row_unchecked-> row = uniform_length num_output_columns row_unchecked problem_builder @@ -355,6 +361,7 @@ handle_nothing function = x-> case x of ## PRIVATE Repeat a computation n times. +repeat_each : Integer -> Any -> Any repeat_each n ~action = 0.up_to n . each _-> action ## PRIVATE diff --git a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso index 07f866df508c..a68fd39f6695 100644 --- a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso @@ -115,6 +115,18 @@ spec = t2 = t.tokenize_to_rows "bar" "[bc]" case_sensitivity=Case_Sensitivity.Insensitive t2.should_equal expected + Test.specify "can do tokenize_to_rows with at_least_one_row=True" <| + input = Table.from_rows ["foo", "bar"] [[0, "a12b34r5"], [1, "qqq"], [2, "2r4r55"]] + expected = Table.from_rows ["foo", "bar"] [[0, "12"], [0, "34"], [0, "5"], [1, Nothing], [2, "2"], [2, "4"], [2, "55"]] + actual = input.tokenize_to_rows "bar" "\d+" at_least_one_row=True + actual.should_equal expected + + Test.specify "can do tokenize_to_rows with at_least_one_row=True, with groups" <| + input = Table.from_rows ["foo", "bar"] [[0, "a12b34r5"], [1, "qqq"], [2, "2r44r55"], [3, Nothing]] + expected = Table.from_rows ["foo", "bar"] [[0, "12"], [0, "34"], [1, Nothing], [2, "44"], [2, "55"], [3, Nothing]] + actual = input.tokenize_to_rows "bar" "(\d)(\d)" at_least_one_row=True + actual.should_equal expected + Test.group "Table.split/tokenize column count" <| Test.specify "should generate extra empty columns if column_count is set" <| cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] From bc0db18a6eed07c5c3acee593fa1e322e413a861 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Sat, 6 May 2023 11:10:24 +0100 Subject: [PATCH 18/28] Small changes from Book Club issues (#6533) - Add dropdown to tokenize and split `column`. - Remove the custom `Join_Kind` dropdown. - Adjust split and tokenize names to start numbering from 1, not 0. - Add JS_Object serialization for Period. - Add `days_until` and `until` to `Date`. - Add `Date_Period.Day` and create `next` and `previous` on `Date`. - Use simple names with `File_Format` dropdown. - Avoid using `Main.enso` based imports in `Standard.Base.Data.Map` and `Standard.Base.Data.Text.Helpers`. - Remove an incorrect import from `Standard.Database.Data.Table`. From #6587: A few small changes, lots of lines because this affected lots of tests: - `Table.join` now defaults to `Join_Kind.Left_Outer`, to avoid losing rows in the left table unexpectedly. If the user really wants to have an Inner join, they can switch to it. - `Table.join` now defaults to joining columns by name not by index - it looks in the right table for a column with the same name as the first column in left table. - Missing Input Column errors now specify which table they refer to in the join. - The unique name suffix in column renaming / default column names when loading from file is now a space instead of underscore. --- .../Standard/Base/0.0.0-dev/src/Data/Map.enso | 5 +- .../0.0.0-dev/src/Data/Text/Extensions.enso | 2 +- .../Base/0.0.0-dev/src/Data/Text/Helpers.enso | 6 +- .../Base/0.0.0-dev/src/Data/Time/Date.enso | 50 +++- .../0.0.0-dev/src/Data/Time/Date_Period.enso | 16 +- .../Base/0.0.0-dev/src/Data/Time/Period.enso | 19 ++ .../0.0.0-dev/src/System/File_Format.enso | 2 +- .../Database/0.0.0-dev/src/Data/Table.enso | 50 ++-- .../Standard/Database/0.0.0-dev/src/Main.enso | 2 + .../Table/0.0.0-dev/src/Data/Table.enso | 34 ++- .../src/Delimited/Delimited_Format.enso | 2 +- .../Standard/Table/0.0.0-dev/src/Errors.enso | 7 +- .../0.0.0-dev/src/Internal/Join_Helpers.enso | 17 +- .../src/Internal/Problem_Builder.enso | 10 +- .../src/Internal/Split_Tokenize.enso | 4 +- .../src/Internal/Unique_Name_Strategy.enso | 8 +- .../src/Internal/Widget_Helpers.enso | 72 +++++- .../org/enso/base/time/Date_Period_Utils.java | 14 ++ .../org/enso/table/read/DelimitedReader.java | 2 +- .../org/enso/table/util/NameDeduplicator.java | 4 +- .../Aggregate_Spec.enso | 24 +- .../Common_Table_Operations/Core_Spec.enso | 14 +- .../Cross_Tab_Spec.enso | 4 +- .../Integration_Tests.enso | 2 +- .../Join/Cross_Join_Spec.enso | 22 +- .../Join/Join_Spec.enso | 226 +++++++++--------- .../Join/Zip_Spec.enso | 30 +-- .../Select_Columns_Spec.enso | 104 ++++---- .../Transpose_Spec.enso | 12 +- .../src/Database/Codegen_Spec.enso | 4 +- .../Table_Tests/src/Database/Upload_Spec.enso | 12 +- .../Helpers/Unique_Naming_Strategy_Spec.enso | 58 ++--- test/Table_Tests/src/IO/Csv_Spec.enso | 18 +- .../src/IO/Delimited_Read_Spec.enso | 60 ++--- .../src/IO/Delimited_Write_Spec.enso | 2 +- test/Table_Tests/src/IO/Excel_Spec.enso | 20 +- .../src/In_Memory/Column_Spec.enso | 4 +- .../src/In_Memory/Join_Performance_Spec.enso | 2 +- .../src/In_Memory/Split_Tokenize_Spec.enso | 46 ++-- .../Table_Tests/src/In_Memory/Table_Spec.enso | 8 +- test/Table_Tests/src/Util.enso | 43 ++-- test/Tests/src/Data/Time/Date_Spec.enso | 16 ++ .../src/Lazy_Table_Spec.enso | 4 +- 43 files changed, 643 insertions(+), 418 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso index 108115c8c1a0..10767ffea671 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso @@ -1,13 +1,16 @@ +import project.Any.Any import project.Data.Numbers.Integer import project.Data.Vector.Vector import project.Data.Pair.Pair import project.Data.Text.Extensions import project.Data.Text.Text +import project.Error.Error import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.No_Such_Key.No_Such_Key +import project.Nothing.Nothing +import project.Panic.Panic from project.Data.Boolean import Boolean, True, False -from project import Error, Nothing, Any, Panic ## A key-value store. It is possible to use any type as keys and values and mix them in one Map. Keys are checked for equality based on their hash code and `==` operator, which diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso index c36c4ba34b72..fc2c1c6d3b5f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso @@ -900,7 +900,7 @@ Text.repeat : Integer -> Text Text.repeat self count=1 = 0.up_to count . fold "" acc-> _-> acc + self -## ALIAS first, last, left, right, mid, substring +## ALIAS first, last, left, right, mid, substring, slice Creates a new Text by selecting the specified range of the input. This can select a section of text from the beginning, end, or middle of the diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Helpers.enso index ef6c323d6386..9b046dced946 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Helpers.enso @@ -1,8 +1,6 @@ -from Standard.Base import all - import project.Any.Any -import project.Data.Locale.Locale -import project.Data.Text.Case_Sensitivity.Case_Sensitivity +import project.Data.Text.Text +import project.Error.Error import project.Errors.Common.Type_Error import project.Meta diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso index 34907d65c092..954d2998efa6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date.enso @@ -314,6 +314,54 @@ type Date end_of : Date_Period -> Date end_of self period=Date_Period.Month = period.adjust_end self + ## Returns the next date adding the `Date_Period` to self. + + Produces a warning for a Date that is before epoch start. + See `Date_Time.enso_epoch_start`. + + Arguments: + - period: the period to add to self. + next : Date_Period -> Date + next self period=Date_Period.Day = self + period.to_period + + ## Returns the previous date subtracting the `Date_Period` from self. + + Produces a warning for a Date that is before epoch start. + See `Date_Time.enso_epoch_start`. + + Arguments: + - period: the period to add to self. + previous : Date_Period -> Date + previous self period=Date_Period.Day = self - period.to_period + + ## Creates a `Period` between self and the provided end date. + + Produces a warning for a Date that is before epoch start. + See `Date_Time.enso_epoch_start`. + + Arguments: + - end: the end date of the interval to count workdays in. + until : Date -> Period + until self end = + ensure_in_epoch self <| ensure_in_epoch end <| + Period.between self end + + ## Counts the days between self (inclusive) and the provided end date + (exclusive). + + Produces a warning for a Date that is before epoch start. + See `Date_Time.enso_epoch_start`. + + Arguments: + - end: the end date of the interval to count workdays in. + - include_end_date: whether to include the end date in the count. + By default the end date is not included in the interval. + days_until : Date -> Boolean -> Integer + days_until self end include_end_date=False = + if end < self then -(end.days_until self include_end_date) else + ensure_in_epoch self <| ensure_in_epoch end <| + (Time_Utils.days_between self end) + if include_end_date then 1 else 0 + ## Counts workdays between self (inclusive) and the provided end date (exclusive). @@ -331,7 +379,7 @@ type Date end-exclusive manner), by default the end date is not included in the count. This has the nice property that for example to count the work days within the next week you can do - `date.work_days_until (date + (Period.new days=7)` and it will look at + `date.work_days_until (date + (Period.new days=7))` and it will look at the 7 days starting from the current `date` and not 8 days. This also gives us a property that `date.work_days_until (date.add_work_days N) == N` for any non-negative diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Period.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Period.enso index 0df833bbeb18..4eab0356cb2d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Period.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Date_Period.enso @@ -1,4 +1,5 @@ import project.Data.Time.Date.Date +import project.Data.Time.Period.Period import project.Data.Time.Date_Time.Date_Time import project.Data.Time.Day_Of_Week.Day_Of_Week from project.Data.Boolean import Boolean, True, False @@ -22,6 +23,8 @@ type Date_Period to any other day. Week (first_day:Day_Of_Week = Day_Of_Week.Monday) + Day + ## PRIVATE This method could be replaced with matching on `Date_Period` supertype if/when that is supported. @@ -36,14 +39,25 @@ type Date_Period Date_Period.Quarter -> Date_Period_Utils.quarter_start Date_Period.Month -> TemporalAdjusters.firstDayOfMonth Date_Period.Week first_day -> TemporalAdjusters.previousOrSame first_day.to_java + Date_Period.Day -> Date_Period_Utils.day_start (Time_Utils.utils_for date).apply_adjuster date adjuster ## PRIVATE adjust_end : (Date | Date_Time) -> (Date | Date_Time) - adjust_end self date = + adjust_end self date = if self == Date_Period.Day then date else adjuster = case self of Date_Period.Year -> TemporalAdjusters.lastDayOfYear Date_Period.Quarter -> Date_Period_Utils.quarter_end Date_Period.Month -> TemporalAdjusters.lastDayOfMonth Date_Period.Week first_day -> Date_Period_Utils.end_of_week first_day.to_java + Date_Period.Day -> Date_Period_Utils.day_end (Time_Utils.utils_for date).apply_adjuster date adjuster + + ## PRIVATE + to_period : Period + to_period self = case self of + Date_Period.Year -> Period.new years=1 + Date_Period.Quarter -> Period.new months=3 + Date_Period.Month -> Period.new months=1 + Date_Period.Week _ -> Period.new days=7 + Date_Period.Day -> Period.new days=1 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso index ed1daadba25c..cc2fbab1670a 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Time/Period.enso @@ -1,10 +1,12 @@ import project.Any.Any +import project.Data.Json.JS_Object import project.Data.Numbers.Integer import project.Data.Ordering.Comparable import project.Data.Text.Extensions import project.Data.Text.Text import project.Data.Time.Date.Date import project.Data.Time.Duration.Duration +import project.Data.Vector.Vector import project.Error.Error import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Time_Error.Time_Error @@ -144,3 +146,20 @@ type Period m = if months == 0 && (y=="" || d=="") then "" else months.to_text + "M " (y + m + d) . trim + + ## PRIVATE + Convert to a JavaScript Object representing a Period. + + > Example + Convert a period of 10 months to a JS_Object. + + example_to_json = (Period.new months=10).to_js_object + to_js_object : JS_Object + to_js_object self = + b = Vector.new_builder 7 + b.append ["type", "Period"] + b.append ["constructor", "new"] + if self.years==0 . not then b.append ["years", self.years] + if self.months==0 . not then b.append ["months", self.months] + if self.days==0 . not then b.append ["days", self.days] + JS_Object.from_pairs b.to_vector diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso index 292b55027061..a92f9aff7d2c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File_Format.enso @@ -45,7 +45,7 @@ format_widget : Single_Choice format_widget = all_types = [Auto_Detect] + format_types make_ctor type_obj = - type_name = Meta.get_qualified_type_name type_obj + type_name = Meta.get_simple_type_name type_obj ctors = Meta.meta type_obj . constructors is_singleton_type = ctors.length == 0 if is_singleton_type then type_name else diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 8c355eedf35f..7173af43bc86 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -8,7 +8,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Errors.Unimplemented.Unimplemented -from Standard.Table import Auto_Detect, Aggregate_Column, Data_Formatter, Column_Selector, Sort_Column, Match_Columns, Position, Set_Mode, Auto, Value_Type +from Standard.Table import Aggregate_Column, Data_Formatter, Column_Selector, Sort_Column, Match_Columns, Position, Set_Mode, Auto, Value_Type import Standard.Table.Data.Expression.Expression import Standard.Table.Data.Expression.Expression_Error import Standard.Table.Data.Join_Condition.Join_Condition @@ -23,6 +23,7 @@ import Standard.Table.Internal.Java_Exports import Standard.Table.Internal.Table_Helpers import Standard.Table.Internal.Table_Helpers.Table_Column_Helper import Standard.Table.Internal.Problem_Builder.Problem_Builder +import Standard.Table.Internal.Unique_Name_Strategy.Unique_Name_Strategy import Standard.Table.Internal.Widget_Helpers from Standard.Table.Data.Column import get_item_string, normalize_string_for_display from Standard.Table.Data.Table import print_table @@ -187,6 +188,7 @@ type Table table.select_columns [-1, 0, 1] reorder=True Icon: select_column + @columns Widget_Helpers.make_column_name_vector_selector select_columns : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range select_columns self (columns = [0]) (reorder = False) (error_on_missing_columns = True) (on_problems = Report_Warning) = new_columns = self.columns_helper.select_columns selectors=columns reorder=reorder error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -239,6 +241,7 @@ type Table Remove the first two columns and the last column. table.remove_columns [-1, 0, 1] + @columns Widget_Helpers.make_column_name_vector_selector remove_columns : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range remove_columns self (columns = [0]) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.remove_columns selectors=columns error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -294,6 +297,7 @@ type Table Move the first column to back. table.reorder_columns [0] position=Position.After_Other_Columns + @columns Widget_Helpers.make_column_name_vector_selector reorder_columns : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range reorder_columns self (columns = [0]) (position = Position.Before_Other_Columns) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.reorder_columns selectors=columns position=position error_on_missing_columns on_problems=on_problems @@ -617,6 +621,7 @@ type Table double_inventory = table.at "total_stock" * 2 table.set double_inventory new_name="total_stock" table.set "2 * [total_stock]" new_name="total_stock_expr" + @new_name Widget_Helpers.make_column_name_selector set : Column | Text -> Text | Nothing -> Set_Mode -> Problem_Behavior -> Table ! Unsupported_Name | Existing_Column | Missing_Column | No_Such_Column | Expression_Error set self column new_name=Nothing set_mode=Set_Mode.Add_Or_Update on_problems=Report_Warning = resolved = case column of @@ -794,6 +799,7 @@ type Table Sort the table by columns whose names start with letter `a`. table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)] + @columns Widget_Helpers.make_order_by_selector order_by : Text | Sort_Column | Vector (Text | Sort_Column) -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range order_by self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = Panic.handle_wrapped_dataflow_error <| problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected] @@ -843,6 +849,7 @@ type Table - If floating points values are present in the distinct columns, a `Floating_Point_Equality` is reported according to the `on_problems` setting. + @columns Widget_Helpers.make_column_name_vector_selector distinct : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | No_Input_Columns_Selected | Floating_Point_Equality distinct self columns=self.column_names case_sensitivity=Case_Sensitivity.Default error_on_missing_columns=True on_problems=Report_Warning = key_columns = self.columns_helper.select_columns selectors=columns reorder=True error_on_missing_columns=error_on_missing_columns on_problems=on_problems . catch No_Output_Columns _-> @@ -855,12 +862,15 @@ type Table Arguments: - right: The table to join with. - - join_kind: The `Join_Kind` for the joining the two tables. + - join_kind: The `Join_Kind` for the joining the two tables. It defaults + to `Left_Outer`. - on: A single condition or a common column name, or a list thereof, on which to correlate rows from the two tables. If multiple conditions are supplied, rows are correlated only if all are true. If common column names are provided, these columns should be present in both tables and an equality condition is added for each of them. + By default, the join is performed on the first column of the left table + correlated with a column in the right table with the same name. - right_prefix: The prefix added to right table column names in case of name conflict. - on_problems: Specifies how to handle problems if they occur, reporting @@ -908,10 +918,9 @@ type Table allows to join the two tables on equality of corresponding columns with the same name. So `table.join other on=["A", "B"]` is a shorthand for: table.join other on=[Join_Condition.Equals "A" "A", Join_Condition.Equals "B" "B"] - @join_kind Widget_Helpers.join_kind_selector @on Widget_Helpers.make_column_name_selector join : Table -> Join_Kind -> Join_Condition | Text | Vector (Join_Condition | Text) -> Text -> Problem_Behavior -> Table - join self right join_kind=Join_Kind.Inner on=[Join_Condition.Equals 0 0] right_prefix="Right_" on_problems=Report_Warning = + join self right join_kind=Join_Kind.Left_Outer on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = can_proceed = if Table_Helpers.is_table right . not then Error.throw (Type_Error.Error Table right "right") else same_backend = case right of _ : Table -> True @@ -996,7 +1005,7 @@ type Table example, by sorting the table; in-memory tables will keep the memory layout order while for database tables the order may be unspecified). cross_join : Table -> Integer | Nothing -> Text -> Problem_Behavior -> Table - cross_join self right right_row_limit=100 right_prefix="Right_" on_problems=Report_Warning = + cross_join self right right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = _ = [right, right_row_limit, right_prefix, on_problems] Error.throw (Unsupported_Database_Operation.Error "Table.cross_join is not implemented yet for the Database backends.") @@ -1045,7 +1054,7 @@ type Table order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. zip : Table -> Boolean | Report_Unmatched -> Text -> Problem_Behavior -> Table - zip self right keep_unmatched=Report_Unmatched right_prefix="Right_" on_problems=Report_Warning = + zip self right keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning = _ = [right, keep_unmatched, right_prefix, on_problems] Error.throw (Unsupported_Database_Operation.Error "Table.zip is not implemented yet for the Database backends.") @@ -1340,6 +1349,7 @@ type Table - If any column names in the new table are clashing, a `Duplicate_Output_Column_Names` is reported according to the `on_problems` setting. + @id_fields Widget_Helpers.make_column_name_vector_selector transpose : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range | Duplicate_Output_Column_Names transpose self id_fields=[] (name_field="Name") (value_field="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = ## Avoid unused arguments warning. We cannot rename arguments to `_`, @@ -1382,6 +1392,9 @@ type Table an `Unquoted_Delimiter` - If there are more than 10 issues with a single column, an `Additional_Warnings`. + @group_by Widget_Helpers.make_column_name_vector_selector + @name_column Widget_Helpers.make_column_name_selector + @values (Widget_Helpers.make_aggregate_column_selector include_group_by=False) cross_tab : Aggregate_Column | Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector | Aggregate_Column) -> (Text | Integer | Column) -> Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings cross_tab self group_by=[] name_column=self.column_names.first values=Aggregate_Column.Count (on_problems=Report_Warning) = ## Avoid unused arguments warning. We cannot rename arguments to `_`, @@ -1392,6 +1405,8 @@ type Table ## Parsing values is not supported in database tables, the table has to be loaded into memory first with `read`. + @type Widget_Helpers.parse_type_selector + @columns Widget_Helpers.make_column_name_vector_selector parse : Text | Integer | Column_Selector | Vector (Text | Integer | Column_Selector) -> Value_Type | Auto -> Text | Data_Formatter -> Boolean -> Problem_Behavior -> Table parse columns=(self.columns . filter (c-> c.value_type.is_text) . map .name) type=Auto format=Data_Formatter.Value error_on_missing_columns=True on_problems=Report_Warning = ## Avoid unused arguments warning. We cannot rename arguments to `_`, @@ -1415,6 +1430,7 @@ type Table ! Error Conditions If the data exceeds the `column_count`, a `Column_Count_Exceeded` will be reported according to the `on_problems` behavior. + @column Widget_Helpers.make_column_name_selector split_to_columns : Text | Integer -> Text -> Integer | Nothing -> Problem_Behavior -> Table split_to_columns self column delimiter="," column_count=Nothing on_problems=Report_Error = _ = [column delimiter column_count on_problems] @@ -1426,6 +1442,7 @@ type Table Arguments: - column: The name or index of the column to split the text of. - delimiter: The term or terms used to split the text. + @column Widget_Helpers.make_column_name_selector split_to_rows : Text | Integer -> Text -> Table split_to_rows self column delimiter="," = _ = [column delimiter] @@ -1451,6 +1468,7 @@ type Table ! Error Conditions If the data exceeds the `column_count`, a `Column_Count_Exceeded` will be reported according to the `on_problems` behavior. + @column Widget_Helpers.make_column_name_selector tokenize_to_columns : Text | Integer -> Text -> Case_Sensitivity -> Integer | Nothing -> Problem_Behavior -> Table tokenize_to_columns self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive column_count=Nothing on_problems=Report_Error = _ = [column pattern case_sensitivity column_count on_problems] @@ -1470,6 +1488,7 @@ type Table - at_least_one_row: If True, a tokenization that returns no values will still produce at least one row, with `Nothing` for the output column values. Equivalent to converting a tokenization output of [] to [Nothing]. + @column Widget_Helpers.make_column_name_selector tokenize_to_rows : Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Table tokenize_to_rows self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive at_least_one_row=False = _ = [column, pattern, case_sensitivity, at_least_one_row] @@ -1499,6 +1518,7 @@ type Table will be named ` ` where `N` is the number of the marked group. If the new name is already in use it will be renamed following the normal suffixing strategy. + @column Widget_Helpers.make_column_name_selector parse_to_columns : Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table parse_to_columns self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems=Report_Error = _ = [column, pattern, case_sensitivity, parse_values, on_problems] @@ -1554,6 +1574,7 @@ type Table If the backend does not support the requested target type, the closest supported type is chosen and a `Inexact_Type_Coercion` problem is reported. + @columns Widget_Helpers.make_column_name_vector_selector cast : (Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector)) -> Value_Type -> Problem_Behavior -> Table ! Illegal_Argument | Inexact_Type_Coercion | Lossy_Conversion cast self columns=[0] value_type=Value_Type.Char on_problems=Problem_Behavior.Report_Warning = selected = self.select_columns columns @@ -1593,9 +1614,7 @@ type Table table = self.connection.read_statement sql table.at column_name . at 0 - ## UNSTABLE - - Returns a materialized dataframe containing rows of this table. + ## Returns a materialized dataframe containing rows of this table. Arguments: - max_rows: specifies a maximum amount of rows to fetch; if not set, all @@ -1859,16 +1878,9 @@ display_dataframe df indices_count all_rows_count format_terminal = is, otherwise numerical suffixes are added. fresh_names : Vector Text -> Vector Text -> Vector Text fresh_names used_names preferred_names = - freshen currently_used name ix = - new_name = if ix == 0 then name else name+"_"+ix.to_text - case currently_used.contains new_name of - False -> new_name - True -> freshen currently_used name ix+1 - res = preferred_names . fold [used_names, []] acc-> name-> - used = acc.first - new_name = freshen used name 0 - [used_names + [new_name], acc.second + [new_name]] - res.second + unique = Unique_Name_Strategy.new + unique.mark_used used_names + unique.make_all_unique preferred_names ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso index 6986f97ec3a0..66e8fec763ba 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso @@ -6,6 +6,7 @@ import project.Connection.Database import project.Connection.Postgres_Details.Postgres_Details import project.Connection.SQLite_Details.SQLite_Details import project.Connection.SQLite_Details.In_Memory +import project.Connection.SQLite_Format.SQLite_Format import project.Connection.SSL_Mode.SSL_Mode import project.Data.SQL_Query.SQL_Query import project.Extensions.Upload_Table @@ -21,6 +22,7 @@ export project.Connection.Database export project.Connection.Postgres_Details.Postgres_Details export project.Connection.SQLite_Details.SQLite_Details export project.Connection.SQLite_Details.In_Memory +export project.Connection.SQLite_Format.SQLite_Format export project.Connection.SSL_Mode.SSL_Mode export project.Data.SQL_Query.SQL_Query export project.Extensions.Upload_Table diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 49dae0ee81b5..442100065529 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -311,6 +311,7 @@ type Table table.select_columns [-1, 0, 1] reorder=True Icon: select_column + @columns Widget_Helpers.make_column_name_vector_selector select_columns : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Boolean -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range select_columns self columns=[0] (reorder = False) (error_on_missing_columns = True) (on_problems = Report_Warning) = new_columns = self.columns_helper.select_columns selectors=columns reorder=reorder error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -363,7 +364,7 @@ type Table Remove the first two columns and the last column. table.remove_columns [-1, 0, 1] - + @columns Widget_Helpers.make_column_name_vector_selector remove_columns : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range remove_columns self (columns=[0]) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.remove_columns selectors=columns error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -419,7 +420,7 @@ type Table Move the first column to back. table.reorder_columns [0] position=Position.After_Other_Columns - + @columns Widget_Helpers.make_column_name_vector_selector reorder_columns : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Position -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range reorder_columns self (columns = [0]) (position = Position.Before_Other_Columns) (error_on_missing_columns = False) (on_problems = Report_Warning) = new_columns = self.columns_helper.reorder_columns selectors=columns position=position error_on_missing_columns=error_on_missing_columns on_problems=on_problems @@ -681,6 +682,7 @@ type Table Sort the table by columns whose names start with letter `a`. table.order_by [(Sort_Column.Select_By_Name "a.*" use_regex=True case_sensitivity=Case_Sensitivity.Insensitive)] + @columns Widget_Helpers.make_order_by_selector order_by : Text | Sort_Column | Vector (Text | Sort_Column) -> Text_Ordering -> Boolean -> Problem_Behavior -> Table ! Incomparable_Values | No_Input_Columns_Selected | Missing_Input_Columns | Column_Indexes_Out_Of_Range order_by self (columns = ([(Sort_Column.Name (self.columns.at 0 . name))])) text_ordering=Text_Ordering.Default error_on_missing_columns=True on_problems=Problem_Behavior.Report_Warning = problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns types_to_always_throw=[No_Input_Columns_Selected] @@ -739,6 +741,7 @@ type Table - If floating points values are present in the distinct columns, a `Floating_Point_Equality` is reported according to the `on_problems` setting. + @columns Widget_Helpers.make_column_name_vector_selector distinct : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | No_Input_Columns_Selected | Floating_Point_Equality distinct self (columns = self.column_names) case_sensitivity=Case_Sensitivity.Default error_on_missing_columns=True on_problems=Report_Warning = key_columns = self.columns_helper.select_columns selectors=columns reorder=True error_on_missing_columns=error_on_missing_columns on_problems=on_problems . catch No_Output_Columns _-> @@ -833,6 +836,8 @@ type Table Parse all columns inferring their types, using `,` as the decimal point for numbers. table.parse format=(Data_Formatter.Value.with_number_formatting decimal_point=',') + @type Widget_Helpers.parse_type_selector + @columns Widget_Helpers.make_column_name_vector_selector parse : Text | Integer | Column_Selector | Vector (Text | Integer | Column_Selector) -> Value_Type | Auto -> Text | Data_Formatter -> Boolean -> Problem_Behavior -> Table parse self columns=(self.columns . filter (c-> c.value_type.is_text) . map .name) type=Auto format=Data_Formatter.Value error_on_missing_columns=True on_problems=Report_Warning = formatter = case format of @@ -918,6 +923,7 @@ type Table If the backend does not support the requested target type, the closest supported type is chosen and a `Inexact_Type_Coercion` problem is reported. + @columns Widget_Helpers.make_column_name_vector_selector cast : (Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector)) -> Value_Type -> Problem_Behavior -> Table ! Illegal_Argument | Inexact_Type_Coercion | Lossy_Conversion cast self columns=[0] value_type=Value_Type.Char on_problems=Problem_Behavior.Report_Warning = _ = [columns, value_type, on_problems] @@ -939,6 +945,7 @@ type Table ! Error Conditions If the data exceeds the `column_count`, a `Column_Count_Exceeded` will be reported according to the `on_problems` behavior. + @column Widget_Helpers.make_column_name_selector split_to_columns : Text | Integer -> Text -> Integer | Nothing -> Problem_Behavior -> Table split_to_columns self column delimiter="," column_count=Nothing on_problems=Report_Error = Split_Tokenize.split_to_columns self column delimiter column_count on_problems @@ -949,6 +956,7 @@ type Table Arguments: - column: The name or index of the column to split the text of. - delimiter: The term or terms used to split the text. + @column Widget_Helpers.make_column_name_selector split_to_rows : Text | Integer -> Text -> Table split_to_rows self column delimiter="," = Split_Tokenize.split_to_rows self column delimiter @@ -973,6 +981,7 @@ type Table ! Error Conditions If the data exceeds the `column_count`, a `Column_Count_Exceeded` will be reported according to the `on_problems` behavior. + @column Widget_Helpers.make_column_name_selector tokenize_to_columns : Text | Integer -> Text -> Case_Sensitivity -> Integer | Nothing -> Problem_Behavior -> Table tokenize_to_columns self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive column_count=Nothing on_problems=Report_Error = Split_Tokenize.tokenize_to_columns self column pattern case_sensitivity column_count on_problems @@ -991,6 +1000,7 @@ type Table - at_least_one_row: If True, a tokenization that returns no values will still produce at least one row, with `Nothing` for the output column values. Equivalent to converting a tokenization output of [] to [Nothing]. + @column Widget_Helpers.make_column_name_selector tokenize_to_rows : Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Table tokenize_to_rows self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive at_least_one_row=False = Split_Tokenize.tokenize_to_rows self column pattern case_sensitivity at_least_one_row @@ -1019,6 +1029,7 @@ type Table will be named ` ` where `N` is the number of the marked group. If the new name is already in use it will be renamed following the normal suffixing strategy. + @column Widget_Helpers.make_column_name_selector parse_to_columns : Text | Integer -> Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table parse_to_columns self column pattern="." case_sensitivity=Case_Sensitivity.Sensitive parse_values=True on_problems=Report_Error = Split_Tokenize.parse_to_columns self column pattern case_sensitivity parse_values on_problems @@ -1208,6 +1219,7 @@ type Table double_inventory = table.at "total_stock" * 2 table.set double_inventory new_name="total_stock" table.set "2 * [total_stock]" new_name="total_stock_expr" + @new_name Widget_Helpers.make_column_name_selector set : Column | Text -> Text | Nothing -> Set_Mode -> Problem_Behavior -> Table ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error set self column new_name=Nothing set_mode=Set_Mode.Add_Or_Update on_problems=Report_Warning = resolved = case column of @@ -1322,12 +1334,15 @@ type Table Arguments: - right: The table to join with. - - join_kind: The `Join_Kind` for the joining the two tables. + - join_kind: The `Join_Kind` for the joining the two tables. It defaults + to `Left_Outer`. - on: A single condition or a common column name, or a list thereof, on which to correlate rows from the two tables. If multiple conditions are supplied, rows are correlated only if all are true. If common column names are provided, these columns should be present in both tables and an equality condition is added for each of them. + By default, the join is performed on the first column of the left table + correlated with a column in the right table with the same name. - right_prefix: The prefix added to right table column names in case of name conflict. - on_problems: Specifies how to handle problems if they occur, reporting @@ -1375,10 +1390,9 @@ type Table allows to join the two tables on equality of corresponding columns with the same name. So `table.join other on=["A", "B"]` is a shorthand for: table.join other on=[Join_Condition.Equals "A" "A", Join_Condition.Equals "B" "B"] - @join_kind Widget_Helpers.join_kind_selector @on Widget_Helpers.make_column_name_selector join : Table -> Join_Kind -> Join_Condition | Text | Vector (Join_Condition | Text) -> Text -> Problem_Behavior -> Table - join self right join_kind=Join_Kind.Inner on=[Join_Condition.Equals 0 0] right_prefix="Right_" on_problems=Report_Warning = + join self right join_kind=Join_Kind.Left_Outer on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = if check_table "right" right then # [left_unmatched, matched, right_unmatched] rows_to_keep = case join_kind of @@ -1439,7 +1453,7 @@ type Table example, by sorting the table; in-memory tables will keep the memory layout order while for database tables the order may be unspecified). cross_join : Table -> Integer | Nothing -> Text -> Problem_Behavior -> Table - cross_join self right right_row_limit=100 right_prefix="Right_" on_problems=Report_Warning = + cross_join self right right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = if check_table "right" right then limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of True -> @@ -1495,7 +1509,7 @@ type Table order of columns is undefined and the operation will fail, reporting a `Undefined_Column_Order` problem and returning an empty table. zip : Table -> Boolean | Report_Unmatched -> Text -> Problem_Behavior -> Table - zip self right keep_unmatched=Report_Unmatched right_prefix="Right_" on_problems=Report_Warning = + zip self right keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning = if check_table "right" right then keep_unmatched_bool = case keep_unmatched of Report_Unmatched -> True @@ -1694,6 +1708,7 @@ type Table - If any column names in the new table are clashing, a `Duplicate_Output_Column_Names` is reported according to the `on_problems` setting. + @id_fields Widget_Helpers.make_column_name_vector_selector transpose : Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector) -> Text -> Text -> Boolean -> Problem_Behavior -> Table ! No_Output_Columns | Missing_Input_Columns | Column_Indexes_Out_Of_Range | Duplicate_Output_Column_Names transpose self (id_fields = []) (name_field="Name") (value_field="Value") (error_on_missing_columns=True) (on_problems = Report_Warning) = columns_helper = self.columns_helper @@ -1749,7 +1764,10 @@ type Table an `Unquoted_Delimiter` - If there are more than 10 issues with a single column, an `Additional_Warnings`. - cross_tab : Aggregate_Column | Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector | Aggregate_Column) -> (Text | Integer) -> Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings + @group_by Widget_Helpers.make_column_name_vector_selector + @name_column Widget_Helpers.make_column_name_selector + @values (Widget_Helpers.make_aggregate_column_selector include_group_by=False) + cross_tab : Aggregate_Column | Text | Integer | Column_Selector | Vector (Integer | Text | Column_Selector | Aggregate_Column) -> (Text | Integer) -> Aggregate_Column | Vector Aggregate_Column -> Problem_Behavior -> Table ! Missing_Input_Columns | Column_Indexes_Out_Of_Range | Invalid_Aggregate_Column | Floating_Point_Equality | Invalid_Aggregation | Unquoted_Delimiter | Additional_Warnings cross_tab self group_by=[] name_column=self.column_names.first values=Aggregate_Column.Count (on_problems=Report_Warning) = columns_helper = self.columns_helper problem_builder = Problem_Builder.new error_on_missing_columns=True diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso index d2b8799dc71e..67910e938f2f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Delimited/Delimited_Format.enso @@ -49,7 +49,7 @@ type Delimited_Format character if it anywhere else than at the beginning of the line. This option is only applicable for read mode and does not affect writing. It defaults to `Nothing` which means that comments are disabled. - Delimited (delimiter:Text) (encoding:Encoding=Encoding.utf_8) (skip_rows:Integer=0) (row_limit:Integer|Nothing=Nothing) (quote_style:Quote_Style=Quote_Style.With_Quotes) (headers:Boolean|Infer=Infer) (value_formatter:Data_Formatter|Nothing=Data_Formatter.Value) (keep_invalid_rows:Boolean=True) (line_endings:Line_Ending_Style=Infer) (comment_character:Text|Nothing=Nothing) + Delimited (delimiter:Text=',') (encoding:Encoding=Encoding.utf_8) (skip_rows:Integer=0) (row_limit:Integer|Nothing=Nothing) (quote_style:Quote_Style=Quote_Style.With_Quotes) (headers:Boolean|Infer=Infer) (value_formatter:Data_Formatter|Nothing=Data_Formatter.Value) (keep_invalid_rows:Boolean=True) (line_endings:Line_Ending_Style=Infer) (comment_character:Text|Nothing=Nothing) ## PRIVATE ADVANCED diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso index 33a1dd505f5f..ad9fcbeea235 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso @@ -12,14 +12,17 @@ polyglot java import org.enso.table.error.EmptySheetException type Missing_Input_Columns ## PRIVATE One or more columns not found in the input table. - Error (criteria : [Text]) + Error (criteria : [Text]) (where:Text|Nothing = Nothing) ## PRIVATE Convert a missing input error to a human-readable form. to_display_text : Text to_display_text self = - "The criteria "+self.criteria.to_text+" did not match any columns." + where = case self.where of + Nothing -> "." + location : Text -> " in "+location+"." + "The criteria "+self.criteria.to_text+" did not match any columns"+where type Column_Indexes_Out_Of_Range ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso index 313ac6a9c705..abdd958ea67d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Join_Helpers.enso @@ -23,9 +23,10 @@ type Join_Condition_Resolver resolve : Join_Condition | Text | Vector (Join_Condition | Text) -> Problem_Behavior -> Join_Condition_Resolution resolve self conditions on_problems = redundant_names = Vector.new_builder - problem_builder = Problem_Builder.new types_to_always_throw=[Missing_Input_Columns, Column_Indexes_Out_Of_Range] + left_problem_builder = Problem_Builder.new missing_input_columns_location="the left table" types_to_always_throw=[Missing_Input_Columns, Column_Indexes_Out_Of_Range] + right_problem_builder = Problem_Builder.new missing_input_columns_location="the right table" types_to_always_throw=[Missing_Input_Columns, Column_Indexes_Out_Of_Range] - resolve_selector resolver selector = + resolve_selector problem_builder resolver selector = r_1 = resolver selector r_2 = r_1.catch No_Such_Column _-> problem_builder.report_missing_input_columns [selector] @@ -33,9 +34,10 @@ type Join_Condition_Resolver r_2.catch Index_Out_Of_Bounds _-> problem_builder.report_oob_indices [selector] Nothing - resolve_left = resolve_selector self.left_at - resolve_right = resolve_selector self.right_at + resolve_left = resolve_selector left_problem_builder self.left_at + resolve_right = resolve_selector right_problem_builder self.right_at + problem_builder = Problem_Builder.new is_nothing column = case column of Nothing -> True _ -> False @@ -70,7 +72,12 @@ type Join_Condition_Resolver Value_Type.expect_comparable left right_lower <| Value_Type.expect_comparable left right_upper <| self.make_between problem_builder left right_lower right_upper - problem_builder.attach_problems_before on_problems <| + attach_problems ~result = + left_problem_builder.attach_problems_before on_problems <| + right_problem_builder.attach_problems_before on_problems <| + problem_builder.attach_problems_before on_problems <| + result + attach_problems <| if converted.contains Nothing then Panic.throw (Illegal_State.Error "Impossible: unresolved columns remaining in the join resolution. This should have raised a dataflow error. This is a bug in the Table library.") else Join_Condition_Resolution.Result converted redundant_names.to_vector diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso index c90d924b2779..2c668f40a801 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Problem_Builder.enso @@ -8,7 +8,7 @@ from project.Errors import Missing_Input_Columns, Column_Indexes_Out_Of_Range, D ## PRIVATE type Problem_Builder ## PRIVATE - Value types_to_always_throw oob_indices missing_input_columns other + Value types_to_always_throw oob_indices missing_input_columns missing_input_columns_location other ## PRIVATE report_oob_indices self indices = @@ -39,7 +39,7 @@ type Problem_Builder if vec.not_empty then problems.append (problem_creator vec) - build_vector_and_append self.missing_input_columns Missing_Input_Columns.Error + build_vector_and_append self.missing_input_columns (Missing_Input_Columns.Error _ where=self.missing_input_columns_location) build_vector_and_append self.oob_indices Column_Indexes_Out_Of_Range.Error self.other.to_vector.each problems.append @@ -91,10 +91,12 @@ type Problem_Builder methods regardless of the `Problem_Behavior` used. Defaults to `False`. Setting this to `True` is essentially a shorthand for adding these two problem types to `types_to_always_throw`. + - missing_input_columns_location: The location to add to the missing + input column error to make it more informative. Defaults to `Nothing`. new : Vector -> Boolean -> Problem_Builder - new types_to_always_throw=[] error_on_missing_columns=False = + new types_to_always_throw=[] error_on_missing_columns=False missing_input_columns_location=Nothing = additional_types_to_throw = if error_on_missing_columns then [Missing_Input_Columns, Column_Indexes_Out_Of_Range, Invalid_Aggregate_Column] else [] - Problem_Builder.Value types_to_always_throw+additional_types_to_throw (Ref.new Vector_Builder.empty) (Ref.new Vector_Builder.empty) other=Vector.new_builder + Problem_Builder.Value types_to_always_throw+additional_types_to_throw (Ref.new Vector_Builder.empty) (Ref.new Vector_Builder.empty) missing_input_columns_location other=Vector.new_builder ## PRIVATE Appends a `Vector` to a `Vector_Builder` stored in a `Ref`. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso index fb2199da456b..46b8ac42e534 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso @@ -173,7 +173,7 @@ fan_out_to_rows table input_column_id function at_least_one_row=False on_problem x | 12 34 56 | y ===> ... | ... | ... - foo | bar 0 | bar 1 | baz + foo | bar 1 | bar 2 | baz ----+-------+-------+---- x | 1 | 2 | y x | 3 | 4 | y @@ -367,7 +367,7 @@ repeat_each n ~action = 0.up_to n . each _-> action ## PRIVATE Name a column by appending an integer to a base column name. default_column_namer : Text -> Integer -> Text -default_column_namer base_name i = base_name + " " + i.to_text +default_column_namer base_name i = base_name + " " + (i+1).to_text ## PRIVATE Pad or truncate a vector to be a specified length; if altered, report diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso index 867c80966e09..853836a51891 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso @@ -56,10 +56,10 @@ type Unique_Name_Strategy > Example Rename names from a second list to avoid clashing with the first one. - first = ["A", "B", "second_A"] - second = ["A", "B", "second_A_1", "C"] + first = ["A", "B", "second A"] + second = ["A", "B", "second A 1", "C"] unique_second = Unique_Name_Strategy.combine_with_prefix first second "second_" - unique_second == ["second_A_2", "second_B", "second_A_1", "C"] + unique_second == ["second A 2", "second_B", "second A 1", "C"] combine_with_prefix : Vector Text -> Vector Text -> Text -> Unique_Name_Strategy combine_with_prefix self first second second_prefix = Vector.from_polyglot_array <| @@ -110,7 +110,7 @@ type Unique_Name_Strategy > Example strategy = Unique_Name_Strategy.new strategy.make_unique "A" # returns "A" - strategy.make_unique "A" # returns "A_1" + strategy.make_unique "A" # returns "A 1" make_unique : Text -> Text make_unique self name = self.deduplicator.makeUnique name diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso index c6274fca3cb7..b1bfae4ab6ec 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Widget_Helpers.enso @@ -1,16 +1,72 @@ from Standard.Base import all - -from Standard.Base.Metadata.Widget import Single_Choice +from Standard.Base.Metadata.Widget import Single_Choice, Vector_Editor from Standard.Base.Metadata.Choice import Option import Standard.Base.Metadata.Display -from project.Data.Table import Table +import project.Data.Table.Table +import project.Data.Aggregate_Column.Aggregate_Column + +## PRIVATE + Make an aggregate column selector. +make_aggregate_column_selector : Table -> Display -> Boolean -> Single_Choice +make_aggregate_column_selector table display=Display.Always include_group_by=True = + col_names_selector = make_column_name_selector table display=Display.Always + column_widget = Pair.new "column" col_names_selector + + col_list_selector = make_column_name_vector_selector table display=Display.Always + + group_by = if include_group_by then [Option "Group By" "(Aggregate_Column.Group_By)" [column_widget]] else [] + count = Option "Count" "Aggregate_Column.Count" + count_distinct = Option "Count Distinct" "(Aggregate_Column.Count_Distinct)" [Pair.new "columns" (col_list_selector)] + first = Option "First" "(Aggregate_Column.First)" [column_widget, Pair.new "order_by" (col_list_selector)] + last = Option "Last" "(Aggregate_Column.Last)" [column_widget, Pair.new "order_by" (col_list_selector)] + + count_not_nothing = Option "Count Not Nothing" "(Aggregate_Column.Count_Not_Nothing)" [column_widget] + count_nothing = Option "Count Nothing" "(Aggregate_Column.Count_Nothing)" [column_widget] + + ## Should be a list of Text columns only + count_not_empty = Option "Count Not Empty" "(Aggregate_Column.Count_Not_Empty)" [column_widget] + count_empty = Option "Count Empty" "(Aggregate_Column.Count_Empty)" [column_widget] + concatenate = Option "Concatenate" "(Aggregate_Column.Concatenate)" [column_widget] + shortest = Option "Shortest" "(Aggregate_Column.Shortest)" [column_widget] + longest = Option "Longest" "(Aggregate_Column.Longest)" [column_widget] + + ## Should be a list of Numeric columns only + sum = Option "Sum" "(Aggregate_Column.Sum)" [column_widget] + average = Option "Average" "(Aggregate_Column.Average)" [column_widget] + median = Option "Median" "(Aggregate_Column.Median)" [column_widget] + percentile = Option "Percentile" "(Aggregate_Column.Percentile)" [column_widget] + mode = Option "Mode" "(Aggregate_Column.Mode)" [column_widget] + standard_deviation = Option "Standard Deviation" "(Aggregate_Column.Standard_Deviation)" [column_widget] + + # Should be a list of comparable columns only + maximum = Option "Maximum" "(Aggregate_Column.Maximum)" [column_widget] + minimum = Option "Minimum" "(Aggregate_Column.Minimum)" [column_widget] + + Single_Choice display=display values=(group_by+[count, count_distinct, first, last, count_not_nothing, count_nothing, count_not_empty, count_empty, concatenate, shortest, longest, sum, average, median, percentile, mode, standard_deviation, maximum, minimum]) ## PRIVATE Make a column name selector. make_column_name_selector : Table -> Display -> Single_Choice make_column_name_selector table display=Display.Always = - Single_Choice display=display values=(table.column_names.map n->(Option n n.pretty)) + col_names = table.column_names + names = col_names.map n-> Option n n.pretty + Single_Choice display=display values=names + +## PRIVATE + Make a multiple column name selector. +make_column_name_vector_selector : Table -> Display -> Vector_Editor +make_column_name_vector_selector table display=Display.Always = + item_editor = make_column_name_selector table display=Display.Always + Vector_Editor item_editor=item_editor item_default=table.column_names.first.pretty display=display + +## PRIVATE + Make a column name selector. +make_order_by_selector : Table -> Display -> Single_Choice +make_order_by_selector table display=Display.Always = + col_names = table.column_names + names = col_names.fold [] c-> n-> c + [Option n+" (Asc)" n.pretty, Option n+" (Desc)" "(Sort_Column.Name "+n.pretty+" Sort_Direction.Descending)"] + Single_Choice display=display values=names ## PRIVATE Selector for type argument on `Column.parse`. @@ -21,11 +77,3 @@ parse_type_selector = options = names.zip choice . map pair-> Option pair.first pair.second Single_Choice display=Display.Always values=options -## PRIVATE - Selector for type argument on `Column.parse`. -join_kind_selector : Single_Choice -join_kind_selector = - choice = ['Join_Kind.Inner','Join_Kind.Left_Outer','Join_Kind.Right_Outer','Join_Kind.Full','Join_Kind.Left_Exclusive','Join_Kind.Right_Exclusive'] - names = ['Inner', 'Left Outer', 'Right Outer', 'Full', 'Left Exclusive', 'Right Exclusive'] - options = names.zip choice . map pair-> Option pair.first pair.second - Single_Choice display=Display.Always values=options diff --git a/std-bits/base/src/main/java/org/enso/base/time/Date_Period_Utils.java b/std-bits/base/src/main/java/org/enso/base/time/Date_Period_Utils.java index bf4b5f882638..48902640e92b 100644 --- a/std-bits/base/src/main/java/org/enso/base/time/Date_Period_Utils.java +++ b/std-bits/base/src/main/java/org/enso/base/time/Date_Period_Utils.java @@ -5,6 +5,20 @@ import java.time.temporal.*; public class Date_Period_Utils implements TimeUtilsBase { + private static final long NANOSECONDS_IN_DAY = 86_400_000_000_000L; + public static TemporalAdjuster day_start = + (Temporal temporal) -> { + return temporal.isSupported(ChronoField.NANO_OF_DAY) + ? temporal.with(ChronoField.NANO_OF_DAY, 0) + : temporal; + }; + + public static TemporalAdjuster day_end = + (Temporal temporal) -> { + return temporal.isSupported(ChronoField.NANO_OF_DAY) + ? temporal.with(ChronoField.NANO_OF_DAY, NANOSECONDS_IN_DAY - 1) + : temporal; + }; public static TemporalAdjuster quarter_start = (Temporal temporal) -> { diff --git a/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java b/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java index 82103f7c2946..ff90cf4facf2 100644 --- a/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java +++ b/std-bits/table/src/main/java/org/enso/table/read/DelimitedReader.java @@ -334,7 +334,7 @@ private WithProblems> headersFromRow(String[] row) { private WithProblems> generateDefaultHeaders(int columnCount) { List headerNames = new ArrayList<>(columnCount); for (int i = 0; i < columnCount; ++i) { - headerNames.add(COLUMN_NAME + "_" + (i + 1)); + headerNames.add(COLUMN_NAME + " " + (i + 1)); } return new WithProblems<>(headerNames, Collections.emptyList()); } diff --git a/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java b/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java index 73f8486e9eec..b5fa13f3046e 100644 --- a/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java +++ b/std-bits/table/src/main/java/org/enso/table/util/NameDeduplicator.java @@ -87,7 +87,7 @@ private static String getName(String name, int index) { if (index == 0) { return name; } - return name + "_" + index; + return name + " " + index; } public String[] getInvalidNames() { @@ -134,7 +134,7 @@ public List combineWithPrefix( String name = second.get(i); if (output.get(i) == null) { var prefixed = secondPrefix + name; - output.set(i, makeUnique(secondPrefix + name)); + output.set(i, makeUnique(prefixed)); } } return output; diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 95a75f0b7c52..8f691bdeefdb 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -111,9 +111,9 @@ spec setup = materialized.columns.at 0 . at 0 . should_equal 56.708660 epsilon=0.000001 materialized.columns.at 1 . name . should_equal "Standard Deviation ValueWithNothing" materialized.columns.at 1 . at 0 . should_equal 58.588610 epsilon=0.000001 - materialized.columns.at 2 . name . should_equal "Standard Deviation Value_1" + materialized.columns.at 2 . name . should_equal "Standard Deviation Value 1" materialized.columns.at 2 . at 0 . should_equal 56.697317 epsilon=0.000001 - materialized.columns.at 3 . name . should_equal "Standard Deviation ValueWithNothing_1" + materialized.columns.at 3 . name . should_equal "Standard Deviation ValueWithNothing 1" materialized.columns.at 3 . at 0 . should_equal 58.575554 epsilon=0.000001 Test.specify "should be able to create median, mode and percentile values" (pending = resolve_pending test_selection.advanced_stats) <| @@ -153,7 +153,7 @@ spec setup = materialized.column_count . should_equal 3 materialized.columns.at 0 . name . should_equal "First TextWithNothing" materialized.columns.at 0 . at 0 . should_equal "riwaiqq1io" - materialized.columns.at 1 . name . should_equal "First TextWithNothing_1" + materialized.columns.at 1 . name . should_equal "First TextWithNothing 1" materialized.columns.at 1 . at 0 . should_equal "j4i2ua7uft" materialized.columns.at 2 . name . should_equal "Last ValueWithNothing" materialized.columns.at 2 . at 0 . should_equal -38.56 epsilon=0.000001 @@ -236,7 +236,7 @@ spec setup = materialized.column_count . should_equal 2 materialized.columns.at 0 . name . should_equal "Count Distinct Code" materialized.columns.at 0 . at 0 . should_equal 0 - materialized.columns.at 1 . name . should_equal "Count Distinct Code_1" + materialized.columns.at 1 . name . should_equal "Count Distinct Code 1" materialized.columns.at 1 . at 0 . should_equal 0 Test.specify "should be able to compute sum and average of values" <| @@ -527,9 +527,9 @@ spec setup = materialized.columns.at 1 . at idx . should_equal 60.272158 epsilon=0.000001 materialized.columns.at 2 . name . should_equal "Standard Deviation ValueWithNothing" materialized.columns.at 2 . at idx . should_equal 56.798691 epsilon=0.000001 - materialized.columns.at 3 . name . should_equal "Standard Deviation Value_1" + materialized.columns.at 3 . name . should_equal "Standard Deviation Value 1" materialized.columns.at 3 . at idx . should_equal 60.156583 epsilon=0.000001 - materialized.columns.at 4 . name . should_equal "Standard Deviation ValueWithNothing_1" + materialized.columns.at 4 . name . should_equal "Standard Deviation ValueWithNothing 1" materialized.columns.at 4 . at idx . should_equal 56.677714 epsilon=0.000001 Test.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| @@ -735,9 +735,9 @@ spec setup = materialized.columns.at 2 . at idx . should_equal 58.979275 epsilon=0.000001 materialized.columns.at 3 . name . should_equal "Standard Deviation ValueWithNothing" materialized.columns.at 3 . at idx . should_equal 57.561756 epsilon=0.000001 - materialized.columns.at 4 . name . should_equal "Standard Deviation Value_1" + materialized.columns.at 4 . name . should_equal "Standard Deviation Value 1" materialized.columns.at 4 . at idx . should_equal 58.746614 epsilon=0.000001 - materialized.columns.at 5 . name . should_equal "Standard Deviation ValueWithNothing_1" + materialized.columns.at 5 . name . should_equal "Standard Deviation ValueWithNothing 1" materialized.columns.at 5 . at idx . should_equal 57.306492 epsilon=0.000001 Test.specify "should be able to create median values" (pending = resolve_pending test_selection.advanced_stats) <| @@ -1366,25 +1366,25 @@ spec setup = Test.specify "should raise a warning when an invalid output name" <| action = table.aggregate [Group_By "Index" ""] on_problems=_ problems = [Invalid_Output_Column_Names.Error [""]] - tester = expect_column_names ["Column_1"] + tester = expect_column_names ["Column 1"] Problems.test_problem_handling action problems tester Test.specify "should raise a warning when a duplicate column name" <| action = table.aggregate [Group_By "Index", Group_By 0] on_problems=_ problems = [Duplicate_Output_Column_Names.Error ["Index"]] - tester = expect_column_names ["Index", "Index_1"] + tester = expect_column_names ["Index", "Index 1"] Problems.test_problem_handling action problems tester Test.specify "should raise a warning when a duplicate column name and rename default names first" <| action = table.aggregate [Group_By "Value", Group_By "Index" "Value"] on_problems=_ problems = [Duplicate_Output_Column_Names.Error ["Value"]] - tester = expect_column_names ["Value_1", "Value"] + tester = expect_column_names ["Value 1", "Value"] Problems.test_problem_handling action problems tester Test.specify "should raise a warning when duplicate column names" <| action = table.aggregate [Sum "Value" new_name="AGG1", Count new_name="AGG1"] on_problems=_ problems = [Duplicate_Output_Column_Names.Error ["AGG1"]] - tester = expect_column_names ["AGG1", "AGG1_1"] + tester = expect_column_names ["AGG1", "AGG1 1"] Problems.test_problem_handling action problems tester Test.specify "should allow partial matches on Count_Distinct" <| diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index 6091137b646b..ae7842786fb0 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -21,8 +21,8 @@ spec setup = col1 = ["foo", [1,2,3]] col2 = ["bar", [4,5,6]] col3 = ["Baz", [7,8,9]] - col4 = ["foo_1", [10,11,12]] - col5 = ["foo_2", [13,14,15]] + col4 = ["foo 1", [10,11,12]] + col5 = ["foo 2", [13,14,15]] col6 = ["ab.+123", [16,17,18]] col7 = ["abcd123", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] @@ -100,11 +100,11 @@ spec setup = Test.specify "should allow adding a column" <| bar2 = table.get "bar" . rename "bar2" t2 = table.set bar2 - t2.column_names . should_equal ["foo", "bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "bar2"] + t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2"] t2.get "bar2" . to_vector . should_equal [4, 5, 6] t3 = t2.set bar2 "bar3" - t3.column_names . should_equal ["foo", "bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "bar2", "bar3"] + t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar2", "bar3"] Test.specify "should not allow illegal column names" <| table.set (table.get "bar") new_name="" . should_fail_with Illegal_Argument @@ -113,11 +113,11 @@ spec setup = Test.specify "should allow replacing a column" <| foo = table.get "bar" . rename "foo" t2 = table.set foo - t2.column_names . should_equal ["foo", "bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] + t2.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t2.get "foo" . to_vector . should_equal [4, 5, 6] t3 = t2.set foo "bar3" - t3.column_names . should_equal ["foo", "bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "bar3"] + t3.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "bar3"] Test.specify "should allow adding a column" <| bar2 = table.get "bar" . rename "bar2" @@ -166,7 +166,7 @@ spec setup = Test.group prefix+"Table.column_names" <| Test.specify "should return the names of all columns" <| - table.column_names . should_equal ["foo", "bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] + table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] Test.specify "should allow weird column names in all backends" <| columns = weird_names.map_with_index ix-> name-> diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index d45c83be88f9..bfe835b49b98 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -149,14 +149,14 @@ spec setup = Test.specify "should gracefully handle duplicate aggregate names" <| action = table.cross_tab [] "Key" values=[Count new_name="Agg1", Sum "Value" new_name="Agg1"] on_problems=_ tester table = - table.column_names . should_equal ["x Agg1", "x Agg1_1", "y Agg1", "y Agg1_1", "z Agg1", "z Agg1_1"] + table.column_names . should_equal ["x Agg1", "x Agg1 1", "y Agg1", "y Agg1 1", "z Agg1", "z Agg1 1"] problems = [Duplicate_Output_Column_Names.Error ["x Agg1", "y Agg1", "z Agg1"]] Problems.test_problem_handling action problems tester table3 = table2.rename_columns (Map.from_vector [["Group", "x"]]) action3 = table3.cross_tab ["x"] "Key" on_problems=_ tester3 table = - table.column_names . should_equal ["x", "x_1", "y", "z"] + table.column_names . should_equal ["x", "x 1", "y", "z"] problems3 = [Duplicate_Output_Column_Names.Error ["x"]] Problems.test_problem_handling action3 problems3 tester3 diff --git a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso index 94eafeba889b..8ae48df0538e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Integration_Tests.enso @@ -37,7 +37,7 @@ spec setup = t3 = t2.aggregate [Group_By "Letter", Count] t4 = t3.join t1 on="Count" join_kind=Join_Kind.Left_Outer |> materialize |> _.order_by "Letter" - t4.columns.map .name . should_equal ["Letter", "Count", "Right_Count", "Class"] + t4.columns.map .name . should_equal ["Letter", "Count", "Right Count", "Class"] rows = t4.rows . map .to_vector rows.at 0 . should_equal ["A", 4, Nothing, Nothing] rows.at 1 . should_equal ["B", 3, 3, "Z"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index ba11a64b515e..3f60df4ca93a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -94,7 +94,7 @@ spec setup = t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = t1.cross_join t1 - expect_column_names ["X", "Y", "Right_X", "Right_Y"] t2 + expect_column_names ["X", "Y", "Right X", "Right Y"] t2 t2.row_count . should_equal 4 r = materialize t2 . rows . map .to_vector r.length . should_equal 4 @@ -108,26 +108,26 @@ spec setup = False -> r.should_equal expected_rows Test.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1]], ["Y", [5]], ["Right_Y", [10]]] + t1 = table_builder [["X", [1]], ["Y", [5]], ["Right Y", [10]]] t2 = table_builder [["X", ['a']], ["Y", ['d']]] t3 = t1.cross_join t2 - expect_column_names ["X", "Y", "Right_Y", "Right_X", "Right_Y_1"] t3 - Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right_Y"]] + expect_column_names ["X", "Y", "Right Y", "Right X", "Right Y 1"] t3 + Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right Y"]] t3.row_count . should_equal 1 t3.at "X" . to_vector . should_equal [1] t3.at "Y" . to_vector . should_equal [5] - t3.at "Right_Y" . to_vector . should_equal [10] - t3.at "Right_X" . to_vector . should_equal ['a'] - t3.at "Right_Y_1" . to_vector . should_equal ['d'] + t3.at "Right Y" . to_vector . should_equal [10] + t3.at "Right X" . to_vector . should_equal ['a'] + t3.at "Right Y 1" . to_vector . should_equal ['d'] t1.cross_join t2 on_problems=Problem_Behavior.Report_Error . should_fail_with Duplicate_Output_Column_Names - expect_column_names ["X", "Y", "Right_Y", "X_1", "Y_1"] (t1.cross_join t2 right_prefix="") + expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] (t1.cross_join t2 right_prefix="") - t4 = table_builder [["X", [1]], ["Right_X", [5]]] - expect_column_names ["X", "Y", "Right_Y", "Right_X_1", "Right_X"] (t1.cross_join t4) - expect_column_names ["X", "Right_X", "Right_X_1", "Y", "Right_Y"] (t4.cross_join t1) + t4 = table_builder [["X", [1]], ["Right X", [5]]] + expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.cross_join t4) + expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.cross_join t1) Test.specify "should respect the column ordering" <| t1 = table_builder [["X", [100, 2]], ["Y", [4, 5]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index b0b56bfffd65..910080636bea 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -34,9 +34,18 @@ spec setup = Test.group prefix+"Table.join" <| t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] t2 = table_builder [["Z", [2, 3, 2, 4]], ["W", [4, 5, 6, 7]]] - Test.specify "should allow to Inner join on equality of a the first column by default" <| - t3 = t1.join t2 + Test.specify "should by default do a Left Outer join on equality of first column in the left table, correlated with column of the same name in the right one" <| + t3 = table_builder [["Z", [4, 5, 6, 7]], ["X", [2, 3, 2, 4]]] + t4 = t1.join t3 |> materialize |> _.order_by ["X", "Z"] + expect_column_names ["X", "Y", "Z", "Right X"] t4 + t4.at "X" . to_vector . should_equal [1, 2, 2, 3] + t4.at "Y" . to_vector . should_equal [4, 5, 5, 6] + t4.at "Right X" . to_vector . should_equal [Nothing, 2, 2, 3] + t4.at "Z" . to_vector . should_equal [Nothing, 4, 6, 5] + + Test.specify "should allow Inner join" <| + t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals 0 0) expect_column_names ["X", "Y", "Z", "W"] t3 t4 = t3 |> materialize |> _.order_by ["X", "W"] t4.at "X" . to_vector . should_equal [2, 2, 3] @@ -45,23 +54,15 @@ spec setup = t4.at "W" . to_vector . should_equal [4, 6, 5] Test.specify "should allow Full join" <| - t3 = t1.join t2 join_kind=Join_Kind.Full |> materialize |> _.order_by ["X", "W"] + t3 = t1.join t2 join_kind=Join_Kind.Full on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] expect_column_names ["X", "Y", "Z", "W"] t3 t3.at "X" . to_vector . should_equal [Nothing, 1, 2, 2, 3] t3.at "Y" . to_vector . should_equal [Nothing, 4, 5, 5, 6] t3.at "Z" . to_vector . should_equal [4, Nothing, 2, 2, 3] t3.at "W" . to_vector . should_equal [7, Nothing, 4, 6, 5] - Test.specify "should allow Left Outer join" <| - t4 = t1.join t2 join_kind=Join_Kind.Left_Outer |> materialize |> _.order_by ["X", "W"] - expect_column_names ["X", "Y", "Z", "W"] t4 - t4.at "X" . to_vector . should_equal [1, 2, 2, 3] - t4.at "Y" . to_vector . should_equal [4, 5, 5, 6] - t4.at "Z" . to_vector . should_equal [Nothing, 2, 2, 3] - t4.at "W" . to_vector . should_equal [Nothing, 4, 6, 5] - Test.specify "should allow Right Outer join" <| - t5 = t1.join t2 join_kind=Join_Kind.Right_Outer |> materialize |> _.order_by ["X", "W"] + t5 = t1.join t2 join_kind=Join_Kind.Right_Outer on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X", "W"] expect_column_names ["X", "Y", "Z", "W"] t5 t5.at "X" . to_vector . should_equal [Nothing, 2, 2, 3] t5.at "Y" . to_vector . should_equal [Nothing, 5, 5, 6] @@ -69,12 +70,12 @@ spec setup = t5.at "W" . to_vector . should_equal [7, 4, 6, 5] Test.specify "should allow to perform anti-joins" <| - t6 = t1.join t2 join_kind=Join_Kind.Left_Exclusive |> materialize |> _.order_by ["X"] + t6 = t1.join t2 join_kind=Join_Kind.Left_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["X"] t6.columns.map .name . should_equal ["X", "Y"] t6.at "X" . to_vector . should_equal [1] t6.at "Y" . to_vector . should_equal [4] - t7 = t1.join t2 join_kind=Join_Kind.Right_Exclusive |> materialize |> _.order_by ["Z"] + t7 = t1.join t2 join_kind=Join_Kind.Right_Exclusive on=(Join_Condition.Equals 0 0) |> materialize |> _.order_by ["Z"] t7.columns.map .name . should_equal ["Z", "W"] t7.at "Z" . to_vector . should_equal [4] t7.at "W" . to_vector . should_equal [7] @@ -82,39 +83,39 @@ spec setup = t3 = table_builder [["X", [1, 1, 1, 2, 2, 2]], ["Y", ["A", "B", "B", "C", "C", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] t4 = table_builder [["X", [1, 1, 3, 2, 2, 4]], ["Y", ["B", "B", "C", "C", "D", "A"]], ["Z", [1, 2, 3, 4, 5, 6]]] check_xy_joined r = - expect_column_names ["X", "Y", "Z", "Right_Z"] r + expect_column_names ["X", "Y", "Z", "Right Z"] r r.at "X" . to_vector . should_equal [1, 1, 1, 1, 2, 2] r.at "Y" . to_vector . should_equal ["B", "B", "B", "B", "C", "C"] r.at "Z" . to_vector . should_equal [2, 2, 3, 3, 4, 5] - r.at "Right_Z" . to_vector . should_equal [1, 2, 1, 2, 4, 4] + r.at "Right Z" . to_vector . should_equal [1, 2, 1, 2, 4, 4] Test.specify "should allow to join on equality of multiple columns and drop redundant columns if Inner join" <| conditions = [Join_Condition.Equals "Y" "Y", Join_Condition.Equals "X" "X"] - r = t3.join t4 on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right_Z"] + r = t3.join t4 join_kind=Join_Kind.Inner on=conditions |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r [Join_Kind.Full, Join_Kind.Left_Outer, Join_Kind.Right_Outer].each kind-> r2 = t3.join t4 join_kind=kind on=conditions - expect_column_names ["X", "Y", "Z", "Right_X", "Right_Y", "Right_Z"] r2 + expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] r2 Test.specify "should support same-name column join shorthand" <| - r = t3.join t4 on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right_Z"] + r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r Test.specify "should allow to join on text equality ignoring case" <| t1 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] t2 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] - r1 = t1.join t2 + r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 r1 . at "X" . to_vector . should_equal ["a"] r1 . at "Y" . to_vector . should_equal [1] r1 . at "Z" . to_vector . should_equal [2] - r2 = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Z"] - expect_column_names ["X", "Y", "Right_X", "Z"] r2 + r2 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Z"] + expect_column_names ["X", "Y", "Right X", "Z"] r2 r2 . at "X" . to_vector . should_equal ["a", "a", "B"] - r2 . at "Right_X" . to_vector . should_equal ["A", "a", "b"] + r2 . at "Right X" . to_vector . should_equal ["A", "a", "b"] r2 . at "Y" . to_vector . should_equal [1, 1, 2] r2 . at "Z" . to_vector . should_equal [1, 2, 3] @@ -123,16 +124,16 @@ spec setup = t1 = table_builder [["X", ['s\u0301', 'S\u0301']], ["Y", [1, 2]]] t2 = table_builder [["X", ['s', 'S', 'ś']], ["Z", [1, 2, 3]]] - r1 = t1.join t2 + r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 r1 . at "X" . to_vector . should_equal ['ś'] r1 . at "Y" . to_vector . should_equal [1] r1 . at "Z" . to_vector . should_equal [3] - r2 = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Y"] - expect_column_names ["X", "Y", "Right_X", "Z"] r2 + r2 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Y"] + expect_column_names ["X", "Y", "Right X", "Z"] r2 r2 . at "X" . to_vector . should_equal ['s\u0301', 'S\u0301'] - r2 . at "Right_X" . to_vector . should_equal ['ś', 'ś'] + r2 . at "Right X" . to_vector . should_equal ['ś', 'ś'] r2 . at "Y" . to_vector . should_equal [1, 2] r2 . at "Z" . to_vector . should_equal [3, 3] @@ -141,7 +142,7 @@ spec setup = t1 = table_builder [["X", [1, 2]], ["Y", [10, 20]]] t2 = table_builder [["X", [2.0, 2.1, 0.0]], ["Z", [1, 2, 3]]] - r1 = t1.join t2 + r1 = t1.join t2 join_kind=Join_Kind.Inner expect_column_names ["X", "Y", "Z"] r1 r1 . at "X" . to_vector . should_equal [2] r1 . at "Y" . to_vector . should_equal [20] @@ -152,14 +153,14 @@ spec setup = t1 = table_builder [["X", [My_Type.Value 1 2, My_Type.Value 2 3]], ["Y", [1, 2]]] t2 = table_builder [["X", [My_Type.Value 5 0, My_Type.Value 2 1]], ["Z", [10, 20]]] - r1 = t1.join t2 |> materialize |> _.order_by ["Y"] + r1 = t1.join t2 join_kind=Join_Kind.Inner |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z"] r1 r1 . at "X" . to_vector . should_equal [My_Type.Value 1 2, My_Type.Value 2 3] ## We don't keep the other column, because the values in both are equal. However, with custom comparators, they may not be the same values, so we may consider keeping it. For not it is dropped though for consistency. - # r1 . at "Right_X" . to_vector . should_equal [My_Type.Value 1 2, My_Type.Value 2 3] + # r1 . at "Right X" . to_vector . should_equal [My_Type.Value 1 2, My_Type.Value 2 3] r1 . at "Y" . to_vector . should_equal [1, 2] r1 . at "Z" . to_vector . should_equal [20, 10] @@ -167,7 +168,7 @@ spec setup = t1 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] t2 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] - r1 = t1.join t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] + r1 = t1.join join_kind=Join_Kind.Inner t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 r1 . at "X" . to_vector . should_equal [1, 10, 10, 12, 12] r1 . at "Y" . to_vector . should_equal [1, 2, 2, 3, 3] @@ -179,7 +180,7 @@ spec setup = t1 = table_builder [["X", ["a", "b", "c"]], ["Y", [1, 2, 3]]] t2 = table_builder [["lower", ["a", "b"]], ["upper", ["a", "ccc"]], ["Z", [10, 20]]] - r1 = t1.join t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] + r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["X", "Z"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 r1 . at "X" . to_vector . should_equal ["a", "b", "c"] r1 . at "Y" . to_vector . should_equal [1, 2, 3] @@ -192,7 +193,7 @@ spec setup = t1 = table_builder [["X", ['s\u0301', 's']], ["Y", [1, 2]]] t2 = table_builder [["lower", ['s', 'ś']], ["upper", ['sa', 'ś']], ["Z", [10, 20]]] - r1 = t1.join t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Y"] + r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 r1 . at "X" . to_vector . should_equal ['s\u0301', 's'] r1 . at "Y" . to_vector . should_equal [1, 2] @@ -205,7 +206,7 @@ spec setup = t1 = table_builder [["X", [My_Type.Value 20 30, My_Type.Value 1 2]], ["Y", [1, 2]]] t2 = table_builder [["lower", [My_Type.Value 3 0, My_Type.Value 10 10]], ["upper", [My_Type.Value 2 1, My_Type.Value 100 0]], ["Z", [10, 20]]] - r1 = t1.join t2 on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Z"] + r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "lower" "upper") |> materialize |> _.order_by ["Z"] expect_column_names ["X", "Y", "lower", "upper", "Z"] r1 r1 . at "X" . to_vector . to_text . should_equal "[(My_Type.Value 1 2), (My_Type.Value 20 30)]" r1 . at "Y" . to_vector . should_equal [2, 1] @@ -217,25 +218,25 @@ spec setup = t1 = table_builder [["X", [1, 12, 12, 0]], ["Y", [1, 2, 3, 4]], ["Z", ["a", "A", "a", "ą"]], ["W", [1, 2, 3, 4]]] t2 = table_builder [["X", [12, 12, 1]], ["l", [0, 100, 100]], ["u", [10, 100, 100]], ["Z", ["A", "A", "A"]], ["W'", [10, 20, 30]]] - r1 = t1.join t2 on=[Join_Condition.Between "Y" "l" "u", Join_Condition.Equals_Ignore_Case "Z" "Z", Join_Condition.Equals "X" "X"] |> materialize |> _.order_by ["Y"] - expect_column_names ["X", "Y", "Z", "W", "l", "u", "Right_Z", "W'"] r1 + r1 = t1.join t2 join_kind=Join_Kind.Inner on=[Join_Condition.Between "Y" "l" "u", Join_Condition.Equals_Ignore_Case "Z" "Z", Join_Condition.Equals "X" "X"] |> materialize |> _.order_by ["Y"] + expect_column_names ["X", "Y", "Z", "W", "l", "u", "Right Z", "W'"] r1 r1.at "X" . to_vector . should_equal [12, 12] r1.at "Y" . to_vector . should_equal [2, 3] r1.at "Z" . to_vector . should_equal ["A", "a"] r1.at "W" . to_vector . should_equal [2, 3] r1.at "l" . to_vector . should_equal [0, 0] r1.at "u" . to_vector . should_equal [10, 10] - r1.at "Right_Z" . to_vector . should_equal ["A", "A"] + r1.at "Right Z" . to_vector . should_equal ["A", "A"] r1.at "W'" . to_vector . should_equal [10, 10] Test.specify "should work fine if the same condition is specified multiple times" <| - r = t3.join t4 on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right_Z"] + r = t3.join t4 join_kind=Join_Kind.Inner on=["X", "X", "Y", "X", "Y"] |> materialize |> _.order_by ["X", "Y", "Z", "Right Z"] check_xy_joined r t5 = table_builder [["X", [1, 10, 12]], ["Y", [1, 2, 3]]] t6 = table_builder [["lower", [1, 10, 8, 12]], ["upper", [1, 12, 30, 0]], ["Z", [1, 2, 3, 4]]] - r1 = t5.join t6 on=[Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper"] |> materialize |> _.order_by ["X", "Z"] + r1 = t5.join t6 join_kind=Join_Kind.Inner on=[Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper", Join_Condition.Between "X" "lower" "upper"] |> materialize |> _.order_by ["X", "Z"] r1 . at "X" . to_vector . should_equal [1, 10, 10, 12, 12] r1 . at "Y" . to_vector . should_equal [1, 2, 2, 3, 3] r1 . at "Z" . to_vector . should_equal [1, 2, 3, 2, 3] @@ -243,64 +244,71 @@ spec setup = t7 = table_builder [["X", ["a", "B"]], ["Y", [1, 2]]] t8 = table_builder [["X", ["A", "a", "b"]], ["Z", [1, 2, 3]]] - r2 = t7.join t8 on=[Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X" "X"] |> materialize |> _.order_by ["Z"] + r2 = t7.join t8 join_kind=Join_Kind.Inner on=[Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X", Join_Condition.Equals_Ignore_Case "X" "X"] |> materialize |> _.order_by ["Z"] r2 . at "X" . to_vector . should_equal ["a", "a", "B"] - r2 . at "Right_X" . to_vector . should_equal ["A", "a", "b"] + r2 . at "Right X" . to_vector . should_equal ["A", "a", "b"] r2 . at "Z" . to_vector . should_equal [1, 2, 3] Test.specify "should correctly handle joining a table with itself" <| t1 = table_builder [["X", [0, 1, 2, 3, 2]], ["Y", [1, 2, 3, 4, 100]], ["A", ["B", "C", "D", "E", "X"]]] - t2 = t1.join t1 on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y"] + t2 = t1.join t1 join_kind=Join_Kind.Inner on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y"] - expect_column_names ["X", "Y", "A", "Right_X", "Right_Y", "Right_A"] t2 + expect_column_names ["X", "Y", "A", "Right X", "Right Y", "Right A"] t2 t2.at "X" . to_vector . should_equal [1, 2, 2, 3] - t2.at "Right_Y" . to_vector . should_equal [1, 2, 2, 3] + t2.at "Right Y" . to_vector . should_equal [1, 2, 2, 3] t2.at "Y" . to_vector . should_equal [2, 3, 100, 4] t2.at "A" . to_vector . should_equal ["C", "D", "X", "E"] - t2.at "Right_X" . to_vector . should_equal [0, 1, 1, 2] - t2.at "Right_A" . to_vector . should_equal ["B", "C", "C", "D"] + t2.at "Right X" . to_vector . should_equal [0, 1, 1, 2] + t2.at "Right A" . to_vector . should_equal ["B", "C", "C", "D"] - t3 = t1.join t1 join_kind=Join_Kind.Full on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y", "Right_X"] - expect_column_names ["X", "Y", "A", "Right_X", "Right_Y", "Right_A"] t3 + t3 = t1.join t1 join_kind=Join_Kind.Full on=(Join_Condition.Equals left="X" right="Y") |> materialize |> _.order_by ["X", "Y", "Right X"] + expect_column_names ["X", "Y", "A", "Right X", "Right Y", "Right A"] t3 t3.at "X" . to_vector . should_equal [Nothing, Nothing, 0, 1, 2, 2, 3] - t3.at "Right_Y" . to_vector . should_equal [100, 4, Nothing, 1, 2, 2, 3] + t3.at "Right Y" . to_vector . should_equal [100, 4, Nothing, 1, 2, 2, 3] t3.at "Y" . to_vector . should_equal [Nothing, Nothing, 1, 2, 3, 100, 4] t3.at "A" . to_vector . should_equal [Nothing, Nothing, "B", "C", "D", "X", "E"] - t3.at "Right_X" . to_vector . should_equal [2, 3, Nothing, 0, 1, 1, 2] - t3.at "Right_A" . to_vector . should_equal ["X", "E", Nothing, "B", "C", "C", "D"] + t3.at "Right X" . to_vector . should_equal [2, 3, Nothing, 0, 1, 1, 2] + t3.at "Right A" . to_vector . should_equal ["X", "E", Nothing, "B", "C", "C", "D"] t4 = table_builder [["X", [Nothing, "a", "B"]], ["Y", ["ą", "b", Nothing]], ["Z", [1, 2, 3]]] - t5 = t4.join t4 on=(Join_Condition.Equals_Ignore_Case left="Y" right="X") |> materialize |> _.order_by ["Y"] - expect_column_names ["X", "Y", "Z", "Right_X", "Right_Y", "Right_Z"] t5 + t5 = t4.join t4 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case left="Y" right="X") |> materialize |> _.order_by ["Y"] + expect_column_names ["X", "Y", "Z", "Right X", "Right Y", "Right Z"] t5 # TODO enable once we handle nothing properly # t5.at "Y" . to_vector . should_equal [Nothing, "b"] - # t5.at "Right_X" . to_vector . should_equal [Nothing, "B"] + # t5.at "Right X" . to_vector . should_equal [Nothing, "B"] # t5.at "X" . to_vector . should_equal ["B", "a"] # t5.at "Z" . to_vector . should_equal [3, 2] - # t5.at "Right_Y" . to_vector . should_equal ["ą", Nothing] - # t5.at "Right_Z" . to_vector . should_equal [1, 3] + # t5.at "Right Y" . to_vector . should_equal ["ą", Nothing] + # t5.at "Right Z" . to_vector . should_equal [1, 3] Test.specify "should gracefully handle unmatched columns in Join_Conditions" <| t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] t2 = table_builder [["Z", [2, 1]], ["W", [5, 6]]] + # Report error if the default fails - the right table does not have a column with same name as first column of left one: + r1 = t1.join t2 + r1.should_fail_with Missing_Input_Columns + r1.catch.criteria.should_equal ["X"] + r1.catch.to_display_text.should_equal "The criteria [X] did not match any columns in the right table." + conditions = [Join_Condition.Equals "foo" 42, Join_Condition.Equals "X" -3, Join_Condition.Equals -1 "baz"] - r1 = t1.join t2 on=conditions on_problems=Problem_Behavior.Ignore + r2 = t1.join t2 on=conditions on_problems=Problem_Behavior.Ignore ## We have both - Column_Indexes_Out_Of_Range.Error [42, -3] - Missing_Input_Columns.Error ["foo", "baz"] here, but we can throw only one error. I think column names error will be more useful, so I'd prioritize it. - r1.should_fail_with Missing_Input_Columns - r1.catch.criteria.should_equal ["foo", "baz"] + r2.should_fail_with Missing_Input_Columns + r2.catch.criteria.should_equal ["foo"] + r2.catch.to_display_text.should_equal "The criteria [foo] did not match any columns in the left table." - r2 = t1.join t2 on=[Join_Condition.Equals 42 0] on_problems=Problem_Behavior.Ignore - r2.should_fail_with Column_Indexes_Out_Of_Range - r2.catch.indexes.should_equal [42] + r3 = t1.join t2 on=[Join_Condition.Equals 42 0] on_problems=Problem_Behavior.Ignore + r3.should_fail_with Column_Indexes_Out_Of_Range + r3.catch.indexes.should_equal [42] Test.specify "should report Invalid_Value_Type if non-text columns are provided to Equals_Ignore_Case" <| t1 = table_builder [["X", ["1", "2", "c"]], ["Y", [1, 2, 3]]] @@ -317,7 +325,7 @@ spec setup = Test.specify "should report Invalid_Value_Type if incompatible types are correlated" <| t1 = table_builder [["X", ["1", "2", "c"]]] - t2 = table_builder [["Y", [1, 2, 3]]] + t2 = table_builder [["X", [1, 2, 3]]] r1 = t1.join t2 on_problems=Problem_Behavior.Ignore r1.should_fail_with Invalid_Value_Type @@ -334,7 +342,7 @@ spec setup = t1 = table_builder [["X", [1.5, 2.0, 2.00000000001]], ["Y", [10, 20, 30]]] t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] - action1 = t1.join t2 on=(Join_Condition.Equals "X" "Z") on_problems=_ + action1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=_ tester1 table = expect_column_names ["X", "Y", "Z", "W"] table t1 = table.order_by ["Y", "W"] @@ -345,7 +353,7 @@ spec setup = problems1 = [Floating_Point_Equality.Error "X", Floating_Point_Equality.Error "Z"] Problems.test_problem_handling action1 problems1 tester1 - action2 = t1.join t2 on=(Join_Condition.Equals "X" "W") on_problems=_ + action2 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "W") on_problems=_ tester2 table = expect_column_names ["X", "Y", "Z", "W"] table t1 = table.order_by ["Y", "W"] @@ -363,7 +371,7 @@ spec setup = if setup.supports_custom_objects then t1 = table_builder [["X", [My_Type.Value 1 2, 2.0, 2]], ["Y", [10, 20, 30]]] t2 = table_builder [["Z", [2.0, 1.5, 2.0]], ["W", [1, 2, 3]]] - action3 = t1.join t2 on=(Join_Condition.Equals "X" "Z") on_problems=_ + action3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Z") on_problems=_ tester3 table = expect_column_names ["X", "Y", "Z", "W"] table t1 = table.order_by ["Y", "W"] @@ -378,7 +386,7 @@ spec setup = t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] - r1 = t1.join t2 |> materialize |> _.order_by ["Y"] + r1 = t1.join t2 join_kind=Join_Kind.Inner |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "Z"] r1 r1.at "X" . to_vector . should_equal [Nothing, Nothing, "a", Nothing, Nothing] r1.at "Y" . to_vector . should_equal [1, 1, 2, 3, 3] @@ -388,10 +396,10 @@ spec setup = t1 = table_builder [["X", ["A", Nothing, "a", Nothing, "ą"]], ["Y", [0, 1, 2, 3, 4]]] t2 = table_builder [["X", ["a", Nothing, Nothing]], ["Z", [10, 20, 30]]] - r1 = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Y"] - expect_column_names ["X", "Y", "Right_X", "Z"] r1 + r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals_Ignore_Case "X") |> materialize |> _.order_by ["Y"] + expect_column_names ["X", "Y", "Right X", "Z"] r1 r1.at "X" . to_vector . should_equal ["A", Nothing, Nothing, "a", Nothing, Nothing] - r1.at "Right_X" . to_vector . should_equal ["a", Nothing, Nothing, "a", Nothing, Nothing] + r1.at "Right X" . to_vector . should_equal ["a", Nothing, Nothing, "a", Nothing, Nothing] r1.at "Y" . to_vector . should_equal [0, 1, 1, 2, 3, 3] r1.at "Z" . to_vector . should_equal [10, 20, 30, 10, 20, 30] @@ -399,7 +407,7 @@ spec setup = t1 = table_builder [["X", [1, Nothing, 2, Nothing]], ["Y", [0, 1, 2, 3]]] t2 = table_builder [["l", [Nothing, 0, 1]], ["u", [100, 10, Nothing]], ["Z", [10, 20, 30]]] - r1 = t1.join t2 on=(Join_Condition.Between "X" "l" "u") |> materialize |> _.order_by ["Y"] + r1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Between "X" "l" "u") |> materialize |> _.order_by ["Y"] expect_column_names ["X", "Y", "l", "u", "Z"] r1 r1.at "X" . to_vector . should_equal [1, 2] r1.at "Y" . to_vector . should_equal [0, 2] @@ -408,51 +416,55 @@ spec setup = r1.at "Z" . to_vector . should_equal [20, 20] Test.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right_Y", [5, 6]]] + t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]], ["Right Y", [5, 6]]] t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]]] - t3 = t1.join t2 on=(Join_Condition.Equals "X" "Y") |> materialize |> _.order_by ["Right_X"] - Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right_Y"]] - expect_column_names ["X", "Y", "Right_Y", "Right_X", "Right_Y_1"] t3 + t3 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Y") |> materialize |> _.order_by ["Right X"] + Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right Y"]] + t3.column_names.should_equal ["X", "Y", "Right Y", "Right X", "Right Y 1"] t3.at "X" . to_vector . should_equal [2, 2] t3.at "Y" . to_vector . should_equal [4, 4] - t3.at "Right_Y" . to_vector . should_equal [6, 6] - t3.at "Right_X" . to_vector . should_equal [1, 2] - t3.at "Right_Y_1" . to_vector . should_equal [2, 2] + t3.at "Right Y" . to_vector . should_equal [6, 6] + t3.at "Right X" . to_vector . should_equal [1, 2] + t3.at "Right Y 1" . to_vector . should_equal [2, 2] - err1 = t1.join t2 on=(Join_Condition.Equals "X" "Y") on_problems=Problem_Behavior.Report_Error + err1 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "Y") on_problems=Problem_Behavior.Report_Error err1.should_fail_with Duplicate_Output_Column_Names - err1.catch.column_names . should_equal ["Right_Y"] + err1.catch.column_names . should_equal ["Right Y"] - t4 = table_builder [["Right_X", [1, 1]], ["X", [1, 2]], ["Y", [3, 4]], ["Right_Y_2", [2, 2]]] - t5 = table_builder [["Right_X", [2, 1]], ["X", [2, 2]], ["Y", [2, 2]], ["Right_Y", [2, 2]], ["Right_Y_1", [2, 2]], ["Right_Y_4", [2, 2]]] + t4 = table_builder [["Right X", [1, 1]], ["X", [1, 2]], ["Y", [3, 4]], ["Right Y 2", [2, 2]]] + t5 = table_builder [["Right X", [2, 1]], ["X", [2, 2]], ["Y", [2, 2]], ["Right Y", [2, 2]], ["Right Y 1", [2, 2]], ["Right Y 4", [2, 2]]] t6 = t4.join t5 on=(Join_Condition.Equals "X" "Y") - expect_column_names ["Right_X", "X", "Y", "Right_Y_2"]+["Right_Right_X", "Right_X_1", "Right_Y_3", "Right_Y", "Right_Y_1", "Right_Y_4"] t6 + t6.column_names.should_equal ["Right X", "X", "Y", "Right Y 2"]+["Right Right X", "Right X 1", "Right Y 3", "Right Y", "Right Y 1", "Right Y 4"] action = t1.join t2 right_prefix="" on_problems=_ - tester = expect_column_names ["X", "Y", "Right_Y", "Y_1"] - problems = [Duplicate_Output_Column_Names.Error ["Y"]] + tester = expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] + problems = [Duplicate_Output_Column_Names.Error ["X", "Y"]] Problems.test_problem_handling action problems tester - t8 = t1.join t2 right_prefix="P" - expect_column_names ["X", "Y", "Right_Y", "PY"] t8 + action_2 = t1.join t2 join_kind=Join_Kind.Inner right_prefix="" on_problems=_ + tester_2 = expect_column_names ["X", "Y", "Right Y", "Y 1"] + problems_2 = [Duplicate_Output_Column_Names.Error ["Y"]] + Problems.test_problem_handling action_2 problems_2 tester_2 + + t8 = t1.join t2 join_kind=Join_Kind.Inner right_prefix="P" + t8.column_names.should_equal ["X", "Y", "Right Y", "PY"] Test.specify "should warn about renamed columns" <| t1 = table_builder [["X", [1, 2]], ["Y", [3, 4]]] - t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right_Y", [2, 44]]] + t2 = table_builder [["X", [2, 1]], ["Y", [2, 2]], ["Right Y", [2, 44]]] action1 = t1.join t2 on=(Join_Condition.Equals "X" "Y") on_problems=_ tester1 table = - expect_column_names ["X", "Y", "Right_X", "Right_Y_1", "Right_Y"] table - problems1 = [Duplicate_Output_Column_Names.Error ["Right_Y"]] + expect_column_names ["X", "Y", "Right X", "Right Y 1", "Right Y"] table + problems1 = [Duplicate_Output_Column_Names.Error ["Right Y"]] Problems.test_problem_handling action1 problems1 tester1 - - action2 = t1.join t2 on=(Join_Condition.Equals "X" "X") on_problems=_ + action2 = t1.join t2 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "X" "X") on_problems=_ tester2 table = - expect_column_names ["X", "Y", "Right_Y_1", "Right_Y"] table - problems2 = [Duplicate_Output_Column_Names.Error ["Right_Y"]] + expect_column_names ["X", "Y", "Right Y 1", "Right Y"] table + problems2 = [Duplicate_Output_Column_Names.Error ["Right Y"]] Problems.test_problem_handling action2 problems2 tester2 Test.specify "should pass dataflow errors through" <| @@ -527,13 +539,13 @@ spec setup = t7 = table_builder [["A", [Nothing, 2]], ["B", [Nothing, 3]]] t8 = table_builder [["C", [2, 3]], ["D", [4, 5]]] - t9 = t7.join t8 join_kind=Join_Kind.Inner + t9 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Inner r9 = materialize t9 . order_by ["A", "B", "D"] . rows . map .to_vector within_table t9 <| r9.length . should_equal 1 r9.at 0 . should_equal [2, 3, 2, 4] - t10 = t7.join t8 join_kind=Join_Kind.Full + t10 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Full r10 = materialize t10 . order_by ["A", "C"] . rows . map .to_vector within_table t10 <| r10.length . should_equal 3 @@ -541,27 +553,27 @@ spec setup = r10.at 1 . should_equal [Nothing, Nothing, 3, 5] r10.at 2 . should_equal [2, 3, 2, 4] - t10_2 = t7.join t8 join_kind=Join_Kind.Left_Outer + t10_2 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Left_Outer r10_2 = materialize t10_2 . order_by ["A", "C"] . rows . map .to_vector within_table t10_2 <| r10_2.length . should_equal 2 r10_2.at 0 . should_equal [Nothing, Nothing, Nothing, Nothing] r10_2.at 1 . should_equal [2, 3, 2, 4] - t10_3 = t7.join t8 join_kind=Join_Kind.Right_Outer + t10_3 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Right_Outer r10_3 = materialize t10_3 . order_by ["A", "C"] . rows . map .to_vector within_table t10_3 <| r10_3.length . should_equal 2 r10_3.at 0 . should_equal [Nothing, Nothing, 3, 5] r10_3.at 1 . should_equal [2, 3, 2, 4] - t11 = t7.join t8 join_kind=Join_Kind.Left_Exclusive + t11 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Left_Exclusive r11 = materialize t11 . rows . map .to_vector within_table t11 <| r11.length . should_equal 1 r11.at 0 . should_equal [Nothing, Nothing] - t12 = t7.join t8 join_kind=Join_Kind.Right_Exclusive + t12 = t7.join t8 on=[Join_Condition.Equals "A" "C"] join_kind=Join_Kind.Right_Exclusive r12 = materialize t12 . rows . map .to_vector within_table t12 <| r12.length . should_equal 1 @@ -585,18 +597,18 @@ spec setup = t1_2 = t1.set "10*[X]+1" new_name="A" t1_3 = t1.set "[X]+20" new_name="B" - t2 = t1_2.join t1_3 on=(Join_Condition.Equals "A" "B") + t2 = t1_2.join t1_3 join_kind=Join_Kind.Inner on=(Join_Condition.Equals "A" "B") t2.at "A" . to_vector . should_equal [21] t2.at "X" . to_vector . should_equal [2] t2.at "B" . to_vector . should_equal [21] - t2.at "Right_X" . to_vector . should_equal [1] + t2.at "Right X" . to_vector . should_equal [1] t4 = table_builder [["X", [1, 2, 3]], ["Y", [10, 20, 30]]] t5 = table_builder [["X", [5, 7, 1]], ["Z", [100, 200, 300]]] t4_2 = t4.set "2*[X]+1" new_name="C" t6 = t4_2.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Inner - expect_column_names ["X", "Y", "C", "Right_X", "Z"] t6 + expect_column_names ["X", "Y", "C", "Right X", "Z"] t6 r2 = materialize t6 . order_by ["Y"] . rows . map .to_vector r2.length . should_equal 2 r2.at 0 . should_equal [2, 20, 5, 5, 100] @@ -607,7 +619,7 @@ spec setup = t2 = table_builder [["X", ["Ć", "A", "b"]], ["Z", [100, 200, 300]]] t3 = t1.join t2 on=(Join_Condition.Equals_Ignore_Case "X") join_kind=Join_Kind.Full - expect_column_names ["X", "Y", "Right_X", "Z"] t3 + expect_column_names ["X", "Y", "Right X", "Z"] t3 r = materialize t3 . order_by ["Y"] . rows . map .to_vector r.length . should_equal 4 r.at 0 . should_equal [Nothing, Nothing, "Ć", 100] @@ -620,7 +632,7 @@ spec setup = t4_2 = t4.set "2*[X]+1" new_name="C" t6 = t4_2.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Full - expect_column_names ["X", "Y", "C", "Right_X", "Z"] t6 + expect_column_names ["X", "Y", "C", "Right X", "Z"] t6 r2 = materialize t6 . order_by ["Y"] . rows . map .to_vector r2.length . should_equal 4 r2.at 0 . should_equal [Nothing, Nothing, Nothing, 1, 300] @@ -632,8 +644,8 @@ spec setup = t4_4 = t4_3.set (t4_3.at "X" . fill_nothing 7) new_name="C" t7 = t4_4.join t5 on=(Join_Condition.Equals "C" "X") join_kind=Join_Kind.Full within_table t7 <| - expect_column_names ["X", "Y", "C", "Right_X", "Z"] t7 - r3 = materialize t7 . order_by ["Y", "Right_X"] . rows . map .to_vector + expect_column_names ["X", "Y", "C", "Right X", "Z"] t7 + r3 = materialize t7 . order_by ["Y", "Right X"] . rows . map .to_vector r3.length . should_equal 5 r3.at 0 . should_equal [Nothing, Nothing, Nothing, 1, 300] r3.at 1 . should_equal [Nothing, Nothing, Nothing, 5, 100] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso index b186cbe73a9f..e8fb5c328fee 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso @@ -148,33 +148,33 @@ spec setup = t3.at "W" . to_vector . should_equal ['b', Nothing, Nothing] Test.specify "should rename columns of the right table to avoid duplicates" <| - t1 = table_builder [["X", [1, 2]], ["Y", [5, 6]], ["Right_Y", [7, 8]]] + t1 = table_builder [["X", [1, 2]], ["Y", [5, 6]], ["Right Y", [7, 8]]] t2 = table_builder [["X", ['a']], ["Y", ['d']]] t3 = t1.zip t2 keep_unmatched=True - expect_column_names ["X", "Y", "Right_Y", "Right_X", "Right_Y_1"] t3 - Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right_Y"]] + expect_column_names ["X", "Y", "Right Y", "Right X", "Right Y 1"] t3 + Problems.get_attached_warnings t3 . should_equal [Duplicate_Output_Column_Names.Error ["Right Y"]] t3.row_count . should_equal 2 t3.at "X" . to_vector . should_equal [1, 2] t3.at "Y" . to_vector . should_equal [5, 6] - t3.at "Right_Y" . to_vector . should_equal [7, 8] - t3.at "Right_X" . to_vector . should_equal ['a', Nothing] - t3.at "Right_Y_1" . to_vector . should_equal ['d', Nothing] + t3.at "Right Y" . to_vector . should_equal [7, 8] + t3.at "Right X" . to_vector . should_equal ['a', Nothing] + t3.at "Right Y 1" . to_vector . should_equal ['d', Nothing] t1.zip t2 keep_unmatched=False on_problems=Problem_Behavior.Report_Error . should_fail_with Duplicate_Output_Column_Names - expect_column_names ["X", "Y", "Right_Y", "X_1", "Y_1"] (t1.zip t2 right_prefix="") + expect_column_names ["X", "Y", "Right Y", "X 1", "Y 1"] (t1.zip t2 right_prefix="") - t4 = table_builder [["X", [1]], ["Right_X", [5]]] - expect_column_names ["X", "Y", "Right_Y", "Right_X_1", "Right_X"] (t1.zip t4) - expect_column_names ["X", "Right_X", "Right_X_1", "Y", "Right_Y"] (t4.zip t1) + t4 = table_builder [["X", [1]], ["Right X", [5]]] + expect_column_names ["X", "Y", "Right Y", "Right X 1", "Right X"] (t1.zip t4) + expect_column_names ["X", "Right X", "Right X 1", "Y", "Right Y"] (t4.zip t1) Test.specify "should report both row count mismatch and duplicate column warnings at the same time" <| - t1 = table_builder [["X", [1, 2]], ["Right_X", [5, 6]]] + t1 = table_builder [["X", [1, 2]], ["Right X", [5, 6]]] t2 = table_builder [["X", ['a']], ["Z", ['d']]] t3 = t1.zip t2 - expected_problems = [Row_Count_Mismatch.Error 2 1, Duplicate_Output_Column_Names.Error ["Right_X"]] + expected_problems = [Row_Count_Mismatch.Error 2 1, Duplicate_Output_Column_Names.Error ["Right X"]] Problems.get_attached_warnings t3 . should_contain_the_same_elements_as expected_problems Test.specify "should allow to zip the table with itself" <| @@ -183,12 +183,12 @@ spec setup = the Database backend. t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = t1.zip t1 - expect_column_names ["X", "Y", "Right_X", "Right_Y"] t2 + expect_column_names ["X", "Y", "Right X", "Right Y"] t2 t2.row_count . should_equal 2 t2.at "X" . to_vector . should_equal [1, 2] t2.at "Y" . to_vector . should_equal [4, 5] - t2.at "Right_X" . to_vector . should_equal [1, 2] - t2.at "Right_Y" . to_vector . should_equal [4, 5] + t2.at "Right X" . to_vector . should_equal [1, 2] + t2.at "Right Y" . to_vector . should_equal [4, 5] if setup.is_database.not then Test.specify "should correctly pad/truncate all kinds of column types" <| diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index 55fdc51974a5..b79d762f1b8c 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -19,8 +19,8 @@ spec setup = col1 = ["foo", [1,2,3]] col2 = ["bar", [4,5,6]] col3 = ["Baz", [7,8,9]] - col4 = ["foo_1", [10,11,12]] - col5 = ["foo_2", [13,14,15]] + col4 = ["foo 1", [10,11,12]] + col5 = ["foo 2", [13,14,15]] col6 = ["ab.+123", [16,17,18]] col7 = ["abcd123", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] @@ -28,7 +28,7 @@ spec setup = Test.group prefix+"Table.select_columns" <| Test.specify "should work as shown in the doc examples" <| expect_column_names ["foo", "bar"] <| table.select_columns ["bar", "foo"] - expect_column_names ["bar", "Baz", "foo_1", "foo_2"] <| table.select_columns [By_Name "foo.+" use_regex=True, By_Name "b.*" use_regex=True] + expect_column_names ["bar", "Baz", "foo 1", "foo 2"] <| table.select_columns [By_Name "foo.+" use_regex=True, By_Name "b.*" use_regex=True] expect_column_names ["abcd123", "foo", "bar"] <| table.select_columns [-1, 0, 1] reorder=True Test.specify "should allow to reorder columns if asked to" <| @@ -45,13 +45,13 @@ spec setup = expect_column_names ["abcd123"] <| table.select_columns [By_Name "abcd123" Case_Sensitivity.Sensitive use_regex=True] Test.specify "should allow negative indices" <| - expect_column_names ["foo", "bar", "foo_2"] <| table.select_columns [-3, 0, 1] + expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, 0, 1] Test.specify "should allow mixed names and indexes" <| - expect_column_names ["foo", "bar", "foo_2"] <| table.select_columns [-3, "bar", 0] - expect_column_names ["foo_2", "bar", "foo"] <| table.select_columns [-3, "bar", 0] reorder=True - expect_column_names ["foo", "bar", "foo_1", "foo_2", "abcd123"] <| table.select_columns [-1, "bar", By_Name "foo.*" Case_Sensitivity.Sensitive use_regex=True] - expect_column_names ["foo", "foo_1", "foo_2", "bar", "abcd123"] <| table.select_columns [By_Name "foo.*" Case_Sensitivity.Sensitive use_regex=True, "bar", "foo", -1] reorder=True + expect_column_names ["foo", "bar", "foo 2"] <| table.select_columns [-3, "bar", 0] + expect_column_names ["foo 2", "bar", "foo"] <| table.select_columns [-3, "bar", 0] reorder=True + expect_column_names ["foo", "bar", "foo 1", "foo 2", "abcd123"] <| table.select_columns [-1, "bar", By_Name "foo.*" Case_Sensitivity.Sensitive use_regex=True] + expect_column_names ["foo", "foo 1", "foo 2", "bar", "abcd123"] <| table.select_columns [By_Name "foo.*" Case_Sensitivity.Sensitive use_regex=True, "bar", "foo", -1] reorder=True if test_selection.supports_case_sensitive_columns then Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| @@ -63,8 +63,8 @@ spec setup = expect_column_names ["bar", "Bar"] <| table.select_columns [By_Name "bar"] Test.specify "should correctly handle regexes matching multiple names" <| - expect_column_names ["foo", "bar", "foo_1", "foo_2"] <| table.select_columns [By_Name "b.*" Case_Sensitivity.Sensitive use_regex=True, By_Name "f.+" Case_Sensitivity.Sensitive use_regex=True] - expect_column_names ["bar", "foo", "foo_1", "foo_2"] <| table.select_columns [By_Name "b.*" Case_Sensitivity.Sensitive use_regex=True, By_Name "f.+" Case_Sensitivity.Sensitive use_regex=True] reorder=True + expect_column_names ["foo", "bar", "foo 1", "foo 2"] <| table.select_columns [By_Name "b.*" Case_Sensitivity.Sensitive use_regex=True, By_Name "f.+" Case_Sensitivity.Sensitive use_regex=True] + expect_column_names ["bar", "foo", "foo 1", "foo 2"] <| table.select_columns [By_Name "b.*" Case_Sensitivity.Sensitive use_regex=True, By_Name "f.+" Case_Sensitivity.Sensitive use_regex=True] reorder=True Test.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] @@ -142,21 +142,21 @@ spec setup = Test.group prefix+"Table.remove_columns" <| Test.specify "should work as shown in the doc examples" <| - expect_column_names ["Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] <| table.remove_columns ["bar", "foo"] + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.remove_columns ["bar", "foo"] expect_column_names ["foo", "ab.+123", "abcd123"] <| table.remove_columns [By_Name "foo.+" Case_Sensitivity.Insensitive use_regex=True, By_Name "b.*" Case_Sensitivity.Insensitive use_regex=True] - expect_column_names ["Baz", "foo_1", "foo_2", "ab.+123"] <| table.remove_columns [-1, 0, 1] + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123"] <| table.remove_columns [-1, 0, 1] Test.specify "should correctly handle regex matching" <| last_ones = table.columns.drop 1 . map .name expect_column_names last_ones <| table.remove_columns [By_Name "foo" Case_Sensitivity.Sensitive use_regex=True] - first_ones = ["foo", "bar", "Baz", "foo_1", "foo_2"] + first_ones = ["foo", "bar", "Baz", "foo 1", "foo 2"] expect_column_names first_ones <| table.remove_columns [By_Name "a.*" Case_Sensitivity.Sensitive use_regex=True] expect_column_names first_ones <| table.remove_columns [By_Name "ab.+123" Case_Sensitivity.Sensitive use_regex=True] expect_column_names first_ones+["abcd123"] <| table.remove_columns [By_Name "ab.+123"] expect_column_names first_ones+["ab.+123"] <| table.remove_columns [By_Name "abcd123" Case_Sensitivity.Sensitive use_regex=True] Test.specify "should allow negative indices" <| - expect_column_names ["Baz", "foo_1", "ab.+123"] <| table.remove_columns [-1, -3, 0, 1] + expect_column_names ["Baz", "foo 1", "ab.+123"] <| table.remove_columns [-1, -3, 0, 1] if test_selection.supports_case_sensitive_columns then Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| @@ -173,7 +173,7 @@ spec setup = Test.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] action = table.remove_columns selector on_problems=_ - tester = expect_column_names ["Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] + tester = expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] problems = [Column_Indexes_Out_Of_Range.Error [100, -200, 300]] Problems.test_problem_handling action problems tester @@ -183,28 +183,28 @@ spec setup = Test.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] t + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t Test.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error - expect_column_names ["Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] t + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t Test.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] t + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t Test.specify "should correctly handle edge-cases: duplicate matches due to case insensitivity" <| selector = [By_Name "FOO", By_Name "foo"] t = table.remove_columns selector on_problems=Problem_Behavior.Report_Error - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] t + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] t Test.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] action = table.remove_columns selector on_problems=_ - tester = expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] + tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester @@ -223,22 +223,22 @@ spec setup = Test.group prefix+"Table.reorder_columns" <| Test.specify "should work as shown in the doc examples" <| - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns "foo" Position.After_Other_Columns - expect_column_names ["Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo", "bar"] <| table.reorder_columns ["foo", "bar"] Position.After_Other_Columns - expect_column_names ["foo_1", "foo_2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| table.reorder_columns [By_Name "foo.+" Case_Sensitivity.Insensitive use_regex=True, By_Name "b.*" Case_Sensitivity.Insensitive use_regex=True] - expect_column_names ["bar", "foo", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] <| table.reorder_columns [1, 0] Position.Before_Other_Columns - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [0] Position.After_Other_Columns + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns "foo" Position.After_Other_Columns + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] <| table.reorder_columns ["foo", "bar"] Position.After_Other_Columns + expect_column_names ["foo 1", "foo 2", "bar", "Baz", "foo", "ab.+123", "abcd123"] <| table.reorder_columns [By_Name "foo.+" Case_Sensitivity.Insensitive use_regex=True, By_Name "b.*" Case_Sensitivity.Insensitive use_regex=True] + expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] <| table.reorder_columns [1, 0] Position.Before_Other_Columns + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [0] Position.After_Other_Columns Test.specify "should correctly handle regex matching" <| - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [By_Name "foo" Case_Sensitivity.Sensitive use_regex=True] Position.After_Other_Columns - rest = ["foo", "bar", "Baz", "foo_1", "foo_2"] + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] <| table.reorder_columns [By_Name "foo" Case_Sensitivity.Sensitive use_regex=True] Position.After_Other_Columns + rest = ["foo", "bar", "Baz", "foo 1", "foo 2"] expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns [By_Name "a.*" Case_Sensitivity.Sensitive use_regex=True] expect_column_names ["ab.+123", "abcd123"]+rest <| table.reorder_columns [By_Name "ab.+123" Case_Sensitivity.Sensitive use_regex=True] expect_column_names ["ab.+123"]+rest+["abcd123"] <| table.reorder_columns ["ab.+123"] expect_column_names ["abcd123"]+rest+["ab.+123"] <| table.reorder_columns [By_Name "abcd123" Case_Sensitivity.Sensitive use_regex=True] Test.specify "should allow negative indices" <| - expect_column_names ["abcd123", "foo_2", "foo", "bar", "Baz", "foo_1", "ab.+123"] <| table.reorder_columns [-1, -3, 0, 1] + expect_column_names ["abcd123", "foo 2", "foo", "bar", "Baz", "foo 1", "ab.+123"] <| table.reorder_columns [-1, -3, 0, 1] if test_selection.supports_case_sensitive_columns then Test.specify "should correctly handle exact matches matching multiple names due to case insensitivity" <| @@ -250,12 +250,12 @@ spec setup = expect_column_names ["bar", "Bar", "foo"] <| table.reorder_columns [By_Name "bar"] Test.specify "should correctly handle regexes matching multiple names" <| - expect_column_names ["bar", "foo", "foo_1", "foo_2", "Baz", "ab.+123", "abcd123"] <| table.reorder_columns [By_Name "b.*" Case_Sensitivity.Sensitive use_regex=True, By_Name "f.+" Case_Sensitivity.Sensitive use_regex=True] + expect_column_names ["bar", "foo", "foo 1", "foo 2", "Baz", "ab.+123", "abcd123"] <| table.reorder_columns [By_Name "b.*" Case_Sensitivity.Sensitive use_regex=True, By_Name "f.+" Case_Sensitivity.Sensitive use_regex=True] Test.specify "should correctly handle problems: out of bounds indices" <| selector = [1, 0, 100, -200, 300] action = table.reorder_columns selector on_problems=_ - tester = expect_column_names ["bar", "foo", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123"] + tester = expect_column_names ["bar", "foo", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] problems = [Column_Indexes_Out_Of_Range.Error [100, -200, 300]] Problems.test_problem_handling action problems tester @@ -265,23 +265,23 @@ spec setup = Test.specify "should correctly handle edge-cases: duplicate indices" <| selector = [0, 0, 0] t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo"] t + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t Test.specify "should correctly handle edge-cases: aliased indices" <| selector = [0, -7, -6, 1] t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error - expect_column_names ["Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo", "bar"] t + expect_column_names ["Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo", "bar"] t Test.specify "should correctly handle edge-cases: duplicate names" <| selector = ["foo", "foo"] t = table.reorder_columns selector Position.After_Other_Columns on_problems=Problem_Behavior.Report_Error - expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo"] t + expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] t Test.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' selector = ["foo", "hmm", weird_name] action = table.reorder_columns selector Position.After_Other_Columns on_problems=_ - tester = expect_column_names ["bar", "Baz", "foo_1", "foo_2", "ab.+123", "abcd123", "foo"] + tester = expect_column_names ["bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123", "foo"] problems = [Missing_Input_Columns.Error ["hmm", weird_name]] Problems.test_problem_handling action problems tester @@ -290,31 +290,31 @@ spec setup = Test.group prefix+"Table.sort_columns" <| table = - col1 = ["foo_21", [1,2,3]] - col2 = ["foo_100", [4,5,6]] - col3 = ["foo_1", [7,8,9]] - col4 = ["Foo_2", [10,11,12]] - col5 = ["foo_3", [13,14,15]] - col6 = ["foo_001", [16,17,18]] + col1 = ["foo 21", [1,2,3]] + col2 = ["foo 100", [4,5,6]] + col3 = ["foo 1", [7,8,9]] + col4 = ["Foo 2", [10,11,12]] + col5 = ["foo 3", [13,14,15]] + col6 = ["foo 001", [16,17,18]] col7 = ["bar", [19,20,21]] table_builder [col1, col2, col3, col4, col5, col6, col7] Test.specify "should work as shown in the doc examples" <| sorted = table.sort_columns - expect_column_names ["Foo_2", "bar", "foo_001", "foo_1", "foo_100", "foo_21", "foo_3"] sorted + expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 100", "foo 21", "foo 3"] sorted sorted.columns.first.to_vector . should_equal [10,11,12] - expect_column_names ["bar", "foo_001", "foo_1", "Foo_2", "foo_3", "foo_21", "foo_100"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) - expect_column_names ["foo_3", "foo_21", "foo_100", "foo_1", "foo_001", "bar", "Foo_2"] <| table.sort_columns Sort_Direction.Descending + expect_column_names ["bar", "foo 001", "foo 1", "Foo 2", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) + expect_column_names ["foo 3", "foo 21", "foo 100", "foo 1", "foo 001", "bar", "Foo 2"] <| table.sort_columns Sort_Direction.Descending Test.specify "should correctly handle case-insensitive sorting" <| - expect_column_names ["bar", "foo_001", "foo_1", "foo_100", "Foo_2", "foo_21", "foo_3"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive) + expect_column_names ["bar", "foo 001", "foo 1", "foo 100", "Foo 2", "foo 21", "foo 3"] <| table.sort_columns text_ordering=(Text_Ordering.Case_Insensitive) Test.specify "should correctly handle natural order sorting" <| - expect_column_names ["Foo_2", "bar", "foo_001", "foo_1", "foo_3", "foo_21", "foo_100"] <| table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) + expect_column_names ["Foo 2", "bar", "foo 001", "foo 1", "foo 3", "foo 21", "foo 100"] <| table.sort_columns text_ordering=(Text_Ordering.Default sort_digits_as_numbers=True) Test.specify "should correctly handle various combinations of options" <| - expect_column_names ["foo_100", "foo_21", "foo_3", "Foo_2", "foo_1", "foo_001", "bar"] <| table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) + expect_column_names ["foo 100", "foo 21", "foo 3", "Foo 2", "foo 1", "foo 001", "bar"] <| table.sort_columns Sort_Direction.Descending text_ordering=(Text_Ordering.Case_Insensitive sort_digits_as_numbers=True) Test.group prefix+"Table.rename_columns" <| table = @@ -441,41 +441,41 @@ spec setup = Test.specify "should correctly handle problems: invalid names ''" <| map = Map.from_vector [[1, ""]] action = table.rename_columns map on_problems=_ - tester = expect_column_names ["alpha", "Column_1", "gamma", "delta"] + tester = expect_column_names ["alpha", "Column 1", "gamma", "delta"] problems = [Invalid_Output_Column_Names.Error [""]] Problems.test_problem_handling action problems tester Test.specify "should correctly handle problems: invalid names Nothing" <| map = ["alpha", Nothing] action = table.rename_columns map on_problems=_ - tester = expect_column_names ["alpha", "Column_1", "gamma", "delta"] + tester = expect_column_names ["alpha", "Column 1", "gamma", "delta"] problems = [Invalid_Output_Column_Names.Error [Nothing]] Problems.test_problem_handling action problems tester Test.specify "should correctly handle problems: invalid names null character" <| map = ["alpha", 'a\0b'] action = table.rename_columns map on_problems=_ - tester = expect_column_names ["alpha", "Column_1", "gamma", "delta"] + tester = expect_column_names ["alpha", "Column 1", "gamma", "delta"] problems = [Invalid_Output_Column_Names.Error ['a\0b']] Problems.test_problem_handling action problems tester Test.specify "should correctly handle problems: duplicate names" <| map = ["Test", "Test", "Test", "Test"] action = table.rename_columns map on_problems=_ - tester = expect_column_names ["Test", "Test_1", "Test_2", "Test_3"] + tester = expect_column_names ["Test", "Test 1", "Test 2", "Test 3"] problems = [Duplicate_Output_Column_Names.Error ["Test", "Test", "Test"]] Problems.test_problem_handling action problems tester Test.specify "should correctly handle problems: new name is clashing with existing name of existing column" <| map = Map.from_vector [["alpha", "beta"]] action = table.rename_columns map on_problems=_ - tester = expect_column_names ["beta", "beta_1", "gamma", "delta"] + tester = expect_column_names ["beta", "beta 1", "gamma", "delta"] problems = [Duplicate_Output_Column_Names.Error ["beta"]] Problems.test_problem_handling action problems tester map2 = Map.from_vector [["beta", "alpha"]] action2 = table.rename_columns map2 on_problems=_ - tester2 = expect_column_names ["alpha_1", "alpha", "gamma", "delta"] + tester2 = expect_column_names ["alpha 1", "alpha", "gamma", "delta"] problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]] Problems.test_problem_handling action2 problems2 tester2 diff --git a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso index 4a3d340b6739..1fa3b63e4f0e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Transpose_Spec.enso @@ -73,18 +73,18 @@ spec setup = Test.specify "should handle missing columns" <| t1 = table_builder [["Key", ["x", "y", "z"]], ["Value", [1, 2, 3]], ["Another", [10, Nothing, 20]]] - err1 = t1.transpose ["Key", "Missing", "Missing_2"] + err1 = t1.transpose ["Key", "Missing", "Missing 2"] err1.should_fail_with Missing_Input_Columns - err1.catch.criteria . should_equal ["Missing", "Missing_2"] + err1.catch.criteria . should_equal ["Missing", "Missing 2"] err2 = t1.transpose [0, -1, 42, -100] err2.should_fail_with Column_Indexes_Out_Of_Range err2.catch.indexes . should_equal [42, -100] - action1 = t1.transpose ["Key", "Missing", "Missing_2"] error_on_missing_columns=False on_problems=_ + action1 = t1.transpose ["Key", "Missing", "Missing 2"] error_on_missing_columns=False on_problems=_ tester1 table = table.column_names . should_equal ["Key", "Name", "Value"] - problems1 = [Missing_Input_Columns.Error ["Missing", "Missing_2"]] + problems1 = [Missing_Input_Columns.Error ["Missing", "Missing 2"]] Problems.test_problem_handling action1 problems1 tester1 action2 = t1.transpose [0, -1, 42, -100] error_on_missing_columns=False on_problems=_ @@ -98,13 +98,13 @@ spec setup = action1 = t1.transpose ["X", "Y", "Z"] name_field="Y" value_field="Z" on_problems=_ tester1 table = - table.column_names . should_equal ["X", "Y", "Z", "Y_1", "Z_1"] + table.column_names . should_equal ["X", "Y", "Z", "Y 1", "Z 1"] problems1 = [Duplicate_Output_Column_Names.Error ["Y", "Z"]] Problems.test_problem_handling action1 problems1 tester1 action2 = t1.transpose ["X"] name_field="F" value_field="F" on_problems=_ tester2 table = - table.column_names . should_equal ["X", "F", "F_1"] + table.column_names . should_equal ["X", "F", "F 1"] problems2 = [Duplicate_Output_Column_Names.Error ["F"]] Problems.test_problem_handling action2 problems2 tester2 diff --git a/test/Table_Tests/src/Database/Codegen_Spec.enso b/test/Table_Tests/src/Database/Codegen_Spec.enso index cdc4e6b8f51e..f2f12b50ec70 100644 --- a/test/Table_Tests/src/Database/Codegen_Spec.enso +++ b/test/Table_Tests/src/Database/Codegen_Spec.enso @@ -115,9 +115,9 @@ spec = Test.group "Helpers" <| Test.specify "fresh_names should provide fresh names" <| - used_names = ["A", "A_1"] + used_names = ["A", "A 1"] preferred_names = ["A", "A", "B"] - fresh_names used_names preferred_names . should_equal ["A_2", "A_3", "B"] + fresh_names used_names preferred_names . should_equal ["A 2", "A 3", "B"] Test.group "[Codegen] Aggregation" <| Test.specify "should allow to count rows" <| diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index 3e3f0bfba7b2..57ec8feb1073 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -34,7 +34,7 @@ spec make_new_connection prefix persistent_connector=True = Test.group prefix+"Uploading an in-memory Table" <| in_memory_table = Table.new [["X", [1, 2, 3]], ["Y", ['a', 'b', 'c']]] Test.specify "should include the created table in the tables directory" <| - db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "permanent_table_1") temporary=False + db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "permanent_table 1") temporary=False Panic.with_finalizer (connection.drop_table db_table.name) <| db_table.at "X" . to_vector . should_equal [1, 2, 3] @@ -42,7 +42,7 @@ spec make_new_connection prefix persistent_connector=True = connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] Test.specify "should include the temporary table in the tables directory" <| - db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "temporary_table_1") temporary=True + db_table = in_memory_table.create_database_table connection (Name_Generator.random_name "temporary_table 1") temporary=True db_table.at "X" . to_vector . should_equal [1, 2, 3] connection.tables.at "Name" . to_vector . should_contain db_table.name connection.query db_table.name . at "X" . to_vector . should_equal [1, 2, 3] @@ -50,7 +50,7 @@ spec make_new_connection prefix persistent_connector=True = if persistent_connector then Test.specify "should drop the temporary table after the connection is closed" <| tmp_connection = make_new_connection Nothing - db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "temporary_table_2") temporary=True + db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "temporary_table 2") temporary=True name = db_table.name tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] tmp_connection.close @@ -58,7 +58,7 @@ spec make_new_connection prefix persistent_connector=True = Test.specify "should preserve the regular table after the connection is closed" <| tmp_connection = make_new_connection Nothing - db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "permanent_table_1") temporary=False + db_table = in_memory_table.create_database_table tmp_connection (Name_Generator.random_name "permanent_table 1") temporary=False name = db_table.name Panic.with_finalizer (connection.drop_table name) <| tmp_connection.query (SQL_Query.Table_Name name) . at "X" . to_vector . should_equal [1, 2, 3] @@ -143,11 +143,11 @@ spec make_new_connection prefix persistent_connector=True = db_table_4 = db_table_2.join db_table_3 join_kind=Join_Kind.Left_Outer copied_table = db_table_4.create_database_table connection (Name_Generator.random_name "copied-table") temporary=True primary_key=Nothing - copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "Right_X", "C3"] + copied_table.column_names . should_equal ["X", "Y", "C1", "C2", "Right X", "C3"] copied_table.at "X" . to_vector . should_equal [1, 1, 2] copied_table.at "C1" . to_vector . should_equal [101, 102, 203] copied_table.at "C2" . to_vector . should_equal ["constant_text", "constant_text", "constant_text"] - copied_table.at "Right_X" . to_vector . should_equal [Nothing, Nothing, 2] + copied_table.at "Right X" . to_vector . should_equal [Nothing, Nothing, 2] copied_table.at "C3" . to_vector . should_equal [Nothing, Nothing, 5] # We check that this is indeed querying a simple DB table and not a complex query like `db_table_4` would be, diff --git a/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso b/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso index 2bc8d7f5d6cf..c9d3ef258fd1 100644 --- a/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso +++ b/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso @@ -30,28 +30,28 @@ spec = Test.specify 'should rename duplicates names' <| strategy = Unique_Name_Strategy.new strategy.make_unique "A" . should_equal "A" - strategy.make_unique "A" . should_equal "A_1" - strategy.make_unique "A" . should_equal "A_2" + strategy.make_unique "A" . should_equal "A 1" + strategy.make_unique "A" . should_equal "A 2" strategy.renames.length . should_equal 2 strategy.invalid_names.length . should_equal 0 Test.specify 'should preserve existing suffix' <| strategy = Unique_Name_Strategy.new strategy.make_unique "A" . should_equal "A" - strategy.make_unique "A_1" . should_equal "A_1" - strategy.make_unique "A" . should_equal "A_2" - strategy.make_unique "A_1" . should_equal "A_1_1" + strategy.make_unique "A 1" . should_equal "A 1" + strategy.make_unique "A" . should_equal "A 2" + strategy.make_unique "A 1" . should_equal "A 1 1" strategy.renames.length . should_equal 2 strategy.invalid_names.length . should_equal 0 Test.specify "should always add a counter when renaming invalid names" <| strategy = Unique_Name_Strategy.new - strategy.make_unique "" . should_equal "Column_1" - strategy.make_unique "" . should_equal "Column_2" - strategy.make_unique Nothing . should_equal "Column_3" + strategy.make_unique "" . should_equal "Column 1" + strategy.make_unique "" . should_equal "Column 2" + strategy.make_unique Nothing . should_equal "Column 3" strategy.make_unique "Foo" . should_equal "Foo" strategy.make_unique "Column" . should_equal "Column" - strategy.make_unique "" . should_equal "Column_4" + strategy.make_unique "" . should_equal "Column 4" Test.specify 'should work as in examples' <| unique_name_strategy = Unique_Name_Strategy.new @@ -60,19 +60,19 @@ spec = invalid = unique_name_strategy.invalid_names duplicates.should_equal ["A"] invalid.should_equal [""] - unique_names.should_equal ["A", "B", "A_1", "Column_1"] + unique_names.should_equal ["A", "B", "A 1", "Column 1"] strategy_1 = Unique_Name_Strategy.new strategy_1.make_unique "A" . should_equal "A" - strategy_1.make_unique "A" . should_equal "A_1" + strategy_1.make_unique "A" . should_equal "A 1" Test.group "Unique_Name_Strategy.combine_with_prefix" <| Test.specify "should work as in examples" <| strategy = Unique_Name_Strategy.new first = ["A", "B", "second_A"] - second = ["A", "B", "second_A_1", "C"] + second = ["A", "B", "second_A 1", "C"] unique_second = strategy.combine_with_prefix first second "second_" - unique_second . should_equal ["second_A_2", "second_B", "second_A_1", "C"] + unique_second . should_equal ["second_A 2", "second_B", "second_A 1", "C"] strategy.invalid_names . should_equal [] strategy.renames . should_equal ["second_A"] @@ -81,7 +81,7 @@ spec = second = ["B", "A", "C"] strategy = Unique_Name_Strategy.new r = strategy.combine_with_prefix first second "" - r . should_equal ["B_1", "A_1", "C"] + r . should_equal ["B 1", "A 1", "C"] Test.specify "should work for empty input" <| Unique_Name_Strategy.new.combine_with_prefix [] [] "" . should_equal [] @@ -89,34 +89,34 @@ spec = Unique_Name_Strategy.new.combine_with_prefix [] ["a"] "" . should_equal ["a"] Test.specify "should find the first free spot" <| - Unique_Name_Strategy.new.combine_with_prefix ["A", "A_1", "A_2"] ["A"] "" . should_equal ["A_3"] - Unique_Name_Strategy.new.combine_with_prefix ["A", "A_1", "A_2"] ["A_4", "A_6", "A_100", "A", "A_3"] "" . should_equal ["A_4", "A_6", "A_100", "A_5", "A_3"] + Unique_Name_Strategy.new.combine_with_prefix ["A", "A 1", "A 2"] ["A"] "" . should_equal ["A 3"] + Unique_Name_Strategy.new.combine_with_prefix ["A", "A 1", "A 2"] ["A 4", "A 6", "A 100", "A", "A 3"] "" . should_equal ["A 4", "A 6", "A 100", "A 5", "A 3"] - Unique_Name_Strategy.new.combine_with_prefix ["A", "A_1", "A_2"] ["A"] "P_" . should_equal ["P_A"] - Unique_Name_Strategy.new.combine_with_prefix ["A", "A_1", "A_2", "P_A"] ["A"] "P_" . should_equal ["P_A_1"] - Unique_Name_Strategy.new.combine_with_prefix ["A", "A_1", "A_2", "P_A_1"] ["A"] "P_" . should_equal ["P_A"] - Unique_Name_Strategy.new.combine_with_prefix ["A", "A_1", "A_2", "P_A_1"] ["A", "P_A", "P_A_2"] "P_" . should_equal ["P_A_3", "P_A", "P_A_2"] + Unique_Name_Strategy.new.combine_with_prefix ["A", "A 1", "A 2"] ["A"] "P_" . should_equal ["P_A"] + Unique_Name_Strategy.new.combine_with_prefix ["A", "A 1", "A 2", "P_A"] ["A"] "P_" . should_equal ["P_A 1"] + Unique_Name_Strategy.new.combine_with_prefix ["A", "A 1", "A 2", "P_A 1"] ["A"] "P_" . should_equal ["P_A"] + Unique_Name_Strategy.new.combine_with_prefix ["A", "A 1", "A 2", "P_A 1"] ["A", "P_A", "P_A 2"] "P_" . should_equal ["P_A 3", "P_A", "P_A 2"] Test.specify "will add a prefix/suffix, not increment an existing counter" <| - first = ["A", "A_1", "A_2", "A_3"] - Unique_Name_Strategy.new.combine_with_prefix first ["A_2"] "P_" . should_equal ["P_A_2"] - Unique_Name_Strategy.new.combine_with_prefix first ["A_2"] "" . should_equal ["A_2_1"] - Unique_Name_Strategy.new.combine_with_prefix first+["P_A_2"] ["A_2"] "P_" . should_equal ["P_A_2_1"] + first = ["A", "A 1", "A 2", "A 3"] + Unique_Name_Strategy.new.combine_with_prefix first ["A 2"] "P_" . should_equal ["P_A 2"] + Unique_Name_Strategy.new.combine_with_prefix first ["A 2"] "" . should_equal ["A 2 1"] + Unique_Name_Strategy.new.combine_with_prefix first+["P_A 2"] ["A 2"] "P_" . should_equal ["P_A 2 1"] Test.specify "should prioritize existing names when renaming conflicts and rename only ones that are clashing with the other list" <| first = ["A", "B"] - second = ["B", "A", "B_1", "C", "B_2", "B_4"] + second = ["B", "A", "B 1", "C", "B 2", "B_4"] strategy = Unique_Name_Strategy.new r = strategy.combine_with_prefix first second "" - r . should_equal ["B_3", "A_1", "B_1", "C", "B_2", "B_4"] + r . should_equal ["B 3", "A 1", "B 1", "C", "B 2", "B_4"] strategy.invalid_names . should_equal [] strategy.renames . should_equal ["B", "A"] r2 = Unique_Name_Strategy.new.combine_with_prefix first second "P_" - r2 . should_equal ["P_B", "P_A", "B_1", "C", "B_2", "B_4"] + r2 . should_equal ["P_B", "P_A", "B 1", "C", "B 2", "B_4"] - third = ["B", "A", "P_B", "X", "P_B_1", "P_B_2"] + third = ["B", "A", "P_B", "X", "P_B 1", "P_B 2"] r3 = Unique_Name_Strategy.new.combine_with_prefix first third "P_" - r3 . should_equal ["P_B_3", "P_A", "P_B", "X", "P_B_1", "P_B_2"] + r3 . should_equal ["P_B 3", "P_A", "P_B", "X", "P_B 1", "P_B 2"] main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/IO/Csv_Spec.enso b/test/Table_Tests/src/IO/Csv_Spec.enso index 7bfbf817c17c..56b8f0377d14 100644 --- a/test/Table_Tests/src/IO/Csv_Spec.enso +++ b/test/Table_Tests/src/IO/Csv_Spec.enso @@ -29,13 +29,13 @@ spec = Test.specify "should correctly infer types of varied-type columns" <| varied_column = (enso_project.data / "varied_column.csv") . read - c_1 = ["Column_1", ["2005-02-25", "2005-02-28", "4", "2005-03-02", Nothing, "2005-03-04", "2005-03-07", "2005-03-08"]] + c_1 = ["Column 1", ["2005-02-25", "2005-02-28", "4", "2005-03-02", Nothing, "2005-03-04", "2005-03-07", "2005-03-08"]] # We can re-enable this once date support is improved. - #c_2 = ["Column_2", ["2005-02-25", "2005-02-28", "2005-03-01", Nothing, "2005-03-03", "2005-03-04", "2005-03-07", "2005-03-08"]] - c_3 = ["Column_3", [1, 2, 3, 4, 5, Nothing, 7, 8]] - c_4 = ["Column_4", [1, 2, 3, 4, 5, 6, 7, 8]] - c_5 = ["Column_5", [1.0, 2.0, 3.0, 4.0, 5.0, 6.25, 7.0, 8.0]] - c_6 = ["Column_6", ['1', '2', '3', '4', '5', '6.25', '7', 'osiem']] + #c_2 = ["Column 2", ["2005-02-25", "2005-02-28", "2005-03-01", Nothing, "2005-03-03", "2005-03-04", "2005-03-07", "2005-03-08"]] + c_3 = ["Column 3", [1, 2, 3, 4, 5, Nothing, 7, 8]] + c_4 = ["Column 4", [1, 2, 3, 4, 5, 6, 7, 8]] + c_5 = ["Column 5", [1.0, 2.0, 3.0, 4.0, 5.0, 6.25, 7.0, 8.0]] + c_6 = ["Column 6", ['1', '2', '3', '4', '5', '6.25', '7', 'osiem']] expected = Table.new [c_1, c_3, c_4, c_5, c_6] varied_column.select_columns [0, 2, 3, 4, 5] . should_equal expected @@ -45,14 +45,14 @@ spec = name,x,y,x,y foo,10,20,30,20 t = Table.from csv (format = Delimited ",") - t.columns.map .name . should_equal ['name', 'x', 'y', 'x_1', 'y_1'] + t.columns.map .name . should_equal ['name', 'x', 'y', 'x 1', 'y 1'] Test.group 'Writing' <| Test.specify 'should properly serialize simple tables' <| varied_column = (enso_project.data / "varied_column.csv") . read res = Text.from varied_column format=(Delimited ",") exp = normalize_lines <| ''' - Column_1,Column_2,Column_3,Column_4,Column_5,Column_6 + Column 1,Column 2,Column 3,Column 4,Column 5,Column 6 2005-02-25,2005-02-25,1,1,1.0,1 2005-02-28,2005-02-28,2,2,2.0,2 4,2005-03-01,3,3,3.0,3 @@ -98,7 +98,7 @@ spec = out.delete_if_exists varied_column.write out exp = normalize_lines <| ''' - Column_1,Column_2,Column_3,Column_4,Column_5,Column_6 + Column 1,Column 2,Column 3,Column 4,Column 5,Column 6 2005-02-25,2005-02-25,1,1,1.0,1 2005-02-28,2005-02-28,2,2,2.0,2 4,2005-03-01,3,3,3.0,3 diff --git a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso index f13068001b9f..6237f164c444 100644 --- a/test/Table_Tests/src/IO/Delimited_Read_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Read_Spec.enso @@ -25,9 +25,9 @@ spec = simple_empty.should_equal expected_table Test.specify "should load a simple table without headers" <| - c_1 = ["Column_1", ['a', '1', '4', '7', '10']] - c_2 = ["Column_2", ['b', '2', Nothing, '8', '11']] - c_3 = ["Column_3", ['c', Nothing, '6', '9', '12']] + c_1 = ["Column 1", ['a', '1', '4', '7', '10']] + c_2 = ["Column 2", ['b', '2', Nothing, '8', '11']] + c_3 = ["Column 3", ['c', Nothing, '6', '9', '12']] expected_table = Table.new [c_1, c_2, c_3] simple_empty = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False value_formatter=Nothing) simple_empty.should_equal expected_table @@ -35,11 +35,11 @@ spec = Test.specify "should work in presence of missing headers" <| action on_problems = Data.read (enso_project.data / "missing_header.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems tester table = - table.columns.map .name . should_equal ["a", "Column_1", "c", "Column_2", "d"] + table.columns.map .name . should_equal ["a", "Column 1", "c", "Column 2", "d"] table.at "a" . to_vector . should_equal ["1"] - table.at "Column_1" . to_vector . should_equal ["2"] + table.at "Column 1" . to_vector . should_equal ["2"] table.at "c" . to_vector . should_equal ["3"] - table.at "Column_2" . to_vector . should_equal ["4"] + table.at "Column 2" . to_vector . should_equal ["4"] table.at "d" . to_vector . should_equal ["5"] problems = [Invalid_Output_Column_Names.Error [Nothing, Nothing]] Problems.test_problem_handling action problems tester @@ -49,9 +49,9 @@ spec = t1.columns.map .name . should_equal ["Code", "Index", "Flag", "Value", "ValueWithNothing", "TextWithNothing", "Hexadecimal", "Leading0s", "QuotedNumbers", "Mixed Types"] t2 = Data.read (enso_project.data / "all_text.csv") (Delimited ",") - t2.columns.map .name . should_equal ["Column_1", "Column_2"] - t2.at "Column_1" . to_vector . should_equal ["a", "c", "e", "g"] - t2.at "Column_2" . to_vector . should_equal ["b", "d", "f", "h"] + t2.columns.map .name . should_equal ["Column 1", "Column 2"] + t2.at "Column 1" . to_vector . should_equal ["a", "c", "e", "g"] + t2.at "Column 2" . to_vector . should_equal ["b", "d", "f", "h"] t3 = Data.read (enso_project.data / "two_rows1.csv") (Delimited ",") t3.columns.map .name . should_equal ["a", "b", "c"] @@ -60,16 +60,16 @@ spec = t3.at "c" . to_vector . should_equal [Nothing] t4 = Data.read (enso_project.data / "two_rows2.csv") (Delimited ",") - t4.columns.map .name . should_equal ["Column_1", "Column_2", "Column_3"] - t4.at "Column_1" . to_vector . should_equal ["a", "d"] - t4.at "Column_2" . to_vector . should_equal ["b", "e"] - t4.at "Column_3" . to_vector . should_equal ["c", "f"] + t4.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"] + t4.at "Column 1" . to_vector . should_equal ["a", "d"] + t4.at "Column 2" . to_vector . should_equal ["b", "e"] + t4.at "Column 3" . to_vector . should_equal ["c", "f"] t5 = Data.read (enso_project.data / "numbers_in_header.csv") (Delimited ",") - t5.columns.map .name . should_equal ["Column_1", "Column_2", "Column_3"] - t5.at "Column_1" . to_vector . should_equal ["a", "1"] - t5.at "Column_2" . to_vector . should_equal ["b", "2"] - t5.at "Column_3" . to_vector . should_equal [0, 3] + t5.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"] + t5.at "Column 1" . to_vector . should_equal ["a", "1"] + t5.at "Column 2" . to_vector . should_equal ["b", "2"] + t5.at "Column 3" . to_vector . should_equal [0, 3] t6 = Data.read (enso_project.data / "quoted_numbers_in_header.csv") (Delimited ",") t6.columns.map .name . should_equal ["1", "x"] @@ -78,10 +78,10 @@ spec = Test.specify "should not use the first row as headers if it is the only row, unless specifically asked to" <| t1 = Data.read (enso_project.data / "one_row.csv") (Delimited ",") - t1.columns.map .name . should_equal ["Column_1", "Column_2", "Column_3"] - t1.at "Column_1" . to_vector . should_equal ["x"] - t1.at "Column_2" . to_vector . should_equal ["y"] - t1.at "Column_3" . to_vector . should_equal ["z"] + t1.columns.map .name . should_equal ["Column 1", "Column 2", "Column 3"] + t1.at "Column 1" . to_vector . should_equal ["x"] + t1.at "Column 2" . to_vector . should_equal ["y"] + t1.at "Column 3" . to_vector . should_equal ["z"] t2 = Data.read (enso_project.data / "one_row.csv") (Delimited "," headers=True) t2.columns.map .name . should_equal ["x", "y", "z"] @@ -138,10 +138,10 @@ spec = format = Delimited ',' headers=False value_formatter=(Data_Formatter.Value trim_values=False) - reference_table = Table.new [["Column_1", ["a", "d", "1"]], ["Column_2", ["b", "e", "2"]], ["Column_3", ["c", "f", "3"]]] + reference_table = Table.new [["Column 1", ["a", "d", "1"]], ["Column 2", ["b", "e", "2"]], ["Column 3", ["c", "f", "3"]]] collapsed_table = Table.new <| ['a', 'b', 'c\nd', 'e', 'f\n1', 2, 3].map_with_index i-> v-> - ["Column_" + (i+1).to_text, [v]] + ["Column " + (i+1).to_text, [v]] Data.read file format . should_equal reference_table Data.read file (format.with_line_endings Line_Ending_Style.Unix) . should_equal reference_table Data.read file (format.with_line_endings Line_Ending_Style.Mac_Legacy) . should_equal collapsed_table @@ -201,9 +201,9 @@ spec = Test.specify "should handle duplicated columns" <| action on_problems = Data.read (enso_project.data / "duplicated_columns.csv") (Delimited "," headers=True value_formatter=Nothing) on_problems tester table = - table.columns.map .name . should_equal ['a', 'b', 'c', 'a_1'] + table.columns.map .name . should_equal ['a', 'b', 'c', 'a 1'] table.at 'a' . to_vector . should_equal ['1'] - table.at 'a_1' . to_vector . should_equal ['4'] + table.at 'a 1' . to_vector . should_equal ['4'] problems = [Duplicate_Output_Column_Names.Error ['a']] Problems.test_problem_handling action problems tester @@ -337,7 +337,7 @@ spec = Test.specify "should allow to skip rows" <| t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 value_formatter=Nothing) - t1.at "Column_1" . to_vector . should_equal ['7', '10'] + t1.at "Column 1" . to_vector . should_equal ['7', '10'] t2 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True skip_rows=3 value_formatter=Nothing) t2.columns.map .name . should_equal ['7', '8', '9'] @@ -345,16 +345,16 @@ spec = Test.specify "should allow to set a limit of rows to read" <| t1 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=2 value_formatter=Nothing) - t1.at "Column_1" . to_vector . should_equal ['a', '1'] + t1.at "Column 1" . to_vector . should_equal ['a', '1'] t2 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True row_limit=2 value_formatter=Nothing) t2.at "a" . to_vector . should_equal ['1', '4'] t3 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1 value_formatter=Nothing) - t3.at "Column_1" . to_vector . should_equal ['7'] + t3.at "Column 1" . to_vector . should_equal ['7'] t4 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False row_limit=0 value_formatter=Nothing) - t4.columns.map .name . should_equal ['Column_1', 'Column_2', 'Column_3'] + t4.columns.map .name . should_equal ['Column 1', 'Column 2', 'Column 3'] t4.row_count . should_equal 0 t5 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=True row_limit=0 value_formatter=Nothing) @@ -363,7 +363,7 @@ spec = t5.row_count . should_equal 0 t6 = Data.read (enso_project.data / "simple_empty.csv") (Delimited "," headers=False skip_rows=3 row_limit=1000 value_formatter=Nothing) - t6.at "Column_1" . to_vector . should_equal ['7', '10'] + t6.at "Column 1" . to_vector . should_equal ['7', '10'] Test.specify "should check arguments" <| path = (enso_project.data / "simple_empty.csv") diff --git a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso index 086ebc14fa1c..3f57f964d4ba 100644 --- a/test/Table_Tests/src/IO/Delimited_Write_Spec.enso +++ b/test/Table_Tests/src/IO/Delimited_Write_Spec.enso @@ -347,7 +347,7 @@ spec = with_headers = base_format . with_headers expected_table_with_headers = Table.new [["A", [1,2,33,44]], ["B", [1.0,1.5,Nothing,0]], ["C", ["x","y","a","BB"]]] - expected_table_without_headers = expected_table_with_headers.rename_columns ["Column_1", "Column_2", "Column_3"] + expected_table_without_headers = expected_table_with_headers.rename_columns ["Column 1", "Column 2", "Column 3"] test_append initial_file_format=with_headers append_format=no_headers expected_table_with_headers test_append initial_file_format=with_headers append_format=base_format expected_table_with_headers diff --git a/test/Table_Tests/src/IO/Excel_Spec.enso b/test/Table_Tests/src/IO/Excel_Spec.enso index 9daa23cf7d65..239b449363c6 100644 --- a/test/Table_Tests/src/IO/Excel_Spec.enso +++ b/test/Table_Tests/src/IO/Excel_Spec.enso @@ -47,8 +47,8 @@ spec_fmt header file read_method sheet_count=5 = Test.specify "should gracefully handle duplicate column names and formulas" <| t = read_method file (Excel (Worksheet "Duplicate Columns")) - t.columns.map .name . should_equal ['Item', 'Price', 'Quantity', 'Price_1'] - t.at 'Price_1' . to_vector . should_equal [20, 40, 0, 60, 0, 10] + t.columns.map .name . should_equal ['Item', 'Price', 'Quantity', 'Price 1'] + t.at 'Price 1' . to_vector . should_equal [20, 40, 0, 60, 0, 10] Test.specify "should allow reading with cell range specified" <| t_1 = read_method file (Excel (Cell_Range "Simple!B:C")) @@ -292,8 +292,8 @@ spec_write suffix test_sheet_name = Test.specify 'should be able to append to a range by name after deduplication of names' <| out.delete_if_exists (enso_project.data / test_sheet_name) . copy_to out - extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['AA_1',[True, False]], ['BB_1', ['2022-01-20', '2022-01-21']]] - expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['AA_1',[True, False, False, True, False]]] + extra_another = Table.new [['AA', ['d', 'e']], ['BB',[4, 5]], ['AA 1',[True, False]], ['BB 1', ['2022-01-20', '2022-01-21']]] + expected = Table.new [['AA', ['f', 'g', 'h', 'd', 'e']], ['BB',[1, 2, 3, 4, 5]], ['AA 1',[True, False, False, True, False]]] extra_another.write out (Excel (Cell_Range "Random!S3")) on_existing_file=Existing_File_Behavior.Append on_problems=Report_Error . should_succeed written = out.read (Excel (Cell_Range "Random!S3")) . select_columns [0, 1, 2] written.should_equal expected @@ -725,10 +725,10 @@ spec = check_table (file.read (Excel (Cell_Range "Sheet1!B2"))) ["B"] [["A","B","C","D","E","F"]] Test.specify "Patchy table" <| - check_table (file.read (Excel (Cell_Range "Sheet1!D1"))) ["A", "B", "Column_1"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] + check_table (file.read (Excel (Cell_Range "Sheet1!D1"))) ["A", "B", "Column 1"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] check_table (file.read (Excel (Cell_Range "Sheet1!D2"))) ["D", "E", "F"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] check_table (file.read (Excel (Cell_Range "Sheet1!E"))) ["B"] [[4,4,Nothing,Nothing,Nothing,Nothing]] - check_table (file.read (Excel (Cell_Range "Sheet1!E1"))) ["B", "Column_1"] [[4,4,Nothing], [6,Nothing,6]] + check_table (file.read (Excel (Cell_Range "Sheet1!E1"))) ["B", "Column 1"] [[4,4,Nothing], [6,Nothing,6]] check_table (file.read (Excel (Cell_Range "Sheet1!E2"))) ["E", "F"] [[4,4,Nothing], [6,Nothing,6]] Test.specify "Single cell" <| @@ -739,19 +739,19 @@ spec = check_table (file.read (Excel (Cell_Range "Sheet1!J1"))) ["J", "K", "L"] [["Just"],["Some"],["Headers"]] Test.specify "Growing table" <| - check_table (file.read (Excel (Cell_Range "Sheet1!N1"))) ["A", "Full", "Table", "Column_1"] [["Hello","World",Nothing,"Extend"],[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] - check_table (file.read (Excel (Cell_Range "Sheet1!O1"))) ["Full", "Table", "Column_1"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] + check_table (file.read (Excel (Cell_Range "Sheet1!N1"))) ["A", "Full", "Table", "Column 1"] [["Hello","World",Nothing,"Extend"],[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] + check_table (file.read (Excel (Cell_Range "Sheet1!O1"))) ["Full", "Table", "Column 1"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] check_table (file.read (Excel (Cell_Range "Sheet1!O2"))) ["O", "P", "Q"] [[1,Nothing,"Gap",3],[2,2,"Here",5],[Nothing,Nothing,"To","Hello"]] Test.specify "Should handle invalid headers with warnings" <| action = file.read (Excel (Cell_Range "Sheet1!D1")) on_problems=_ - tester = check_table _ ["A", "B", "Column_1"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] + tester = check_table _ ["A", "B", "Column 1"] [[1,2,4], [4,4,Nothing], [6,Nothing,6]] problems = [Invalid_Output_Column_Names.Error [""]] Problems.test_problem_handling action problems tester Test.specify "Should handle duplicate headers with warnings" <| action = file.read (Excel (Cell_Range "Sheet1!S1")) on_problems=_ - tester = check_table _ ["DD", "DD_1"] [[1,3], [2,4]] + tester = check_table _ ["DD", "DD 1"] [[1,3], [2,4]] problems = [Duplicate_Output_Column_Names.Error ["DD"]] Problems.test_problem_handling action problems tester diff --git a/test/Table_Tests/src/In_Memory/Column_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Spec.enso index 97426a5f9f2e..6318ad66d212 100644 --- a/test/Table_Tests/src/In_Memory/Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Spec.enso @@ -66,10 +66,10 @@ spec = Test.group "Columns" <| r.to_vector . should_equal [0, 2, 4, 5, Nothing, 30] Test.specify "should allow to count duplicate value occurences" <| - c_1 = Column.from_vector "c_1" [0, 1, 2, 2, 1, 0, 2] + c_1 = Column.from_vector "c 1" [0, 1, 2, 2, 1, 0, 2] c_1.duplicate_count.to_vector.should_equal [0, 0, 0, 1, 1, 1, 2] - c_2 = Column.from_vector "c_2" ["foo", "bar", "foo", "baz", "bar"] + c_2 = Column.from_vector "c 2" ["foo", "bar", "foo", "baz", "bar"] c_2.duplicate_count.to_vector.should_equal [0, 0, 1, 0, 1] Test.specify "should result in correct Storage if operation allows it" <| diff --git a/test/Table_Tests/src/In_Memory/Join_Performance_Spec.enso b/test/Table_Tests/src/In_Memory/Join_Performance_Spec.enso index be94acae6712..5014a25555f1 100644 --- a/test/Table_Tests/src/In_Memory/Join_Performance_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Join_Performance_Spec.enso @@ -76,7 +76,7 @@ spec = t4 = r2.second . order_by "N" t4.row_count . should_equal n t4.at "X" . to_vector . should_equal lowers - t4.at "Right_X" . to_vector . should_equal uppers + t4.at "Right X" . to_vector . should_equal uppers t4.at "Z" . to_vector . should_equal <| 1.up_to n+1 . to_vector . reverse base_ms = r1.first.total_milliseconds diff --git a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso index a68fd39f6695..e2b7079ce00a 100644 --- a/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Split_Tokenize_Spec.enso @@ -14,7 +14,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, "gh", "ij", "u"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3"] expected_rows t2 = t.split_to_columns "bar" "b" t2.should_equal expected @@ -30,7 +30,7 @@ spec = cols = [["foo", [0, 1, 2, 3]], ["bar", ["abc", "cbdbef", Nothing, "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing], [1, "c", "d", "ef"], [2, Nothing, Nothing, Nothing], [3, "gh", "ij", "u"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3"] expected_rows t2 = t.split_to_columns "bar" "b" t2.should_equal expected @@ -47,7 +47,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12", "34", "5"], [1, "23", Nothing, Nothing], [2, "2", "4", "55"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3"] expected_rows t2 = t.tokenize_to_columns "bar" "\d+" t2.should_equal expected @@ -63,7 +63,7 @@ spec = cols = [["foo", [0, 1, 2, 3]], ["bar", ["a12b34r5", Nothing, "23", "2r4r55"]]] t = Table.new cols expected_rows = [[0, "12", "34", "5"], [1, Nothing, Nothing, Nothing], [2, "23", Nothing, Nothing], [3, "2", "4", "55"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3"] expected_rows t2 = t.tokenize_to_columns "bar" "\d+" t2.should_equal expected @@ -87,7 +87,7 @@ spec = cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols expected_rows = [[0, "a1", "b12", "d50"], [1, "b10", "c20", Nothing]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3"] expected_rows t2 = t.tokenize_to_columns "bar" "([a-z]).(\d+)" t2.should_equal expected @@ -103,7 +103,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["aBqcE", "qcBr", "cCb"]]] t = Table.new cols expected_rows = [[0, "B", "c", Nothing], [1, "c", "B", Nothing], [2, "c", "C", "b"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3"] expected_rows t2 = t.tokenize_to_columns "bar" "[bc]" case_sensitivity=Case_Sensitivity.Insensitive t2.should_equal expected @@ -132,7 +132,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, Nothing], [1, "c", "d", "ef", Nothing], [2, "gh", "ij", "u", Nothing]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2", "bar 3"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 3"] expected_rows t2 = t.split_to_columns "bar" "b" column_count=4 t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true @@ -141,7 +141,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]]] t = Table.new cols expected_rows = [[0, "a", "c"], [1, "c", "d"], [2, "gh", "ij"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2"] expected_rows action = t.split_to_columns "bar" "b" column_count=2 on_problems=_ tester = t-> t.should_equal expected problems = [Column_Count_Exceeded.Error 2 3] @@ -151,7 +151,7 @@ spec = cols = [["foo", [0, 1]], ["bar", ["r a-1, b-12,qd-50", "ab-10:bc-20c"]]] t = Table.new cols expected_rows = [[0, "a1", "b12", "d50"], [1, "b10", "c20", Nothing]] - expected = Table.from_rows ["foo", "bar 0", "bar 1"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2"] expected_rows action = t.tokenize_to_columns "bar" "([a-z]).(\d+)" column_count=2 on_problems=_ tester = t-> t.should_equal expected problems = [Column_Count_Exceeded.Error 2 3] @@ -161,7 +161,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["ghbijbu", "cbdbef", "abc"]]] t = Table.new cols expected_rows = [[0, "gh", "ij", "u", Nothing], [1, "c", "d", "ef", Nothing], [2, "a", "c", Nothing, Nothing]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2", "bar 3"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "bar 3"] expected_rows t2 = t.split_to_columns "bar" "b" column_count=4 t2.should_equal expected t2.at "bar 3" . value_type . is_text . should_be_true @@ -198,7 +198,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]], ["bar 1", ["a", "b", "c"]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, "a"], [1, "c", "d", "ef", "b"], [2, "gh", "ij", "u", "c"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1_1", "bar 2", "bar 1"] expected_rows + expected = Table.from_rows ["foo", "bar 1 1", "bar 2", "bar 3", "bar 1"] expected_rows action = t.split_to_columns "bar" "b" on_problems=_ tester = t-> t.should_equal expected problems = [Duplicate_Output_Column_Names.Error ["bar 1"]] @@ -208,7 +208,7 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["a12b34r5", "23", "2r4r55"]], ["bar 1", ["a", "b", "c"]]] t = Table.new cols expected_rows = [[0, "12", "34", "5", "a"], [1, "23", Nothing, Nothing, "b"], [2, "2", "4", "55", "c"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1_1", "bar 2", "bar 1"] expected_rows + expected = Table.from_rows ["foo", "bar 1 1", "bar 2", "bar 3", "bar 1"] expected_rows action = t.tokenize_to_columns "bar" "\d+" on_problems=_ tester = t-> t.should_equal expected problems = [Duplicate_Output_Column_Names.Error ["bar 1"]] @@ -219,14 +219,14 @@ spec = cols = [["foo", [0, 1, 2]], ["bar", ["abc", "cbdbef", "ghbijbu"]], ["baz", [1, 2, 3]]] t = Table.new cols expected_rows = [[0, "a", "c", Nothing, 1], [1, "c", "d", "ef", 2], [2, "gh", "ij", "u", 3]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2", "baz"] expected_rows + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "baz"] expected_rows t2 = t.split_to_columns "bar" "b" t2.should_equal expected Test.group "Table.parse_to_columns" <| Test.specify "can parse to columns" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "baz"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] + expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected @@ -244,31 +244,31 @@ spec = Test.specify "non-participating groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "q1", "y"], ["xx", "qp", "yy"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "bar 2", "baz"] [["x", "1", 1, Nothing, "y"], ["xx", "p", Nothing, "p", "yy"]] + expected = Table.from_rows ["foo", "bar 1", "bar 2", "bar 3", "baz"] [["x", "1", 1, Nothing, "y"], ["xx", "p", Nothing, "p", "yy"]] actual = t.parse_to_columns "bar" "q((\d)|([a-z]))" actual.should_equal expected Test.specify "case-insensitive" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "qq", "y"], ["xx", "qQ", "yy"]] - expected = Table.from_rows ["foo", "bar 0", "baz"] [["x", "q", "y"], ["xx", "Q", "yy"]] + expected = Table.from_rows ["foo", "bar 1", "baz"] [["x", "q", "y"], ["xx", "Q", "yy"]] actual = t.parse_to_columns "bar" "q(q)" case_sensitivity=Case_Sensitivity.Insensitive actual.should_equal expected Test.specify "no post-parsing" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "baz"] [["x", "1", "2", "y"], ["x", "3", "4", "y"], ["x", "5", "6", "y"], ["xx", "4", "8", "yy"], ["xx", "5", "9", "yy"]] + expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", "1", "2", "y"], ["x", "3", "4", "y"], ["x", "5", "6", "y"], ["xx", "4", "8", "yy"], ["xx", "5", "9", "yy"]] actual = t.parse_to_columns "bar" "(\d)(\d)" parse_values=False actual.should_equal expected Test.specify "column name clash" <| t = Table.from_rows ["foo", "bar", "bar 1"] [["x", "12 34p q56", "y"], ["xx", "a48 59b", "yy"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1_1", "bar 1"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] + expected = Table.from_rows ["foo", "bar 1 1", "bar 2", "bar 1"] [["x", 1, 2, "y"], ["x", 3, 4, "y"], ["x", 5, 6, "y"], ["xx", 4, 8, "yy"], ["xx", 5, 9, "yy"]] actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected Test.specify "column and group name clash" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "123", "y"]] - expected = Table.from_rows ["foo", "bar", "baz_1", "quux", "baz"] [["x", 1, 2, 3, "y"]] + expected = Table.from_rows ["foo", "bar", "baz 1", "quux", "baz"] [["x", 1, 2, 3, "y"]] actual = t.parse_to_columns "bar" "(?\d)(?\d)(?\d)" actual.should_equal expected @@ -280,13 +280,13 @@ spec = Test.specify "empty table, with regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0 - expected = Table.from_rows ["foo", "bar 0", "bar 1", "baz"] [["x", "a", "a", "y"]] . take 0 + expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [["x", "a", "a", "y"]] . take 0 actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected Test.specify "empty table, with named and unnamed regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] . take 0 - expected = Table.from_rows ["foo", "quux", "bar 0", "foo_1", "bar 1", "baz"] [["x", "a", "a", "a", "a", "y"]] . take 0 + expected = Table.from_rows ["foo", "quux", "bar 1", "foo 1", "bar 2", "baz"] [["x", "a", "a", "a", "a", "y"]] . take 0 actual = t.parse_to_columns "bar" "(?)(\d)(?\d)(\d)" actual.should_equal expected @@ -298,13 +298,13 @@ spec = Test.specify "input with no matches, with regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] - expected = Table.from_rows ["foo", "bar 0", "bar 1", "baz"] [] + expected = Table.from_rows ["foo", "bar 1", "bar 2", "baz"] [] actual = t.parse_to_columns "bar" "(\d)(\d)" actual.should_equal expected Test.specify "input with no matches, with named and unnamed regex groups" <| t = Table.from_rows ["foo", "bar", "baz"] [["x", "a", "y"]] - expected = Table.from_rows ["foo", "quux", "bar 0", "foo_1", "bar 1", "baz"] [] + expected = Table.from_rows ["foo", "quux", "bar 1", "foo 1", "bar 2", "baz"] [] actual = t.parse_to_columns "bar" "(?)(\d)(?\d)(\d)" actual.should_equal expected diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index a6af3f7d817e..43384190d364 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -541,7 +541,7 @@ spec = c_4 = ['Z', [True, False, True]] table = Table.new [c_0, c_1, c_2, c_3, c_4] action = table.use_first_row_as_names on_problems=_ - tester = expect_column_names ["Column_1", "1980-01-01", "1", "5.3", "True"] + tester = expect_column_names ["Column 1", "1980-01-01", "1", "5.3", "True"] problems = [Invalid_Output_Column_Names.Error [""]] Problems.test_problem_handling action problems tester @@ -553,7 +553,7 @@ spec = c_4 = ['Z', [True, False, True]] table = Table.new [c_0, c_1, c_2, c_3, c_4] action = table.use_first_row_as_names on_problems=_ - tester = expect_column_names ["A", "1980-01-01", "Column_1", "5.3", "True"] + tester = expect_column_names ["A", "1980-01-01", "Column 1", "5.3", "True"] problems = [Invalid_Output_Column_Names.Error [Nothing]] Problems.test_problem_handling action problems tester @@ -565,7 +565,7 @@ spec = c_4 = ['Z', [True, False, True]] table = Table.new [c_0, c_1, c_2, c_3, c_4] action = table.use_first_row_as_names on_problems=_ - tester = expect_column_names ["Column_1", "1980-01-01", "Column_2", "5.3", "True"] + tester = expect_column_names ["Column 1", "1980-01-01", "Column 2", "5.3", "True"] problems = [Invalid_Output_Column_Names.Error ["", Nothing]] Problems.test_problem_handling action problems tester @@ -576,7 +576,7 @@ spec = c_3 = ['C', ["A", "B", "C"]] table = Table.new [c_0, c_1, c_2, c_3] action = table.use_first_row_as_names on_problems=_ - tester = expect_column_names ["A", "A_1", "A_2", "A_3"] + tester = expect_column_names ["A", "A 1", "A 2", "A 3"] problems = [Duplicate_Output_Column_Names.Error ["A", "A", "A"]] Problems.test_problem_handling action problems tester diff --git a/test/Table_Tests/src/Util.enso b/test/Table_Tests/src/Util.enso index aa20695bda50..3d586781b417 100644 --- a/test/Table_Tests/src/Util.enso +++ b/test/Table_Tests/src/Util.enso @@ -4,27 +4,36 @@ from Standard.Table import Table, Column from Standard.Test import Test import Standard.Test.Extensions +import Standard.Test.Test_Result.Test_Result polyglot java import org.enso.base_test_helpers.FileSystemHelper -Table.should_equal self expected = case expected of - _ : Table -> - tables_equal t0 t1 = - same_headers = (t0.columns.map .name) == (t1.columns.map .name) - same_columns = (t0.columns.map .to_vector) == (t1.columns.map .to_vector) - same_headers && same_columns - equal = tables_equal self expected - if equal.not then - msg = 'Tables differ.\nActual:\n' + self.display + '\nExpected:\n' + expected.display - Test.fail msg - _ -> Test.fail "Got a Table, but expected a "+expected.to_display_text +Table.should_equal : Any -> Integer -> Test_Result +Table.should_equal self expected frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + case expected of + _ : Table -> + tables_equal t0 t1 = + same_headers = (t0.columns.map .name) == (t1.columns.map .name) + same_columns = (t0.columns.map .to_vector) == (t1.columns.map .to_vector) + same_headers && same_columns + equal = tables_equal self expected + if equal.not then + msg = 'Tables differ at '+loc+'.\nActual:\n' + self.display + '\nExpected:\n' + expected.display + Test.fail msg + _ -> Test.fail "Got a Table, but expected a "+expected.to_display_text+' (at '+loc+').' -Column.should_equal self expected = - if self.name != expected.name then - Test.fail "Expected column name "+expected.name+", but got "+self.name+"." - if self.length != expected.length then - Test.fail "Expected column length "+expected.length.to_text+", but got "+self.length.to_text+"." - self.to_vector.should_equal expected.to_vector +Column.should_equal : Any -> Integer -> Test_Result +Column.should_equal self expected frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + case expected of + _ : Column -> + if self.name != expected.name then + Test.fail "Expected column name "+expected.name+", but got "+self.name+" (at "+loc+")." + if self.length != expected.length then + Test.fail "Expected column length "+expected.length.to_text+", but got "+self.length.to_text+" (at "+loc+")." + self.to_vector.should_equal expected.to_vector + _ -> Test.fail "Got a Column, but expected a "+expected.to_display_text+' (at '+loc+').' normalize_lines string line_separator=Line_Ending_Style.Unix.to_text newline_at_end=True = case newline_at_end of diff --git a/test/Tests/src/Data/Time/Date_Spec.enso b/test/Tests/src/Data/Time/Date_Spec.enso index edc0f589baf5..6dd5f6b67fd6 100644 --- a/test/Tests/src/Data/Time/Date_Spec.enso +++ b/test/Tests/src/Data/Time/Date_Spec.enso @@ -229,6 +229,22 @@ spec_with name create_new_date parse_date = (create_new_date 2000 7 1).end_of Date_Period.Quarter . should_equal (Date.new 2000 9 30) (create_new_date 2000 6 30).end_of Date_Period.Quarter . should_equal (Date.new 2000 6 30) + Test.specify "should allow to compute the number of days until a date" <| + create_new_date 2000 2 1 . days_until (create_new_date 2000 2 1) . should_equal 0 + create_new_date 2000 2 1 . days_until (create_new_date 2000 2 2) . should_equal 1 + create_new_date 2000 2 2 . days_until (create_new_date 2000 2 1) . should_equal -1 + create_new_date 2001 3 1 . days_until (create_new_date 2001 4 1) . should_equal 31 + create_new_date 2000 3 1 . days_until (create_new_date 2001 3 1) . should_equal 365 + create_new_date 2001 3 1 . days_until (create_new_date 2000 3 1) . should_equal -365 + + Test.specify "should allow to compute the number of days until a date including the end date" <| + create_new_date 2000 2 1 . days_until (create_new_date 2000 2 1) include_end_date=True . should_equal 1 + create_new_date 2000 2 1 . days_until (create_new_date 2000 2 2) include_end_date=True . should_equal 2 + create_new_date 2000 2 2 . days_until (create_new_date 2000 2 1) include_end_date=True . should_equal -2 + create_new_date 2001 3 1 . days_until (create_new_date 2001 4 1) include_end_date=True . should_equal 32 + create_new_date 2000 3 1 . days_until (create_new_date 2001 3 1) include_end_date=True . should_equal 366 + create_new_date 2001 3 1 . days_until (create_new_date 2000 3 1) include_end_date=True . should_equal -366 + Test.specify "should allow to compute the number of working days until a later date" <| # 2000-2-1 is a Tuesday create_new_date 2000 2 1 . work_days_until (create_new_date 2000 2 1) . should_equal 0 diff --git a/test/Visualization_Tests/src/Lazy_Table_Spec.enso b/test/Visualization_Tests/src/Lazy_Table_Spec.enso index 293f2bc6465c..e61e92266a2d 100644 --- a/test/Visualization_Tests/src/Lazy_Table_Spec.enso +++ b/test/Visualization_Tests/src/Lazy_Table_Spec.enso @@ -16,8 +16,8 @@ sample_table = col1 = ["foo", [123456789,23456789,987654321]] col2 = ["bar", [4,5,6]] col3 = ["Baz", [7,8,9]] - col4 = ["foo_1", [10,11,12]] - col5 = ["foo_2", [13,14,15]] + col4 = ["foo 1", [10,11,12]] + col5 = ["foo 2", [13,14,15]] col6 = ["ab.+123", [16,17,18]] col7 = ["abcd123", ["19",20, t1]] Table.new [col1, col2, col3, col4, col5, col6, col7] From 069fcf39285686a465fe42333883e72223220318 Mon Sep 17 00:00:00 2001 From: "Stijn (\"stain\") Seghers" Date: Mon, 8 May 2023 12:12:26 +0200 Subject: [PATCH 19/28] Show error pop-up when failing to rename a project (#6366) Closes #5065: when a project can't be renamed, it now shows an error pop-up and stays in edit mode. https://user-images.githubusercontent.com/607786/234025360-6761f7d6-c13c-479c-a0ba-8af5d81f6e87.mp4 --- CHANGELOG.md | 4 + Cargo.lock | 1 + .../engine-protocol/src/common/error.rs | 3 + app/gui/src/model/project/synchronized.rs | 16 +- app/gui/src/presenter/project.rs | 19 ++- app/gui/view/examples/interface/src/lib.rs | 18 +++ app/gui/view/graph-editor/Cargo.toml | 1 + .../graph-editor/src/component/breadcrumbs.rs | 13 +- .../src/component/breadcrumbs/project_name.rs | 42 +++-- app/gui/view/src/debug_mode_popup.rs | 148 ++---------------- app/gui/view/src/lib.rs | 1 + app/gui/view/src/popup.rs | 143 +++++++++++++++++ app/gui/view/src/project.rs | 14 ++ build-config.yaml | 2 +- integration-test/tests/graph_editor.rs | 4 +- 15 files changed, 274 insertions(+), 155 deletions(-) create mode 100644 app/gui/view/src/popup.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 7308d7b33a4b..eab22f3ab5dd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -136,6 +136,9 @@ such as dropdown can now be placed in the node and affect the code text flow. - [The IDE UI element for selecting the execution mode of the project is now sending messages to the backend.][6341]. +- [Feedback when renaming a project][6366]. When the user tries to rename the + project to an invalid name, a helpful error message is shown and the text + field stays the same as to give the user the opportunity to fix the mistake. - [Area selectionof nodes no longer takes into account the visualisation that belongs to the node.][6487]. - [List Editor Widget][6470]. Now you can edit lists by clicking buttons on @@ -209,6 +212,7 @@ [6035]: https://github.com/enso-org/enso/pull/6035 [6097]: https://github.com/enso-org/enso/pull/6097 [6097]: https://github.com/enso-org/enso/pull/6341 +[6366]: https://github.com/enso-org/enso/pull/6366 [6487]: https://github.com/enso-org/enso/pull/6487 [6341]: https://github.com/enso-org/enso/pull/6341 [6470]: https://github.com/enso-org/enso/pull/6470 diff --git a/Cargo.lock b/Cargo.lock index ad4c0a5a1be2..7dbc0c471656 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4342,6 +4342,7 @@ dependencies = [ "js-sys", "nalgebra", "ordered-float", + "parser", "serde", "serde-wasm-bindgen", "serde_json", diff --git a/app/gui/controller/engine-protocol/src/common/error.rs b/app/gui/controller/engine-protocol/src/common/error.rs index ea842f8c46c6..adbe8c46aadd 100644 --- a/app/gui/controller/engine-protocol/src/common/error.rs +++ b/app/gui/controller/engine-protocol/src/common/error.rs @@ -30,4 +30,7 @@ pub mod code { /// Signals that requested project is already under version control. pub const VCS_ALREADY_EXISTS: i64 = 1005; + + /// Signals that project name is invalid. + pub const PROJECT_NAME_INVALID: i64 = 4001; } diff --git a/app/gui/src/model/project/synchronized.rs b/app/gui/src/model/project/synchronized.rs index 4967efe096b5..a9c51a071f4e 100644 --- a/app/gui/src/model/project/synchronized.rs +++ b/app/gui/src/model/project/synchronized.rs @@ -224,6 +224,13 @@ async fn update_modules_on_file_change( #[fail(display = "Project Manager is unavailable.")] pub struct ProjectManagerUnavailable; +/// An error signalling the project name was invalid. +#[derive(Clone, Debug, Fail)] +#[fail(display = "The project name is not allowed: {}", cause)] +pub struct ProjectNameInvalid { + cause: String, +} + #[allow(missing_docs)] #[derive(Clone, Copy, Debug, Fail)] #[fail(display = "Project renaming is not available in read-only mode.")] @@ -720,7 +727,14 @@ impl model::project::API for Project { self.project_manager.as_ref().ok_or(ProjectManagerUnavailable)?; let project_id = self.properties.borrow().id; let project_name = ProjectName::new_unchecked(name); - project_manager.rename_project(&project_id, &project_name).await?; + project_manager.rename_project(&project_id, &project_name).await.map_err( + |error| match error { + RpcError::RemoteError(cause) + if cause.code == code::PROJECT_NAME_INVALID => + failure::Error::from(ProjectNameInvalid { cause: cause.message }), + error => error.into(), + }, + )?; self.properties.borrow_mut().name.project = referent_name.clone_ref(); self.execution_contexts.rename_project(old_name, referent_name); Ok(()) diff --git a/app/gui/src/presenter/project.rs b/app/gui/src/presenter/project.rs index 2805043bf9b5..01ed7c3efddb 100644 --- a/app/gui/src/presenter/project.rs +++ b/app/gui/src/presenter/project.rs @@ -4,6 +4,7 @@ use crate::prelude::*; use crate::executor::global::spawn_stream_handler; +use crate::model::project::synchronized::ProjectNameInvalid; use crate::presenter; use crate::presenter::graph::ViewNodeId; @@ -159,11 +160,23 @@ impl Model { if self.controller.model.name() != name.as_ref() { let project = self.controller.model.clone_ref(); let breadcrumbs = self.view.graph().model.breadcrumbs.clone_ref(); + let popup = self.view.popup().clone_ref(); let name = name.into(); executor::global::spawn(async move { - if let Err(e) = project.rename_project(name).await { - error!("The project couldn't be renamed: {e}"); - breadcrumbs.cancel_project_name_editing.emit(()); + if let Err(error) = project.rename_project(name).await { + let error_message = match error.downcast::() { + Ok(error) => error.to_string(), + Err(error) => { + // Other errors aren't geared towards users, so display a generic + // message. + let prefix = "The project couldn't be renamed".to_string(); + error!("{prefix}: {error}"); + prefix + } + }; + popup.set_label.emit(error_message); + // Reset name to old, valid value + breadcrumbs.input.project_name.emit(project.name()); } }); } diff --git a/app/gui/view/examples/interface/src/lib.rs b/app/gui/view/examples/interface/src/lib.rs index d8a9639068be..a8c275ebb1ff 100644 --- a/app/gui/view/examples/interface/src/lib.rs +++ b/app/gui/view/examples/interface/src/lib.rs @@ -19,6 +19,7 @@ use ensogl::prelude::*; use enso_frp as frp; use ensogl::application::Application; +use ensogl::control::io::mouse; use ensogl::display::object::ObjectOps; use ensogl::display::shape::StyleWatch; use ensogl::gui::text; @@ -260,6 +261,23 @@ fn init(app: &Application) { graph_editor.set_available_execution_environments(make_dummy_execution_environments()); + // === Pop-up === + + // Create node to trigger a pop-up. + let node_id = graph_editor.model.add_node(); + graph_editor.frp.set_node_position.emit((node_id, Vector2(-300.0, -100.0))); + let expression = expression_mock_string("Click me to show a pop-up"); + graph_editor.frp.set_node_expression.emit((node_id, expression)); + let node = graph_editor.nodes().all.get_cloned_ref(&node_id).unwrap(); + + let popup = project_view.popup(); + let network = node.network(); + let node_clicked = node.on_event::(); + frp::extend! { network + eval_ node_clicked (popup.set_label.emit("This is a test pop-up.")); + } + + // === Rendering === // let tgt_type = dummy_type_generator.get_dummy_type(); diff --git a/app/gui/view/graph-editor/Cargo.toml b/app/gui/view/graph-editor/Cargo.toml index 5566d5961139..6b4bd34dc32f 100644 --- a/app/gui/view/graph-editor/Cargo.toml +++ b/app/gui/view/graph-editor/Cargo.toml @@ -29,6 +29,7 @@ indexmap = "1.9.2" js-sys = { workspace = true } nalgebra = { workspace = true } ordered-float = { workspace = true } +parser = { path = "../../language/parser" } serde = { version = "1.0", features = ["derive"] } serde-wasm-bindgen = { workspace = true } serde_json = { workspace = true } diff --git a/app/gui/view/graph-editor/src/component/breadcrumbs.rs b/app/gui/view/graph-editor/src/component/breadcrumbs.rs index d30ad663a546..17ba6ff12ff3 100644 --- a/app/gui/view/graph-editor/src/component/breadcrumbs.rs +++ b/app/gui/view/graph-editor/src/component/breadcrumbs.rs @@ -81,12 +81,6 @@ ensogl::define_endpoints! { /// Signalizes a mouse press happened outside the breadcrumb panel. It's used to finish /// project renaming, committing the name in text field. outside_press (), - /// Signalizes we want to cancel project name renaming, bringing back the project name - /// before editing. - cancel_project_name_editing (), - /// Signalizes we want to start editing the project name. Adds a cursor to the text edit - /// field at the mouse position. - start_project_name_editing (), /// Sets the project name. project_name (String), /// Select the breadcrumb by its index. @@ -126,6 +120,8 @@ ensogl::define_endpoints! { project_name_hovered (bool), /// Indicates whether the project name was clicked. project_mouse_down (), + /// Signalizes an error if the user tried to rename the project to an invalid name. + project_name_error (String), /// Indicates if the read-only mode is enabled. read_only(bool), } @@ -478,7 +474,6 @@ impl Breadcrumbs { eval frp.input.project_name((name) model.project_name.set_name.emit(name)); frp.source.project_name <+ model.project_name.output.name; - eval_ frp.input.start_project_name_editing( model.project_name.start_editing.emit(()) ); eval frp.ide_text_edit_mode((value) model.project_name.ide_text_edit_mode.emit(value) ); frp.source.project_name_hovered <+ model.project_name.is_hovered; @@ -486,10 +481,12 @@ impl Breadcrumbs { eval frp.input.set_project_changed((v) model.project_name.set_project_changed(v)); + frp.source.project_name_error <+ model.project_name.error; + + // === User Interaction === frp.select_breadcrumb <+ model.project_name.frp.output.mouse_down.constant(0); - model.project_name.frp.cancel_editing <+ frp.cancel_project_name_editing; model.project_name.frp.outside_press <+ frp.outside_press; popped_count <= frp.output.breadcrumb_select.map(|selected| (0..selected.0).collect_vec()); diff --git a/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs b/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs index 2f5e39beb61f..431d374ffbd5 100644 --- a/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs +++ b/app/gui/view/graph-editor/src/component/breadcrumbs/project_name.rs @@ -19,6 +19,7 @@ use ensogl::DEPRECATED_Animation; use ensogl_component::text; use ensogl_component::text::formatting::Size as TextSize; use ensogl_hardcoded_theme::graph_editor::breadcrumbs as breadcrumbs_theme; +use parser::Parser; @@ -66,8 +67,8 @@ ensogl::define_endpoints_2! { cancel_editing (), /// Enable editing the project name field and add a cursor at the mouse position. start_editing (), - /// Commit current project name. - commit (), + /// Try committing current project name. + try_commit (), outside_press (), /// Indicates that this is the currently active breadcrumb. select (), @@ -90,6 +91,7 @@ ensogl::define_endpoints_2! { edit_mode (bool), selected (bool), is_hovered (bool), + error (String), read_only (bool), } } @@ -217,6 +219,24 @@ impl ProjectNameModel { self.commit(name); } + /// Confirm the given name as the current project name if it's valid. + fn try_commit(&self, name: impl Str) -> Result<(), String> { + let name = name.into(); + Self::validate(&name) + .map_err(|error| format!("The project couldn't be renamed. {error}"))?; + self.commit(name); + Ok(()) + } + + /// Check whether the given name is a valid project name. + fn validate(name: impl Str) -> Result<(), String> { + let parser = Parser::new(); + match parser.parse_line_ast(name).map(|ast| ast.shape().clone()) { + Ok(ast::Shape::Cons(_)) => Ok(()), + _ => Err("The project name should use the 'Upper_Snake' case.".to_owned()), + } + } + /// Confirm the given name as the current project name. fn commit>(&self, name: T) { let name = name.into(); @@ -323,11 +343,15 @@ impl ProjectName { // === Commit === - do_commit <- any(&frp.commit,&frp.outside_press).gate(&frp.output.edit_mode); - commit_text <- text_content.sample(&do_commit); - output.name <+ commit_text; - eval commit_text((text) model.commit(text)); - on_commit <- commit_text.constant(()); + try_commit <- any(&frp.try_commit, &frp.outside_press).gate(&frp.output.edit_mode); + commit_result <- try_commit.map2(&text_content, f!([model] (_, text) { + let result = model.try_commit(text); + (result.as_ref().ok().copied(), result.err()) + })); + commit_success <- commit_result.filter_map(|(ok, _)| *ok); + commit_failure <- commit_result.filter_map(|(_, error)| error.clone()); + output.name <+ text_content.sample(&commit_success); + output.error <+ commit_failure; not_selected <- frp.output.selected.map(|selected| !selected); on_deselect <- not_selected.gate(¬_selected).constant(()); @@ -337,7 +361,7 @@ impl ProjectName { // === Selection === output.selected <+ frp.select.to_true(); - set_inactive <- any(&frp.deselect,&on_commit); + set_inactive <- any(&frp.deselect, &commit_success); eval_ set_inactive ([text,model] { text.deprecated_set_focus(false); text.remove_all_cursors(); @@ -409,7 +433,7 @@ impl View for ProjectName { fn default_shortcuts() -> Vec { use shortcut::ActionType::*; [ - (Press, "!read_only", "enter", "commit"), + (Press, "!read_only", "enter", "try_commit"), (Release, "", "escape", "cancel_editing"), (DoublePress, "is_hovered & !read_only", "left-mouse-button", "start_editing"), ] diff --git a/app/gui/view/src/debug_mode_popup.rs b/app/gui/view/src/debug_mode_popup.rs index 65adbcf74b09..f7e2bebab084 100644 --- a/app/gui/view/src/debug_mode_popup.rs +++ b/app/gui/view/src/debug_mode_popup.rs @@ -1,15 +1,13 @@ -//! Text message on top of the screen that signals about enabling/disabling Debug Mode of Graph -//! Editor. +//! A pop-up that signals about enabling/disabling Debug Mode of Graph Editor. use crate::prelude::*; -use ensogl::display::shape::*; -use enso_frp as frp; -use ensogl::animation::delayed::DelayedAnimation; +use crate::popup; + use ensogl::application::Application; use ensogl::display; -use ensogl::Animation; -use ensogl_component::label::Label; +use frp::stream::EventOutput; +use frp::HasLabel; @@ -28,110 +26,7 @@ macro_rules! define_debug_mode_shortcut { } define_debug_mode_shortcut!("ctrl shift d"); const DEBUG_MODE_DISABLED: &str = "Debug Mode disabled."; - const LABEL_VISIBILITY_DELAY_MS: f32 = 3_000.0; -const LABEL_PADDING_TOP: f32 = 50.0; - - - -// ================== -// === PopupLabel === -// ================== - -/// Text label that disappears after a predefined delay. -#[derive(Debug, Clone, CloneRef)] -pub struct PopupLabel { - label: Label, - network: frp::Network, - delay_animation: DelayedAnimation, - /// Show the Popup with the given message. - pub show: frp::Source, -} - -impl display::Object for PopupLabel { - fn display_object(&self) -> &display::object::Instance { - self.label.display_object() - } -} - -impl PopupLabel { - /// Constructor. - pub fn new(app: &Application) -> Self { - let network = frp::Network::new("PopupLabel"); - let label = Label::new(app); - label.set_opacity(0.0); - let background_layer = &app.display.default_scene.layers.panel; - let text_layer = &app.display.default_scene.layers.panel_text; - label.set_layers(background_layer, text_layer); - - let opacity_animation = Animation::new(&network); - network.store(&opacity_animation); - let delay_animation = DelayedAnimation::new(&network); - delay_animation.set_delay(0.0); - delay_animation.set_duration(0.0); - network.store(&delay_animation); - - frp::extend! { network - show <- source::(); - - eval show ([label, delay_animation](text) { - label.set_content(text); - delay_animation.reset(); - delay_animation.start(); - }); - - opacity_animation.target <+ show.constant(1.0); - opacity_animation.target <+ delay_animation.on_end.constant(0.0); - label.set_opacity <+ opacity_animation.value; - } - - Self { label, network, show, delay_animation } - } - - /// Set a delay in milliseconds after which the label will disappear. - pub fn set_delay(&self, delay: f32) { - self.delay_animation.set_delay(delay); - } -} - - - -// ============= -// === Model === -// ============= - -#[derive(Debug, Clone, CloneRef)] -struct Model { - display_object: display::object::Instance, - label: PopupLabel, -} - -impl Model { - /// Constructor. - pub fn new(app: &Application) -> Self { - let display_object = display::object::Instance::new(); - let label = PopupLabel::new(app); - label.set_delay(LABEL_VISIBILITY_DELAY_MS); - display_object.add_child(&label); - - Self { display_object, label } - } - - /// Show "Debug Mode enabled" label. - pub fn show_enabled_label(&self) { - self.label.show.emit(String::from(DEBUG_MODE_ENABLED)); - } - - /// Show "Debug Mode disabled" label. - pub fn show_disabled_label(&self) { - self.label.show.emit(String::from(DEBUG_MODE_DISABLED)); - } - - /// Return the height of the label. - pub fn label_height(&self) -> f32 { - self.label.label.size.value().y - } -} @@ -155,48 +50,39 @@ ensogl::define_endpoints! { // === View === // ============ -/// Text message on top of the screen that signals about enabling/disabling Debug Mode of Graph -/// Editor. +/// A pop-up that signals about enabling/disabling Debug Mode of Graph Editor. #[derive(Debug, Clone, CloneRef)] pub struct View { frp: Frp, - model: Model, + popup: popup::View, } impl View { /// Constructor. pub fn new(app: &Application) -> Self { let frp = Frp::new(); - let model = Model::new(app); let network = &frp.network; + let popup = popup::View::new(app); + + popup.set_delay(LABEL_VISIBILITY_DELAY_MS); frp::extend! { network - init <- source_(); - let shape = app.display.default_scene.shape(); - _eval <- all_with(shape, &init, f!([model](scene_size, _init) { - let half_height = scene_size.height / 2.0; - let label_height = model.label_height(); - let pos_y = half_height - LABEL_PADDING_TOP - label_height / 2.0; - model.display_object.set_y(pos_y); - })); - - eval_ frp.enabled(model.show_enabled_label()); - eval_ frp.disabled(model.show_disabled_label()); + eval_ frp.enabled (popup.set_label(DEBUG_MODE_ENABLED.to_string())); + eval_ frp.disabled (popup.set_label(DEBUG_MODE_DISABLED.to_string())); } - init.emit(()); - Self { frp, model } + Self { frp, popup } } - /// Get the label of the popup. - pub fn label(&self) -> &PopupLabel { - &self.model.label + /// Get the FRP node for the content of the pop-up, for testing purposes. + pub fn content_frp_node(&self) -> impl EventOutput + HasLabel { + self.popup.content_frp_node() } } impl display::Object for View { fn display_object(&self) -> &display::object::Instance { - &self.model.display_object + self.popup.display_object() } } diff --git a/app/gui/view/src/lib.rs b/app/gui/view/src/lib.rs index ab816ae54bc7..96853628a874 100644 --- a/app/gui/view/src/lib.rs +++ b/app/gui/view/src/lib.rs @@ -32,6 +32,7 @@ #[allow(clippy::option_map_unit_fn)] pub mod code_editor; pub mod debug_mode_popup; +pub mod popup; pub mod project; pub mod project_list; pub mod root; diff --git a/app/gui/view/src/popup.rs b/app/gui/view/src/popup.rs new file mode 100644 index 000000000000..a7b8ec556869 --- /dev/null +++ b/app/gui/view/src/popup.rs @@ -0,0 +1,143 @@ +//! A temporary text message on top of the screen. + +use crate::prelude::*; + +use ensogl::animation::delayed::DelayedAnimation; +use ensogl::application::Application; +use ensogl::display; +use ensogl::Animation; +use ensogl_component::label::Label; +use frp::stream::EventOutput; +use frp::HasLabel; + + + +// ================= +// === Constants === +// ================= + +const PADDING_TOP: f32 = 50.0; +const DEFAULT_DELAY_MS: f32 = 5_000.0; + + + +// ============= +// === Model === +// ============= + +/// Text label that disappears after a predefined delay. +#[derive(Debug, Clone, CloneRef)] +struct Model { + label: Label, + opacity_animation: Animation, + delay_animation: DelayedAnimation, +} + +impl Model { + /// Constructor. + fn new(app: &Application, network: &frp::Network) -> Self { + let label = Label::new(app); + label.set_opacity(0.0); + // Add the pop-up to the panel layer so its position is fixed. The default for Label is the + // tooltip layer, which moves when panning. + let scene = &app.display.default_scene; + let background_layer = &scene.layers.panel; + let text_layer = &scene.layers.panel_text; + label.set_layers(background_layer, text_layer); + + let opacity_animation = Animation::new(network); + network.store(&opacity_animation); + let delay_animation = DelayedAnimation::new(network); + delay_animation.set_delay(DEFAULT_DELAY_MS); + delay_animation.set_duration(0.0); + network.store(&delay_animation); + + Self { label, opacity_animation, delay_animation } + } + + /// Set the message. + fn set_label(&self, content: String) { + self.label.set_content(content); + self.delay_animation.reset(); + self.delay_animation.start(); + } + + /// Set the position of the label based on the height of the scene. + fn set_label_position(&self, scene_height: f32) { + let half_height = scene_height / 2.0; + let label_height = self.label.size.value().y; + let pos_y = half_height - PADDING_TOP - label_height / 2.0; + self.label.display_object().set_y(pos_y); + } + + /// Set a delay in milliseconds after which the label will disappear. + fn set_delay(&self, delay: f32) { + self.delay_animation.set_delay(delay); + } +} + + + +// =========== +// === FRP === +// =========== + +ensogl::define_endpoints! { + Input { + set_label (String), + set_delay (f32), + } + Output {} +} + + + +// ============ +// === View === +// ============ + +/// A temporary text message on top of the screen. +#[derive(Debug, Clone, CloneRef, Deref)] +pub struct View { + #[deref] + frp: Frp, + model: Model, +} + +impl View { + /// Constructor. + pub fn new(app: &Application) -> Self { + let frp = Frp::new(); + let network = &frp.network; + let model = Model::new(app, network); + + frp::extend! { network + init <- source_(); + let scene_shape = app.display.default_scene.shape(); + _eval <- all_with(scene_shape, &init, f!((scene_shape, _init) + model.set_label_position(scene_shape.height); + )); + + model.opacity_animation.target <+ frp.set_label.constant(1.0); + model.opacity_animation.target <+ model.delay_animation.on_end.constant(0.0); + model.label.set_opacity <+ model.opacity_animation.value; + + eval frp.set_label ((content) model.set_label(content.clone())); + eval frp.set_delay ((delay) model.set_delay(*delay)); + } + init.emit(()); + + Self { frp, model } + } + + /// Get the FRP node for the content of the pop-up, for testing purposes. + pub fn content_frp_node(&self) -> impl EventOutput + HasLabel { + self.frp.set_label.clone_ref() + } +} + +impl display::Object for View { + fn display_object(&self) -> &display::object::Instance { + self.model.label.display_object() + } +} diff --git a/app/gui/view/src/project.rs b/app/gui/view/src/project.rs index acf620d2c0a0..abab4093624a 100644 --- a/app/gui/view/src/project.rs +++ b/app/gui/view/src/project.rs @@ -13,6 +13,7 @@ use crate::graph_editor::component::node::Expression; use crate::graph_editor::component::visualization; use crate::graph_editor::GraphEditor; use crate::graph_editor::NodeId; +use crate::popup; use crate::project_list::ProjectList; use crate::searcher; @@ -147,6 +148,7 @@ struct Model { fullscreen_vis: Rc>>, project_list: Rc, debug_mode_popup: debug_mode_popup::View, + popup: popup::View, } impl Model { @@ -158,6 +160,7 @@ impl Model { let code_editor = app.new_view::(); let fullscreen_vis = default(); let debug_mode_popup = debug_mode_popup::View::new(app); + let popup = popup::View::new(app); let runs_in_web = ARGS.groups.startup.options.platform.value == "web"; let window_control_buttons = runs_in_web.as_some_from(|| { let window_control_buttons = app.new_view::(); @@ -172,6 +175,7 @@ impl Model { display_object.add_child(&code_editor); display_object.add_child(&searcher); display_object.add_child(&debug_mode_popup); + display_object.add_child(&popup); display_object.remove_child(&searcher); let app = app.clone_ref(); @@ -186,6 +190,7 @@ impl Model { fullscreen_vis, project_list, debug_mode_popup, + popup, } } @@ -624,6 +629,10 @@ impl View { model.debug_mode_popup.enabled <+ frp.enable_debug_mode; model.debug_mode_popup.disabled <+ frp.disable_debug_mode; + + // === Error Pop-up === + + model.popup.set_label <+ model.graph_editor.model.breadcrumbs.project_name_error; } init.emit(()); @@ -655,6 +664,11 @@ impl View { pub fn debug_mode_popup(&self) -> &debug_mode_popup::View { &self.model.debug_mode_popup } + + /// Pop-up + pub fn popup(&self) -> &popup::View { + &self.model.popup + } } impl display::Object for View { diff --git a/build-config.yaml b/build-config.yaml index d71082a4c23d..bf84c332d939 100644 --- a/build-config.yaml +++ b/build-config.yaml @@ -1,6 +1,6 @@ # Options intended to be common for all developers. -wasm-size-limit: 15.83 MiB +wasm-size-limit: 15.85 MiB required-versions: # NB. The Rust version is pinned in rust-toolchain.toml. diff --git a/integration-test/tests/graph_editor.rs b/integration-test/tests/graph_editor.rs index 8f1a15d9077b..d98acee76838 100644 --- a/integration-test/tests/graph_editor.rs +++ b/integration-test/tests/graph_editor.rs @@ -52,7 +52,7 @@ async fn debug_mode() { // Turning On let expect_mode = project.debug_mode.next_event(); - let expect_popup_message = project.debug_mode_popup().label().show.next_event(); + let expect_popup_message = project.debug_mode_popup().content_frp_node().next_event(); project.enable_debug_mode(); assert!(expect_mode.expect()); let message = expect_popup_message.expect(); @@ -68,7 +68,7 @@ async fn debug_mode() { // Turning Off let expect_mode = project.debug_mode.next_event(); - let expect_popup_message = project.debug_mode_popup().label().show.next_event(); + let expect_popup_message = project.debug_mode_popup().content_frp_node().next_event(); project.disable_debug_mode(); assert!(!expect_mode.expect()); let message = expect_popup_message.expect(); From ee8e9e5c60755c407d404d4bb439d33bdae338f7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wawrzyniec=20Urba=C5=84czyk?= Date: Mon, 8 May 2023 16:13:53 +0200 Subject: [PATCH 20/28] Fix the Engine version check in GUI (#6570) This PR fixes #6560. The fix has a few elements: 1) Bumps the Engine requirement to the latest release, namely `2023.1.1`. 2) Changed the logic of checking whether a given version matches the requirement. Previously, we relied on `VersionReq` from `semver` crate which did not behave intuitively when the required version had a prerelease suffix. Now we rely directly on Semantic Versioning rules of precedence. 3) Code cleanups, including deduplicating 3 copies of the version-checking code, and moving some tests to more sensible places. --- Cargo.lock | 120 ++++++++++++---------- Cargo.toml | 2 + app/gui/Cargo.toml | 2 +- app/gui/config.yaml | 4 +- app/gui/config/Cargo.toml | 3 +- app/gui/config/src/lib.rs | 99 +++++++++++++++++- app/gui/src/controller/project.rs | 25 +---- app/gui/src/model/project/synchronized.rs | 16 +-- build/build/Cargo.toml | 2 +- build/ci_utils/Cargo.toml | 2 +- lib/rust/ensogl/core/Cargo.toml | 2 +- 11 files changed, 184 insertions(+), 93 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7dbc0c471656..97367fa5e9a0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -148,7 +148,7 @@ dependencies = [ "enso-macro-utils", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -278,7 +278,7 @@ checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -295,7 +295,7 @@ checksum = "1cd7fce9ba8c3c042128ce72d8b2ddbf3a05747efb67ea0313c635e10bda47a2" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -961,7 +961,7 @@ dependencies = [ "cached_proc_macro_types", "darling", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -1055,7 +1055,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -1399,7 +1399,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d2301688392eb071b0bf1a37be05c469d3cc4dbbd95df672fe28ab021e6a096" dependencies = [ "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -1426,7 +1426,7 @@ dependencies = [ "proc-macro2", "quote", "scratch", - "syn", + "syn 1.0.107", ] [[package]] @@ -1443,7 +1443,7 @@ checksum = "357f40d1f06a24b60ae1fe122542c1fb05d28d32acb2aed064e84bc2ad1e252e" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -1467,7 +1467,7 @@ dependencies = [ "proc-macro2", "quote", "strsim", - "syn", + "syn 1.0.107", ] [[package]] @@ -1478,7 +1478,7 @@ checksum = "9c972679f83bdf9c42bd905396b6c3588a843a17f0f16dfcfa3e2c5d57441835" dependencies = [ "darling_core", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -1658,7 +1658,7 @@ checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -1671,7 +1671,7 @@ dependencies = [ "proc-macro2", "quote", "rustc_version 0.4.0", - "syn", + "syn 1.0.107", ] [[package]] @@ -1962,7 +1962,7 @@ dependencies = [ "enso-build-macros-lib", "itertools", "proc-macro2", - "syn", + "syn 1.0.107", ] [[package]] @@ -1978,7 +1978,7 @@ dependencies = [ "quote", "regex", "serde_yaml", - "syn", + "syn 1.0.107", ] [[package]] @@ -2019,6 +2019,7 @@ dependencies = [ "enso-prelude", "ensogl", "semver 1.0.16", + "thiserror", ] [[package]] @@ -2218,7 +2219,7 @@ dependencies = [ "Inflector", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -2227,7 +2228,7 @@ version = "0.2.0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -2323,7 +2324,7 @@ dependencies = [ "enso-macro-utils", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -2416,7 +2417,7 @@ dependencies = [ "Inflector", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -2434,7 +2435,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -2462,7 +2463,7 @@ dependencies = [ "paste", "proc-macro2", "quote", - "syn", + "syn 1.0.107", "wasm-bindgen-test", ] @@ -2634,7 +2635,7 @@ version = "0.1.0" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -3266,7 +3267,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -3327,7 +3328,7 @@ checksum = "aa4da3c766cd7a0db8242e326e9e4e081edd567072893ed320008189715366a4" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", "synstructure", ] @@ -3426,7 +3427,7 @@ checksum = "236b4e4ae2b8be5f7a5652f6108c4a0f2627c569db4e7923333d31c7dbfed0fb" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -3558,7 +3559,7 @@ checksum = "95a73af87da33b5acf53acfebdc339fe592ecf5357ac7c0a7734ab9d8c876a70" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -3730,7 +3731,7 @@ dependencies = [ "quote", "serde", "serde_json", - "syn", + "syn 1.0.107", ] [[package]] @@ -3741,7 +3742,7 @@ checksum = "a755cc59cda2641ea3037b4f9f7ef40471c329f55c1fa2db6fa0bb7ae6c1f7ce" dependencies = [ "graphql_client_codegen", "proc-macro2", - "syn", + "syn 1.0.107", ] [[package]] @@ -4149,7 +4150,7 @@ dependencies = [ "sha2", "strum", "symlink", - "syn", + "syn 1.0.107", "sysinfo", "tar", "tempfile", @@ -4653,7 +4654,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -4783,7 +4784,7 @@ dependencies = [ "cfg-if 0.1.10", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5025,7 +5026,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5107,7 +5108,7 @@ checksum = "b501e44f11665960c7e7fcf062c7d96a14ade4aa98116c004b2e37b5be7d736c" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5164,7 +5165,7 @@ dependencies = [ "proc-macro-error", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5332,7 +5333,7 @@ dependencies = [ "pest_meta", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5363,7 +5364,7 @@ checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5511,7 +5512,7 @@ dependencies = [ "proc-macro-error-attr", "proc-macro2", "quote", - "syn", + "syn 1.0.107", "version_check", ] @@ -5534,9 +5535,9 @@ checksum = "dc375e1527247fe1a97d8b7156678dfe7c1af2fc075c9a4db3690ecd2a148068" [[package]] name = "proc-macro2" -version = "1.0.50" +version = "1.0.56" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ef7d57beacfaf2d8aee5937dab7b7f28de3cb8b1828479bb5de2a7106f2bae2" +checksum = "2b63bdb0cd06f1f4dedf69b254734f9b45af66e4a031e42a7480257d9898b435" dependencies = [ "unicode-ident", ] @@ -5561,7 +5562,7 @@ dependencies = [ "itertools", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -5603,9 +5604,9 @@ checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" [[package]] name = "quote" -version = "1.0.23" +version = "1.0.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b" +checksum = "4424af4bf778aae2051a77b60283332f386554255d722233d09fbfc7e30da2fc" dependencies = [ "proc-macro2", ] @@ -6137,7 +6138,7 @@ checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -6343,7 +6344,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -6431,7 +6432,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn", + "syn 1.0.107", ] [[package]] @@ -6451,6 +6452,17 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "syn" +version = "2.0.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a34fcf3e8b60f57e6a14301a2e916d323af98b0ea63c599441eec8558660c822" +dependencies = [ + "proc-macro2", + "quote", + "unicode-ident", +] + [[package]] name = "sync_wrapper" version = "0.1.1" @@ -6465,7 +6477,7 @@ checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", "unicode-xid", ] @@ -6554,22 +6566,22 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a9cd18aa97d5c45c6603caea1da6628790b37f7a34b6ca89522331c5180fed0" +checksum = "978c9a314bd8dc99be594bc3c175faaa9794be04a5a5e153caba6915336cebac" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.38" +version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f" +checksum = "f9456a42c5b0d803c8cd86e73dd7cc9edd429499f37a3550d286d5e86720569f" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 2.0.15", ] [[package]] @@ -6683,7 +6695,7 @@ checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -6889,7 +6901,7 @@ checksum = "4017f8f45139870ca7e672686113917c71c7a6e02d4924eda67186083c03081a" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", ] [[package]] @@ -7281,7 +7293,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn", + "syn 1.0.107", "wasm-bindgen-shared", ] @@ -7315,7 +7327,7 @@ checksum = "2aff81306fcac3c7515ad4e177f521b5c9a15f2b08f4e32d823066102f35a5f6" dependencies = [ "proc-macro2", "quote", - "syn", + "syn 1.0.107", "wasm-bindgen-backend", "wasm-bindgen-shared", ] diff --git a/Cargo.toml b/Cargo.toml index b22bd852415d..e00b7ac0e0e6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -134,3 +134,5 @@ syn = { version = "1.0", features = [ "visit-mut", ] } quote = { version = "1.0.23" } +semver = { version = "1.0.0", features = ["serde"] } +thiserror = "1.0.40" diff --git a/app/gui/Cargo.toml b/app/gui/Cargo.toml index 5a57c3454fcd..500fe57b4ad2 100644 --- a/app/gui/Cargo.toml +++ b/app/gui/Cargo.toml @@ -50,7 +50,7 @@ itertools = { workspace = true } js-sys = { workspace = true } mockall = { version = "0.7.1", features = ["nightly"] } nalgebra = { workspace = true } -semver = { version = "1.0.0", features = ["serde"] } +semver = { workspace = true } serde = { version = "1.0", features = ["derive"] } serde_json = { workspace = true } sha3 = { version = "0.8.2" } diff --git a/app/gui/config.yaml b/app/gui/config.yaml index e6ca0802fd42..08ece4c7e1b0 100644 --- a/app/gui/config.yaml +++ b/app/gui/config.yaml @@ -20,8 +20,8 @@ minimumSupportedVersion": "2.0.0-alpha.6" # The minimum engine version supported by the application. The projects opened with the older versions # will have the "Unsupported engine version" message displayed. -engineVersionSupported: "2023.1.1-nightly.2023.2.8" +engineVersionSupported: "2023.1.1" # The minimum language edition supported by the application. It will be displayed as edition user # should put in their package.yaml file to have project compatible with the IDE. -languageEditionSupported: "2023.1.1-nightly.2023.2.8" +languageEditionSupported: "2023.1.1" diff --git a/app/gui/config/Cargo.toml b/app/gui/config/Cargo.toml index 6b574e3c7dae..bb4bc0343e6c 100644 --- a/app/gui/config/Cargo.toml +++ b/app/gui/config/Cargo.toml @@ -8,7 +8,8 @@ edition = "2021" ensogl = { path = "../../../lib/rust/ensogl" } enso-prelude = { path = "../../../lib/rust/prelude" } enso-json-to-struct = { path = "../../../lib/rust/json-to-struct" } -semver = "1.0.0" +semver = { workspace = true } +thiserror = { workspace = true } [build-dependencies] config-reader = { path = "../../../lib/rust/config-reader" } diff --git a/app/gui/config/src/lib.rs b/app/gui/config/src/lib.rs index ca71959078b0..b01b31c494bb 100644 --- a/app/gui/config/src/lib.rs +++ b/app/gui/config/src/lib.rs @@ -20,15 +20,74 @@ use enso_json_to_struct::json_to_struct; // ============== -// === Config === +// === Errors === // ============== +///Error type with information that the Engine version does not meet the requirements. +#[derive(Clone, Debug, thiserror::Error)] +#[error("Unsupported Engine version: required {required} (or newer), found {found}.")] +pub struct UnsupportedEngineVersion { + /// The version of the Engine that is required. + pub required: semver::Version, + /// The version of the Engine that was found. + pub found: semver::Version, +} + + + +// =============== +// === Version === +// =============== + include!(concat!(env!("OUT_DIR"), "/config.rs")); pub use generated::*; -pub fn engine_version_requirement() -> semver::VersionReq { - semver::VersionReq::parse(&format!(">={engine_version_supported}")).unwrap() +/// The minimum supported engine version. +pub fn engine_version_required() -> semver::Version { + // Safe to unwrap, as `engine_version_supported` compile-time and is validated by the test. + semver::Version::parse(engine_version_supported).unwrap() +} + +/// Check if the given Engine version meets the requirements. +/// +/// Effectively, this checks if the given version is greater or equal to the minimum supported. +/// "Greater or equal" is defined by the [Semantic Versioning specification](https://semver.org/) +/// term of precedence. +pub fn check_engine_version_requirement( + required_version: &semver::Version, + tested_version: &semver::Version, +) -> Result<(), UnsupportedEngineVersion> { + // We don't want to rely on the `semver::VersionReq` semantics here. Unfortunately the + // [Semantic Versioning specification](https://semver.org/) does not define the semantics of + // the version requirement operators, so different implementations may behave differently. + // + // The `semver::VersionReq` implementation follows the Cargo's implementation, namely: + // ``` + // In particular, in order for any VersionReq to match a pre-release version, the VersionReq + // must contain at least one Comparator that has an explicit major, minor, and patch version + // identical to the pre-release being matched, and that has a nonempty pre-release component. + // ``` + // See: https://docs.rs/semver/latest/semver/struct.VersionReq.html#associatedconstant.STAR + // This leads to counter-intuitive behavior, where `2023.0.0-dev` does not fulfill the + // `>= 2022.0.0-dev` requirement. + if tested_version < required_version { + Err(UnsupportedEngineVersion { + required: required_version.clone(), + found: tested_version.clone(), + }) + } else { + Ok(()) + } +} + +/// Check if the given Engine version meets the requirements for this build. +/// +/// See [`check_engine_version_requirement`] for more details. +pub fn check_engine_version( + engine_version: &semver::Version, +) -> Result<(), UnsupportedEngineVersion> { + check_engine_version_requirement(&engine_version_required(), engine_version) } @@ -64,3 +123,37 @@ pub fn read_args() -> Args { lazy_static! { pub static ref ARGS: Args = read_args(); } + + + +// ============= +// === Tests === +// ============= + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn check_that_version_requirement_parses() { + // We just expect that it won't panic. + let _ = engine_version_required(); + } + + #[test] + fn new_project_engine_version_fills_requirements() { + // Sanity check: required version must be supported. + assert!(check_engine_version(&engine_version_required()).is_ok()); + } + + #[test] + fn newer_prerelease_matches() -> anyhow::Result<()> { + // Whatever version we have currently defined with `-dev` prerelease. + let current = + semver::Version { pre: semver::Prerelease::new("dev")?, ..engine_version_required() }; + let newer = semver::Version { major: current.major + 1, ..current.clone() }; + + check_engine_version_requirement(¤t, &newer)?; + Ok(()) + } +} diff --git a/app/gui/src/controller/project.rs b/app/gui/src/controller/project.rs index 6c9c205b754a..e78384ead0cd 100644 --- a/app/gui/src/controller/project.rs +++ b/app/gui/src/controller/project.rs @@ -214,16 +214,8 @@ impl Project { } fn display_warning_on_unsupported_engine_version(&self) { - let requirement = enso_config::engine_version_requirement(); - let version = self.model.engine_version(); - if !requirement.matches(&version) { - let message = format!( - "Unsupported Engine version. Please update edition in {} \ - to {}.", - package_yaml_path(&self.model.name()), - enso_config::language_edition_supported - ); - self.status_notifications.publish_event(message); + if let Err(e) = enso_config::check_engine_version(&self.model.engine_version()) { + self.status_notifications.publish_event(e.to_string()); } } } @@ -273,19 +265,6 @@ mod tests { use engine_protocol::language_server; use std::assert_matches::assert_matches; - #[test] - fn parse_supported_engine_version() { - // Should not panic. - enso_config::engine_version_requirement(); - } - - #[test] - fn new_project_engine_version_fills_requirements() { - let requirements = enso_config::engine_version_requirement(); - let version = semver::Version::parse(enso_config::engine_version_supported).unwrap(); - assert!(requirements.matches(&version)) - } - #[wasm_bindgen_test] fn adding_missing_main() { let _ctx = TestWithLocalPoolExecutor::set_up(); diff --git a/app/gui/src/model/project/synchronized.rs b/app/gui/src/model/project/synchronized.rs index a9c51a071f4e..62e92a6ba246 100644 --- a/app/gui/src/model/project/synchronized.rs +++ b/app/gui/src/model/project/synchronized.rs @@ -240,8 +240,9 @@ pub struct RenameInReadOnly; /// engine's version (which is likely the cause of the problems). #[derive(Debug, Fail)] pub struct UnsupportedEngineVersion { - project_name: String, - root_cause: failure::Error, + project_name: String, + version_mismatch: enso_config::UnsupportedEngineVersion, + root_cause: failure::Error, } impl UnsupportedEngineVersion { @@ -249,10 +250,13 @@ impl UnsupportedEngineVersion { let engine_version = properties.engine_version.clone(); let project_name = properties.name.project.as_str().to_owned(); move |root_cause| { - let requirement = enso_config::engine_version_requirement(); - if !requirement.matches(&engine_version) { - let project_name = project_name.clone(); - UnsupportedEngineVersion { project_name, root_cause }.into() + if let Err(version_mismatch) = enso_config::check_engine_version(&engine_version) { + UnsupportedEngineVersion { + project_name: project_name.clone(), + version_mismatch, + root_cause, + } + .into() } else { root_cause } diff --git a/build/build/Cargo.toml b/build/build/Cargo.toml index e621630144d9..f6c2bc623d74 100644 --- a/build/build/Cargo.toml +++ b/build/build/Cargo.toml @@ -58,7 +58,7 @@ regex = { workspace = true } reqwest = { version = "0.11.5", default-features = false, features = [ "stream" ] } -semver = { version = "1.0.4", features = ["serde"] } +semver = { workspace = true } serde = { version = "1.0.130", features = ["derive"] } serde_json = { workspace = true } serde_yaml = { workspace = true } diff --git a/build/ci_utils/Cargo.toml b/build/ci_utils/Cargo.toml index 4ec09d4079c7..3180a18f567c 100644 --- a/build/ci_utils/Cargo.toml +++ b/build/ci_utils/Cargo.toml @@ -59,7 +59,7 @@ regex = { workspace = true } reqwest = { version = "0.11.5", default-features = false, features = [ "stream" ] } -semver = { version = "1.0.4", features = ["serde"] } +semver = { workspace = true } serde = { version = "1.0.130", features = ["derive"] } serde_json = { workspace = true } serde_yaml = { workspace = true } diff --git a/lib/rust/ensogl/core/Cargo.toml b/lib/rust/ensogl/core/Cargo.toml index b6b5648be8b3..3311e4167c26 100644 --- a/lib/rust/ensogl/core/Cargo.toml +++ b/lib/rust/ensogl/core/Cargo.toml @@ -40,7 +40,7 @@ num_enum = { version = "0.5.1" } num-traits = { version = "0.2" } ordered-float = { workspace = true } rustc-hash = { version = "1.0.1" } -semver = { version = "1.0.9" } +semver = { workspace = true } serde = { version = "1" } smallvec = { workspace = true } typenum = { version = "1.11.2" } From 05404300c216e43abcf508099fe752eb4ccfc52e Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Tue, 9 May 2023 03:09:13 +1000 Subject: [PATCH 21/28] Revert typescript CI Lint changes (#6602) * Revert "Fix lint CI (#6567)" This reverts commit 0a8f80959f65c9891ff8cffb68c5d9a7e6cd527c. * Revert "Run typecheck and eslint on `./run lint` (#6314)" This reverts commit 7885145b6e3caf42b343950ae23646dcd789bb27. --- .../lib/client/src/file-associations.ts | 5 ++--- app/ide-desktop/lib/content-config/src/index.ts | 2 +- app/ide-desktop/lib/types/modules.d.ts | 17 ----------------- build/cli/src/lib.rs | 6 ------ 4 files changed, 3 insertions(+), 27 deletions(-) diff --git a/app/ide-desktop/lib/client/src/file-associations.ts b/app/ide-desktop/lib/client/src/file-associations.ts index 4a6e1c67c330..02dc0bfbfd3c 100644 --- a/app/ide-desktop/lib/client/src/file-associations.ts +++ b/app/ide-desktop/lib/client/src/file-associations.ts @@ -104,7 +104,7 @@ export function isFileOpenable(path: string): boolean { * we manually start a new instance of the application and pass the file path to it (using the * Windows-style command). */ -export function onFileOpened(event: Event, path: string): string | null { +export function onFileOpened(event: Event, path: string): string | void { logger.log(`Received 'open-file' event for path '${path}'.`) if (isFileOpenable(path)) { logger.log(`The file '${path}' is openable.`) @@ -114,6 +114,7 @@ export function onFileOpened(event: Event, path: string): string | null { if (!electron.app.isReady() && CLIENT_ARGUMENTS.length === 0) { event.preventDefault() logger.log(`Opening file '${path}'.`) + // eslint-disable-next-line no-restricted-syntax return handleOpenFile(path) } else { // We need to start another copy of the application, as the first one is already running. @@ -127,11 +128,9 @@ export function onFileOpened(event: Event, path: string): string | null { }) // Prevent parent (this) process from waiting for the child to exit. child.unref() - return null } } else { logger.log(`The file '${path}' is not openable, ignoring the 'open-file' event.`) - return null } } diff --git a/app/ide-desktop/lib/content-config/src/index.ts b/app/ide-desktop/lib/content-config/src/index.ts index 1a68e317199a..c2aed90d433f 100644 --- a/app/ide-desktop/lib/content-config/src/index.ts +++ b/app/ide-desktop/lib/content-config/src/index.ts @@ -2,7 +2,7 @@ import * as semver from 'semver' -import * as linkedDist from '../../../../../target/ensogl-pack/linked-dist' +import * as linkedDist from '../../../../../target/ensogl-pack/linked-dist/index' import BUILD_INFO from '../../../build.json' assert { type: 'json' } // Aliases with the same name as the original. diff --git a/app/ide-desktop/lib/types/modules.d.ts b/app/ide-desktop/lib/types/modules.d.ts index 58762fe78f19..3c68a6e47708 100644 --- a/app/ide-desktop/lib/types/modules.d.ts +++ b/app/ide-desktop/lib/types/modules.d.ts @@ -2,23 +2,6 @@ * * This file MUST NOT `export {}` for the modules to be visible to other files. */ -declare module '*/build.json' { - interface BuildInfo { - commit: string - version: string - engineVersion: string - name: string - } - - const BUILD_INFO: BuildInfo - export default BUILD_INFO -} - -declare module '*/ensogl-pack/linked-dist' { - // eslint-disable-next-line no-restricted-syntax - export * from '../../../../lib/rust/ensogl/pack/js/src/runner/index' -} - declare module '*/gui/config.yaml' { interface Config { windowAppScopeName: string diff --git a/build/cli/src/lib.rs b/build/cli/src/lib.rs index c6fc4f497cc7..7ab115122e0f 100644 --- a/build/cli/src/lib.rs +++ b/build/cli/src/lib.rs @@ -86,8 +86,6 @@ use ide_ci::programs::git; use ide_ci::programs::git::clean; use ide_ci::programs::rustc; use ide_ci::programs::Cargo; -use ide_ci::programs::Npm; -use ide_ci::programs::Npx; use std::time::Duration; use tempfile::tempdir; use tokio::process::Child; @@ -837,10 +835,6 @@ pub async fn main_internal(config: Option) -> Result .await?; prettier::check(&ctx.repo_root).await?; - let js_modules_root = ctx.repo_root.join("app/ide-desktop"); - Npm.cmd()?.current_dir(&js_modules_root).args(["install"]).run_ok().await?; - Npm.cmd()?.current_dir(&js_modules_root).args(["run", "typecheck"]).run_ok().await?; - Npx.cmd()?.current_dir(&js_modules_root).args(["eslint", "."]).run_ok().await?; } Target::Fmt => { let prettier = prettier::write(&ctx.repo_root); From 5b0af105c101003d45428dca6cb7b4684b6356db Mon Sep 17 00:00:00 2001 From: Michael Mauderer Date: Tue, 9 May 2023 09:56:18 +0200 Subject: [PATCH 22/28] Fix visualisation type selector artifacts rendered after node preview visualisation was closed. (#6575) Fixes #6501. There appears to be an issue with the children of an unlinked display object still receiving FRP mouse events. In this case, they show/hide the unlinked child, leading to it appearing again. This is mitigated by unlinking the affected element directly. Note that while investigating this, I found that there is an untreated issue with the display object hierarchy, which cause failing tests. The failing test case has been disabled, but the linked issue ([#1405](https://github.com/enso-org/ide/issues/1405)) was closed when migrating tickets. I have re-opened it for triage. --- .../src/component/visualization/container.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/app/gui/view/graph-editor/src/component/visualization/container.rs b/app/gui/view/graph-editor/src/component/visualization/container.rs index 5d713495cca5..cfdbeb15ce49 100644 --- a/app/gui/view/graph-editor/src/component/visualization/container.rs +++ b/app/gui/view/graph-editor/src/component/visualization/container.rs @@ -315,6 +315,15 @@ impl ContainerModel { impl ContainerModel { fn set_visibility(&self, visibility: bool) { + // This is a workaround for #6600. It ensures the action bar is removed + // and receive no further mouse events. + if visibility { + self.view.add_child(&self.action_bar); + } else { + self.action_bar.unset_parent(); + } + + // Show or hide the visualization. if visibility { self.drag_root.add_child(&self.view); self.show_visualisation(); From 4e7f757f53f26c8792066e0d1019e5d95e4993a4 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Tue, 9 May 2023 18:33:11 +1000 Subject: [PATCH 23/28] Fix dashboard issues (part 2) (#6511) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Fix cloud-v2/#411 * Fix cloud-v2/#412 * Fix cloud-v2/#415 * Fix cloud-v2/#414 * Run prettier * Fix cloud-v2/#417 --------- Co-authored-by: Paweł Buchowski --- .../authentication/components/setUsername.tsx | 5 ++- .../src/authentication/providers/auth.tsx | 37 ++++++++++++++--- .../src/authentication/src/components/app.tsx | 3 ++ .../components/changePasswordModal.tsx | 2 +- .../components/confirmDeleteModal.tsx | 3 +- .../src/dashboard/components/createForm.tsx | 4 +- .../src/dashboard/components/dashboard.tsx | 41 ++++++++++--------- .../src/dashboard/components/modal.tsx | 10 +++-- .../src/dashboard/components/renameModal.tsx | 2 +- .../src/dashboard/components/topBar.tsx | 15 ++++++- .../dashboard/components/uploadFileModal.tsx | 2 +- .../authentication/src/providers/backend.tsx | 2 +- 12 files changed, 87 insertions(+), 39 deletions(-) diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/setUsername.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/setUsername.tsx index 903fad09e884..7aff6c16d8c4 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/setUsername.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/components/setUsername.tsx @@ -3,7 +3,9 @@ import * as react from 'react' import * as auth from '../providers/auth' +import * as backendProvider from '../../providers/backend' import * as svg from '../../components/svg' + import Input from './input' import SvgIcon from './svgIcon' @@ -14,6 +16,7 @@ import SvgIcon from './svgIcon' function SetUsername() { const { setUsername: authSetUsername } = auth.useAuth() const { email } = auth.usePartialUserSession() + const { backend } = backendProvider.useBackend() const [username, setUsername] = react.useState('') @@ -32,7 +35,7 @@ function SetUsername() {
{ event.preventDefault() - await authSetUsername(username, email) + await authSetUsername(backend, username, email) }} >
diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx index 1f18c902bd5a..e1adaee206f6 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/authentication/providers/auth.tsx @@ -86,7 +86,11 @@ export interface PartialUserSession { interface AuthContextType { signUp: (email: string, password: string) => Promise confirmSignUp: (email: string, code: string) => Promise - setUsername: (username: string, email: string) => Promise + setUsername: ( + backend: backendProvider.AnyBackendAPI, + username: string, + email: string + ) => Promise signInWithGoogle: () => Promise signInWithGitHub: () => Promise signInWithPassword: (email: string, password: string) => Promise @@ -166,7 +170,7 @@ export function AuthProvider(props: AuthProviderProps) { const client = new http.Client(headers) const backend = new remoteBackend.RemoteBackend(client, logger) setBackend(backend) - const organization = await backend.usersMe() + const organization = await backend.usersMe().catch(() => null) let newUserSession: UserSession if (!organization) { newUserSession = { @@ -257,10 +261,14 @@ export function AuthProvider(props: AuthProviderProps) { return result.ok }) - const setUsername = async (username: string, email: string) => { - const { backend } = backendProvider.useBackend() + const setUsername = async ( + backend: backendProvider.AnyBackendAPI, + username: string, + email: string + ) => { if (backend.platform === platform.Platform.desktop) { - throw new Error('') + toast.error('You cannot set your username on the local backend.') + return false } else { try { await backend.createUser({ @@ -270,7 +278,8 @@ export function AuthProvider(props: AuthProviderProps) { navigate(app.DASHBOARD_PATH) toast.success(MESSAGES.setUsernameSuccess) return true - } catch { + } catch (e) { + toast.error('Could not set your username.') return false } } @@ -377,6 +386,22 @@ export function ProtectedLayout() { if (!session) { return + } else if (session.variant === 'partial') { + return + } else { + return + } +} + +// =========================== +// === SemiProtectedLayout === +// =========================== + +export function SemiProtectedLayout() { + const { session } = useAuth() + + if (session?.variant === 'full') { + return } else { return } diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx index d5f5fa458679..cc1f34e22ffc 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/components/app.tsx @@ -144,6 +144,9 @@ function AppRouter(props: AppProps) { path={DASHBOARD_PATH} element={showDashboard && } /> + + {/* Semi-protected pages are visible to users currently registering. */} + }> } /> {/* Other pages are visible to unauthenticated and authenticated users. */} diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx index 8964a327ff61..9759f73002ee 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/changePasswordModal.tsx @@ -33,7 +33,7 @@ function ChangePasswordModal() { } return ( - +
{ event.stopPropagation() diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx index 02a0140f444a..4a08633b7c03 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/confirmDeleteModal.tsx @@ -20,8 +20,9 @@ export interface ConfirmDeleteModalProps { function ConfirmDeleteModal(props: ConfirmDeleteModalProps) { const { assetType, name, doDelete, onSuccess } = props const { unsetModal } = modalProvider.useSetModal() + return ( - + { diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx index b99273ee142a..535566efdd3d 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/createForm.tsx @@ -31,10 +31,10 @@ function CreateForm(props: CreateFormProps) { } return ( - + { event.stopPropagation() diff --git a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx index 416e8def13bf..aca32eb2426c 100644 --- a/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx +++ b/app/ide-desktop/lib/dashboard/src/authentication/src/dashboard/components/dashboard.tsx @@ -525,8 +525,8 @@ function Dashboard(props: DashboardProps) { const CreateForm = ASSET_TYPE_CREATE_FORM[assetType] setModal(() => ( @@ -645,7 +645,7 @@ function Dashboard(props: DashboardProps) { return (
{ @@ -679,20 +679,22 @@ function Dashboard(props: DashboardProps) { } }} setBackendPlatform={newBackendPlatform => { - setProjectAssets([]) - setDirectoryAssets([]) - setSecretAssets([]) - setFileAssets([]) - switch (newBackendPlatform) { - case platformModule.Platform.desktop: - setBackend(new localBackend.LocalBackend()) - break - case platformModule.Platform.cloud: { - const headers = new Headers() - headers.append('Authorization', `Bearer ${accessToken}`) - const client = new http.Client(headers) - setBackend(new remoteBackendModule.RemoteBackend(client, logger)) - break + if (newBackendPlatform !== backend.platform) { + setProjectAssets([]) + setDirectoryAssets([]) + setSecretAssets([]) + setFileAssets([]) + switch (newBackendPlatform) { + case platformModule.Platform.desktop: + setBackend(new localBackend.LocalBackend()) + break + case platformModule.Platform.cloud: { + const headers = new Headers() + headers.append('Authorization', `Bearer ${accessToken}`) + const client = new http.Client(headers) + setBackend(new remoteBackendModule.RemoteBackend(client, logger)) + break + } } } }} @@ -725,6 +727,7 @@ function Dashboard(props: DashboardProps) { ? 'opacity-50' : '' }`} + disabled={backend.platform === platformModule.Platform.desktop} onClick={event => { event.stopPropagation() setModal(() => ( @@ -738,8 +741,8 @@ function Dashboard(props: DashboardProps) { {svg.UPLOAD_ICON}