From 018d4c312f59928464a8b2f3669faf88fbaff14a Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Mon, 8 Jul 2024 08:58:08 +0100 Subject: [PATCH 01/11] Stop publishing `Postgres` constructor, update `Postgres_Details.Postgres` to `Postgres.Server`. (#10466) ![image](https://github.com/enso-org/enso/assets/4699705/6d0d4167-e97b-4765-8079-650ad091ce60) - Rename `Postgres_Details` to `Postgres`. - Rename `Postgres` constructor to `Server`. - Update SPI. - Linting issues (indent, missing doc comment) --- CHANGELOG.md | 2 ++ .../Redshift/Internal/Redshift_Dialect.enso | 11 +++--- .../0.0.0-dev/src/Data/Json/Extensions.enso | 4 +-- .../src/Connection/Connection_Details.enso | 2 +- .../Data_Link/Postgres_Data_Link.enso | 8 ++--- .../{Postgres_Details.enso => Postgres.enso} | 6 ++-- .../Postgres/Postgres_Data_Link_Setup.enso | 6 ++-- .../Internal/Postgres/Postgres_Dialect.enso | 10 +++--- .../src/Internal/SQLite/SQLite_Dialect.enso | 2 ++ .../Standard/Database/0.0.0-dev/src/Main.enso | 3 +- .../src/Internal/Snowflake_Connection.enso | 2 +- .../PostgresConnectionDetailsSPI.java | 6 ++-- .../src/Database/Postgres_Spec.enso | 34 +++++++++---------- 13 files changed, 49 insertions(+), 47 deletions(-) rename distribution/lib/Standard/Database/0.0.0-dev/src/Connection/{Postgres_Details.enso => Postgres.enso} (91%) diff --git a/CHANGELOG.md b/CHANGELOG.md index f6c91d8375a0..18f20fe3e2f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -12,9 +12,11 @@ methods.][10434] - [Renamed `Location.Start` to `Location.Left` and `Location.End` to `Location.Right`.][10445] +- [Renamed `Postgres_Details.Postgres` to `Postgres.Server`.][10466] [10434]: https://github.com/enso-org/enso/pull/10434 [10445]: https://github.com/enso-org/enso/pull/10445 +[10466]: https://github.com/enso-org/enso/pull/10466 # Enso 2024.2 diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso index 4578d24d4bff..9c99532bc76a 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Dialect.enso @@ -74,12 +74,11 @@ type Redshift_Dialect wrap_identifier self identifier = Base_Generator.wrap_in_quotes identifier - ## PRIVATE - Generates a SQL expression for a table literal. - make_table_literal : Vector (Vector Text) -> Vector Text -> Text -> SQL_Builder - make_table_literal self vecs column_names as_name = - Base_Generator.default_make_table_literal self.wrap_identifier vecs column_names as_name - + ## PRIVATE + Generates a SQL expression for a table literal. + make_table_literal : Vector (Vector Text) -> Vector Text -> Text -> SQL_Builder + make_table_literal self vecs column_names as_name = + Base_Generator.default_make_table_literal self.wrap_identifier vecs column_names as_name ## PRIVATE Prepares an ordering descriptor. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso index edb0d267fa21..10fcedd9e1bc 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso @@ -2,7 +2,6 @@ import project.Any.Any import project.Data.Array.Array import project.Data.Array_Proxy.Array_Proxy import project.Data.Decimal.Decimal -import project.Data.Numeric.Math_Context.Math_Context import project.Data.Json.JS_Object import project.Data.Json.Json import project.Data.Locale.Locale @@ -10,12 +9,13 @@ import project.Data.Map.Map import project.Data.Numbers.Float import project.Data.Numbers.Integer import project.Data.Numbers.Number +import project.Data.Numeric.Math_Context.Math_Context import project.Data.Text.Text import project.Data.Text.Text_Sub_Range.Text_Sub_Range import project.Data.Vector.Vector import project.Error.Error -import project.Errors.Illegal_Argument.Illegal_Argument import project.Errors.Deprecated.Deprecated +import project.Errors.Illegal_Argument.Illegal_Argument import project.Meta import project.Nothing.Nothing import project.Warning.Warning diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso index 87e0a89e4648..f590b0293d7b 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection_Details.enso @@ -9,7 +9,7 @@ from project.Errors import SQL_Error ## Specifies the connection details for the database. This is an interface that is implemented by particular database types, like - `Postgres_Details`, `SQLite` etc. + `Postgres`, `SQLite` etc. type Connection_Details ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso index 8096a58eacfe..42e4b7f41a10 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Data_Link/Postgres_Data_Link.enso @@ -8,17 +8,17 @@ from Standard.Base.Enso_Cloud.Public_Utils import get_optional_field, get_requir import project.Connection.Connection_Options.Connection_Options import project.Connection.Credentials.Credentials -import project.Connection.Postgres_Details.Postgres_Details +import project.Connection.Postgres.Postgres ## PRIVATE type Postgres_Data_Link ## PRIVATE A data-link returning a connection to the specified database. - Connection details:Postgres_Details + Connection details:Postgres ## PRIVATE A data-link returning a query to a specific table within a database. - Table name:Text details:Postgres_Details + Table name:Text details:Postgres ## PRIVATE @@ -34,7 +34,7 @@ type Postgres_Data_Link password = get_required_field "password" credentials_json |> parse_secure_value Credentials.Username_And_Password username password - details = Postgres_Details.Postgres host=host port=port database=db_name schema=schema credentials=credentials + details = Postgres.Server host=host port=port database=db_name schema=schema credentials=credentials case get_optional_field "table" json expected_type=Text of Nothing -> Postgres_Data_Link.Connection details diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Details.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres.enso similarity index 91% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Details.enso rename to distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres.enso index 73b71d1e195d..9a58b5c73c30 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres_Details.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Postgres.enso @@ -12,7 +12,7 @@ import project.Internal.Postgres.Postgres_Data_Link_Setup.Postgres_Data_Link_Set polyglot java import org.postgresql.Driver -type Postgres_Details +type Postgres ## Connect to a PostgreSQL database. Arguments: @@ -28,7 +28,7 @@ type Postgres_Details - use_ssl: Whether to use SSL (defaults to `SSL_Mode.Prefer`). - client_cert: The client certificate to use or `Nothing` if not needed. @credentials Credentials.default_widget - Postgres (host:Text=default_postgres_host) (port:Integer=default_postgres_port) (database:Text=default_postgres_database) (schema:Text="") (credentials:(Credentials|Nothing)=Nothing) (use_ssl:SSL_Mode=SSL_Mode.Prefer) (client_cert:(Client_Certificate|Nothing)=Nothing) + Server (host:Text=default_postgres_host) (port:Integer=default_postgres_port) (database:Text=default_postgres_database) (schema:Text="") (credentials:(Credentials|Nothing)=Nothing) (use_ssl:SSL_Mode=SSL_Mode.Prefer) (client_cert:(Client_Certificate|Nothing)=Nothing) ## PRIVATE Build the Connection resource. @@ -46,7 +46,7 @@ type Postgres_Details ## Cannot use default argument values as gets in an infinite loop if you do. make_new database schema = - Postgres_Details.Postgres self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options allow_data_links + Postgres.Server self.host self.port (database.if_nothing self.database) (schema.if_nothing self.schema) self.credentials self.use_ssl self.client_cert . connect options allow_data_links Postgres_Connection.create self.jdbc_url properties make_new data_link_setup diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso index d0422e7db1ef..4d1f67183968 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso @@ -8,13 +8,13 @@ import Standard.Base.Runtime.Context from Standard.Base.Enso_Cloud.Data_Link_Helpers import data_link_extension, secure_value_to_json import project.Connection.Credentials.Credentials -import project.Connection.Postgres_Details.Postgres_Details +import project.Connection.Postgres.Postgres import project.Connection.SSL_Mode.SSL_Mode ## PRIVATE type Postgres_Data_Link_Setup ## PRIVATE - Available details:Postgres_Details + Available details:Postgres ## PRIVATE Unavailable cause:Text @@ -45,7 +45,7 @@ type Postgres_Data_Link_Setup Error.throw (Illegal_Argument.Error "Cannot save connection as Data Link: "+cause) ## PRIVATE -prepare_credentials data_link_location:Enso_File details:Postgres_Details -> JS_Object | Nothing = +prepare_credentials data_link_location:Enso_File details:Postgres -> JS_Object | Nothing = case details.credentials of Nothing -> Nothing credentials:Credentials -> diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 792e6cc9aea0..47e1b5a93d33 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -85,11 +85,11 @@ type Postgres_Dialect wrap_identifier self identifier = Base_Generator.wrap_in_quotes identifier - ## PRIVATE - Generates a SQL expression for a table literal. - make_table_literal : Vector (Vector Text) -> Vector Text -> Text -> SQL_Builder - make_table_literal self vecs column_names as_name = - Base_Generator.default_make_table_literal self.wrap_identifier vecs column_names as_name + ## PRIVATE + Generates a SQL expression for a table literal. + make_table_literal : Vector (Vector Text) -> Vector Text -> Text -> SQL_Builder + make_table_literal self vecs column_names as_name = + Base_Generator.default_make_table_literal self.wrap_identifier vecs column_names as_name ## PRIVATE Prepares an ordering descriptor. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 146a08bc5a57..4cca20c4df10 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -64,6 +64,8 @@ type SQLite_Dialect generate_sql self query = Base_Generator.generate_query self query . build + ## PRIVATE + Generates SQL to truncate a table. generate_truncate_table_sql : Text -> SQL_Builder generate_truncate_table_sql self table_name = Base_Generator.truncate_table_delete_from_style self table_name diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso index d83211a988de..b4e987ae8c19 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Main.enso @@ -3,12 +3,11 @@ export project.Connection.Client_Certificate.Client_Certificate export project.Connection.Connection_Options.Connection_Options export project.Connection.Credentials.Credentials export project.Connection.Database -export project.Connection.Postgres_Details.Postgres_Details +export project.Connection.Postgres.Postgres export project.Connection.SQLite.SQLite export project.Connection.SQLite_Format.SQLite_Format export project.Connection.SSL_Mode.SSL_Mode export project.SQL_Query.SQL_Query export project.Update_Action.Update_Action -from project.Connection.Postgres_Details.Postgres_Details export Postgres from project.Extensions.Upload_Database_Table export all from project.Extensions.Upload_In_Memory_Table export all diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso index 679a24402bb5..31e9ab484122 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Connection.enso @@ -41,7 +41,7 @@ type Snowflake_Connection ## PRIVATE - A Postgres database connection. + A Snowflake database connection. Arguments: - connection: the underlying connection. diff --git a/std-bits/database/src/main/java/org/enso/database/postgres/PostgresConnectionDetailsSPI.java b/std-bits/database/src/main/java/org/enso/database/postgres/PostgresConnectionDetailsSPI.java index 46f94a31c9fa..ee357856e205 100644 --- a/std-bits/database/src/main/java/org/enso/database/postgres/PostgresConnectionDetailsSPI.java +++ b/std-bits/database/src/main/java/org/enso/database/postgres/PostgresConnectionDetailsSPI.java @@ -6,17 +6,17 @@ public class PostgresConnectionDetailsSPI extends DatabaseConnectionDetailsSPI { @Override protected String getModuleName() { - return "Standard.Database.Connection.Postgres_Details"; + return "Standard.Database.Connection.Postgres"; } @Override protected String getTypeName() { - return "Postgres_Details"; + return "Postgres"; } @Override protected String getCodeForDefaultConstructor() { - return "(Postgres 'localhost' 5432)"; + return "(Postgres.Server 'localhost' 5432)"; } @Override diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index 05206c460b7d..84356596f1ec 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -728,27 +728,27 @@ add_table_specs suite_builder = suite_builder.group "[PostgreSQL] SSL connectivity tests" pending=ssl_pending group_builder-> group_builder.specify "should connect without ssl parameter" <| - Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) . should_succeed + Database.connect (Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password)) . should_succeed group_builder.specify "should connect, requiring SSL" <| - Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed + Database.connect (Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Require) . should_succeed group_builder.specify "should connect be able to verify the certificate" <| - Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed + Database.connect (Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Verify_CA ca_cert_file)) . should_succeed ## Default certificate should not accept the self signed certificate. - ca_fail = Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Verify_CA) + ca_fail = Database.connect (Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=SSL_Mode.Verify_CA) ca_fail.is_error . should_equal True ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True group_builder.specify "should connect be able to verify the host name against the certificate" <| - Database.connect (Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed + Database.connect (Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) . should_succeed alternate_host = Environment.get "ENSO_POSTGRES_ALTERNATE_HOST" . if_nothing <| if db_host == "127.0.0.1" then "localhost" else Nothing pending_alternate = if alternate_host.is_nothing then "Alternative host name not configured." else Nothing group_builder.specify "should fail to connect with alternate host name not valid in certificate" pending=pending_alternate <| - ca_fail = Database.connect (Postgres alternate_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) + ca_fail = Database.connect (Postgres.Server alternate_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) use_ssl=(SSL_Mode.Full_Verification ca_cert_file)) ca_fail.is_error . should_equal True ca_fail.catch SQL_Error . is_a SQL_Error . should_equal True @@ -770,7 +770,7 @@ add_table_specs suite_builder = with_secret "my_postgres_username" db_user username_secret-> with_secret "my_postgres_password" db_password password_secret-> my_secret_name = "Enso Test: My Secret App NAME " + (Random.uuid.take 5) with_secret "my_postgres_app_name" my_secret_name app_name_secret-> Test.with_retries <| - details = Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret) + details = Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password username_secret password_secret) # We set the ApplicationName option, so that we can see that secrets can be used in custom properties. options = Connection_Options.Value [["ApplicationName", app_name_secret]] connection = Database.connect details options @@ -794,7 +794,7 @@ get_configured_connection_details = db_user = Environment.get "ENSO_POSTGRES_USER" db_password = Environment.get "ENSO_POSTGRES_PASSWORD" if db_name.is_nothing then Nothing else - Postgres db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) + Postgres.Server db_host db_port db_name credentials=(Credentials.Username_And_Password db_user db_password) ## Returns a function that takes anything and returns a new connection. The function creates a _new_ connection on each invocation @@ -852,12 +852,12 @@ add_pgpass_specs suite_builder = suite_builder.group "[PostgreSQL] .pgpass" grou add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Connection setup" group_builder-> group_builder.specify "should use environment variables as host, port and database defaults and fall back to hardcoded defaults" <| - c1 = Postgres "example.com" 12345 "my_db" - c2 = Postgres + c1 = Postgres.Server "example.com" 12345 "my_db" + c2 = Postgres.Server c3 = Test_Environment.unsafe_with_environment_override "PGHOST" "192.168.0.1" <| Test_Environment.unsafe_with_environment_override "PGPORT" "1000" <| Test_Environment.unsafe_with_environment_override "PGDATABASE" "ensoDB" <| - Postgres + Postgres.Server c1.host . should_equal "example.com" c1.port . should_equal 12345 @@ -877,7 +877,7 @@ add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Con ## Currently we require the port to be numeric. When we support Unix-sockets, we may lift that restriction. c4 = Test_Environment.unsafe_with_environment_override "PGPORT" "foobar" <| - Postgres + Postgres.Server c4.host . should_equal "localhost" c4.port . should_equal 5432 c4.database . should_equal "postgres" @@ -885,12 +885,12 @@ add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Con add_ssl props = props+[Pair.new 'sslmode' 'prefer'] group_builder.specify "should use the given credentials" <| - c = Postgres credentials=(Credentials.Username_And_Password "myuser" "mypass") + c = Postgres.Server credentials=(Credentials.Username_And_Password "myuser" "mypass") c.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres" c.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "myuser", Pair.new "password" "mypass"] group_builder.specify "should fallback to environment variables and fill-out missing information based on the PGPASS file (if available)" <| - c1 = Postgres + c1 = Postgres.Server c1.jdbc_url . should_equal "jdbc:postgresql://localhost:5432/postgres" c1.jdbc_properties . should_equal <| add_ssl [] @@ -901,9 +901,9 @@ add_connection_setup_specs suite_builder = suite_builder.group "[PostgreSQL] Con Test_Environment.unsafe_with_environment_override "PGUSER" "someuser" <| c1.jdbc_properties . should_equal <| add_ssl [Pair.new "user" "someuser", Pair.new "password" "somepassword"] - c2 = Postgres "192.168.4.0" 1234 "foo" - c3 = Postgres "::1" 55999 "database_name" - c4 = Postgres "::1" 55999 "otherDB" + c2 = Postgres.Server "192.168.4.0" 1234 "foo" + c3 = Postgres.Server "::1" 55999 "database_name" + c4 = Postgres.Server "::1" 55999 "otherDB" c2.jdbc_properties . should_equal <| add_ssl [] c3.jdbc_properties . should_equal <| add_ssl [] c4.jdbc_properties . should_equal <| add_ssl [] From 53eec66edab5636ed6234c7450da284dee4e81d3 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Mon, 8 Jul 2024 11:05:11 +0200 Subject: [PATCH 02/11] Low-hanging perf improvements (#10462) - avoid loading shapeless for the sole purpose of having a compile-time type inequality - don't use `sys.env` to avoid some Scala conversions - lazy initialization of fields # Important Notes On a slow machine, so easier to spot. ![Screenshot from 2024-07-05 16-15-06](https://github.com/enso-org/enso/assets/292128/a07f42c5-9bee-492b-aad1-46fab7b6476f) ![Screenshot from 2024-07-05 18-14-53](https://github.com/enso-org/enso/assets/292128/694c4fb1-dfda-4629-8bd3-21c765612ec3) --- build.sbt | 2 -- .../enso/languageserver/boot/MainModule.scala | 4 ++-- .../scala/org/enso/compiler/pass/IRPass.scala | 19 ++++++++++++++++--- .../pass/optimise/LambdaConsolidate.scala | 15 ++++++++------- .../locking/ThreadSafeFileLockManager.scala | 2 +- 5 files changed, 27 insertions(+), 15 deletions(-) diff --git a/build.sbt b/build.sbt index aa1a8d8e4536..a7cdfa4f8526 100644 --- a/build.sbt +++ b/build.sbt @@ -548,7 +548,6 @@ val scalacticVersion = "3.3.0-SNAP4" val scalaLoggingVersion = "3.9.4" val scalameterVersion = "0.19" val scalatestVersion = "3.3.0-SNAP4" -val shapelessVersion = "2.3.10" val slf4jVersion = JPMSUtils.slf4jVersion val sqliteVersion = "3.42.0.0" val tikaVersion = "2.4.1" @@ -2226,7 +2225,6 @@ lazy val `runtime-compiler` = annotationProcSetting, (Test / fork) := true, libraryDependencies ++= Seq( - "com.chuusai" %% "shapeless" % shapelessVersion % "provided", "junit" % "junit" % junitVersion % Test, "com.github.sbt" % "junit-interface" % junitIfVersion % Test, "org.scalatest" %% "scalatest" % scalatestVersion % Test, diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/boot/MainModule.scala b/engine/language-server/src/main/scala/org/enso/languageserver/boot/MainModule.scala index 7de0fb427f6d..e180b34babdc 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/boot/MainModule.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/boot/MainModule.scala @@ -64,7 +64,7 @@ import java.net.URI import java.nio.charset.StandardCharsets import java.time.Clock -import scala.concurrent.duration._ +import scala.concurrent.duration.DurationInt /** A main module containing all components of the server. * @@ -91,7 +91,7 @@ class MainModule(serverConfig: LanguageServerConfig, logLevel: Level) { new File(serverConfig.contentRootPath) ) - private val openAiKey = sys.env.get("OPENAI_API_KEY") + private val openAiKey = Option(java.lang.System.getenv("OPENAI_API_KEY")) private val openAiCfg = openAiKey.map(AICompletionConfig) val languageServerConfig = Config( diff --git a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/IRPass.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/IRPass.scala index 881a1d9dd63a..cb4a1c996d0b 100644 --- a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/IRPass.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/IRPass.scala @@ -5,7 +5,6 @@ import org.enso.compiler.core.{CompilerError, IR, Identifier} import org.enso.compiler.core.ir.ProcessingPass import org.enso.compiler.core.ir.Module import org.enso.compiler.core.ir.Expression -import shapeless.=:!= import java.util.UUID import scala.annotation.unused @@ -129,7 +128,7 @@ object IRPass { * @return `ev`, cast to `T` if it is a `T` */ def as[T <: Metadata: ClassTag](implicit - @unused ev: T =:!= Metadata + @unused ev: T =!= Metadata ): Option[T] = { this match { case p: T => Some(p) @@ -146,7 +145,7 @@ object IRPass { */ @throws[CompilerError] def unsafeAs[T <: Metadata: ClassTag](implicit - @unused ev: T =:!= Metadata + @unused ev: T =!= Metadata ): T = { this .as[T] @@ -183,4 +182,18 @@ object IRPass { override def duplicate(): Option[Metadata] = Some(this) } } + + // https://stackoverflow.com/questions/6909053/enforce-type-difference + + sealed class =!=[A, B] + + trait LowerPriorityImplicits { + implicit def equal[A]: =!=[A, A] = sys.error("should not be called") + } + object =!= extends LowerPriorityImplicits { + implicit def nequal[A, B](implicit same: A =:= B = null): =!=[A, B] = + if (same != null) + sys.error("should not be called explicitly with same type") + else new =!=[A, B] + } } diff --git a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala index 51e1fb813fe3..f80c55eea5bc 100644 --- a/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala @@ -418,20 +418,21 @@ case object LambdaConsolidate extends IRPass { ): List[DefinitionArgument] = { argsWithShadowed.map { case ( - spec @ DefinitionArgument.Specified(name, _, _, _, _, _, _), + spec: DefinitionArgument.Specified, isShadowed ) => + val oldName = spec.name val newName = if (isShadowed) { freshNameSupply - .newName(from = Some(name)) + .newName(from = Some(oldName)) .copy( - location = name.location, - passData = name.passData, - diagnostics = name.diagnostics, - id = name.getId + location = oldName.location, + passData = oldName.passData, + diagnostics = oldName.diagnostics, + id = oldName.getId ) - } else name + } else oldName spec.copy(name = newName) } diff --git a/lib/scala/distribution-manager/src/main/scala/org/enso/distribution/locking/ThreadSafeFileLockManager.scala b/lib/scala/distribution-manager/src/main/scala/org/enso/distribution/locking/ThreadSafeFileLockManager.scala index 6f42d20faffa..cc86996c540e 100644 --- a/lib/scala/distribution-manager/src/main/scala/org/enso/distribution/locking/ThreadSafeFileLockManager.scala +++ b/lib/scala/distribution-manager/src/main/scala/org/enso/distribution/locking/ThreadSafeFileLockManager.scala @@ -35,7 +35,7 @@ import java.nio.file.Path */ class ThreadSafeFileLockManager(locksRoot: Path) extends ThreadSafeLockManager { val fileLockManager = new FileLockManager(locksRoot) - val localLocks = + lazy val localLocks = collection.concurrent.TrieMap.empty[String, ThreadSafeLock] /** A thread-safe wrapper for a file lock - ensures that the process holds at From 4c0647ea2977dda1e4673635e4ddea24b92c35c9 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Mon, 8 Jul 2024 11:26:30 +0100 Subject: [PATCH 03/11] Stop publishing `First`/`Last` as constructors and use auto-scoping for `take` and `drop`. (#10467) - Removes `First` and `Last` from the `Standard.Base` exports. - Enable auto-scoping for all `Index_Sub_Range` and `Text_Sub_Range`. - Update all use of those methods to use auto-scoping. --- CHANGELOG.md | 2 + .../AWS/0.0.0-dev/src/Internal/S3_Path.enso | 8 +- .../Base/0.0.0-dev/src/Data/Array.enso | 4 +- .../0.0.0-dev/src/Data/Index_Sub_Range.enso | 16 +- .../0.0.0-dev/src/Data/Json/Extensions.enso | 1 - .../0.0.0-dev/src/Data/Text/Extensions.enso | 88 +-- .../src/Data/Text/Text_Sub_Range.enso | 19 +- .../Base/0.0.0-dev/src/Data/Vector.enso | 4 +- .../0.0.0-dev/src/Enso_Cloud/Enso_File.enso | 1 - .../Internal/Enso_File_Helpers.enso | 3 +- .../src/Enso_Cloud/Internal/Enso_Path.enso | 3 +- .../Internal/Existing_Enso_Asset.enso | 3 +- .../src/Internal/Array_Like_Helpers.enso | 5 +- .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 1 - .../0.0.0-dev/src/Network/HTTP/Response.enso | 3 +- .../Standard/Base/0.0.0-dev/src/Runtime.enso | 3 +- .../Base/0.0.0-dev/src/System/File.enso | 4 +- .../Standard/Base/0.0.0-dev/src/Warning.enso | 7 +- .../Database/0.0.0-dev/src/DB_Column.enso | 4 +- .../Database/0.0.0-dev/src/DB_Table.enso | 8 +- .../Database/0.0.0-dev/src/Errors.enso | 4 +- .../src/Internal/Aggregate_Helper.enso | 43 +- .../src/Internal/Base_Generator.enso | 6 +- .../Postgres/Postgres_Data_Link_Setup.enso | 2 +- .../Internal/Postgres/Postgres_Dialect.enso | 2 +- .../src/Internal/SQLite/SQLite_Dialect.enso | 2 +- .../Internal/SQLite/SQLite_Type_Mapping.enso | 4 +- .../0.0.0-dev/src/Internal/Upload_Table.enso | 4 +- .../0.0.0-dev/src/Take_Drop_Helpers.enso | 13 +- .../src/Internal/Snowflake_Dialect.enso | 2 +- .../Standard/Table/0.0.0-dev/src/Column.enso | 16 +- .../src/Extensions/Table_Conversions.enso | 2 +- .../src/Internal/Aggregate_Column_Helper.enso | 2 +- .../src/Internal/Column_Naming_Helper.enso | 2 +- .../src/Internal/Delimited_Reader.enso | 2 +- .../src/Internal/Display_Helpers.enso | 4 +- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 4 +- .../Table/0.0.0-dev/src/Rows_To_Read.enso | 2 +- .../Standard/Table/0.0.0-dev/src/Table.enso | 22 +- .../Visualization/0.0.0-dev/src/Helpers.enso | 4 +- .../0.0.0-dev/src/Scatter_Plot.enso | 4 +- test/AWS_Tests/src/Redshift_Spec.enso | 2 +- test/Base_Tests/src/Data/Text/Utils_Spec.enso | 4 +- test/Base_Tests/src/Data/Text_Spec.enso | 530 +++++++++--------- .../Data/Time/Date_Time_Formatter_Spec.enso | 4 +- .../src/Data/Vector/Slicing_Helpers_Spec.enso | 23 +- test/Base_Tests/src/Data/Vector_Spec.enso | 179 +++--- .../src/Runtime/Stack_Traces_Spec.enso | 4 +- .../src/Semantic/Conversion_Spec.enso | 2 +- .../src/Semantic/Meta_Location_Spec.enso | 2 +- .../src/Semantic/Warnings_Spec.enso | 2 +- test/Base_Tests/src/System/File_Spec.enso | 8 +- test/Benchmarks/src/Table/Aggregate.enso | 2 +- test/Benchmarks/src/Table/Join.enso | 2 +- test/Benchmarks/src/Table/Sorting.enso | 2 +- test/Benchmarks/src/Time/Work_Days.enso | 2 +- test/Benchmarks/src/Vector/Operations.enso | 4 +- .../src/Python_Examples_Spec.enso | 2 +- test/Snowflake_Tests/src/Snowflake_Spec.enso | 2 +- .../Aggregate_Spec.enso | 2 +- .../Join/Join_Spec.enso | 2 +- .../Take_Drop_Spec.enso | 307 +++++----- .../src/Database/Postgres_Spec.enso | 2 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 2 +- .../src/Helpers/Sorted_List_Index_Spec.enso | 2 +- .../src/In_Memory/Aggregate_Column_Spec.enso | 2 +- .../src/In_Memory/Column_Spec.enso | 12 +- .../Table_Tests/src/In_Memory/Table_Spec.enso | 34 +- .../src/Scatter_Plot_Spec.enso | 2 +- .../benchmark-analysis/src/Main.enso | 2 +- 70 files changed, 730 insertions(+), 748 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 18f20fe3e2f7..2737eabd722d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -13,10 +13,12 @@ - [Renamed `Location.Start` to `Location.Left` and `Location.End` to `Location.Right`.][10445] - [Renamed `Postgres_Details.Postgres` to `Postgres.Server`.][10466] +- [Remove `First` and `Last` from namespace, use auto-scoped.][10467] [10434]: https://github.com/enso-org/enso/pull/10434 [10445]: https://github.com/enso-org/enso/pull/10445 [10466]: https://github.com/enso-org/enso/pull/10466 +[10467]: https://github.com/enso-org/enso/pull/10467 # Enso 2024.2 diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Internal/S3_Path.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Internal/S3_Path.enso index 628e1bead256..490fea997027 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Internal/S3_Path.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Internal/S3_Path.enso @@ -75,9 +75,9 @@ type S3_Path without the trailing delimiter. file_name self -> Text = if self.is_root then S3_Path.delimiter else - trimmed = if self.key.ends_with S3_Path.delimiter then self.key.drop (Last 1) else self.key + trimmed = if self.key.ends_with S3_Path.delimiter then self.key.drop (..Last 1) else self.key last_index = trimmed.last_index_of S3_Path.delimiter - if last_index == Nothing then trimmed else trimmed.drop (First last_index+1) + if last_index == Nothing then trimmed else trimmed.drop (..First last_index+1) ## PRIVATE Checks if the given other path is inside of this path. @@ -126,7 +126,7 @@ type Decomposed_S3_Path True -> parts.map Path_Entry.Directory False -> if parts.is_empty then [] else - (parts.drop (Last 1) . map Path_Entry.Directory) + [Path_Entry.File parts.last] + (parts.drop (..Last 1) . map Path_Entry.Directory) + [Path_Entry.File parts.last] Decomposed_S3_Path.Value entries has_root_prefix ## PRIVATE @@ -152,5 +152,5 @@ type Decomposed_S3_Path ## PRIVATE parent self -> Decomposed_S3_Path | Nothing = if self.parts.is_empty then Nothing else - new_parts = self.parts.drop (Last 1) + new_parts = self.parts.drop (..Last 1) Decomposed_S3_Path.Value new_parts self.go_to_root diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso index 6ede8c854250..160deadd8c34 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Array.enso @@ -162,7 +162,7 @@ type Array If a `Range`, the selection is specified by two indices, from and to. @range Index_Sub_Range.default_widget take : (Index_Sub_Range | Range | Integer) -> Vector Any - take self range=(Index_Sub_Range.First 1) = + take self range:(Index_Sub_Range | Range | Integer)=..First = Array_Like_Helpers.take self range ## ALIAS skip, remove @@ -177,7 +177,7 @@ type Array If a `Range`, the selection is specified by two indices, from and to. @range Index_Sub_Range.default_widget drop : (Index_Sub_Range | Range | Integer) -> Vector Any - drop self range=(Index_Sub_Range.First 1) = + drop self range:(Index_Sub_Range | Range | Integer)=..First = Array_Like_Helpers.drop self range ## GROUP Calculations diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso index 720b20437c9c..ee9705d3f5f8 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso @@ -81,12 +81,12 @@ type Index_Sub_Range `Range` too. default_options : Vector Option default_options = - o1 = Option "First" "(Index_Sub_Range.First 1)" - o2 = Option "Last" "(Index_Sub_Range.Last 1)" - o3 = Option "While" "(Index_Sub_Range.While (x-> False))" - o4 = Option "By_Index" "(Index_Sub_Range.By_Index [0])" - o5 = Option "Sample" "(Index_Sub_Range.Sample 10)" - o6 = Option "Every" "(Index_Sub_Range.Every 2)" + o1 = Option "First" "(..First 1)" + o2 = Option "Last" "(..Last 1)" + o3 = Option "While" "(..While (x-> False))" + o4 = Option "By_Index" "(..By_Index [0])" + o5 = Option "Sample" "(..Sample 10)" + o6 = Option "Every" "(..Every 2)" o7 = Option "Range" "(Range.new 0 100)" [o1, o2, o3, o4, o5, o6, o7] @@ -196,7 +196,7 @@ sort_and_merge_ranges ranges = normalization on its own. - range: The `Index_Sub_Range` to take from the collection. take_helper : Integer -> (Integer -> Any) -> (Integer -> Integer -> Any) -> (Vector (Integer | Range) -> Vector Any) -> (Index_Sub_Range | Range | Integer) -> Any -take_helper length at single_slice slice_ranges range = case range of +take_helper length at single_slice slice_ranges range:(Index_Sub_Range | Range | Integer) = case range of count : Integer -> take_helper length at single_slice slice_ranges (Index_Sub_Range.First count) _ : Range -> take_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index range) Index_Sub_Range.First count -> single_slice 0 (length.min count) @@ -247,7 +247,7 @@ take_helper length at single_slice slice_ranges range = case range of normalized. - range: The `Index_Sub_Range` to drop from the collection. drop_helper : Integer -> (Integer -> Any) -> (Integer -> Integer -> Any) -> (Vector (Integer | Range) -> Vector Any) -> (Index_Sub_Range | Range | Integer) -> Any -drop_helper length at single_slice slice_ranges range = case range of +drop_helper length at single_slice slice_ranges range:(Index_Sub_Range | Range | Integer) = case range of _ : Integer -> single_slice range length _ : Range -> drop_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index range) Index_Sub_Range.First count -> single_slice count length diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso index 10fcedd9e1bc..33a5497e31c0 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso @@ -11,7 +11,6 @@ import project.Data.Numbers.Integer import project.Data.Numbers.Number import project.Data.Numeric.Math_Context.Math_Context import project.Data.Text.Text -import project.Data.Text.Text_Sub_Range.Text_Sub_Range import project.Data.Vector.Vector import project.Error.Error import project.Errors.Deprecated.Deprecated diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso index c5b239a94694..6fa86156f24f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Extensions.enso @@ -1033,29 +1033,29 @@ Text.repeat self count=1 = > Example Various different ways to take part of "Hello World!" - "Hello World!".take First == "H" - "Hello World!".take (First 5) == "Hello" - "Hello World!".take (First 0) == "" - "Hello World!".take Last == "!" - "Hello World!".take (Last 6) == "World!" - "Hello World!".take (Before " ") == "Hello" - "Hello World!".take (Before_Last "o") == "Hello W" - "Hello World!".take (After " ") == "World!" - "Hello World!".take (After_Last "o") == "rld!" - "Hello World!".take (While c->c!=" ") == "Hello" - "Hello World!".take (Range 3 5) == "lo" - "Hello World!".take (Range 5 Nothing) == " World!" - "Hello World!".take (Range 5 12) == " World!" - "Hello World!".take (Range 6 12 2) == "Wrd" - "Hello World!".take (Every 2 first=6) == "Wrd" - "Hello World!".take (Every 3) == "Hl Wl" - "Hello World!".take (By_Index 0) == "H" - "Hello World!".take (By_Index [1, 0, 0, 6, 0]) == "eHHWH" - "Hello World!".take (By_Index [Range 0 3, 6, Range 6 12 2]) == "HelWWrd" - "Hello World!".take (Sample 3 seed=42) == "l d" + "Hello World!".take ..First == "H" + "Hello World!".take (..First 5) == "Hello" + "Hello World!".take (..First 0) == "" + "Hello World!".take ..Last == "!" + "Hello World!".take (..Last 6) == "World!" + "Hello World!".take (..Before " ") == "Hello" + "Hello World!".take (..Before_Last "o") == "Hello W" + "Hello World!".take (..After " ") == "World!" + "Hello World!".take (..After_Last "o") == "rld!" + "Hello World!".take (..While c->c!=" ") == "Hello" + "Hello World!".take (..Range 3 5) == "lo" + "Hello World!".take (..Range 5 Nothing) == " World!" + "Hello World!".take (..Range 5 12) == " World!" + "Hello World!".take (..Range 6 12 2) == "Wrd" + "Hello World!".take (..Every 2 first=6) == "Wrd" + "Hello World!".take (..Every 3) == "Hl Wl" + "Hello World!".take (..By_Index 0) == "H" + "Hello World!".take (..By_Index [1, 0, 0, 6, 0]) == "eHHWH" + "Hello World!".take (..By_Index [Range 0 3, 6, Range 6 12 2]) == "HelWWrd" + "Hello World!".take (..Sample 3 seed=42) == "l d" @range Text_Sub_Range.default_widget Text.take : (Text_Sub_Range | Index_Sub_Range | Range | Integer) -> Text ! Index_Out_Of_Bounds -Text.take self range=(Index_Sub_Range.First 1) = +Text.take self range:(Text_Sub_Range | Index_Sub_Range | Range | Integer)=..First = ranges = Codepoint_Ranges.resolve self range case ranges of Range.Between start end 1 -> @@ -1082,29 +1082,29 @@ Text.take self range=(Index_Sub_Range.First 1) = > Example Various different ways to take part of "Hello World!" - "Hello World!".drop First == "ello World!" - "Hello World!".drop (First 5) == " World!" - "Hello World!".drop (First 0) == "Hello World!" - "Hello World!".drop Last == "Hello World" - "Hello World!".drop (Last 6) == "Hello " - "Hello World!".drop (Before " ") == " World!" - "Hello World!".drop (Before_Last "o") == "orld!" - "Hello World!".drop (After " ") == "Hello " - "Hello World!".drop (After_Last "o") == "Hello Wo" - "Hello World!".drop (While c->c!=" ") == " World!" - "Hello World!".drop (Range 3 5) == "Hel World!" - "Hello World!".drop (Range 5 Nothing) == "Hello" - "Hello World!".drop (Range 5 12) == "Hello" - "Hello World!".drop (Range 6 12 2) == "Hello ol!" - "Hello World!".drop (Every 2 first=6) == "Hello ol!" - "Hello World!".drop (Every 3) == "elo ord!" - "Hello World!".drop (By_Index 0) == "ello World!" - "Hello World!".drop (By_Index [1, 0, 0, 6, 0]) == "llo orld!" - "Hello World!".drop (By_Index [Range 0 3, 6, Range 6 12 2]) == "lo ol!" - "Hello World!".drop (Sample 3 seed=42) == "HeloWorl!" + "Hello World!".drop ..First == "ello World!" + "Hello World!".drop (..First 5) == " World!" + "Hello World!".drop (..First 0) == "Hello World!" + "Hello World!".drop ..Last == "Hello World" + "Hello World!".drop (..Last 6) == "Hello " + "Hello World!".drop (..Before " ") == " World!" + "Hello World!".drop (..Before_Last "o") == "orld!" + "Hello World!".drop (..After " ") == "Hello " + "Hello World!".drop (..After_Last "o") == "Hello Wo" + "Hello World!".drop (..While c->c!=" ") == " World!" + "Hello World!".drop (..Range 3 5) == "Hel World!" + "Hello World!".drop (..Range 5 Nothing) == "Hello" + "Hello World!".drop (..Range 5 12) == "Hello" + "Hello World!".drop (..Range 6 12 2) == "Hello ol!" + "Hello World!".drop (..Every 2 first=6) == "Hello ol!" + "Hello World!".drop (..Every 3) == "elo ord!" + "Hello World!".drop (..By_Index 0) == "ello World!" + "Hello World!".drop (..By_Index [1, 0, 0, 6, 0]) == "llo orld!" + "Hello World!".drop (..By_Index [Range 0 3, 6, Range 6 12 2]) == "lo ol!" + "Hello World!".drop (..Sample 3 seed=42) == "HeloWorl!" @range Text_Sub_Range.default_widget Text.drop : (Text_Sub_Range | Index_Sub_Range | Range) -> Text ! Index_Out_Of_Bounds -Text.drop self range=(Index_Sub_Range.First 1) = +Text.drop self range:(Text_Sub_Range | Index_Sub_Range | Range | Integer)=..First = ranges = Codepoint_Ranges.resolve self range case ranges of Range.Between start end 1 -> @@ -1197,9 +1197,9 @@ Text.pad self length:Integer=0 with_pad:Text=' ' at:Location=..Right = case at o remainder = pad_size % with_pad_length case at of Location.Left -> - with_pad.take (Index_Sub_Range.Last remainder) + with_pad.repeat full_repetitions + self + with_pad.take (..Last remainder) + with_pad.repeat full_repetitions + self Location.Right -> - self + with_pad.repeat full_repetitions + with_pad.take (Index_Sub_Range.First remainder) + self + with_pad.repeat full_repetitions + with_pad.take (..First remainder) ## GROUP Text ICON text diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso index 0fed425b4d5e..fe5020c9280b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso @@ -1,4 +1,3 @@ -import project.Data.Index_Sub_Range as Index_Sub_Range_Module import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Numbers.Integer import project.Data.Pair.Pair @@ -16,7 +15,7 @@ import project.Nothing.Nothing import project.Panic.Panic import project.Random.Random from project.Data.Boolean import Boolean, False, True -from project.Data.Index_Sub_Range import handle_unmatched_type +from project.Data.Index_Sub_Range import handle_unmatched_type, sort_and_merge_ranges from project.Data.Range.Extensions import all from project.Metadata.Choice import Option from project.Metadata.Widget import Single_Choice @@ -56,10 +55,10 @@ type Text_Sub_Range ## PRIVATE default_options : Vector Option default_options = - o1 = Option "Before" "(Text_Sub_Range.Before ' ')" - o2 = Option "Before_Last" "(Text_Sub_Range.Before_Last ' ')" - o3 = Option "After" "(Text_Sub_Range.After ' ')" - o4 = Option "After_Last" "(Text_Sub_Range.After_Last ' ')" + o1 = Option "Before" "(..Before ' ')" + o2 = Option "Before_Last" "(..Before_Last ' ')" + o3 = Option "After" "(..After ' ')" + o4 = Option "After_Last" "(..After_Last ' ')" [o1, o2, o3, o4] ## PRIVATE @@ -89,7 +88,7 @@ type Codepoint_Ranges Empty ranges are not discarded. sorted_and_distinct_ranges : Vector Range sorted_and_distinct_ranges self = if self.is_sorted_and_distinct then self.ranges else - Index_Sub_Range_Module.sort_and_merge_ranges self.ranges + sort_and_merge_ranges self.ranges ## PRIVATE Finds code-point indices corresponding to the part of the input matching the @@ -102,7 +101,7 @@ type Codepoint_Ranges in such a way that the ranges returned by this method always have a step equal to 1. resolve : Text -> (Text_Sub_Range | Index_Sub_Range | Range | Integer) -> (Range | Codepoint_Ranges) - resolve text range = + resolve text:Text range:(Text_Sub_Range | Index_Sub_Range | Range | Integer) = case range of Text_Sub_Range.Before delimiter -> if delimiter.is_empty then (0.up_to 0) else @@ -151,13 +150,13 @@ type Codepoint_Ranges Index_Sub_Range.Sample count seed -> rng = Random.new_generator seed indices = rng.indices text.length count - Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index indices) + Codepoint_Ranges.resolve text (..By_Index indices) Index_Sub_Range.Every step start -> if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else len = text.length if start >= len then 0.up_to 0 else simple_range = start.up_to text.length . with_step step - Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index simple_range) + Codepoint_Ranges.resolve text (..By_Index simple_range) _ : Range -> Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index range) _ : Integer -> diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index a0d3aba3c3e2..93a13fd6717c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -974,7 +974,7 @@ type Vector a If a `Range`, the selection is specified by two indices, from and to. @range Index_Sub_Range.default_widget take : (Index_Sub_Range | Range | Integer) -> Vector Any - take self range=(Index_Sub_Range.First 1) = + take self range:(Index_Sub_Range | Range | Integer)=..First = Array_Like_Helpers.take self range ## ALIAS skip, remove @@ -989,7 +989,7 @@ type Vector a If a `Range`, the selection is specified by two indices, from and to. @range Index_Sub_Range.default_widget drop : (Index_Sub_Range | Range | Integer) -> Vector Any - drop self range=(Index_Sub_Range.First 1) = + drop self range:(Index_Sub_Range | Range | Integer)=..First = Array_Like_Helpers.drop self range ## ALIAS combine, join by row position, merge diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso index 213bfac34e66..099125f4d5cb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso @@ -39,7 +39,6 @@ import project.System.File_Format_Metadata.File_Format_Metadata import project.System.Input_Stream.Input_Stream import project.System.Output_Stream.Output_Stream from project.Data.Boolean import Boolean, False, True -from project.Data.Index_Sub_Range.Index_Sub_Range import Last from project.Data.Text.Extensions import all from project.Enso_Cloud.Internal.Enso_File_Helpers import all from project.Enso_Cloud.Public_Utils import get_required_field diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso index 38243ac6dc00..d68ad9ffc603 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso @@ -1,7 +1,6 @@ private import project.Any.Any -import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Json.Invalid_JSON import project.Data.Json.JS_Object import project.Data.Map.Map @@ -115,7 +114,7 @@ create_datalink_from_stream_action (destination : Enso_File) (allow_existing : B if existing_asset.is_nothing.not && existing_asset.asset_type != Enso_Asset_Type.Data_Link then Error.throw (Illegal_Argument.Error "The destination must be a path to a Data Link, not "+existing_asset.asset_type.to_text+".") else file_name = destination.name if file_name.ends_with data_link_extension . not then Error.throw (Illegal_Argument.Error "A datalink must have a name ending with "+data_link_extension+", but the provided name was: "+file_name) else - title = file_name.drop (Index_Sub_Range.Last data_link_extension.length) + title = file_name.drop (..Last data_link_extension.length) stream_result = Output_Stream.with_memory_stream stream_action raw_bytes = stream_result.first action_result = stream_result.second diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_Path.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_Path.enso index fd69ca15c984..51f40e0f141f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_Path.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_Path.enso @@ -1,6 +1,5 @@ private -import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Text.Text import project.Data.Vector.Vector import project.Enso_Cloud.Enso_File.Enso_File @@ -42,7 +41,7 @@ type Enso_Path ## PRIVATE parent self -> Enso_Path = if self.is_root then Error.throw (Illegal_Argument.Error "Cannot get parent of the root directory.") else - Enso_Path.Value self.organization_name (self.path_segments.drop (Index_Sub_Range.Last 1)) + Enso_Path.Value self.organization_name (self.path_segments.drop (..Last 1)) ## PRIVATE resolve self (subpath : Text) -> Enso_Path = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso index 1a5d2a233724..7e7971eaed5e 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso @@ -3,7 +3,6 @@ private import project.Data.Json.JS_Object import project.Data.Map.Map import project.Data.Text.Text -import project.Data.Text.Text_Sub_Range.Text_Sub_Range import project.Data.Time.Date_Time.Date_Time import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter import project.Data.Vector.Vector @@ -112,7 +111,7 @@ type Existing_Enso_Asset ## PRIVATE from_id_and_title id:Text title:Text -> Existing_Enso_Asset = - asset_type = Enso_Asset_Type.from (id.take (Text_Sub_Range.Before "-")) + asset_type = Enso_Asset_Type.from (id.take (..Before "-")) Existing_Enso_Asset.Value title id asset_type diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso index 12f640ec5f18..1915c7428cfb 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso @@ -3,6 +3,7 @@ private import project.Any.Any import project.Data.Array.Array import project.Data.Array_Proxy.Array_Proxy +import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.List.List import project.Data.Map.Map import project.Data.Maybe.Maybe @@ -25,7 +26,7 @@ import project.Runtime.Ref.Ref import project.Warning.Warning from project.Data.Boolean import Boolean, False, True from project.Data.Filter_Condition import unify_condition_or_predicate, unify_condition_predicate_or_element -from project.Data.Index_Sub_Range import drop_helper, Index_Sub_Range, take_helper +from project.Data.Index_Sub_Range import drop_helper, take_helper from project.Data.Ordering import Comparable from project.Data.Range.Extensions import all @@ -310,7 +311,7 @@ to_list vector = short_display_text vector max_entries = if max_entries < 1 then Error.throw <| Illegal_Argument.Error "The `max_entries` parameter must be positive." else - prefix = vector.take (Index_Sub_Range.First max_entries) + prefix = vector.take (..First max_entries) if prefix.length == vector.length then vector.to_text else remaining_count = vector.length - prefix.length remaining_text = if remaining_count == 1 then "and 1 more element" else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index 6cef76e08b4e..afde3f395f5d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -94,7 +94,6 @@ export project.System.Process export project.System.Process.Exit_Code.Exit_Code export project.Warning.Warning from project.Data.Boolean export Boolean, False, True -from project.Data.Index_Sub_Range.Index_Sub_Range export First, Last from project.Data.Json.Extensions export all from project.Data.Numbers export Float, Integer, Number from project.Data.Range.Extensions export all diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso index 6052f72e1983..696d54bb9f3c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso @@ -5,7 +5,6 @@ import project.Data.Numbers.Integer import project.Data.Numbers.Number import project.Data.Text.Encoding.Encoding import project.Data.Text.Text -import project.Data.Text.Text_Sub_Range.Text_Sub_Range import project.Data.Vector.Vector import project.Error.Error import project.Errors.File_Error.File_Error @@ -226,7 +225,7 @@ resolve_file_metadata_for_response : Response -> File_Format_Metadata resolve_file_metadata_for_response response = uri_as_text = response.uri.to_text guessed_filename = filename_from_content_disposition (response.get_header "Content-Disposition") . if_nothing <| - last_path_segment = uri_as_text.take (Text_Sub_Range.After_Last "/") + last_path_segment = uri_as_text.take (..After_Last "/") ## Heuristic: If the last path segment contains a dot, it is likely a filename, otherwise it is too unlikely + lack of extension will not help our guessing anyway, so we can discard it. diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso index f4d15615ca7a..e2bc140b20a1 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Runtime.enso @@ -15,7 +15,6 @@ import project.Polyglot.Polyglot import project.Runtime.Source_Location.Source_Location import project.System from project.Data.Boolean import Boolean, False, True -from project.Data.Index_Sub_Range.Index_Sub_Range import First, Last from project.Data.Text.Extensions import all from project.Runtime.Context import Input, Output @@ -37,7 +36,7 @@ get_stack_trace : Vector Stack_Trace_Element get_stack_trace = prim_stack = primitive_get_stack_trace stack_with_own_frame = Vector.from_polyglot_array prim_stack - stack = stack_with_own_frame.drop (First 1) + stack = stack_with_own_frame.drop (..First 1) stack.map wrap_primitive_stack_trace_element ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso index edd5f2d5f35a..4415e21a2f9d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/System/File.enso @@ -1,13 +1,11 @@ import project.Any.Any import project.Data.Array.Array -import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Json.JS_Object import project.Data.Numbers.Integer import project.Data.Text.Encoding.Encoding import project.Data.Text.Extensions import project.Data.Text.Matching_Mode.Matching_Mode import project.Data.Text.Text -import project.Data.Text.Text_Sub_Range.Text_Sub_Range import project.Data.Time.Date_Time.Date_Time import project.Data.Vector.Vector import project.Enso_Cloud.Data_Link.Data_Link @@ -874,7 +872,7 @@ get_child_widget file = ## PRIVATE find_extension_from_name : Text -> Text find_extension_from_name name = - extension = name.drop (Text_Sub_Range.Before_Last ".") + extension = name.drop (..Before_Last ".") if extension == "." then "" else extension ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso index 0bdd5aac6931..f933851ddc71 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso @@ -1,6 +1,5 @@ import project.Any.Any import project.Data.Array.Array -import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.Maybe.Maybe import project.Data.Numbers.Integer import project.Data.Pair.Pair @@ -27,7 +26,7 @@ type Warning attach : Any -> Any -> Any attach warning value = origin = Runtime.get_stack_trace - attach_with_stacktrace value warning (origin.drop (Index_Sub_Range.First 1)) + attach_with_stacktrace value warning (origin.drop (..First 1)) ## PRIVATE ADVANCED @@ -332,8 +331,8 @@ map_attached_warnings_helper mapper value frames_to_drop = Maybe.Some new_payload -> self_call_name = "Warning.map_attached_warnings_helper" stack_trace = Runtime.get_stack_trace - stack_trace_up_to_this_function = stack_trace.drop (Index_Sub_Range.While element-> element.name != self_call_name) - new_origin = stack_trace_up_to_this_function.drop (Index_Sub_Range.First 1+frames_to_drop) + stack_trace_up_to_this_function = stack_trace.drop (..While element-> element.name != self_call_name) + new_origin = stack_trace_up_to_this_function.drop (..First 1+frames_to_drop) create new_payload new_origin ## If the mapper did not want to affect this warning, we return the original (unwrapped) warning instance. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso index d3f6bad1a4ca..5c275909f85c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Column.enso @@ -1208,7 +1208,7 @@ type DB_Column - range: The selection of rows from the table to return. @range Index_Sub_Range.default_widget take : (Index_Sub_Range | Range | Integer) -> DB_Column - take self range=(First 1) = self.to_table.take range . at 0 + take self range:(Index_Sub_Range | Range | Integer)=..First = self.to_table.take range . at 0 ## ALIAS remove, skip GROUP Standard.Base.Selections @@ -1220,7 +1220,7 @@ type DB_Column - range: The selection of rows from the table to remove. @range Index_Sub_Range.default_widget drop : (Index_Sub_Range | Range | Integer) -> DB_Column - drop self range=(First 1) = self.to_table.drop range . at 0 + drop self range:(Index_Sub_Range | Range | Integer)=..First = self.to_table.drop range . at 0 ## GROUP Standard.Base.Text ICON preparation diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso index 282ab7e42d57..f9c309a0945c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso @@ -757,7 +757,7 @@ type DB_Table > Example Take first 10 rows of the table. - table.take (First 10) + table.take (..First 10) > Example Take rows from the top of the table as long as their values sum to 10. @@ -765,7 +765,7 @@ type DB_Table table.take (While row-> row.to_vector.compute Statistic.Sum == 10) @range Index_Sub_Range.default_widget take : (Index_Sub_Range | Range | Integer) -> DB_Table - take self range:(Index_Sub_Range | Range | Integer)=(..First 1) = + take self range:(Index_Sub_Range | Range | Integer)=..First = Take_Drop_Helpers.take_drop_helper Take_Drop.Take self range ## ALIAS remove, skip @@ -789,7 +789,7 @@ type DB_Table > Example Drop first 10 rows of the table. - table.drop (First 10) + table.drop (..First 10) > Example Drop rows from the top of the table as long as their values sum to 10. @@ -797,7 +797,7 @@ type DB_Table table.drop (While row-> row.to_vector.compute Statistic.Sum == 10) @range Index_Sub_Range.default_widget drop : (Index_Sub_Range | Range | Integer) -> DB_Table - drop self range:(Index_Sub_Range | Range | Integer)=(..First 1) = + drop self range:(Index_Sub_Range | Range | Integer)=..First = Take_Drop_Helpers.take_drop_helper Take_Drop.Drop self range ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso index 76242a284f4a..e9dc8426e5ca 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso @@ -41,12 +41,12 @@ type SQL_Error we don't shorten them too much. We impose an upper limit to avoid unbounded error message size. max_length = 1000 shortened_query_text = if query_text.length <= max_length then query_text else - query_text.take (Index_Sub_Range.First (max_length.div 2)) + " (...) " + query_text.take (Index_Sub_Range.Last (max_length.div 2)) + query_text.take (..First (max_length.div 2)) + " (...) " + query_text.take (..Last (max_length.div 2)) " [Query was: " + shortened_query_text + "]" message = self.java_exception.getMessage max_length = 300 short_message = if message.length < max_length then message else - message.take (Index_Sub_Range.First (max_length.div 2)) + " (...) " + message.take (Index_Sub_Range.Last (max_length.div 2)) + message.take (..First (max_length.div 2)) + " (...) " + message.take (..Last (max_length.div 2)) "There was an SQL error: " + short_message + "." + query ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso index 7712d8ae1c2a..99d94e26ca70 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso @@ -1,9 +1,8 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Table.Internal.Problem_Builder.Problem_Builder from Standard.Table import Aggregate_Column -from Standard.Table.Aggregate_Column.Aggregate_Column import all from Standard.Table.Errors import Floating_Point_Equality import project.DB_Table.DB_Table @@ -42,49 +41,49 @@ make_aggregate_column table aggregate as dialect infer_return_type problem_build Internal_Column.Value as sql_type_ref expression dialect.check_aggregate_support aggregate . if_not_error <| case aggregate of - Group_By c _ -> + Aggregate_Column.Group_By c _ -> Internal_Column.Value as c.sql_type_reference c.expression - Count _ -> simple_aggregate "COUNT_ROWS" [] - Count_Distinct columns _ ignore_nothing -> if columns.is_empty then Error.throw (Illegal_Argument.Error "Count_Distinct must have at least one column.") else + Aggregate_Column.Count _ -> simple_aggregate "COUNT_ROWS" [] + Aggregate_Column.Count_Distinct columns _ ignore_nothing -> if columns.is_empty then Error.throw (Illegal_Argument.Error "Count_Distinct must have at least one column.") else case ignore_nothing of True -> simple_aggregate "COUNT_DISTINCT" columns False -> simple_aggregate "COUNT_DISTINCT_INCLUDE_NULL" columns - Count_Not_Nothing c _ -> simple_aggregate "COUNT" [c] - Count_Nothing c _ -> simple_aggregate "COUNT_IS_NULL" [c] - Count_Not_Empty c _ -> simple_aggregate "COUNT_NOT_EMPTY" [c] - Count_Empty c _ -> simple_aggregate "COUNT_EMPTY" [c] - Percentile p c _ -> + Aggregate_Column.Count_Not_Nothing c _ -> simple_aggregate "COUNT" [c] + Aggregate_Column.Count_Nothing c _ -> simple_aggregate "COUNT_IS_NULL" [c] + Aggregate_Column.Count_Not_Empty c _ -> simple_aggregate "COUNT_NOT_EMPTY" [c] + Aggregate_Column.Count_Empty c _ -> simple_aggregate "COUNT_EMPTY" [c] + Aggregate_Column.Percentile p c _ -> op_kind = "PERCENTILE" expression = SQL_Expression.Operation op_kind [SQL_Expression.Literal p.to_text, c.expression] sql_type_ref = infer_return_type op_kind [c] expression Internal_Column.Value as sql_type_ref expression - Mode c _ -> + Aggregate_Column.Mode c _ -> col = table.make_column c if col.value_type.is_floating_point then problem_builder.report_other_warning (Floating_Point_Equality.Error as) simple_aggregate "MODE" [c] - First c _ ignore_nothing order_by -> case is_non_empty_selector order_by of + Aggregate_Column.First c _ ignore_nothing order_by -> case is_non_empty_selector order_by of False -> Error.throw (Unsupported_Database_Operation.Error "`First` aggregation requires at least one `order_by` column.") True -> op = case ignore_nothing of False -> "FIRST" True -> "FIRST_NOT_NULL" aggregate_with_order_by op c order_by - Last c _ ignore_nothing order_by -> case is_non_empty_selector order_by of + Aggregate_Column.Last c _ ignore_nothing order_by -> case is_non_empty_selector order_by of False -> Error.throw (Unsupported_Database_Operation.Error "`Last` aggregation requires at least one `order_by` column.") True -> op = case ignore_nothing of False -> "LAST" True -> "LAST_NOT_NULL" aggregate_with_order_by op c order_by - Maximum c _ -> simple_aggregate "MAX" [c] - Minimum c _ -> simple_aggregate "MIN" [c] - Shortest c _ -> simple_aggregate "SHORTEST" [c] - Longest c _ -> simple_aggregate "LONGEST" [c] - Standard_Deviation c _ population -> case population of + Aggregate_Column.Maximum c _ -> simple_aggregate "MAX" [c] + Aggregate_Column.Minimum c _ -> simple_aggregate "MIN" [c] + Aggregate_Column.Shortest c _ -> simple_aggregate "SHORTEST" [c] + Aggregate_Column.Longest c _ -> simple_aggregate "LONGEST" [c] + Aggregate_Column.Standard_Deviation c _ population -> case population of True -> simple_aggregate "STDDEV_POP" [c] False -> simple_aggregate "STDDEV_SAMP" [c] - Concatenate c _ separator prefix suffix quote_char -> + Aggregate_Column.Concatenate c _ separator prefix suffix quote_char -> base_args = [c.expression, SQL_Expression.Constant separator, SQL_Expression.Constant prefix, SQL_Expression.Constant suffix] op_kind = case quote_char.is_empty of True -> "CONCAT" @@ -96,6 +95,6 @@ make_aggregate_column table aggregate as dialect infer_return_type problem_build expression = SQL_Expression.Operation op_kind effective_args sql_type_ref = infer_return_type op_kind [c] expression Internal_Column.Value as sql_type_ref expression - Sum c _ -> simple_aggregate "SUM" [c] - Average c _ -> simple_aggregate "AVG" [c] - Median c _ -> simple_aggregate "MEDIAN" [c] + Aggregate_Column.Sum c _ -> simple_aggregate "SUM" [c] + Aggregate_Column.Average c _ -> simple_aggregate "AVG" [c] + Aggregate_Column.Median c _ -> simple_aggregate "MEDIAN" [c] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index fbb5befcae7c..88814a6fdf74 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -227,7 +227,7 @@ case_when : Vector SQL_Builder -> SQL_Builder case_when arguments = if arguments.length < 3 then Error.throw (Illegal_State.Error "CASE_WHEN needs at least 3 arguments.") else fallback = arguments.last - cases = arguments.drop (Last 1) + cases = arguments.drop (..Last 1) if cases.length % 2 != 0 then Error.throw (Illegal_State.Error "CASE_WHEN expects an odd number of arguments (two arguments for each case and a fallback).") else n = cases.length . div 2 cases_exprs = 0.up_to n . map i-> @@ -279,8 +279,8 @@ make_row_number (arguments : Vector) (metadata : Row_Number_Metadata) = if argum step = arguments.at 1 ordering_and_grouping = arguments.drop 2 - ordering = ordering_and_grouping.drop (Last metadata.groupings_count) - grouping = ordering_and_grouping.take (Last metadata.groupings_count) + ordering = ordering_and_grouping.drop (..Last metadata.groupings_count) + grouping = ordering_and_grouping.take (..Last metadata.groupings_count) group_part = if grouping.length == 0 then "" else SQL_Builder.code "PARTITION BY " ++ SQL_Builder.join ", " grouping diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso index 4d1f67183968..9b0fd98d0c8e 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Data_Link_Setup.enso @@ -54,7 +54,7 @@ prepare_credentials data_link_location:Enso_File details:Postgres -> JS_Object | secret : Enso_Secret -> secret plain_text_password : Text -> secret_location = data_link_location.parent.if_nothing Enso_File.root - location_name = if data_link_location.name.ends_with data_link_extension then data_link_location.name.drop (Index_Sub_Range.Last data_link_extension.length) else data_link_location.name + location_name = if data_link_location.name.ends_with data_link_extension then data_link_location.name.drop (..Last data_link_extension.length) else data_link_location.name create_fresh_secret ix = secret_name = location_name + "-password" + (if ix == 0 then "" else "-"+ix.to_text) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 47e1b5a93d33..454302fc8a63 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Errors.Unimplemented.Unimplemented diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 4cca20c4df10..198e408a8913 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Runtime.Ref.Ref diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso index de5770ab1875..4c0eb55ad84c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso @@ -175,12 +175,12 @@ operations_map = handle_case arguments = fallback = arguments.last - cases = arguments.drop (Last 1) + cases = arguments.drop (..Last 1) if cases.length % 2 != 0 then Panic.throw (Illegal_State.Error "Impossible: constructed a CASE with an odd number of case arguments.") if cases.is_empty then Panic.throw (Illegal_State.Error "Impossible: too few cases provided for a CASE statement.") - case_results = cases.take (Index_Sub_Range.Every 2 first=1) + case_results = cases.take (..Every 2 first=1) possible_results = case_results + [fallback] find_a_common_type possible_results diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso index 84f10f1c6668..cec7fd97b0f7 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso @@ -238,7 +238,7 @@ raise_duplicated_primary_key_error source_table primary_key original_panic = False -> row = materialized.first_row.to_vector example_count = row.last - example_entry = row.drop (Last 1) + example_entry = row.drop (..Last 1) Error.throw (Non_Unique_Key.Error primary_key example_entry example_count) ## PRIVATE @@ -624,7 +624,7 @@ check_multiple_rows_match left_table right_table key_columns ~continuation = True -> continuation False -> row = example.first_row . to_vector - offending_key = row.drop (Last 1) + offending_key = row.drop (..Last 1) count = row.last Error.throw (Multiple_Target_Rows_Matched_For_Update.Error offending_key count) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Take_Drop_Helpers.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Take_Drop_Helpers.enso index 664ecef666f1..b03c943e3aa5 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Take_Drop_Helpers.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Take_Drop_Helpers.enso @@ -1,10 +1,9 @@ from Standard.Base import all -import Standard.Base.Data.Index_Sub_Range as Index_Sub_Range_Module import Standard.Base.Data.Vector.No_Wrap import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State -from Standard.Base.Data.Index_Sub_Range import normalize_ranges, resolve_ranges, sort_and_merge_ranges +from Standard.Base.Data.Index_Sub_Range import drop_helper, normalize_ranges, resolve_ranges, sort_and_merge_ranges, take_helper from Standard.Table import Set_Mode @@ -22,7 +21,7 @@ type Take_Drop ## PRIVATE Apply `take` or `drop` to a table, returning the specified by the selector. take_drop_helper : Take_Drop -> DB_Table -> (Index_Sub_Range | Range | Integer) -> DB_Table -take_drop_helper take_drop table selector = +take_drop_helper take_drop table selector:(Index_Sub_Range | Range | Integer) = check_supported selector <| length = table.row_count ranges = cleanup_ranges (collect_ranges take_drop length selector) @@ -40,7 +39,7 @@ take_drop_helper take_drop table selector = ## PRIVATE Turn the selector into a vector of ranges collect_ranges : Take_Drop -> Integer -> (Index_Sub_Range | Range | Integer) -> Vector Range -collect_ranges take_drop length selector = +collect_ranges take_drop length selector:(Index_Sub_Range | Range | Integer) = at _ = Panic.throw (Illegal_State.Error "Impossible: at called in Database take/drop. This is a bug in the Database library.") single_slice s e = [Range.new s e] slice_ranges selectors = @@ -49,14 +48,14 @@ collect_ranges take_drop length selector = r : Range -> r selectors.map slice_range helper = case take_drop of - Take_Drop.Take -> Index_Sub_Range_Module.take_helper - Take_Drop.Drop -> Index_Sub_Range_Module.drop_helper + Take_Drop.Take -> take_helper + Take_Drop.Drop -> drop_helper helper length at single_slice slice_ranges selector ## PRIVATE Throw Unsupported_Database_Operation for selectors that are not supported by database backends. check_supported : (Index_Sub_Range | Range | Integer) -> Any -> Any | Unsupported_Database_Operation -check_supported selector ~cont = +check_supported selector:(Index_Sub_Range | Range | Integer) ~cont = err = msg = selector.to_display_text + " is not supported for database backends" Error.throw (Unsupported_Database_Operation.Error msg) diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso index a555dbeed7df..089a8572c3ba 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso @@ -1,6 +1,6 @@ private -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Errors.Unimplemented.Unimplemented diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index 4b780357dd9b..8e88dd1f7060 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -1,6 +1,5 @@ from Standard.Base import all import Standard.Base.Data.Array_Proxy.Array_Proxy -import Standard.Base.Data.Index_Sub_Range as Index_Sub_Range_Module import Standard.Base.Data.Vector.No_Wrap import Standard.Base.Errors.Common.Arithmetic_Error import Standard.Base.Errors.Common.Incomparable_Values @@ -12,6 +11,7 @@ import Standard.Base.Errors.Illegal_State.Illegal_State import Standard.Base.Internal.Polyglot_Helpers import Standard.Base.Internal.Rounding_Helpers from Standard.Base.Metadata.Widget import Numeric_Input +from Standard.Base.Data.Index_Sub_Range import drop_helper, normalize_ranges, take_helper from Standard.Base.Widget_Helpers import make_data_cleanse_vector_selector, make_format_chooser, make_regex_text_widget import project.Constants.Previous_Value @@ -2404,7 +2404,7 @@ type Column table = Table.new [["Name", ["Alice", "Bob", "Charlie"]]] column = table.get "Name" ## The take returns "Alice" - first_row = column.take (First 1) + first_row = column.take (..First 1) > Example Select the last row from the "Name" Column. @@ -2412,11 +2412,11 @@ type Column table = Table.new [["Name", ["Alice", "Bob", "Charlie"]]] column = table.get "Name" ## The take returns "Charlie" - last_row = column.take (Last 1) + last_row = column.take (..Last 1) @range Index_Sub_Range.default_widget take : (Index_Sub_Range | Range | Integer) -> Column - take self range=(First 1) = - Index_Sub_Range_Module.take_helper self.length (self.at _) self.slice (slice_ranges self) range + take self range:(Index_Sub_Range | Range | Integer)=..First = + take_helper self.length (self.at _) self.slice (slice_ranges self) range ## ALIAS remove, skip GROUP Standard.Base.Selections @@ -2428,8 +2428,8 @@ type Column - range: The selection of rows from the table to remove. @range Index_Sub_Range.default_widget drop : (Index_Sub_Range | Range | Integer) -> Column - drop self range=(First 1) = - Index_Sub_Range_Module.drop_helper self.length (self.at _) self.slice (slice_ranges self) range + drop self range:(Index_Sub_Range | Range | Integer)=..First = + drop_helper self.length (self.at _) self.slice (slice_ranges self) range ## PRIVATE Returns a column with a continuous sub-range of rows taken. @@ -2710,7 +2710,7 @@ normalize_text_for_display text = ## PRIVATE A helper to create a new table consisting of slices of the original table. slice_ranges column ranges = - normalized = Index_Sub_Range_Module.normalize_ranges ranges + normalized = normalize_ranges ranges Column.Value (column.java_column.slice normalized) ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso index 43f0cc705d73..aa5f7e0f7953 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Extensions/Table_Conversions.enso @@ -140,7 +140,7 @@ append_to_json_table file:File table on_problems:Problem_Behavior = case old_text.ends_with "]" && old_text.starts_with "[" of True -> ## Here we read in the whole table. We could actually just read from the back and rely on `seek` to be more efficient for large files. - new_text = old_text.drop (Last 1) + "," + table.to_json.drop (First 1) + new_text = old_text.drop (..Last 1) + "," + table.to_json.drop (..First 1) new_text.write file on_existing_file=Existing_File_Behavior.Overwrite on_problems=on_problems False -> Error.throw (Invalid_JSON_Format.Error old_text "File already exists and is not a JSON array.") diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Aggregate_Column_Helper.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Aggregate_Column_Helper.enso index 6595f77207be..382f662e9b56 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Aggregate_Column_Helper.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Aggregate_Column_Helper.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Data.Vector.No_Wrap import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Base.Runtime import assert diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Naming_Helper.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Naming_Helper.enso index 7beb43215af2..029175affd01 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Naming_Helper.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Column_Naming_Helper.enso @@ -137,7 +137,7 @@ type Column_Naming_Helper # If there is too little space, we will just pick a few parts: True -> mid = Math.min (texts.length-1) (remaining_space.div min_bytes_per_part) - texts.take (First 1+mid) + texts.take (Last 1) + texts.take (..First 1+mid) + texts.take (..Last 1) False -> texts new_remaining_space = max_size - (separator_size * (parts_to_include.length - 1)) initial_size_per_part = new_remaining_space.div parts_to_include.length diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso index c49909e65f1a..e3b2cdbf2bcd 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Delimited_Reader.enso @@ -212,7 +212,7 @@ newline_at_eof file encoding = file_last_bytes = file.read_last_bytes most_bytes result = newlines.zip newline_bytes . find if_missing=[Nothing] pair-> bytes = pair.second - bytes == (file_last_bytes.take (Last bytes.length)) + bytes == (file_last_bytes.take (..Last bytes.length)) result.first ## PRIVATE diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso index 4421b9a2a913..4a3bd2a99b17 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Display_Helpers.enso @@ -58,8 +58,8 @@ print_table header rows indices_count format_term = divider = content_lengths . map (l -> "-".repeat l+2) . join '+' row_lines = rows.map r-> x = r.zip content_lengths pad - ixes = x.take (First indices_count) . map (ansi_bold format_term) - with_bold_ix = ixes + x.drop (First indices_count) + ixes = x.take (..First indices_count) . map (ansi_bold format_term) + with_bold_ix = ixes + x.drop (..First indices_count) y = with_bold_ix . join ' | ' " " + y ([" " + header_line, divider] + row_lines).join '\n' diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index ba8eefa57c52..f97c754d241b 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -353,8 +353,8 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) vec : Vector -> col_count = internal_columns.length good_names = if vec.length <= col_count then vec else - problem_builder.report_other_warning (Too_Many_Column_Names_Provided.Error (vec.drop (First col_count))) - vec.take (First col_count) + problem_builder.report_other_warning (Too_Many_Column_Names_Provided.Error (vec.drop (..First col_count))) + vec.take (..First col_count) internal_columns.take good_names.length . zip good_names _ : Map -> resolve_rename selector replacement = case selector of diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Rows_To_Read.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Rows_To_Read.enso index 4b5832348a21..913e635ec7c5 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Rows_To_Read.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Rows_To_Read.enso @@ -39,7 +39,7 @@ type Rows_To_Read ## PRIVATE attach_warning self input:Table -> Table = case self of Rows_To_Read.First_With_Warning rows -> if input.row_count <= rows then input else - Problem_Behavior.Report_Warning.attach_problem_after (input.take (First rows)) <| + Problem_Behavior.Report_Warning.attach_problem_after (input.take (..First rows)) <| Not_All_Rows_Downloaded.Warning rows _ -> input diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index 9f4bc83f7283..ddd7f5ec1e98 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -1,7 +1,6 @@ from Standard.Base import all import Standard.Base.Data.Array_Proxy.Array_Proxy import Standard.Base.Data.Filter_Condition as Filter_Condition_Module -import Standard.Base.Data.Index_Sub_Range as Index_Sub_Range_Module import Standard.Base.Data.Time.Errors.Date_Time_Format_Parse_Error import Standard.Base.Data.Vector.No_Wrap import Standard.Base.Errors.Common.Additional_Warnings @@ -16,6 +15,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Unimplemented.Unimplemented import Standard.Base.Runtime.Context import Standard.Base.System.File.Generic.Writable_File.Writable_File +from Standard.Base.Data.Index_Sub_Range import drop_helper, normalize_ranges, take_helper from Standard.Base.Metadata import Display, make_single_choice, Widget from Standard.Base.Widget_Helpers import make_any_selector, make_data_cleanse_vector_selector, make_delimiter_selector, make_format_chooser @@ -746,7 +746,7 @@ type Table problem_builder = Problem_Builder.new problem_builder.report_unique_name_strategy unique problem_builder.attach_problems_before on_problems <| - self.drop (First 1) . rename_columns new_names_cleaned on_problems=on_problems + self.drop (..First 1) . rename_columns new_names_cleaned on_problems=on_problems ## ALIAS group by, summarize GROUP Standard.Base.Calculations @@ -1688,7 +1688,7 @@ type Table > Example Take first 10 rows of the table. - table.take (First 10) + table.take (..First 10) > Example Take rows from the top of the table as long as their values sum to 10. @@ -1696,8 +1696,8 @@ type Table table.take (While row-> row.to_vector.compute Statistic.Sum == 10) @range Index_Sub_Range.default_widget take : (Index_Sub_Range | Range | Integer) -> Table - take self range:(Index_Sub_Range | Range | Integer)=(..First 1) = - Index_Sub_Range_Module.take_helper self.row_count (self.rows.at _) self.slice (slice_ranges self) range + take self range:(Index_Sub_Range | Range | Integer)=..First = + take_helper self.row_count (self.rows.at _) self.slice (slice_ranges self) range ## ALIAS remove, skip GROUP Standard.Base.Selections @@ -1720,7 +1720,7 @@ type Table > Example Drop first 10 rows of the table. - table.drop (First 10) + table.drop (..First 10) > Example Drop rows from the top of the table as long as their values sum to 10. @@ -1728,8 +1728,8 @@ type Table table.drop (While row-> row.to_vector.compute Statistic.Sum == 10) @range Index_Sub_Range.default_widget drop : (Index_Sub_Range | Range | Integer) -> Table - drop self range:(Index_Sub_Range | Range | Integer)=(..First 1) = - Index_Sub_Range_Module.drop_helper self.row_count (self.rows.at _) self.slice (slice_ranges self) range + drop self range:(Index_Sub_Range | Range | Integer)=..First = + drop_helper self.row_count (self.rows.at _) self.slice (slice_ranges self) range ## PRIVATE Filter out all rows. @@ -2481,9 +2481,9 @@ type Table read : Rows_To_Read -> Table read self (max_rows : Rows_To_Read = ..All_Rows) = case max_rows of Rows_To_Read.All_Rows -> self - Rows_To_Read.First n -> self.take (First n) + Rows_To_Read.First n -> self.take (..First n) Rows_To_Read.First_With_Warning n -> - truncated = self.take (First n) + truncated = self.take (..First n) if self.row_count <= n then truncated else Problem_Behavior.Report_Warning.attach_problem_after truncated <| Not_All_Rows_Downloaded.Warning n @@ -3153,7 +3153,7 @@ type Table ## PRIVATE A helper to create a new table consisting of slices of the original table. slice_ranges table ranges = - normalized = Index_Sub_Range_Module.normalize_ranges ranges + normalized = normalize_ranges ranges Table.Value (table.java_table.slice normalized) ## PRIVATE diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso index 7700f197e96a..01380c3cae74 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Helpers.enso @@ -213,7 +213,7 @@ Table.to_default_visualization_data self = row_count = ['number_of_rows', self.row_count] cols = self.columns.map c-> name = c.name - items = c.to_vector.take (First max_size) + items = c.to_vector.take (..First max_size) JS_Object.from_pairs [['name', name], ['data', items]] JS_Object.from_pairs [row_count, ['columns', cols]] . to_text @@ -272,7 +272,7 @@ Column.to_default_visualization_data self = size = ['length', self.length] name = ['name', self.name] max_data = 100 - data = ['data', self.to_vector.take (First max_data)] + data = ['data', self.to_vector.take (..First max_data)] JS_Object.from_pairs [size, name, data] . to_text ## PRIVATE diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso index f1f65386a8e8..1ac1cc7bfc01 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso @@ -175,8 +175,8 @@ limit_data limit data = case limit of _ -> [] extreme = Map.from_vector bounds error_on_duplicates=False . values - if limit <= extreme.length then extreme.take (First limit) else - extreme + data.take (Index_Sub_Range.Sample (limit - extreme.length)) + if limit <= extreme.length then extreme.take (..First limit) else + extreme + data.take (..Sample (limit - extreme.length)) ## PRIVATE json_from_table : Table -> Vector Integer | Nothing -> Integer | Nothing -> Text diff --git a/test/AWS_Tests/src/Redshift_Spec.enso b/test/AWS_Tests/src/Redshift_Spec.enso index e6dba221d8e1..8af25f7005d3 100644 --- a/test/AWS_Tests/src/Redshift_Spec.enso +++ b/test/AWS_Tests/src/Redshift_Spec.enso @@ -82,7 +82,7 @@ add_database_specs suite_builder create_connection_fn = agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True empty_agg_table_fn = _-> - (agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + (agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder Common_Table_Operations.Main.add_specs suite_builder setup diff --git a/test/Base_Tests/src/Data/Text/Utils_Spec.enso b/test/Base_Tests/src/Data/Text/Utils_Spec.enso index 1257e7c43e5f..d0b987bea246 100644 --- a/test/Base_Tests/src/Data/Text/Utils_Spec.enso +++ b/test/Base_Tests/src/Data/Text/Utils_Spec.enso @@ -101,8 +101,8 @@ add_specs suite_builder = long = "Hello World! ".repeat 1024 disp = long.to_display_text disp.length . should_equal 80 - disp.characters.take (First 5) . should_equal [ 'H', 'e', 'l', 'l', 'o' ] - disp.characters.take (Last 6) . should_equal ['l', 'd', '!', ' ', ' ', '…'] + disp.characters.take (..First 5) . should_equal [ 'H', 'e', 'l', 'l', 'o' ] + disp.characters.take (..Last 6) . should_equal ['l', 'd', '!', ' ', ' ', '…'] group_builder.specify "grapheme 1 conversion" <| txt = 'a\u0321\u0302'*100 diff --git a/test/Base_Tests/src/Data/Text_Spec.enso b/test/Base_Tests/src/Data/Text_Spec.enso index 3c38bb623ba5..a2aa195f650a 100644 --- a/test/Base_Tests/src/Data/Text_Spec.enso +++ b/test/Base_Tests/src/Data/Text_Spec.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Data.Text.Regex.No_Such_Group import Standard.Base.Data.Text.Regex.Regex_Syntax_Error import Standard.Base.Data.Text.Span.Span @@ -9,12 +9,8 @@ import Standard.Base.Errors.Common.Missing_Argument import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -from Standard.Base.Data.Text.Text_Sub_Range.Text_Sub_Range import all -from Standard.Base.Data.Index_Sub_Range.Index_Sub_Range import all - from Standard.Test import all - type Auto Value a @@ -374,144 +370,144 @@ add_specs suite_builder = text_2.to_text.should_equal text_2 group_builder.specify "should allow taking or dropping every other character" <| - "ABCDE".take (Every 1) . should_equal "ABCDE" - "ABCDE".take (Every 2) . should_equal "ACE" - "ABCD".take (Every 2) . should_equal "AC" - "ABCD".take (Every 2 first=1) . should_equal "BD" - "ABCDE".take (Every 2 first=1) . should_equal "BD" - "ABCDE".take (Every 3) . should_equal "AD" - "ABCDEFG".take (Every 3) . should_equal "ADG" - "ABCDEFG".take (Every 3 first=1) . should_equal "BE" - "ABCDEFG".take (Every 3 first=6) . should_equal "G" - "ABCDEFG".take (Every 10) . should_equal "A" - - "ABCDE".drop (Every 1) . should_equal "" - "ABCDE".drop (Every 2) . should_equal "BD" - "ABCD".drop (Every 2) . should_equal "BD" - "ABCD".drop (Every 2 first=1) . should_equal "AC" - "ABCDE".drop (Every 2 first=1) . should_equal "ACE" - "ABCDE".drop (Every 3) . should_equal "BCE" - "ABCDEFG".drop (Every 3) . should_equal "BCEF" - "ABCDEFG".drop (Every 3 first=1) . should_equal "ACDFG" - "ABCDEFGH".drop (Every 3 first=1) . should_equal "ACDFG" - "ABCDEFGHI".drop (Every 3 first=1) . should_equal "ACDFGI" + "ABCDE".take (..Every 1) . should_equal "ABCDE" + "ABCDE".take (..Every 2) . should_equal "ACE" + "ABCD".take (..Every 2) . should_equal "AC" + "ABCD".take (..Every 2 first=1) . should_equal "BD" + "ABCDE".take (..Every 2 first=1) . should_equal "BD" + "ABCDE".take (..Every 3) . should_equal "AD" + "ABCDEFG".take (..Every 3) . should_equal "ADG" + "ABCDEFG".take (..Every 3 first=1) . should_equal "BE" + "ABCDEFG".take (..Every 3 first=6) . should_equal "G" + "ABCDEFG".take (..Every 10) . should_equal "A" + + "ABCDE".drop (..Every 1) . should_equal "" + "ABCDE".drop (..Every 2) . should_equal "BD" + "ABCD".drop (..Every 2) . should_equal "BD" + "ABCD".drop (..Every 2 first=1) . should_equal "AC" + "ABCDE".drop (..Every 2 first=1) . should_equal "ACE" + "ABCDE".drop (..Every 3) . should_equal "BCE" + "ABCDEFG".drop (..Every 3) . should_equal "BCEF" + "ABCDEFG".drop (..Every 3 first=1) . should_equal "ACDFG" + "ABCDEFGH".drop (..Every 3 first=1) . should_equal "ACDFG" + "ABCDEFGHI".drop (..Every 3 first=1) . should_equal "ACDFGI" group_builder.specify "should allow taking or dropping a random sample of a substring" <| - "AAAAA".take (Sample 3) . should_equal "AAA" - "AAAAA".drop (Sample 3) . should_equal "AA" + "AAAAA".take (..Sample 3) . should_equal "AAA" + "AAAAA".drop (..Sample 3) . should_equal "AA" ## These tests are very brittle and can be invalidated by a valid implementation modification, so they may need to be updated. - "ABCDEFGH".take (Sample 0) . should_equal "" - "ABCDEFGH".take (Sample 8 seed=42) . should_equal "FGCHABED" - "ABCDEFGH".take (Sample 4 seed=42) . should_equal "FGCH" - "ABCDEFGH".take (Sample 2 seed=42) . should_equal "FG" - "ABCDEFGH".take (Sample 1 seed=42) . should_equal "F" - "ABCDEFGH".take (Sample 100 seed=42) . should_equal "FGCHABED" + "ABCDEFGH".take (..Sample 0) . should_equal "" + "ABCDEFGH".take (..Sample 8 seed=42) . should_equal "FGCHABED" + "ABCDEFGH".take (..Sample 4 seed=42) . should_equal "FGCH" + "ABCDEFGH".take (..Sample 2 seed=42) . should_equal "FG" + "ABCDEFGH".take (..Sample 1 seed=42) . should_equal "F" + "ABCDEFGH".take (..Sample 100 seed=42) . should_equal "FGCHABED" samples_1 = 0.up_to 10000 . map seed-> - "ABCD".take (Sample 2 seed) + "ABCD".take (..Sample 2 seed) samples_1.should_contain_the_same_elements_as ["AB", "BA", "AC", "CA", "AD", "DA", "BC", "CB", "BD", "DB", "CD", "DC"] - "ABCDEFGH".drop (Sample 0) . should_equal "ABCDEFGH" - "ABCDEFGH".drop (Sample 1 seed=42) . should_equal "ABCDEGH" - "ABCDEFGH".drop (Sample 2 seed=42) . should_equal "ABCDEH" - "ABCDEFGH".drop (Sample 4 seed=42) . should_equal "ABDE" - "ABCDEFGH".drop (Sample 8 seed=42) . should_equal "" - "ABCDEFGH".drop (Sample 100 seed=42) . should_equal "" + "ABCDEFGH".drop (..Sample 0) . should_equal "ABCDEFGH" + "ABCDEFGH".drop (..Sample 1 seed=42) . should_equal "ABCDEGH" + "ABCDEFGH".drop (..Sample 2 seed=42) . should_equal "ABCDEH" + "ABCDEFGH".drop (..Sample 4 seed=42) . should_equal "ABDE" + "ABCDEFGH".drop (..Sample 8 seed=42) . should_equal "" + "ABCDEFGH".drop (..Sample 100 seed=42) . should_equal "" samples_2 = 0.up_to 10000 . map seed-> - "ABCD".drop (Sample 2 seed) + "ABCD".drop (..Sample 2 seed) samples_2.should_contain_the_same_elements_as ["AB", "AC", "AD", "BC", "CD", "BD"] group_builder.specify "should allow taking or dropping many indices or subranges (possibly overlapping)" <| - "123"*1000 . take (By_Index (Vector.new 3000 ix-> 2999-ix)) . should_equal "321"*1000 - "123"*1000 . take (By_Index (Vector.new 3000 _-> 0)) . should_equal "1"*3000 - "123456"*1000 . take (By_Index (Vector.new 100 ix-> Range.Between 6*ix+1 6*ix+3)) . should_equal "23"*100 - "AB"*1000 . take (By_Index (Vector.new 100 ix-> Range.Between ix+1 ix+5)) . should_equal "BABAABAB"*50 + "123"*1000 . take (..By_Index (Vector.new 3000 ix-> 2999-ix)) . should_equal "321"*1000 + "123"*1000 . take (..By_Index (Vector.new 3000 _-> 0)) . should_equal "1"*3000 + "123456"*1000 . take (..By_Index (Vector.new 100 ix-> Range.Between 6*ix+1 6*ix+3)) . should_equal "23"*100 + "AB"*1000 . take (..By_Index (Vector.new 100 ix-> Range.Between ix+1 ix+5)) . should_equal "BABAABAB"*50 - "123"*1000 . drop (By_Index (Vector.new 300 ix-> 2999-ix)) . should_equal "123"*900 - "123"*1000 . drop (By_Index (Vector.new 3000 _-> 0)) . should_equal "23"+"123"*999 - "123456"*1000 . drop (By_Index (Vector.new 1000 ix-> Range.Between 6*ix+1 6*ix+3)) . should_equal "1456"*1000 - "ABCD"*25 . drop (By_Index (Vector.new 90 ix-> Range.Between ix+1 ix+5)) . should_equal "ACDABCD" + "123"*1000 . drop (..By_Index (Vector.new 300 ix-> 2999-ix)) . should_equal "123"*900 + "123"*1000 . drop (..By_Index (Vector.new 3000 _-> 0)) . should_equal "23"+"123"*999 + "123456"*1000 . drop (..By_Index (Vector.new 1000 ix-> Range.Between 6*ix+1 6*ix+3)) . should_equal "1456"*1000 + "ABCD"*25 . drop (..By_Index (Vector.new 90 ix-> Range.Between ix+1 ix+5)) . should_equal "ACDABCD" "ABCD"*1000 . take (0.up_to 4000 . with_step 4) . should_equal "A"*1000 - "ABCD"*1000 . take (Every 4) . should_equal "A"*1000 - "ABCD"*1000 . take (By_Index [0.up_to 4000 . with_step 4, 1.up_to 4000 . with_step 4]) . should_equal ("A"*1000 + "B"*1000) - "ABCD"*1000 . take (By_Index [0.up_to 4000 . with_step 4, 2.up_to 4000 . with_step 4]) . should_equal ("A"*1000 + "C"*1000) + "ABCD"*1000 . take (..Every 4) . should_equal "A"*1000 + "ABCD"*1000 . take (..By_Index [0.up_to 4000 . with_step 4, 1.up_to 4000 . with_step 4]) . should_equal ("A"*1000 + "B"*1000) + "ABCD"*1000 . take (..By_Index [0.up_to 4000 . with_step 4, 2.up_to 4000 . with_step 4]) . should_equal ("A"*1000 + "C"*1000) "ABCD"*1000 . drop (0.up_to 4000 . with_step 4) . should_equal "BCD"*1000 - "ABCD"*1000 . drop (Every 4) . should_equal "BCD"*1000 - "ABCD"*1000 . drop (By_Index [0.up_to 4000 . with_step 4, 1.up_to 4000 . with_step 4]) . should_equal "CD"*1000 - "ABCD"*1000 . drop (By_Index [0.up_to 4000 . with_step 4, 2.up_to 4000 . with_step 4]) . should_equal "BD"*1000 + "ABCD"*1000 . drop (..Every 4) . should_equal "BCD"*1000 + "ABCD"*1000 . drop (..By_Index [0.up_to 4000 . with_step 4, 1.up_to 4000 . with_step 4]) . should_equal "CD"*1000 + "ABCD"*1000 . drop (..By_Index [0.up_to 4000 . with_step 4, 2.up_to 4000 . with_step 4]) . should_equal "BD"*1000 - "0123456789".take (By_Index [0.up_to 4, 4.up_to 6, 8.up_to 9]) . should_equal "0123458" - "0123456789".take (By_Index [4.up_to 6, 0.up_to 4, 0, 0]) . should_equal "45012300" - "0123456789".drop (By_Index [0.up_to 4, 4.up_to 6, 8.up_to 9]) . should_equal "679" - "0123456789".drop (By_Index [4.up_to 6, 0.up_to 4, 0, 0]) . should_equal "6789" - "0123456789".drop (By_Index [2.up_to 5, 0.up_to 3, 0, 0]) . should_equal "56789" + "0123456789".take (..By_Index [0.up_to 4, 4.up_to 6, 8.up_to 9]) . should_equal "0123458" + "0123456789".take (..By_Index [4.up_to 6, 0.up_to 4, 0, 0]) . should_equal "45012300" + "0123456789".drop (..By_Index [0.up_to 4, 4.up_to 6, 8.up_to 9]) . should_equal "679" + "0123456789".drop (..By_Index [4.up_to 6, 0.up_to 4, 0, 0]) . should_equal "6789" + "0123456789".drop (..By_Index [2.up_to 5, 0.up_to 3, 0, 0]) . should_equal "56789" group_builder.specify "should allow selecting substrings by characters" <| txt = kshi + facepalm + accent_1 + accent_2 - txt.take (First 2) . should_equal (kshi + facepalm) - txt.drop (First 2) . should_equal (accent_1 + accent_2) + txt.take (..First 2) . should_equal (kshi + facepalm) + txt.drop (..First 2) . should_equal (accent_1 + accent_2) txt.take 2 . should_equal (kshi + facepalm) txt.drop 2 . should_equal (accent_1 + accent_2) - txt.take (Last 2) . should_equal (accent_1 + accent_2) - txt.drop (Last 2) . should_equal (kshi + facepalm) + txt.take (..Last 2) . should_equal (accent_1 + accent_2) + txt.drop (..Last 2) . should_equal (kshi + facepalm) txt.take (0.up_to 2) . should_equal (kshi + facepalm) - txt.take (By_Index (0.up_to 2)) . should_equal (kshi + facepalm) + txt.take (..By_Index (0.up_to 2)) . should_equal (kshi + facepalm) txt.drop (0.up_to 2) . should_equal (accent_1 + accent_2) txt.take (2.up_to 4) . should_equal (accent_1 + accent_2) txt.drop (2.up_to 4) . should_equal (kshi + facepalm) - txt.take (Every 2) . should_equal (kshi + accent_1) - txt.take (Every 2 first=1) . should_equal (facepalm + accent_2) - txt.drop (Every 2) . should_equal (facepalm + accent_2) + txt.take (..Every 2) . should_equal (kshi + accent_1) + txt.take (..Every 2 first=1) . should_equal (facepalm + accent_2) + txt.drop (..Every 2) . should_equal (facepalm + accent_2) txt.take (0.up_to 4 . with_step 2) . should_equal (kshi + accent_1) - txt.take (By_Index [0, 3]) . should_equal (kshi + accent_2) - txt.take (By_Index 0) . should_equal kshi - txt.take (By_Index 1) . should_equal facepalm - txt.take (By_Index 2) . should_equal accent_1 - txt.take (By_Index 3) . should_equal accent_2 - txt.drop (By_Index [0, 3]) . should_equal (facepalm + accent_1) - txt.drop (By_Index [0, 3, 0]) . should_equal (facepalm + accent_1) - txt.drop (By_Index [0, 3, 0, 2, 1]) . should_equal "" - txt.take (By_Index [0, 3, 0, 2, 1]) . should_equal (kshi + accent_2 + kshi + accent_1 + facepalm) - txt.take (By_Index [0, 0, 0.up_to 2]) . should_equal (kshi + kshi + kshi + facepalm) - txt.drop (By_Index [2.up_to 4, 0.up_to 2]) . should_equal "" + txt.take (..By_Index [0, 3]) . should_equal (kshi + accent_2) + txt.take (..By_Index 0) . should_equal kshi + txt.take (..By_Index 1) . should_equal facepalm + txt.take (..By_Index 2) . should_equal accent_1 + txt.take (..By_Index 3) . should_equal accent_2 + txt.drop (..By_Index [0, 3]) . should_equal (facepalm + accent_1) + txt.drop (..By_Index [0, 3, 0]) . should_equal (facepalm + accent_1) + txt.drop (..By_Index [0, 3, 0, 2, 1]) . should_equal "" + txt.take (..By_Index [0, 3, 0, 2, 1]) . should_equal (kshi + accent_2 + kshi + accent_1 + facepalm) + txt.take (..By_Index [0, 0, 0.up_to 2]) . should_equal (kshi + kshi + kshi + facepalm) + txt.drop (..By_Index [2.up_to 4, 0.up_to 2]) . should_equal "" group_builder.specify "take should work as in the examples" <| - "Hello World!".take First . should_equal "H" - "Hello World!".take (First 5) . should_equal "Hello" - "Hello World!".take (First 100) . should_equal "Hello World!" - "Hello World!".take (First 0) . should_equal "" + "Hello World!".take ..First . should_equal "H" + "Hello World!".take (..First 5) . should_equal "Hello" + "Hello World!".take (..First 100) . should_equal "Hello World!" + "Hello World!".take (..First 0) . should_equal "" "Hello World!".take . should_equal "H" "Hello World!".take 5 . should_equal "Hello" "Hello World!".take 100 . should_equal "Hello World!" "Hello World!".take 0 . should_equal "" - "Hello World!".take Last . should_equal "!" - "Hello World!".take (Last 6) . should_equal "World!" - "Hello World!".take (Last 0) . should_equal "" - "Hello World!".take (Last 100) . should_equal "Hello World!" - "Hello World!".take (Before " ") . should_equal "Hello" - "Hello World!".take (Before "z") . should_equal "Hello World!" - "Hello World!".take (Before_Last "o") . should_equal "Hello W" - "Hello World!".take (Before_Last "z") . should_equal "Hello World!" - "Hello World!".take (After " ") . should_equal "World!" - "Hello World!".take (After "z") . should_equal "" - "Hello World!".take (After_Last "o") . should_equal "rld!" - "Hello World!".take (After_Last "z") . should_equal "" - "Hello World!".take (While c->c!=" ") . should_equal "Hello" - "Hello World!".take (While c->c!="z") . should_equal "Hello World!" + "Hello World!".take ..Last . should_equal "!" + "Hello World!".take (..Last 6) . should_equal "World!" + "Hello World!".take (..Last 0) . should_equal "" + "Hello World!".take (..Last 100) . should_equal "Hello World!" + "Hello World!".take (..Before " ") . should_equal "Hello" + "Hello World!".take (..Before "z") . should_equal "Hello World!" + "Hello World!".take (..Before_Last "o") . should_equal "Hello W" + "Hello World!".take (..Before_Last "z") . should_equal "Hello World!" + "Hello World!".take (..After " ") . should_equal "World!" + "Hello World!".take (..After "z") . should_equal "" + "Hello World!".take (..After_Last "o") . should_equal "rld!" + "Hello World!".take (..After_Last "z") . should_equal "" + "Hello World!".take (..While c->c!=" ") . should_equal "Hello" + "Hello World!".take (..While c->c!="z") . should_equal "Hello World!" "Hello World!".take (3.up_to 5) . should_equal "lo" "Hello World!".take (5.up_to 12) . should_equal " World!" "Hello World!".take (6.up_to 12 . with_step 2) . should_equal "Wrd" - "Hello World!".take (Every 2 first=6) . should_equal "Wrd" - "Hello World!".take (Every 3) . should_equal "HlWl" - "Hello World!".take (By_Index 0) . should_equal "H" - "Hello World!".take (By_Index [1, 0, 0, 6, 0]) . should_equal "eHHWH" - "Hello World!".take (By_Index [0.up_to 3, 6, 6.up_to 12 . with_step 2]) . should_equal "HelWWrd" - "Hello World!".take (Sample 3 seed=42) . should_equal "l d" + "Hello World!".take (..Every 2 first=6) . should_equal "Wrd" + "Hello World!".take (..Every 3) . should_equal "HlWl" + "Hello World!".take (..By_Index 0) . should_equal "H" + "Hello World!".take (..By_Index [1, 0, 0, 6, 0]) . should_equal "eHHWH" + "Hello World!".take (..By_Index [0.up_to 3, 6, 6.up_to 12 . with_step 2]) . should_equal "HelWWrd" + "Hello World!".take (..Sample 3 seed=42) . should_equal "l d" group_builder.specify "take should report errors for start indices out of bounds but just go till the end if the end index is OOB" <| txt = "Hello World!" @@ -521,248 +517,248 @@ add_specs suite_builder = txt.take (Range.Between txt.length txt.length) . should_fail_with Index_Out_Of_Bounds txt.take (Range.Between txt.length txt.length) . catch . should_equal (Index_Out_Of_Bounds.Error txt.length txt.length) txt.take (Range.Between txt.length 100) . should_fail_with Index_Out_Of_Bounds - txt.take (First 100) . should_equal txt + txt.take (..First 100) . should_equal txt txt.take 100 . should_equal txt - txt.take (Last 100) . should_equal txt - txt.take (By_Index 100) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index 13) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index [0, 1, 13]) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index [0, 14.up_to 15, 1]) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index [0, 1, 6.up_to 100]) . should_equal "HeWorld!" - txt.take (By_Index [0, 1, 6.up_to 100 . with_step 2]) . should_equal "HeWrd" + txt.take (..Last 100) . should_equal txt + txt.take (..By_Index 100) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index 13) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index [0, 1, 13]) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index [0, 14.up_to 15, 1]) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index [0, 1, 6.up_to 100]) . should_equal "HeWorld!" + txt.take (..By_Index [0, 1, 6.up_to 100 . with_step 2]) . should_equal "HeWrd" txt.take (13.up_to 12) . should_fail_with Index_Out_Of_Bounds "".take (0.up_to 0) . should_fail_with Index_Out_Of_Bounds "".take (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) - "".take (By_Index 0) . should_fail_with Index_Out_Of_Bounds - "ABC".take (By_Index 3) . should_fail_with Index_Out_Of_Bounds + "".take (..By_Index 0) . should_fail_with Index_Out_Of_Bounds + "ABC".take (..By_Index 3) . should_fail_with Index_Out_Of_Bounds txt.take (13.up_to 20) . should_fail_with Index_Out_Of_Bounds txt.take (13.up_to 20 . with_step 2) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index [0.up_to 2, 13.up_to 20]) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index [0.up_to 0, 13.up_to 10, 2.up_to 2 . with_step 2]) . should_equal "" - txt.take (By_Index [0.up_to 2 . with_step 2, 13.up_to 20 . with_step 2]) . should_fail_with Index_Out_Of_Bounds - txt.take (By_Index [0.up_to 2 . with_step 2, 13.up_to 20 . with_step 2]) . catch . should_equal (Index_Out_Of_Bounds.Error 13 12) - txt.take (By_Index [0.up_to 2 . with_step 2, txt.length.up_to 100 . with_step 2]) . should_fail_with Index_Out_Of_Bounds - "".take (By_Index 0) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index [0.up_to 2, 13.up_to 20]) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index [0.up_to 0, 13.up_to 10, 2.up_to 2 . with_step 2]) . should_equal "" + txt.take (..By_Index [0.up_to 2 . with_step 2, 13.up_to 20 . with_step 2]) . should_fail_with Index_Out_Of_Bounds + txt.take (..By_Index [0.up_to 2 . with_step 2, 13.up_to 20 . with_step 2]) . catch . should_equal (Index_Out_Of_Bounds.Error 13 12) + txt.take (..By_Index [0.up_to 2 . with_step 2, txt.length.up_to 100 . with_step 2]) . should_fail_with Index_Out_Of_Bounds + "".take (..By_Index 0) . should_fail_with Index_Out_Of_Bounds group_builder.specify "take should work on grapheme clusters" <| txt_1 = 'He\u0302llo\u0308 Wo\u0301rld!' txt_2 = 'He\u0302llo\u0308 Wo\u0308rld!' - txt_1.take (Every 2) . should_equal 'Hlo\u0308Wrd' - txt_1.take (First 2) . should_equal 'He\u{302}' - txt_1.take (First 5) . should_equal 'He\u{302}llo\u{308}' + txt_1.take (..Every 2) . should_equal 'Hlo\u0308Wrd' + txt_1.take (..First 2) . should_equal 'He\u{302}' + txt_1.take (..First 5) . should_equal 'He\u{302}llo\u{308}' txt_1.take 2 . should_equal 'He\u{302}' txt_1.take 5 . should_equal 'He\u{302}llo\u{308}' - txt_1.take (Last 6) . should_equal 'Wo\u{301}rld!' - txt_1.take (Last 5) . should_equal 'o\u{301}rld!' - txt_1.take (Before 'e\u{302}') . should_equal 'H' - txt_1.take (Before 'ê') . should_equal 'H' - txt_1.take (Before 'e') . should_equal txt_1 - txt_2.take (Before_Last 'o\u{308}') . should_equal 'He\u{302}llo\u{308} W' - txt_2.take (Before_Last 'ö') . should_equal 'He\u{302}llo\u{308} W' - txt_2.take (Before_Last 'o') . should_equal txt_2 - txt_1.take (After 'e\u{302}') . should_equal 'llo\u{308} Wo\u{301}rld!' - txt_1.take (After 'ê') . should_equal 'llo\u{308} Wo\u{301}rld!' - txt_1.take (After 'e\u{308}') . should_equal '' - txt_1.take (After 'e') . should_equal '' - txt_2.take (After_Last 'o\u{308}') . should_equal 'rld!' - txt_2.take (After_Last 'ö') . should_equal 'rld!' - txt_2.take (After_Last 'o') . should_equal '' - txt_2.take (While c->c!='e\u{302}') . should_equal 'H' - txt_2.take (While c->c!='ê') . should_equal 'H' - txt_2.take (While c->c!='e') . should_equal txt_2 + txt_1.take (..Last 6) . should_equal 'Wo\u{301}rld!' + txt_1.take (..Last 5) . should_equal 'o\u{301}rld!' + txt_1.take (..Before 'e\u{302}') . should_equal 'H' + txt_1.take (..Before 'ê') . should_equal 'H' + txt_1.take (..Before 'e') . should_equal txt_1 + txt_2.take (..Before_Last 'o\u{308}') . should_equal 'He\u{302}llo\u{308} W' + txt_2.take (..Before_Last 'ö') . should_equal 'He\u{302}llo\u{308} W' + txt_2.take (..Before_Last 'o') . should_equal txt_2 + txt_1.take (..After 'e\u{302}') . should_equal 'llo\u{308} Wo\u{301}rld!' + txt_1.take (..After 'ê') . should_equal 'llo\u{308} Wo\u{301}rld!' + txt_1.take (..After 'e\u{308}') . should_equal '' + txt_1.take (..After 'e') . should_equal '' + txt_2.take (..After_Last 'o\u{308}') . should_equal 'rld!' + txt_2.take (..After_Last 'ö') . should_equal 'rld!' + txt_2.take (..After_Last 'o') . should_equal '' + txt_2.take (..While c->c!='e\u{302}') . should_equal 'H' + txt_2.take (..While c->c!='ê') . should_equal 'H' + txt_2.take (..While c->c!='e') . should_equal txt_2 txt_2.take (3.up_to 5) . should_equal 'lo\u{308}' txt_2.take (5.up_to 12) . should_equal ' Wo\u{308}rld!' group_builder.specify "take should work on emojis" <| - '✨🚀🚧😍😃😎😙😉☺'.take First . should_equal '✨' - '✨🚀🚧😍😃😎😙😉☺'.take (First 2) . should_equal '✨🚀' + '✨🚀🚧😍😃😎😙😉☺'.take ..First . should_equal '✨' + '✨🚀🚧😍😃😎😙😉☺'.take (..First 2) . should_equal '✨🚀' '✨🚀🚧😍😃😎😙😉☺'.take . should_equal '✨' '✨🚀🚧😍😃😎😙😉☺'.take 2 . should_equal '✨🚀' - '✨🚀🚧😍😃😎😙😉☺'.take Last . should_equal '☺' - '✨🚀🚧😍😃😎😙😉☺'.take (Last 0) . should_equal '' - '✨🚀🚧😍😃😎😙😉☺'.take (Last 3) . should_equal '😙😉☺' - '✨🚀🚧😍😃😍😎😙😉☺'.take (Before '😍') . should_equal '✨🚀🚧' - '✨🚀🚧😍😃😍😎😙😉☺'.take (Before_Last '😍') . should_equal '✨🚀🚧😍😃' - '✨🚀🚧😍😃😍😎😙😉☺'.take (After '😍') . should_equal '😃😍😎😙😉☺' - '✨🚀🚧😍😃😍😎😙😉☺'.take (After_Last '😍') . should_equal '😎😙😉☺' - '✨🚀🚧😍😃😍😎😙😉☺'.take (While c->c!="😃") . should_equal '✨🚀🚧😍' + '✨🚀🚧😍😃😎😙😉☺'.take ..Last . should_equal '☺' + '✨🚀🚧😍😃😎😙😉☺'.take (..Last 0) . should_equal '' + '✨🚀🚧😍😃😎😙😉☺'.take (..Last 3) . should_equal '😙😉☺' + '✨🚀🚧😍😃😍😎😙😉☺'.take (..Before '😍') . should_equal '✨🚀🚧' + '✨🚀🚧😍😃😍😎😙😉☺'.take (..Before_Last '😍') . should_equal '✨🚀🚧😍😃' + '✨🚀🚧😍😃😍😎😙😉☺'.take (..After '😍') . should_equal '😃😍😎😙😉☺' + '✨🚀🚧😍😃😍😎😙😉☺'.take (..After_Last '😍') . should_equal '😎😙😉☺' + '✨🚀🚧😍😃😍😎😙😉☺'.take (..While c->c!="😃") . should_equal '✨🚀🚧😍' '✨🚀🚧😍😃😍😎😙😉☺'.take (3.up_to 6) . should_equal '😍😃😍' group_builder.specify "take should correctly handle edge cases" <| "ABC".take . should_equal "A" - "".take First . should_equal "" - "".take Last . should_equal "" + "".take ..First . should_equal "" + "".take ..Last . should_equal "" - "".take (After "a") . should_equal "" - "".take (After_Last "a") . should_equal "" - "".take (Before "a") . should_equal "" - "".take (Before_Last "a") . should_equal "" + "".take (..After "a") . should_equal "" + "".take (..After_Last "a") . should_equal "" + "".take (..Before "a") . should_equal "" + "".take (..Before_Last "a") . should_equal "" - "".take (After "") . should_equal "" - "".take (After_Last "") . should_equal "" - "".take (Before "") . should_equal "" - "".take (Before_Last "") . should_equal "" + "".take (..After "") . should_equal "" + "".take (..After_Last "") . should_equal "" + "".take (..Before "") . should_equal "" + "".take (..Before_Last "") . should_equal "" - "".take (While _->True) . should_equal "" + "".take (..While _->True) . should_equal "" 'ABC\u{301}'.take (0.up_to 0) . should_equal "" - 'ABC\u{301}'.take (After "") . should_equal 'ABC\u{301}' - 'ABC\u{301}'.take (After_Last "") . should_equal "" - 'ABC\u{301}'.take (Before "") . should_equal "" - 'ABC\u{301}'.take (Before_Last "") . should_equal 'ABC\u{301}' - - "ABC".take (By_Index -1) . should_equal "C" - "ABC".take (By_Index [-1, -1, -1, -3, 2]) . should_equal "CCCAC" - "ABC".take (By_Index []) . should_equal "" - "ABC".take (By_Index ((-2).up_to -1)) . should_fail_with Illegal_Argument - "".take (Every 2) . should_equal "" - "".take (Every 2 first=1) . should_equal "" - "ABC".take (Every 5) . should_equal "A" - "A".take (Every 5) . should_equal "A" - "ABC".take (Every 5 first=4) . should_equal "" - "".take (Sample 0) . should_equal "" - "".take (Sample 100) . should_equal "" + 'ABC\u{301}'.take (..After "") . should_equal 'ABC\u{301}' + 'ABC\u{301}'.take (..After_Last "") . should_equal "" + 'ABC\u{301}'.take (..Before "") . should_equal "" + 'ABC\u{301}'.take (..Before_Last "") . should_equal 'ABC\u{301}' + + "ABC".take (..By_Index -1) . should_equal "C" + "ABC".take (..By_Index [-1, -1, -1, -3, 2]) . should_equal "CCCAC" + "ABC".take (..By_Index []) . should_equal "" + "ABC".take (..By_Index ((-2).up_to -1)) . should_fail_with Illegal_Argument + "".take (..Every 2) . should_equal "" + "".take (..Every 2 first=1) . should_equal "" + "ABC".take (..Every 5) . should_equal "A" + "A".take (..Every 5) . should_equal "A" + "ABC".take (..Every 5 first=4) . should_equal "" + "".take (..Sample 0) . should_equal "" + "".take (..Sample 100) . should_equal "" group_builder.specify "drop should work as in the examples" <| - "Hello World!".drop First . should_equal "ello World!" - "Hello World!".drop (First 5) . should_equal " World!" - "Hello World!".drop (First 100) . should_equal "" - "Hello World!".drop (First 0) . should_equal "Hello World!" + "Hello World!".drop ..First . should_equal "ello World!" + "Hello World!".drop (..First 5) . should_equal " World!" + "Hello World!".drop (..First 100) . should_equal "" + "Hello World!".drop (..First 0) . should_equal "Hello World!" "Hello World!".drop . should_equal "ello World!" "Hello World!".drop 5 . should_equal " World!" "Hello World!".drop 100 . should_equal "" "Hello World!".drop 0 . should_equal "Hello World!" - "Hello World!".drop Last . should_equal "Hello World" - "Hello World!".drop (Last 6) . should_equal "Hello " - "Hello World!".drop (Last 100) . should_equal "" - "Hello World!".drop (Before " ") . should_equal " World!" - "Hello World!".drop (Before "z") . should_equal "" - "Hello World!".drop (Before_Last "o") . should_equal "orld!" - "Hello World!".drop (Before_Last "z") . should_equal "" - "Hello World!".drop (After " ") . should_equal "Hello " - "Hello World!".drop (After "z") . should_equal "Hello World!" - "Hello World!".drop (After_Last "o") . should_equal "Hello Wo" - "Hello World!".drop (After_Last "z") . should_equal "Hello World!" - "Hello World!".drop (While c->c!=" ") . should_equal " World!" - "Hello World!".drop (While c->c!="z") . should_equal "" + "Hello World!".drop ..Last . should_equal "Hello World" + "Hello World!".drop (..Last 6) . should_equal "Hello " + "Hello World!".drop (..Last 100) . should_equal "" + "Hello World!".drop (..Before " ") . should_equal " World!" + "Hello World!".drop (..Before "z") . should_equal "" + "Hello World!".drop (..Before_Last "o") . should_equal "orld!" + "Hello World!".drop (..Before_Last "z") . should_equal "" + "Hello World!".drop (..After " ") . should_equal "Hello " + "Hello World!".drop (..After "z") . should_equal "Hello World!" + "Hello World!".drop (..After_Last "o") . should_equal "Hello Wo" + "Hello World!".drop (..After_Last "z") . should_equal "Hello World!" + "Hello World!".drop (..While c->c!=" ") . should_equal " World!" + "Hello World!".drop (..While c->c!="z") . should_equal "" "Hello World!".drop (3.up_to 5) . should_equal "Hel World!" "Hello World!".drop (5.up_to 12) . should_equal "Hello" "Hello World!".drop (6.up_to 12 . with_step 2) . should_equal "Hello ol!" - "Hello World!".drop (Every 2 first=6) . should_equal "Hello ol!" - "Hello World!".drop (Every 3) . should_equal "elo ord!" - "Hello World!".drop (By_Index 0) . should_equal "ello World!" - "Hello World!".drop (By_Index [1, 0, 0, 6, 0]) . should_equal "llo orld!" - "Hello World!".drop (By_Index [0.up_to 3, 6, 6.up_to 12 . with_step 2]) . should_equal "lo ol!" - "Hello World!".drop (Sample 3 seed=42) . should_equal "HeloWorl!" + "Hello World!".drop (..Every 2 first=6) . should_equal "Hello ol!" + "Hello World!".drop (..Every 3) . should_equal "elo ord!" + "Hello World!".drop (..By_Index 0) . should_equal "ello World!" + "Hello World!".drop (..By_Index [1, 0, 0, 6, 0]) . should_equal "llo orld!" + "Hello World!".drop (..By_Index [0.up_to 3, 6, 6.up_to 12 . with_step 2]) . should_equal "lo ol!" + "Hello World!".drop (..Sample 3 seed=42) . should_equal "HeloWorl!" group_builder.specify "drop should report errors for start indices out of bounds but just go till the end if the end index is OOB" <| txt = "Hello World!" txt.drop (0.up_to 14) . should_equal "" - txt.drop (First 100) . should_equal "" + txt.drop (..First 100) . should_equal "" txt.drop 100 . should_equal "" - txt.drop (Last 100) . should_equal "" - txt.drop (By_Index 100) . should_fail_with Index_Out_Of_Bounds - txt.drop (By_Index 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 12) - txt.drop (By_Index 13) . should_fail_with Index_Out_Of_Bounds - txt.drop (By_Index [0, 1, 13]) . should_fail_with Index_Out_Of_Bounds - txt.drop (By_Index [0, 14.up_to 15, 1]) . should_fail_with Index_Out_Of_Bounds - txt.drop (By_Index [0, 1, 6.up_to 100]) . should_equal "llo " + txt.drop (..Last 100) . should_equal "" + txt.drop (..By_Index 100) . should_fail_with Index_Out_Of_Bounds + txt.drop (..By_Index 100) . catch . should_equal (Index_Out_Of_Bounds.Error 100 12) + txt.drop (..By_Index 13) . should_fail_with Index_Out_Of_Bounds + txt.drop (..By_Index [0, 1, 13]) . should_fail_with Index_Out_Of_Bounds + txt.drop (..By_Index [0, 14.up_to 15, 1]) . should_fail_with Index_Out_Of_Bounds + txt.drop (..By_Index [0, 1, 6.up_to 100]) . should_equal "llo " txt.drop (13.up_to 12) . should_fail_with Index_Out_Of_Bounds txt.drop (14.up_to 15) . should_fail_with Index_Out_Of_Bounds - "".drop (By_Index 0) . should_fail_with Index_Out_Of_Bounds + "".drop (..By_Index 0) . should_fail_with Index_Out_Of_Bounds "".drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds "".drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) txt.drop (0.up_to 0) . should_equal txt txt.drop (5.up_to 100) . should_equal "Hello" txt.drop (5.up_to 100 . with_step 2) . should_equal "HelloWrd" - txt.drop (By_Index [0, 1, 0, 5.up_to 100 . with_step 2]) . should_equal "lloWrd" + txt.drop (..By_Index [0, 1, 0, 5.up_to 100 . with_step 2]) . should_equal "lloWrd" group_builder.specify "drop should work on grapheme clusters" <| txt_1 = 'He\u0302llo\u0308 Wo\u0301rld!' txt_2 = 'He\u0302llo\u0308 Wo\u0308rld!' - txt_1.drop (Every 2) . should_equal 'e\u0302l o\u0301l!' - txt_1.drop (First 2) . should_equal 'llo\u{308} Wo\u{301}rld!' - txt_1.drop (First 5) . should_equal ' Wo\u{301}rld!' + txt_1.drop (..Every 2) . should_equal 'e\u0302l o\u0301l!' + txt_1.drop (..First 2) . should_equal 'llo\u{308} Wo\u{301}rld!' + txt_1.drop (..First 5) . should_equal ' Wo\u{301}rld!' txt_1.drop 2 . should_equal 'llo\u{308} Wo\u{301}rld!' txt_1.drop 5 . should_equal ' Wo\u{301}rld!' - txt_1.drop (Last 6) . should_equal 'He\u{302}llo\u{308} ' - txt_1.drop (Last 5) . should_equal 'He\u{302}llo\u{308} W' - txt_1.drop (Before 'e\u{302}') . should_equal 'e\u{302}llo\u{308} Wo\u{301}rld!' - txt_1.drop (Before 'ê') . should_equal 'e\u{302}llo\u{308} Wo\u{301}rld!' - txt_1.drop (Before 'e') . should_equal '' - txt_2.drop (Before_Last 'o\u{308}') . should_equal 'o\u{308}rld!' - txt_2.drop (Before_Last 'ö') . should_equal 'o\u{308}rld!' - txt_2.drop (Before_Last 'o') . should_equal '' - txt_1.drop (After 'e\u{302}') . should_equal 'He\u{302}' - txt_1.drop (After 'ê') . should_equal 'He\u{302}' - txt_1.drop (After 'e\u{308}') . should_equal txt_1 - txt_1.drop (After 'e') . should_equal txt_1 - txt_2.drop (After_Last 'o\u{308}') . should_equal 'He\u{302}llo\u{308} Wo\u{308}' - txt_2.drop (After_Last 'ö') . should_equal 'He\u{302}llo\u{308} Wo\u{308}' - txt_2.drop (After_Last 'o') . should_equal txt_2 - txt_2.drop (While c->c!='e\u{302}') . should_equal 'e\u{302}llo\u{308} Wo\u{308}rld!' - txt_2.drop (While c->c!='ê') . should_equal 'e\u{302}llo\u{308} Wo\u{308}rld!' - txt_2.drop (While c->c!='e') . should_equal '' + txt_1.drop (..Last 6) . should_equal 'He\u{302}llo\u{308} ' + txt_1.drop (..Last 5) . should_equal 'He\u{302}llo\u{308} W' + txt_1.drop (..Before 'e\u{302}') . should_equal 'e\u{302}llo\u{308} Wo\u{301}rld!' + txt_1.drop (..Before 'ê') . should_equal 'e\u{302}llo\u{308} Wo\u{301}rld!' + txt_1.drop (..Before 'e') . should_equal '' + txt_2.drop (..Before_Last 'o\u{308}') . should_equal 'o\u{308}rld!' + txt_2.drop (..Before_Last 'ö') . should_equal 'o\u{308}rld!' + txt_2.drop (..Before_Last 'o') . should_equal '' + txt_1.drop (..After 'e\u{302}') . should_equal 'He\u{302}' + txt_1.drop (..After 'ê') . should_equal 'He\u{302}' + txt_1.drop (..After 'e\u{308}') . should_equal txt_1 + txt_1.drop (..After 'e') . should_equal txt_1 + txt_2.drop (..After_Last 'o\u{308}') . should_equal 'He\u{302}llo\u{308} Wo\u{308}' + txt_2.drop (..After_Last 'ö') . should_equal 'He\u{302}llo\u{308} Wo\u{308}' + txt_2.drop (..After_Last 'o') . should_equal txt_2 + txt_2.drop (..While c->c!='e\u{302}') . should_equal 'e\u{302}llo\u{308} Wo\u{308}rld!' + txt_2.drop (..While c->c!='ê') . should_equal 'e\u{302}llo\u{308} Wo\u{308}rld!' + txt_2.drop (..While c->c!='e') . should_equal '' txt_2.drop (3.up_to 5) . should_equal 'He\u{302}l Wo\u{308}rld!' txt_2.drop (5.up_to 12) . should_equal 'He\u{302}llo\u{308}' group_builder.specify "drop should work on emojis" <| - '✨🚀🚧😍😃😎😙😉☺'.drop First . should_equal '🚀🚧😍😃😎😙😉☺' - '✨🚀🚧😍😃😎😙😉☺'.drop (First 2) . should_equal '🚧😍😃😎😙😉☺' + '✨🚀🚧😍😃😎😙😉☺'.drop ..First . should_equal '🚀🚧😍😃😎😙😉☺' + '✨🚀🚧😍😃😎😙😉☺'.drop (..First 2) . should_equal '🚧😍😃😎😙😉☺' '✨🚀🚧😍😃😎😙😉☺'.drop . should_equal '🚀🚧😍😃😎😙😉☺' '✨🚀🚧😍😃😎😙😉☺'.drop 2 . should_equal '🚧😍😃😎😙😉☺' - '✨🚀🚧😍😃😎😙😉☺'.drop Last . should_equal '✨🚀🚧😍😃😎😙😉' - '✨🚀🚧😍😃😎😙😉☺'.drop (Last 3) . should_equal '✨🚀🚧😍😃😎' - '✨🚀🚧😍😃😍😎😙😉☺'.drop (Before '😍') . should_equal '😍😃😍😎😙😉☺' - '✨🚀🚧😍😃😍😎😙😉☺'.drop (Before_Last '😍') . should_equal '😍😎😙😉☺' - '✨🚀🚧😍😃😍😎😙😉☺'.drop (After '😍') . should_equal '✨🚀🚧😍' - '✨🚀🚧😍😃😍😎😙😉☺'.drop (After_Last '😍') . should_equal '✨🚀🚧😍😃😍' - '✨🚀🚧😍😃😍😎😙😉☺'.drop (While c->c!="😃") . should_equal '😃😍😎😙😉☺' + '✨🚀🚧😍😃😎😙😉☺'.drop ..Last . should_equal '✨🚀🚧😍😃😎😙😉' + '✨🚀🚧😍😃😎😙😉☺'.drop (..Last 3) . should_equal '✨🚀🚧😍😃😎' + '✨🚀🚧😍😃😍😎😙😉☺'.drop (..Before '😍') . should_equal '😍😃😍😎😙😉☺' + '✨🚀🚧😍😃😍😎😙😉☺'.drop (..Before_Last '😍') . should_equal '😍😎😙😉☺' + '✨🚀🚧😍😃😍😎😙😉☺'.drop (..After '😍') . should_equal '✨🚀🚧😍' + '✨🚀🚧😍😃😍😎😙😉☺'.drop (..After_Last '😍') . should_equal '✨🚀🚧😍😃😍' + '✨🚀🚧😍😃😍😎😙😉☺'.drop (..While c->c!="😃") . should_equal '😃😍😎😙😉☺' '✨🚀🚧😍😃😍😎😙😉☺'.drop (3.up_to 6) . should_equal '✨🚀🚧😎😙😉☺' group_builder.specify "drop should correctly handle edge cases" <| "ABC".drop . should_equal "BC" - "".drop First . should_equal "" - "".drop Last . should_equal "" + "".drop ..First . should_equal "" + "".drop ..Last . should_equal "" - "".drop (After "a") . should_equal "" - "".drop (After_Last "a") . should_equal "" - "".drop (Before "a") . should_equal "" - "".drop (Before_Last "a") . should_equal "" + "".drop (..After "a") . should_equal "" + "".drop (..After_Last "a") . should_equal "" + "".drop (..Before "a") . should_equal "" + "".drop (..Before_Last "a") . should_equal "" - "".drop (After "") . should_equal "" - "".drop (After_Last "") . should_equal "" - "".drop (Before "") . should_equal "" - "".drop (Before_Last "") . should_equal "" + "".drop (..After "") . should_equal "" + "".drop (..After_Last "") . should_equal "" + "".drop (..Before "") . should_equal "" + "".drop (..Before_Last "") . should_equal "" - "".drop (While _->True) . should_equal "" + "".drop (..While _->True) . should_equal "" "".drop (0.up_to 0) . should_fail_with Index_Out_Of_Bounds 'ABC\u{301}'.drop (0.up_to 0) . should_equal 'ABC\u{301}' - 'ABC\u{301}'.drop (After "") . should_equal '' - 'ABC\u{301}'.drop (After_Last "") . should_equal 'ABC\u{301}' - 'ABC\u{301}'.drop (Before "") . should_equal 'ABC\u{301}' - 'ABC\u{301}'.drop (Before_Last "") . should_equal '' - - "ABC".drop (By_Index -1) . should_equal "AB" - "ABC".drop (By_Index [-1, -1, -1, -3, 2]) . should_equal "B" - "ABC".drop (By_Index []) . should_equal "ABC" - "".drop (Every 2) . should_equal "" - "".drop (Every 2 first=1) . should_equal "" - "ABC".drop (Every 5) . should_equal "BC" - "ABC".drop (Every 5 first=4) . should_equal "ABC" - "".drop (Sample 0) . should_equal "" - "".drop (Sample 100) . should_equal "" + 'ABC\u{301}'.drop (..After "") . should_equal '' + 'ABC\u{301}'.drop (..After_Last "") . should_equal 'ABC\u{301}' + 'ABC\u{301}'.drop (..Before "") . should_equal 'ABC\u{301}' + 'ABC\u{301}'.drop (..Before_Last "") . should_equal '' + + "ABC".drop (..By_Index -1) . should_equal "AB" + "ABC".drop (..By_Index [-1, -1, -1, -3, 2]) . should_equal "B" + "ABC".drop (..By_Index []) . should_equal "ABC" + "".drop (..Every 2) . should_equal "" + "".drop (..Every 2 first=1) . should_equal "" + "ABC".drop (..Every 5) . should_equal "BC" + "ABC".drop (..Every 5 first=4) . should_equal "ABC" + "".drop (..Sample 0) . should_equal "" + "".drop (..Sample 100) . should_equal "" group_builder.specify "take and drop should gracefully handle missing constructor arguments" <| Test.expect_panic Type_Error <| "".take "FOO" Test.expect_panic Type_Error <| "".drop "FOO" - r1 = "".take (Index_Sub_Range.While) + r1 = "".take (..While) r1.should_fail_with Missing_Argument r1.catch.to_display_text . should_contain "Provide a value for the argument `predicate`." diff --git a/test/Base_Tests/src/Data/Time/Date_Time_Formatter_Spec.enso b/test/Base_Tests/src/Data/Time/Date_Time_Formatter_Spec.enso index 70eb5dae7b86..fed89970d41b 100644 --- a/test/Base_Tests/src/Data/Time/Date_Time_Formatter_Spec.enso +++ b/test/Base_Tests/src/Data/Time/Date_Time_Formatter_Spec.enso @@ -181,7 +181,7 @@ add_specs suite_builder = time . millisecond . should_equal 0 time . microsecond . should_equal 0 time . nanosecond . should_equal 0 - time.zone.zone_id . take (Last 6) . should_equal "+01:00" + time.zone.zone_id . take (..Last 6) . should_equal "+01:00" group_builder.specify "should parse time with id-based zone" <| time = Date_Time.parse "1970-01-01T00:00:01+01:00[Europe/Paris]" @@ -217,7 +217,7 @@ add_specs suite_builder = time . millisecond . should_equal 0 time . microsecond . should_equal 0 time . nanosecond . should_equal 0 - (time.zone.zone_id . take (Last 3) . to_case Case.Upper) . should_equal "UTC" + (time.zone.zone_id . take (..Last 3) . to_case Case.Upper) . should_equal "UTC" group_builder.specify "should parse custom format of local time" <| time = Date_Time.parse "06 of May 2020 at 04:30AM" "dd 'of' MMMM yyyy 'at' hh:mma" diff --git a/test/Base_Tests/src/Data/Vector/Slicing_Helpers_Spec.enso b/test/Base_Tests/src/Data/Vector/Slicing_Helpers_Spec.enso index 8a69cc7164f7..94232ab5965b 100644 --- a/test/Base_Tests/src/Data/Vector/Slicing_Helpers_Spec.enso +++ b/test/Base_Tests/src/Data/Vector/Slicing_Helpers_Spec.enso @@ -6,18 +6,17 @@ from Standard.Test import all add_specs suite_builder = suite_builder.group "Vector Slicing Helpers" group_builder-> group_builder.specify "should be able to sort correctly merge neighboring sequences" <| - merge = sort_and_merge_ranges - merge [] . should_equal [] - merge [0.up_to 0] . should_equal [] - merge [0.up_to 10] . should_equal [0.up_to 10] - merge [0.up_to 10, 2.up_to 4] . should_equal [0.up_to 10] - merge [0.up_to 5, 5.up_to 10] . should_equal [0.up_to 10] - merge [5.up_to 10, 0.up_to 0, 0.up_to 1, 1.up_to 5] . should_equal [0.up_to 10] - merge [0.up_to 1, 1.up_to 2] . should_equal [0.up_to 2] - merge [6.up_to 7, 7.up_to 8, 5.up_to 5, 0.up_to 1, 2.up_to 3] . should_equal [0.up_to 1, 2.up_to 3, 6.up_to 8] - merge [5.up_to 10, 3.up_to 6, 3.up_to 6, 3.up_to 5, 3.up_to 7, 0.up_to 1] . should_equal [0.up_to 1, 3.up_to 10] - merge [0.up_to 1, 0.up_to 1] . should_equal [0.up_to 1] - merge [0.up_to 1, 1.up_to 2] . should_equal [0.up_to 2] + sort_and_merge_ranges [] . should_equal [] + sort_and_merge_ranges [0.up_to 0] . should_equal [] + sort_and_merge_ranges [0.up_to 10] . should_equal [0.up_to 10] + sort_and_merge_ranges [0.up_to 10, 2.up_to 4] . should_equal [0.up_to 10] + sort_and_merge_ranges [0.up_to 5, 5.up_to 10] . should_equal [0.up_to 10] + sort_and_merge_ranges [5.up_to 10, 0.up_to 0, 0.up_to 1, 1.up_to 5] . should_equal [0.up_to 10] + sort_and_merge_ranges [0.up_to 1, 1.up_to 2] . should_equal [0.up_to 2] + sort_and_merge_ranges [6.up_to 7, 7.up_to 8, 5.up_to 5, 0.up_to 1, 2.up_to 3] . should_equal [0.up_to 1, 2.up_to 3, 6.up_to 8] + sort_and_merge_ranges [5.up_to 10, 3.up_to 6, 3.up_to 6, 3.up_to 5, 3.up_to 7, 0.up_to 1] . should_equal [0.up_to 1, 3.up_to 10] + sort_and_merge_ranges [0.up_to 1, 0.up_to 1] . should_equal [0.up_to 1] + sort_and_merge_ranges [0.up_to 1, 1.up_to 2] . should_equal [0.up_to 2] main filter=Nothing = suite = Test.build suite_builder-> diff --git a/test/Base_Tests/src/Data/Vector_Spec.enso b/test/Base_Tests/src/Data/Vector_Spec.enso index 407d18961383..573c310a1bd7 100644 --- a/test/Base_Tests/src/Data/Vector_Spec.enso +++ b/test/Base_Tests/src/Data/Vector_Spec.enso @@ -15,7 +15,6 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Unimplemented.Unimplemented import Standard.Base.Runtime.Ref.Ref import Standard.Base.Runtime.State -from Standard.Base.Data.Index_Sub_Range.Index_Sub_Range import While, By_Index, Sample, Every from Standard.Base.Panic import Wrapped_Dataflow_Error from Standard.Test import all @@ -576,15 +575,15 @@ type_spec suite_builder name alter = suite_builder.group name group_builder-> [].drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) vec.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - vec.take (First 4) . should_equal first_four - vec.take (First 0) . should_equal [] - vec.take (First -1) . should_equal [] - vec.take (First 100) . should_equal vec + vec.take (..First 4) . should_equal first_four + vec.take (..First 0) . should_equal [] + vec.take (..First -1) . should_equal [] + vec.take (..First 100) . should_equal vec - vec.drop (First 2) . should_equal last_four - vec.drop (First 0) . should_equal vec - vec.drop (First -1) . should_equal vec - vec.drop (First 100) . should_equal [] + vec.drop (..First 2) . should_equal last_four + vec.drop (..First 0) . should_equal vec + vec.drop (..First -1) . should_equal vec + vec.drop (..First 100) . should_equal [] vec.take 4 . should_equal first_four vec.take 0 . should_equal [] @@ -596,106 +595,102 @@ type_spec suite_builder name alter = suite_builder.group name group_builder-> vec.drop -1 . should_equal vec vec.drop 100 . should_equal [] - vec.take (Last 4) . should_equal last_four - vec.take (Last 0) . should_equal [] - vec.take (Last -1) . should_equal [] - vec.take (Last 100) . should_equal vec - - vec.drop (Last 2) . should_equal first_four - vec.drop (Last 0) . should_equal vec - vec.drop (Last -1) . should_equal vec - vec.drop (Last 100) . should_equal [] - - vec.take (Every 1) . should_equal vec - vec.take (Every 3) . should_equal [1, 4] - vec.take (Every 3 first=1) . should_equal [2, 5] - vec.take (Every 2 first=1) . should_equal [2, 4, 6] - vec.take (Every 2 first=100) . should_equal [] - vec.take (Every 200) . should_equal [1] - [].take (Every 2) . should_equal [] - vec.take (Every 0) . should_fail_with Illegal_Argument - [].take (Every 0) . should_fail_with Illegal_Argument - - vec.drop (Every 1) . should_equal [] - vec.drop (Every 3) . should_equal [2, 3, 5, 6] - vec.drop (Every 3 first=1) . should_equal [1, 3, 4, 6] - vec.drop (Every 2 first=1) . should_equal [1, 3, 5] - vec.drop (Every 2 first=100) . should_equal vec - vec.drop (Every 200) . should_equal [2, 3, 4, 5, 6] - [].drop (Every 2) . should_equal [] - vec.drop (Every 0) . should_fail_with Illegal_Argument - [].drop (Every 0) . should_fail_with Illegal_Argument - - vec.take (By_Index 0) . should_equal [1] - [].take (By_Index 0) . should_fail_with Index_Out_Of_Bounds - vec.take (By_Index []) . should_equal [] - vec.take (By_Index [-1, -1]) . should_equal [6, 6] - vec.take (By_Index [0, 0, 3.up_to 100]) . should_equal [1, 1, 4, 5, 6] + vec.take (..Last 4) . should_equal last_four + vec.take (..Last 0) . should_equal [] + vec.take (..Last -1) . should_equal [] + vec.take (..Last 100) . should_equal vec + + vec.drop (..Last 2) . should_equal first_four + vec.drop (..Last 0) . should_equal vec + vec.drop (..Last -1) . should_equal vec + vec.drop (..Last 100) . should_equal [] + + vec.take (..Every 1) . should_equal vec + vec.take (..Every 3) . should_equal [1, 4] + vec.take (..Every 3 first=1) . should_equal [2, 5] + vec.take (..Every 2 first=1) . should_equal [2, 4, 6] + vec.take (..Every 2 first=100) . should_equal [] + vec.take (..Every 200) . should_equal [1] + [].take (..Every 2) . should_equal [] + vec.take (..Every 0) . should_fail_with Illegal_Argument + [].take (..Every 0) . should_fail_with Illegal_Argument + + vec.drop (..Every 1) . should_equal [] + vec.drop (..Every 3) . should_equal [2, 3, 5, 6] + vec.drop (..Every 3 first=1) . should_equal [1, 3, 4, 6] + vec.drop (..Every 2 first=1) . should_equal [1, 3, 5] + vec.drop (..Every 2 first=100) . should_equal vec + vec.drop (..Every 200) . should_equal [2, 3, 4, 5, 6] + [].drop (..Every 2) . should_equal [] + vec.drop (..Every 0) . should_fail_with Illegal_Argument + [].drop (..Every 0) . should_fail_with Illegal_Argument + + vec.take (..By_Index 0) . should_equal [1] + [].take (..By_Index 0) . should_fail_with Index_Out_Of_Bounds + vec.take (..By_Index []) . should_equal [] + vec.take (..By_Index [-1, -1]) . should_equal [6, 6] + vec.take (..By_Index [0, 0, 3.up_to 100]) . should_equal [1, 1, 4, 5, 6] vec.take (0.up_to 100 . with_step 2) . should_equal [1, 3, 5] - vec.take (By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . should_equal [1, 3, 5, 2, 4, 6] - vec.take (By_Index [1.up_to 3, 2.up_to 5]) . should_equal [2, 3, 3, 4, 5] - vec.take (By_Index [2.up_to 5, 1.up_to 3]) . should_equal [3, 4, 5, 2, 3] - vec.take (By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - vec.take (By_Index 100) . should_fail_with Index_Out_Of_Bounds - - vec.drop (By_Index 0) . should_equal [2, 3, 4, 5, 6] - vec.drop (By_Index []) . should_equal vec - vec.drop (By_Index [-1, -1]) . should_equal [1, 2, 3, 4, 5] - vec.drop (By_Index [0, 0, 3.up_to 100]) . should_equal [2, 3] + vec.take (..By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . should_equal [1, 3, 5, 2, 4, 6] + vec.take (..By_Index [1.up_to 3, 2.up_to 5]) . should_equal [2, 3, 3, 4, 5] + vec.take (..By_Index [2.up_to 5, 1.up_to 3]) . should_equal [3, 4, 5, 2, 3] + vec.take (..By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + vec.take (..By_Index 100) . should_fail_with Index_Out_Of_Bounds + + vec.drop (..By_Index 0) . should_equal [2, 3, 4, 5, 6] + vec.drop (..By_Index []) . should_equal vec + vec.drop (..By_Index [-1, -1]) . should_equal [1, 2, 3, 4, 5] + vec.drop (..By_Index [0, 0, 3.up_to 100]) . should_equal [2, 3] vec.drop (0.up_to 100 . with_step 2) . should_equal [2, 4, 6] - vec.drop (By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . should_equal [] - vec.drop (By_Index [1.up_to 3, 2.up_to 5]) . should_equal [1, 6] - vec.drop (By_Index [2.up_to 5, 1.up_to 3]) . should_equal [1, 6] - vec.drop (By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - vec.drop (By_Index 100) . should_fail_with Index_Out_Of_Bounds - - [1, 3, 5, 6, 8, 9, 10, 11, 13].take (While (x-> x%2 == 1)) . should_equal [1, 3, 5] - alter [1, 2, 3] . take (While (_ > 10)) . should_equal [] - alter [1, 2, 3] . take (While (_ < 10)) . should_equal [1, 2, 3] - - [1, 3, 5, 6, 8, 9, 10, 11, 13].drop (While (x-> x%2 == 1)) . should_equal [6, 8, 9, 10, 11, 13] - alter [1, 2, 3] . drop (While (_ > 10)) . should_equal [1, 2, 3] - alter [1, 2, 3] . drop (While (_ < 10)) . should_equal [] - - vec.take (Sample 0) . should_equal [] - alter [] . take (Sample 0) . should_equal [] - alter [] . take (Sample 1) . should_fail_with Illegal_Argument - alter ["a"] . take (Sample 1) . should_equal ["a"] - alter ["a", "a", "a"] . take (Sample 1) . should_equal ["a"] - alter ["a", "a", "a"] . take (Sample 3) . should_equal ["a", "a", "a"] - alter ["a", "a", "a"] . take (Sample 100) . should_fail_with Illegal_Argument - - vec.drop (Sample 0) . should_equal vec - alter [] . drop (Sample 0) . should_equal [] - alter [] . drop (Sample 1) . should_equal [] - alter ["a"] . drop (Sample 1) . should_equal [] - alter ["a", "a", "a"] . drop (Sample 1) . should_equal ["a", "a"] - alter ["a", "a", "a"] . drop (Sample 100) . should_equal [] + vec.drop (..By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . should_equal [] + vec.drop (..By_Index [1.up_to 3, 2.up_to 5]) . should_equal [1, 6] + vec.drop (..By_Index [2.up_to 5, 1.up_to 3]) . should_equal [1, 6] + vec.drop (..By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + vec.drop (..By_Index 100) . should_fail_with Index_Out_Of_Bounds + + [1, 3, 5, 6, 8, 9, 10, 11, 13].take (..While (x-> x%2 == 1)) . should_equal [1, 3, 5] + alter [1, 2, 3] . take (..While (_ > 10)) . should_equal [] + alter [1, 2, 3] . take (..While (_ < 10)) . should_equal [1, 2, 3] + + [1, 3, 5, 6, 8, 9, 10, 11, 13].drop (..While (x-> x%2 == 1)) . should_equal [6, 8, 9, 10, 11, 13] + alter [1, 2, 3] . drop (..While (_ > 10)) . should_equal [1, 2, 3] + alter [1, 2, 3] . drop (..While (_ < 10)) . should_equal [] + + vec.take (..Sample 0) . should_equal [] + alter [] . take (..Sample 0) . should_equal [] + alter [] . take (..Sample 1) . should_fail_with Illegal_Argument + alter ["a"] . take (..Sample 1) . should_equal ["a"] + alter ["a", "a", "a"] . take (..Sample 1) . should_equal ["a"] + alter ["a", "a", "a"] . take (..Sample 3) . should_equal ["a", "a", "a"] + alter ["a", "a", "a"] . take (..Sample 100) . should_fail_with Illegal_Argument + + vec.drop (..Sample 0) . should_equal vec + alter [] . drop (..Sample 0) . should_equal [] + alter [] . drop (..Sample 1) . should_equal [] + alter ["a"] . drop (..Sample 1) . should_equal [] + alter ["a", "a", "a"] . drop (..Sample 1) . should_equal ["a", "a"] + alter ["a", "a", "a"] . drop (..Sample 100) . should_equal [] suite_builder.group "take/drop Sample non-determinism" group_builder-> v = 0.up_to 60 . to_vector group_builder.specify "sampling should be deterministic when a seed is supplied" <| - v.take (Sample 5 seed=4200000) . should_equal (v.take (Sample 5 seed=4200000)) + v.take (..Sample 5 seed=4200000) . should_equal (v.take (..Sample 5 seed=4200000)) group_builder.specify "sampling should be non-deterministic when a seed is not supplied" <| - v.take (Sample 5) . should_not_equal (v.take (Sample 5)) + v.take (..Sample 5) . should_not_equal (v.take (..Sample 5)) group_builder.specify "take/drop should gracefully handle missing constructor arguments" <| Test.expect_panic Type_Error <| [].take "FOO" Test.expect_panic Type_Error <| [].drop "FOO" - r1 = [].take (Index_Sub_Range.While) + r1 = [].take (..While) r1.should_fail_with Missing_Argument r1.catch.to_display_text . should_contain "Provide a value for the argument `predicate`." - r2 = [].drop (Index_Sub_Range.Every ...) - r2.should_fail_with Illegal_Argument - r2.catch.to_display_text . should_contain "The constructor Every is missing some arguments" - - r3 = [].take (Index_Sub_Range.First _) - r3.should_fail_with Illegal_Argument - r3.catch.to_display_text . should_contain "Got a Function instead of a range, is a constructor argument missing?" + r2 = [].take (..By_Index) + r2.should_fail_with Missing_Argument + r2.catch.to_display_text . should_contain "Provide a value for the argument `indexes`." group_builder.specify "should allow getting the last element of the vector" <| non_empty_vec = alter [1, 2, 3, 4, 5] diff --git a/test/Base_Tests/src/Runtime/Stack_Traces_Spec.enso b/test/Base_Tests/src/Runtime/Stack_Traces_Spec.enso index 117fdfb1af67..398c04571931 100644 --- a/test/Base_Tests/src/Runtime/Stack_Traces_Spec.enso +++ b/test/Base_Tests/src/Runtime/Stack_Traces_Spec.enso @@ -16,6 +16,6 @@ add_specs suite_builder = suite_builder.group "Stack traces" group_builder-> modname = Meta.get_simple_type_name Stack_Traces_Spec stack = My_Type.foo names = [modname + ".bar", modname + ".baz", "Number.foo", modname + ".foo", "My_Type.foo"] - stack.take (First 5) . map .name . should_equal names + stack.take (..First 5) . map .name . should_equal names file = enso_project.root / 'src' / 'Runtime' / 'Stack_Traces_Spec.enso' - stack.take (First 5) . map (.source_location >> .file) . each (_.should_equal file) + stack.take (..First 5) . map (.source_location >> .file) . each (_.should_equal file) diff --git a/test/Base_Tests/src/Semantic/Conversion_Spec.enso b/test/Base_Tests/src/Semantic/Conversion_Spec.enso index 7ec836d3984b..c0d7a560ec9e 100644 --- a/test/Base_Tests/src/Semantic/Conversion_Spec.enso +++ b/test/Base_Tests/src/Semantic/Conversion_Spec.enso @@ -265,7 +265,7 @@ add_specs suite_builder = group_builder.specify "Requesting Text & Foo" <| check a (n : Text & Foo) = case a of 0 -> n.foo - 1 -> n.take (First 3) + 1 -> n.take (..First 3) check 0 "Ahoj" . should_equal 4 check 1 "Ahoj" . should_equal "Aho" diff --git a/test/Base_Tests/src/Semantic/Meta_Location_Spec.enso b/test/Base_Tests/src/Semantic/Meta_Location_Spec.enso index 6b1c5227e524..c293c7ffac23 100644 --- a/test/Base_Tests/src/Semantic/Meta_Location_Spec.enso +++ b/test/Base_Tests/src/Semantic/Meta_Location_Spec.enso @@ -17,7 +17,7 @@ add_specs suite_builder = suite_builder.group "Meta-Value Inspection" group_buil group_builder.specify "should allow to get the source location of a frame" pending=location_pending <| src = Meta.get_source_location 0 loc = "Meta_Location_Spec.enso:18:15-40" - src.take (Last loc.length) . should_equal loc + src.take (..Last loc.length) . should_equal loc group_builder.specify "should allow to get qualified type names of values" <| x = 42 diff --git a/test/Base_Tests/src/Semantic/Warnings_Spec.enso b/test/Base_Tests/src/Semantic/Warnings_Spec.enso index 0caf697108dd..4ca0b0daec3a 100644 --- a/test/Base_Tests/src/Semantic/Warnings_Spec.enso +++ b/test/Base_Tests/src/Semantic/Warnings_Spec.enso @@ -162,7 +162,7 @@ add_specs suite_builder = suite_builder.group "Dataflow Warnings" group_builder- current = Runtime.get_stack_trace warned = foo "value" warning_stack = Warning.get_all warned . first . origin - relevant = warning_stack . drop (Last current.length) + relevant = warning_stack . drop (..Last current.length) relevant.map .name . should_equal (['baz', 'bar', 'foo'].map ('Warnings_Spec.'+)) group_builder.specify "should attach reassignment info in the last-reassigned-first order" <| diff --git a/test/Base_Tests/src/System/File_Spec.enso b/test/Base_Tests/src/System/File_Spec.enso index a85231799b0c..15f229fdbfc8 100644 --- a/test/Base_Tests/src/System/File_Spec.enso +++ b/test/Base_Tests/src/System/File_Spec.enso @@ -283,12 +283,12 @@ add_specs suite_builder = suite_builder.group "read_bytes" group_builder-> group_builder.specify "should allow reading a file to byte vector" <| contents = sample_file.read_bytes - contents.take (First 6) . should_equal [67, 117, 112, 99, 97, 107] + contents.take (..First 6) . should_equal [67, 117, 112, 99, 97, 107] group_builder.specify "should allow reading a file to byte vector via path" <| full_path = sample_file . path contents = File.new full_path . read_bytes - contents.take (First 6) . should_equal [67, 117, 112, 99, 97, 107] + contents.take (..First 6) . should_equal [67, 117, 112, 99, 97, 107] group_builder.specify "should allow to read last n bytes from a file" <| file = enso_project.data / "transient" / "bytes.txt" @@ -312,10 +312,10 @@ add_specs suite_builder = group_builder.specify "should open and read the file in one shot" <| path_name = sample_file.path contents = File.new path_name . read_bytes - contents.take (First 6) . should_equal [67, 117, 112, 99, 97, 107] + contents.take (..First 6) . should_equal [67, 117, 112, 99, 97, 107] file = sample_file contents_2 = File.new file . read_bytes - contents_2.take (First 6) . should_equal [67, 117, 112, 99, 97, 107] + contents_2.take (..First 6) . should_equal [67, 117, 112, 99, 97, 107] suite_builder.group "Path Operations" group_builder-> group_builder.specify "should allow going above the first part of a relative directory by resolving it to absolute" <| diff --git a/test/Benchmarks/src/Table/Aggregate.enso b/test/Benchmarks/src/Table/Aggregate.enso index 56aa180fefd8..1c7da8013b10 100644 --- a/test/Benchmarks/src/Table/Aggregate.enso +++ b/test/Benchmarks/src/Table/Aggregate.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all from Standard.Table import Table, Aggregate_Column diff --git a/test/Benchmarks/src/Table/Join.enso b/test/Benchmarks/src/Table/Join.enso index 994d8824ac30..af2dede22b63 100644 --- a/test/Benchmarks/src/Table/Join.enso +++ b/test/Benchmarks/src/Table/Join.enso @@ -13,7 +13,7 @@ type Scenario Value table1 table2 shuffle vec = - vec.take (Index_Sub_Range.Sample vec.length seed=42) + vec.take (..Sample vec.length seed=42) create_scenario_equals num_rows = xs = (0.up_to num_rows).to_vector diff --git a/test/Benchmarks/src/Table/Sorting.enso b/test/Benchmarks/src/Table/Sorting.enso index d73a20cbc39d..ccb9b8c647cc 100644 --- a/test/Benchmarks/src/Table/Sorting.enso +++ b/test/Benchmarks/src/Table/Sorting.enso @@ -21,7 +21,7 @@ Comparable.from (_:My) = My_Comparator vector_size = 100000 -create_ints = (0.up_to vector_size).to_vector.take (Index_Sub_Range.Sample vector_size 42) +create_ints = (0.up_to vector_size).to_vector.take (..Sample vector_size 42) create_dates = create_ints.map x-> (Date_Time.new 1990 1 1) + (Duration.new seconds=x) diff --git a/test/Benchmarks/src/Time/Work_Days.enso b/test/Benchmarks/src/Time/Work_Days.enso index 072e273a2cce..8c77f209f07a 100644 --- a/test/Benchmarks/src/Time/Work_Days.enso +++ b/test/Benchmarks/src/Time/Work_Days.enso @@ -19,7 +19,7 @@ create_dates = create_holidays = dates = create_dates - dates.take (Index_Sub_Range.Sample 100 100) + dates.take (..Sample 100 100) create_shifted_dates shift = diff --git a/test/Benchmarks/src/Vector/Operations.enso b/test/Benchmarks/src/Vector/Operations.enso index 4995c3f9b065..59bb54e87bd2 100644 --- a/test/Benchmarks/src/Vector/Operations.enso +++ b/test/Benchmarks/src/Vector/Operations.enso @@ -50,10 +50,10 @@ collect_benches = Bench.build builder-> random_vec.reduce (+) group_builder.specify "Drop_First_20_and_Sum" <| - (random_vec.drop (First 20)).reduce (+) + (random_vec.drop (..First 20)).reduce (+) group_builder.specify "Drop_Last_20_and_Sum" <| - (random_vec.drop (Last 20)).reduce (+) + (random_vec.drop (..Last 20)).reduce (+) group_builder.specify "Filter" <| random_vec.filter (x -> x % 3 == 1) diff --git a/test/Examples_Tests/src/Python_Examples_Spec.enso b/test/Examples_Tests/src/Python_Examples_Spec.enso index d9ac28b89828..149b0a29236a 100644 --- a/test/Examples_Tests/src/Python_Examples_Spec.enso +++ b/test/Examples_Tests/src/Python_Examples_Spec.enso @@ -29,7 +29,7 @@ pending = if should_run_test then Nothing else """ bin_dir = p = Java_System.getProperty "jdk.module.path" s = p.split Java_File.separator - paths = s.take (Index_Sub_Range.While _!="..") + paths = s.take (..While _!="..") j = paths . join Java_File.separator File.new j diff --git a/test/Snowflake_Tests/src/Snowflake_Spec.enso b/test/Snowflake_Tests/src/Snowflake_Spec.enso index 30b5873a2214..1a6591feea03 100644 --- a/test/Snowflake_Tests/src/Snowflake_Spec.enso +++ b/test/Snowflake_Tests/src/Snowflake_Spec.enso @@ -575,7 +575,7 @@ add_snowflake_specs suite_builder create_connection_fn db_name = agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True empty_agg_table_fn = _-> - (agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + (agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder diff --git a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso index 08da210f1ca2..923e61a50af6 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Aggregate_Spec.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all from Standard.Table import Table, Sort_Column, expr from Standard.Table.Aggregate_Column.Aggregate_Column import all diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index 0fde4d58ad9c..b7c5c9d623f5 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -257,7 +257,7 @@ add_specs suite_builder setup = # 6. multiple Between conditions xs = [0, 0, 1, 1, 1, 1, 2, 2, 3, 3, 3, 4, 4, 4] ys = [1, 2, 3, 1, 9, 2, 3, 2, 4, 2, 1, 1, 1, 2] - pts = xs.zip ys . take (Index_Sub_Range.Sample xs.length seed=42) + pts = xs.zip ys . take (..Sample xs.length seed=42) t1 = table_builder [["X", pts.map .first], ["Y", pts.map .second]] t2 = table_builder [["lx", [1]], ["ux", [3]], ["ly", [1]], ["uy", [2]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index c76606a19303..36b6202e87f6 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -1,5 +1,4 @@ from Standard.Base import all -from Standard.Base.Data.Index_Sub_Range.Index_Sub_Range import While, Sample, Every import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Common.Missing_Argument import Standard.Base.Errors.Common.Type_Error @@ -82,15 +81,15 @@ add_specs suite_builder setup = data.table.take.at "beta" . to_vector . should_equal ["A"] data.table.drop.at "alpha" . to_vector . should_equal [2,3,4,5,6,7,8] - data.table.take (First 4) . at "alpha" . to_vector . should_equal [1,2,3,4] - data.table.take (First 0) . at "alpha" . to_vector . should_equal [] - data.table.take (First -1) . at "alpha" . to_vector . should_equal [] - data.table.take (First 100) . should_equal data.table + data.table.take (..First 4) . at "alpha" . to_vector . should_equal [1,2,3,4] + data.table.take (..First 0) . at "alpha" . to_vector . should_equal [] + data.table.take (..First -1) . at "alpha" . to_vector . should_equal [] + data.table.take (..First 100) . should_equal data.table - data.table.drop (First 2) . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"] - data.table.drop (First 0) . should_equal data.table - data.table.drop (First -1) . should_equal data.table - data.table.drop (First 100) . should_equal data.empty + data.table.drop (..First 2) . at "beta" . to_vector . should_equal ["C","D","E","F","G","H"] + data.table.drop (..First 0) . should_equal data.table + data.table.drop (..First -1) . should_equal data.table + data.table.drop (..First 100) . should_equal data.empty data.table.take 4 . at "alpha" . to_vector . should_equal [1,2,3,4] data.table.take 0 . at "alpha" . to_vector . should_equal [] @@ -102,15 +101,15 @@ add_specs suite_builder setup = data.table.drop -1 . should_equal data.table data.table.drop 100 . should_equal data.empty - data.table.take (Last 4) . at "beta" . to_vector . should_equal ["E","F","G","H"] - data.table.take (Last 0) . should_equal data.empty - data.table.take (Last -1) . should_equal data.empty - data.table.take (Last 100) . should_equal data.table + data.table.take (..Last 4) . at "beta" . to_vector . should_equal ["E","F","G","H"] + data.table.take (..Last 0) . should_equal data.empty + data.table.take (..Last -1) . should_equal data.empty + data.table.take (..Last 100) . should_equal data.table - data.table.drop (Last 2) . at "alpha" . to_vector . should_equal [1,2,3,4,5,6] - data.table.drop (Last 0) . should_equal data.table - data.table.drop (Last -1) . should_equal data.table - data.table.drop (Last 100) . should_equal data.empty + data.table.drop (..Last 2) . at "alpha" . to_vector . should_equal [1,2,3,4,5,6] + data.table.drop (..Last 0) . should_equal data.table + data.table.drop (..Last -1) . should_equal data.table + data.table.drop (..Last 100) . should_equal data.empty group_builder.specify "should handle consecutive take/drops" <| data.table.take 5 . sort "alpha" . take 3 . at "alpha" . to_vector . should_equal [1, 2, 3] @@ -141,49 +140,49 @@ add_specs suite_builder setup = data.empty.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) data.table.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - data.table.take (Index_Sub_Range.By_Index 0) . at "beta" . to_vector . should_equal ["A"] - data.empty.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds - data.table.take (Index_Sub_Range.By_Index []) . should_equal data.empty - data.table.take (Index_Sub_Range.By_Index [-1, -1]) . at "beta" . to_vector . should_equal ["H", "H"] - data.table.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] + data.table.take (..By_Index 0) . at "beta" . to_vector . should_equal ["A"] + data.empty.take (..By_Index 0) . should_fail_with Index_Out_Of_Bounds + data.table.take (..By_Index []) . should_equal data.empty + data.table.take (..By_Index [-1, -1]) . at "beta" . to_vector . should_equal ["H", "H"] + data.table.take (..By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] data.table.take (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] - data.table.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] - data.table.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [2, 3, 3, 4, 5] - data.table.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [3, 4, 5, 2, 3] - data.table.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - data.table.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds - - data.table.drop (Index_Sub_Range.By_Index 0) . at "alpha" . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] - data.table.drop (Index_Sub_Range.By_Index []) . should_equal data.table - data.table.drop (Index_Sub_Range.By_Index [-1, -1]) . at "alpha" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] - data.table.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [2, 3] + data.table.take (..By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] + data.table.take (..By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [2, 3, 3, 4, 5] + data.table.take (..By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [3, 4, 5, 2, 3] + data.table.take (..By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.table.take (..By_Index 100) . should_fail_with Index_Out_Of_Bounds + + data.table.drop (..By_Index 0) . at "alpha" . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] + data.table.drop (..By_Index []) . should_equal data.table + data.table.drop (..By_Index [-1, -1]) . at "alpha" . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] + data.table.drop (..By_Index [0, 0, 3.up_to 100]) . at "alpha" . to_vector . should_equal [2, 3] data.table.drop (0.up_to 100 . with_step 2) . at "alpha" . to_vector . should_equal [2, 4, 6, 8] - data.table.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [8] - data.table.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] - data.table.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] - data.table.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - data.table.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds + data.table.drop (..By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . at "alpha" . to_vector . should_equal [8] + data.table.drop (..By_Index [1.up_to 3, 2.up_to 5]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] + data.table.drop (..By_Index [2.up_to 5, 1.up_to 3]) . at "alpha" . to_vector . should_equal [1, 6, 7, 8] + data.table.drop (..By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.table.drop (..By_Index 100) . should_fail_with Index_Out_Of_Bounds group_builder.specify "should allow selecting every Nth row" <| - data.table.take (Every 1) . should_equal data.table - data.table.take (Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7] - data.table.take (Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8] - data.table.take (Every 2 first=1) . at "beta" . to_vector . should_equal ["B", "D", "F", "H"] - data.table.take (Every 2 first=100) . at "alpha" . to_vector . should_equal [] - data.table.take (Every 200) . at "alpha" . to_vector . should_equal [1] - data.empty.take (Every 2) . should_equal data.empty - data.table.take (Every 0) . should_fail_with Illegal_Argument - data.empty.take (Every 0) . should_fail_with Illegal_Argument - - data.table.drop (Every 1) . should_equal data.empty - data.table.drop (Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8] - data.table.drop (Every 3 first=1) . at "alpha" . to_vector . should_equal [1, 3, 4, 6, 7] - data.table.drop (Every 2 first=1) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] - data.table.drop (Every 2 first=100) . should_equal data.table - data.table.drop (Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] - data.empty.drop (Every 2) . should_equal data.empty - data.table.drop (Every 0) . should_fail_with Illegal_Argument - data.empty.drop (Every 0) . should_fail_with Illegal_Argument + data.table.take (..Every 1) . should_equal data.table + data.table.take (..Every 3) . at "alpha" . to_vector . should_equal [1, 4, 7] + data.table.take (..Every 3 first=1) . at "alpha" . to_vector . should_equal [2, 5, 8] + data.table.take (..Every 2 first=1) . at "beta" . to_vector . should_equal ["B", "D", "F", "H"] + data.table.take (..Every 2 first=100) . at "alpha" . to_vector . should_equal [] + data.table.take (..Every 200) . at "alpha" . to_vector . should_equal [1] + data.empty.take (..Every 2) . should_equal data.empty + data.table.take (..Every 0) . should_fail_with Illegal_Argument + data.empty.take (..Every 0) . should_fail_with Illegal_Argument + + data.table.drop (..Every 1) . should_equal data.empty + data.table.drop (..Every 3) . at "alpha" . to_vector . should_equal [2, 3, 5, 6, 8] + data.table.drop (..Every 3 first=1) . at "alpha" . to_vector . should_equal [1, 3, 4, 6, 7] + data.table.drop (..Every 2 first=1) . at "alpha" . to_vector . should_equal [1, 3, 5, 7] + data.table.drop (..Every 2 first=100) . should_equal data.table + data.table.drop (..Every 200) . at "beta" . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] + data.empty.drop (..Every 2) . should_equal data.empty + data.table.drop (..Every 0) . should_fail_with Illegal_Argument + data.empty.drop (..Every 0) . should_fail_with Illegal_Argument if setup.is_database.not then group_builder.specify "should allow sampling rows" <| @@ -191,20 +190,20 @@ add_specs suite_builder setup = two = table_builder [["X", ["a", "a"]]] . sort "X" three = table_builder [["X", ["a", "a", "a"]]] . sort "X" empty = one.remove_all_rows - three.take (Sample 0) . should_equal empty - empty.take (Sample 0) . should_equal empty - empty.take (Sample 1) . should_equal empty - three.take (Sample 1) . should_equal one - three.take (Sample 100) . should_equal three - - three.drop (Sample 0) . should_equal three - empty.drop (Sample 0) . should_equal empty - empty.drop (Sample 1) . should_equal empty - one.drop (Sample 1) . should_equal empty - three.drop (Sample 1) . should_equal two - three.drop (Sample 100) . should_equal empty - - rnd = data.table.take (Sample 3 seed=42) + three.take (..Sample 0) . should_equal empty + empty.take (..Sample 0) . should_equal empty + empty.take (..Sample 1) . should_equal empty + three.take (..Sample 1) . should_equal one + three.take (..Sample 100) . should_equal three + + three.drop (..Sample 0) . should_equal three + empty.drop (..Sample 0) . should_equal empty + empty.drop (..Sample 1) . should_equal empty + one.drop (..Sample 1) . should_equal empty + three.drop (..Sample 1) . should_equal two + three.drop (..Sample 100) . should_equal empty + + rnd = data.table.take (..Sample 3 seed=42) random_indices = [5, 6, 2] alpha_sample = random_indices.map (data.table.at "alpha" . to_vector . at _) beta_sample = random_indices.map (data.table.at "beta" . to_vector . at _) @@ -212,17 +211,17 @@ add_specs suite_builder setup = rnd.at "beta" . to_vector . should_equal beta_sample group_builder.specify "sampling should be deterministic when a seed is supplied" <| - data.table.take (Sample 3 seed=4200000) . should_equal (data.table.take (Sample 3 seed=4200000)) + data.table.take (..Sample 3 seed=4200000) . should_equal (data.table.take (..Sample 3 seed=4200000)) group_builder.specify "sampling should be non-deterministic when a seed is not supplied" <| 0.up_to 3 . map _-> - data.table.take (Sample 3) . should_not_equal (data.table.take (Sample 3)) + data.table.take (..Sample 3) . should_not_equal (data.table.take (..Sample 3)) if setup.is_database.not then group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <| t = table_builder [["a", [1, 2, 3, 4]], ["b", [5, 6, 7, 8]]] - t2 = t.take (While (row -> row.at "a" < 3)) + t2 = t.take (..While (row -> row.at "a" < 3)) t2.row_count . should_equal 2 t2.at "a" . to_vector . should_equal [1, 2] t2.at "b" . to_vector . should_equal [5, 6] @@ -232,7 +231,7 @@ add_specs suite_builder setup = Test.expect_panic Type_Error <| t.take "FOO" Test.expect_panic Type_Error <| t.drop "FOO" - r1 = t.take (Index_Sub_Range.While) + r1 = t.take (..While) r1.should_fail_with Missing_Argument r1.catch.to_display_text . should_contain "Provide a value for the argument `predicate`." @@ -271,35 +270,35 @@ add_specs suite_builder setup = data.beta.take.to_vector . should_equal ["A"] data.alpha.drop.to_vector . should_equal [2,3,4,5,6,7,8] - data.alpha.take (First 4) . to_vector . should_equal [1,2,3,4] - data.alpha.take (First 0) . should_equal data.empty_alpha - data.alpha.take (First -1) . should_equal data.empty_alpha - data.alpha.take (First 100) . should_equal data.alpha + data.alpha.take (..First 4) . to_vector . should_equal [1,2,3,4] + data.alpha.take (..First 0) . should_equal data.empty_alpha + data.alpha.take (..First -1) . should_equal data.empty_alpha + data.alpha.take (..First 100) . should_equal data.alpha data.alpha.take 4 . to_vector . should_equal [1,2,3,4] data.alpha.take 0 . should_equal data.empty_alpha data.alpha.take -1 . should_equal data.empty_alpha data.alpha.take 100 . should_equal data.alpha - data.beta.drop (First 2) . to_vector . should_equal ["C","D","E","F","G","H"] - data.alpha.drop (First 0) . should_equal data.alpha - data.alpha.drop (First -1) . should_equal data.alpha - data.alpha.drop (First 100) . should_equal data.empty_alpha + data.beta.drop (..First 2) . to_vector . should_equal ["C","D","E","F","G","H"] + data.alpha.drop (..First 0) . should_equal data.alpha + data.alpha.drop (..First -1) . should_equal data.alpha + data.alpha.drop (..First 100) . should_equal data.empty_alpha data.beta.drop 2 . to_vector . should_equal ["C","D","E","F","G","H"] data.alpha.drop 0 . should_equal data.alpha data.alpha.drop -1 . should_equal data.alpha data.alpha.drop 100 . should_equal data.empty_alpha - data.beta.take (Last 4) . to_vector . should_equal ["E","F","G","H"] - data.beta.take (Last 0) . should_equal data.empty_beta - data.beta.take (Last -1) . should_equal data.empty_beta - data.beta.take (Last 100) . should_equal data.beta + data.beta.take (..Last 4) . to_vector . should_equal ["E","F","G","H"] + data.beta.take (..Last 0) . should_equal data.empty_beta + data.beta.take (..Last -1) . should_equal data.empty_beta + data.beta.take (..Last 100) . should_equal data.beta - data.alpha.drop (Last 2) . to_vector . should_equal [1,2,3,4,5,6] - data.alpha.drop (Last 0) . should_equal data.alpha - data.alpha.drop (Last -1) . should_equal data.alpha - data.alpha.drop (Last 100) . should_equal data.empty_alpha + data.alpha.drop (..Last 2) . to_vector . should_equal [1,2,3,4,5,6] + data.alpha.drop (..Last 0) . should_equal data.alpha + data.alpha.drop (..Last -1) . should_equal data.alpha + data.alpha.drop (..Last 100) . should_equal data.empty_alpha group_builder.specify "should handle consecutive take/drops" <| data.alpha.take 5 . sort . take 3 . to_vector . should_equal [1, 2, 3] @@ -330,49 +329,49 @@ add_specs suite_builder setup = data.empty_alpha.drop (0.up_to 0) . catch . should_equal (Index_Out_Of_Bounds.Error 0 0) data.alpha.drop (100.up_to 99) . should_fail_with Index_Out_Of_Bounds - data.beta.take (Index_Sub_Range.By_Index 0) . to_vector . should_equal ["A"] - data.empty_beta.take (Index_Sub_Range.By_Index 0) . should_fail_with Index_Out_Of_Bounds - data.beta.take (Index_Sub_Range.By_Index []) . should_equal data.empty_beta - data.beta.take (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal ["H", "H"] - data.alpha.take (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] + data.beta.take (..By_Index 0) . to_vector . should_equal ["A"] + data.empty_beta.take (..By_Index 0) . should_fail_with Index_Out_Of_Bounds + data.beta.take (..By_Index []) . should_equal data.empty_beta + data.beta.take (..By_Index [-1, -1]) . to_vector . should_equal ["H", "H"] + data.alpha.take (..By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [1, 1, 4, 5, 6, 7, 8] data.alpha.take (0.up_to 100 . with_step 2) . to_vector . should_equal [1, 3, 5, 7] - data.alpha.take (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] - data.alpha.take (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [2, 3, 3, 4, 5] - data.alpha.take (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [3, 4, 5, 2, 3] - data.alpha.take (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - data.alpha.take (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds - - data.alpha.drop (Index_Sub_Range.By_Index 0) . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] - data.alpha.drop (Index_Sub_Range.By_Index []) . should_equal data.alpha - data.alpha.drop (Index_Sub_Range.By_Index [-1, -1]) . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] - data.alpha.drop (Index_Sub_Range.By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [2, 3] + data.alpha.take (..By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [1, 3, 5, 7, 2, 4, 6] + data.alpha.take (..By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [2, 3, 3, 4, 5] + data.alpha.take (..By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [3, 4, 5, 2, 3] + data.alpha.take (..By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.alpha.take (..By_Index 100) . should_fail_with Index_Out_Of_Bounds + + data.alpha.drop (..By_Index 0) . to_vector . should_equal [2, 3, 4, 5, 6, 7, 8] + data.alpha.drop (..By_Index []) . should_equal data.alpha + data.alpha.drop (..By_Index [-1, -1]) . to_vector . should_equal [1, 2, 3, 4, 5, 6, 7] + data.alpha.drop (..By_Index [0, 0, 3.up_to 100]) . to_vector . should_equal [2, 3] data.alpha.drop (0.up_to 100 . with_step 2) . to_vector . should_equal [2, 4, 6, 8] - data.alpha.drop (Index_Sub_Range.By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [8] - data.alpha.drop (Index_Sub_Range.By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [1, 6, 7, 8] - data.alpha.drop (Index_Sub_Range.By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [1, 6, 7, 8] - data.alpha.drop (Index_Sub_Range.By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds - data.alpha.drop (Index_Sub_Range.By_Index 100) . should_fail_with Index_Out_Of_Bounds + data.alpha.drop (..By_Index [0.up_to 100 . with_step 2, 1.up_to 6 . with_step 2]) . to_vector . should_equal [8] + data.alpha.drop (..By_Index [1.up_to 3, 2.up_to 5]) . to_vector . should_equal [1, 6, 7, 8] + data.alpha.drop (..By_Index [2.up_to 5, 1.up_to 3]) . to_vector . should_equal [1, 6, 7, 8] + data.alpha.drop (..By_Index [0, 1, 100.up_to 200]) . should_fail_with Index_Out_Of_Bounds + data.alpha.drop (..By_Index 100) . should_fail_with Index_Out_Of_Bounds group_builder.specify "should allow selecting every Nth row" <| - data.alpha.take (Every 1) . should_equal data.alpha - data.alpha.take (Every 3) . to_vector . should_equal [1, 4, 7] - data.alpha.take (Every 3 first=1) . to_vector . should_equal [2, 5, 8] - data.beta.take (Every 2 first=1) . to_vector . should_equal ["B", "D", "F", "H"] - data.alpha.take (Every 2 first=100) . to_vector . should_equal [] - data.alpha.take (Every 200) . to_vector . should_equal [1] - data.empty_beta.take (Every 2) . should_equal data.empty_beta - data.beta.take (Every 0) . should_fail_with Illegal_Argument - data.empty_beta.take (Every 0) . should_fail_with Illegal_Argument - - data.alpha.drop (Every 1) . should_equal data.empty_alpha - data.alpha.drop (Every 3) . to_vector . should_equal [2, 3, 5, 6, 8] - data.alpha.drop (Every 3 first=1) . to_vector . should_equal [1, 3, 4, 6, 7] - data.alpha.drop (Every 2 first=1) . to_vector . should_equal [1, 3, 5, 7] - data.alpha.drop (Every 2 first=100) . should_equal data.alpha - data.beta.drop (Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] - data.empty_beta.drop (Every 2) . should_equal data.empty_beta - data.beta.drop (Every 0) . should_fail_with Illegal_Argument - data.empty_beta.drop (Every 0) . should_fail_with Illegal_Argument + data.alpha.take (..Every 1) . should_equal data.alpha + data.alpha.take (..Every 3) . to_vector . should_equal [1, 4, 7] + data.alpha.take (..Every 3 first=1) . to_vector . should_equal [2, 5, 8] + data.beta.take (..Every 2 first=1) . to_vector . should_equal ["B", "D", "F", "H"] + data.alpha.take (..Every 2 first=100) . to_vector . should_equal [] + data.alpha.take (..Every 200) . to_vector . should_equal [1] + data.empty_beta.take (..Every 2) . should_equal data.empty_beta + data.beta.take (..Every 0) . should_fail_with Illegal_Argument + data.empty_beta.take (..Every 0) . should_fail_with Illegal_Argument + + data.alpha.drop (..Every 1) . should_equal data.empty_alpha + data.alpha.drop (..Every 3) . to_vector . should_equal [2, 3, 5, 6, 8] + data.alpha.drop (..Every 3 first=1) . to_vector . should_equal [1, 3, 4, 6, 7] + data.alpha.drop (..Every 2 first=1) . to_vector . should_equal [1, 3, 5, 7] + data.alpha.drop (..Every 2 first=100) . should_equal data.alpha + data.beta.drop (..Every 200) . to_vector . should_equal ["B", "C", "D", "E", "F", "G", "H"] + data.empty_beta.drop (..Every 2) . should_equal data.empty_beta + data.beta.drop (..Every 0) . should_fail_with Illegal_Argument + data.empty_beta.drop (..Every 0) . should_fail_with Illegal_Argument if setup.is_database.not then group_builder.specify "should allow sampling rows" <| @@ -382,56 +381,60 @@ add_specs suite_builder setup = one = one_table . at "X" empty = one_table.remove_all_rows . at "X" - three.take (First 2) . should_equal two - three.take First . should_equal one - three.take (First 0) . should_equal empty + three.take (..First 2) . should_equal two + three.take ..First . should_equal one + three.take (..First 0) . should_equal empty three.take 2 . should_equal two three.take . should_equal one three.take 0 . should_equal empty - three.take (Sample 0) . should_equal empty - empty.take (Sample 0) . should_equal empty - empty.take (Sample 1) . should_equal empty - three.take (Sample 1) . should_equal one - three.take (Sample 100) . should_equal three + three.take (..Sample 0) . should_equal empty + empty.take (..Sample 0) . should_equal empty + empty.take (..Sample 1) . should_equal empty + three.take (..Sample 1) . should_equal one + three.take (..Sample 100) . should_equal three - three.drop (Sample 0) . should_equal three - empty.drop (Sample 0) . should_equal empty - empty.drop (Sample 1) . should_equal empty - one.drop (Sample 1) . should_equal empty - three.drop (Sample 1) . should_equal two - three.drop (Sample 100) . should_equal empty + three.drop (..Sample 0) . should_equal three + empty.drop (..Sample 0) . should_equal empty + empty.drop (..Sample 1) . should_equal empty + one.drop (..Sample 1) . should_equal empty + three.drop (..Sample 1) . should_equal two + three.drop (..Sample 100) . should_equal empty - rnd = data.alpha.take (Sample 3 seed=42) + rnd = data.alpha.take (..Sample 3 seed=42) random_indices = [5, 6, 2] - sample = data.alpha.take (Index_Sub_Range.By_Index random_indices) + sample = data.alpha.take (..By_Index random_indices) rnd.should_equal sample if setup.is_database.not then group_builder.specify "should allow selecting rows as long as they satisfy a predicate" <| col = table_builder [["X", [1, 3, 5, 6, 8, 9, 10, 11, 13]]] . at "X" - col.take (While (x-> x%2 == 1)) . to_vector . should_equal [1, 3, 5] - col.drop (While (x-> x%2 == 1)) . to_vector . should_equal [6, 8, 9, 10, 11, 13] + col.take (..While (x-> x%2 == 1)) . to_vector . should_equal [1, 3, 5] + col.drop (..While (x-> x%2 == 1)) . to_vector . should_equal [6, 8, 9, 10, 11, 13] three_table = table_builder [["X", [1, 2, 3]]] three = three_table . at "X" empty = three_table.remove_all_rows . at "X" - three.take (While (_ > 10)) . should_equal empty - three.take (While (_ < 10)) . should_equal three + three.take (..While (_ > 10)) . should_equal empty + three.take (..While (_ < 10)) . should_equal three - three.drop (While (_ > 10)) . should_equal three - three.drop (While (_ < 10)) . should_equal empty + three.drop (..While (_ > 10)) . should_equal three + three.drop (..While (_ < 10)) . should_equal empty group_builder.specify "should gracefully handle missing constructor arguments" <| c = table_builder [["X", [1, 2, 3]]] . at "X" Test.expect_panic Type_Error <| c.take "FOO" Test.expect_panic Type_Error <| c.drop "FOO" - r1 = c.take (Index_Sub_Range.While) + r1 = c.take (..While) r1.should_fail_with Missing_Argument r1.catch.to_display_text . should_contain "Provide a value for the argument `predicate`." + r1b = c.take (..By_Index) + r1b.should_fail_with Missing_Argument + r1b.catch.to_display_text . should_contain "Provide a value for the argument `indexes`." + group_builder.specify "unordered table" <| unordered_table = col1 = ["alpha", [1,2,3,4,5,6,7,8]] diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index 84356596f1ec..2f00c1e34f13 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -698,7 +698,7 @@ add_postgres_specs suite_builder create_connection_fn db_name = agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True empty_agg_table_fn = _-> - (agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + (agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_fn light_table_builder=light_table_builder diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 3aeabbe1d902..4999f99fc354 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -348,7 +348,7 @@ sqlite_spec suite_builder prefix create_connection_func = agg_in_memory_table.select_into_database_table default_connection.get (Name_Generator.random_name "Agg1") primary_key=Nothing temporary=True empty_agg_table_fn = _ -> - (agg_in_memory_table.take (First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True + (agg_in_memory_table.take (..First 0)).select_into_database_table default_connection.get (Name_Generator.random_name "Agg_Empty") primary_key=Nothing temporary=True setup = Common_Table_Operations.Main.Test_Setup.Config prefix agg_table_fn empty_agg_table_fn table_builder materialize is_database=True test_selection=common_selection aggregate_test_selection=aggregate_selection create_connection_func=create_connection_func light_table_builder=light_table_builder sqlite_specific_spec suite_builder prefix create_connection_func setup diff --git a/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso b/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso index c27d72c78fce..85a3ebccc6ab 100644 --- a/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso +++ b/test/Table_Tests/src/Helpers/Sorted_List_Index_Spec.enso @@ -21,7 +21,7 @@ type Data setup = v1 = [0, 0, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 5, 6, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 8, 10, 10, 10, 10, 11, 14, 17, 19] - v1_shuffled = v1.take (Index_Sub_Range.Sample v1.length) + v1_shuffled = v1.take (..Sample v1.length) Data.Value (make_index v1_shuffled) diff --git a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso index d70afbaf9770..99e51abe26e7 100644 --- a/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Aggregate_Column_Spec.enso @@ -1,4 +1,4 @@ -from Standard.Base import all hiding First, Last +from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Table import Table diff --git a/test/Table_Tests/src/In_Memory/Column_Spec.enso b/test/Table_Tests/src/In_Memory/Column_Spec.enso index 23838d89ff7b..4b7687bcb9cd 100644 --- a/test/Table_Tests/src/In_Memory/Column_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Column_Spec.enso @@ -43,9 +43,9 @@ add_specs suite_builder = expected_1 = Column.from_vector "Test" [1, 3, 5] expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] expected_3 = Column.from_vector "Test" [] - test_column.take (First 3) . to_vector . should_equal expected_1.to_vector - test_column.take (First 7) . to_vector . should_equal expected_2.to_vector - test_column.take (First 0) . to_vector . should_equal expected_3.to_vector + test_column.take (..First 3) . to_vector . should_equal expected_1.to_vector + test_column.take (..First 7) . to_vector . should_equal expected_2.to_vector + test_column.take (..First 0) . to_vector . should_equal expected_3.to_vector group_builder.specify "should be able to take the first n elements by Integer" <| expected_1 = Column.from_vector "Test" [1, 3, 5] @@ -59,9 +59,9 @@ add_specs suite_builder = expected_1 = Column.from_vector "Test" [2, 4, 6] expected_2 = Column.from_vector "Test" [1, 3, 5, 2, 4, 6] expected_3 = Column.from_vector "Test" [] - test_column.take (Last 3) . to_vector . should_equal expected_1.to_vector - test_column.take (Last 7) . to_vector . should_equal expected_2.to_vector - test_column.take (Last 0) . to_vector . should_equal expected_3.to_vector + test_column.take (..Last 3) . to_vector . should_equal expected_1.to_vector + test_column.take (..Last 7) . to_vector . should_equal expected_2.to_vector + test_column.take (..Last 0) . to_vector . should_equal expected_3.to_vector group_builder.specify "should be able to get the first element" <| test_column.first . should_equal 1 diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index e69c5a631b0c..da67d628ecec 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -464,20 +464,20 @@ add_specs suite_builder = c_3 = ['col3', [False, True, Nothing]] t_1 = Table.new [i_1, c_1, c_2, c_3] - t_1.take (First 10) . at 'col' . to_vector . should_equal (t_1.at 'col' . to_vector) + t_1.take (..First 10) . at 'col' . to_vector . should_equal (t_1.at 'col' . to_vector) t_1.take 10 . at 'col' . to_vector . should_equal (t_1.at 'col' . to_vector) - t_2 = t_1.take (First 2) - t_2.at 'col' . to_vector . should_equal (t_1.at 'col' . to_vector . take (First 2)) - t_2.at 'col2' . to_vector . should_equal (t_1.at 'col2' . to_vector . take (First 2)) - t_2.at 'col3' . to_vector . should_equal (t_1.at 'col3' . to_vector . take (First 2)) + t_2 = t_1.take (..First 2) + t_2.at 'col' . to_vector . should_equal (t_1.at 'col' . to_vector . take (..First 2)) + t_2.at 'col2' . to_vector . should_equal (t_1.at 'col2' . to_vector . take (..First 2)) + t_2.at 'col3' . to_vector . should_equal (t_1.at 'col3' . to_vector . take (..First 2)) t_3 = t_1.take 2 t_3.at 'col' . to_vector . should_equal (t_1.at 'col' . to_vector . take 2) t_3.at 'col2' . to_vector . should_equal (t_1.at 'col2' . to_vector . take 2) t_3.at 'col3' . to_vector . should_equal (t_1.at 'col3' . to_vector . take 2) - t_1.at 'col' . take (First 2) . to_vector . should_equal (t_1.at 'col' . to_vector . take (First 2)) + t_1.at 'col' . take (..First 2) . to_vector . should_equal (t_1.at 'col' . to_vector . take (..First 2)) t_1.at 'col' . take 2 . to_vector . should_equal (t_1.at 'col' . to_vector . take 2) group_builder.specify "should allow taking the last n rows" <| @@ -487,35 +487,35 @@ add_specs suite_builder = c_3 = ['col3', [False, True, Nothing]] t_1 = Table.new [i_1, c_1, c_2, c_3] - t_1.take (Last 10) . at 'col1' . to_vector . should_equal (t_1.at 'col1' . to_vector) + t_1.take (..Last 10) . at 'col1' . to_vector . should_equal (t_1.at 'col1' . to_vector) - t_2 = t_1.take (Last 2) - t_2.at 'col1' . to_vector . should_equal (t_1.at 'col1' . to_vector . take (Last 2)) - t_2.at 'col2' . to_vector . should_equal (t_1.at 'col2' . to_vector . take (Last 2)) - t_2.at 'col3' . to_vector . should_equal (t_1.at 'col3' . to_vector . take (Last 2)) + t_2 = t_1.take (..Last 2) + t_2.at 'col1' . to_vector . should_equal (t_1.at 'col1' . to_vector . take (..Last 2)) + t_2.at 'col2' . to_vector . should_equal (t_1.at 'col2' . to_vector . take (..Last 2)) + t_2.at 'col3' . to_vector . should_equal (t_1.at 'col3' . to_vector . take (..Last 2)) - t_1.at 'col1' . take (Last 2) . to_vector . should_equal (t_1.at 'col1' . to_vector . take (Last 2)) + t_1.at 'col1' . take (..Last 2) . to_vector . should_equal (t_1.at 'col1' . to_vector . take (..Last 2)) group_builder.specify "should allow taking/dropping a prefix of rows that satisfy a predicate" <| t1 = Table.new [["X", [1, 2, 3, 4, 5, 5]], ["Y", [9, 8, 7, 2, 10, 5]]] - t2 = t1.take (Index_Sub_Range.While row-> row.to_vector.compute Statistic.Sum == 10) + t2 = t1.take (..While row-> row.to_vector.compute Statistic.Sum == 10) t2.at "X" . to_vector . should_equal [1, 2, 3] t2.at "Y" . to_vector . should_equal [9, 8, 7] - t3 = t1.drop (Index_Sub_Range.While row-> row.to_vector.compute Statistic.Sum == 10) + t3 = t1.drop (..While row-> row.to_vector.compute Statistic.Sum == 10) t3.at "X" . to_vector . should_equal [4, 5, 5] t3.at "Y" . to_vector . should_equal [2, 10, 5] - t4 = t1.take (Index_Sub_Range.While row-> row.at "X" < 3) + t4 = t1.take (..While row-> row.at "X" < 3) t4.at "X" . to_vector . should_equal [1, 2] t4.at "Y" . to_vector . should_equal [9, 8] - t5 = t1.drop (Index_Sub_Range.While row-> row.at 1 > 3) + t5 = t1.drop (..While row-> row.at 1 > 3) t5.at "X" . to_vector . should_equal [4, 5, 5] t5.at "Y" . to_vector . should_equal [2, 10, 5] - t6 = t1.take (Index_Sub_Range.While row-> row.at "X" > 1) + t6 = t1.take (..While row-> row.at "X" > 1) t6.at "X" . to_vector . should_equal [] t6.at "Y" . to_vector . should_equal [] diff --git a/test/Visualization_Tests/src/Scatter_Plot_Spec.enso b/test/Visualization_Tests/src/Scatter_Plot_Spec.enso index d304db56a09d..7ab673d92b31 100644 --- a/test/Visualization_Tests/src/Scatter_Plot_Spec.enso +++ b/test/Visualization_Tests/src/Scatter_Plot_Spec.enso @@ -107,7 +107,7 @@ add_specs suite_builder = data = json.get 'data' data.should_be_a Vector data.length . should_equal 10 - (data.take (First 3) . sort on=(_.get "x")).to_text . should_equal '[{"x":0,"y":225}, {"x":15,"y":0}, {"x":29,"y":196}]' + (data.take (..First 3) . sort on=(_.get "x")).to_text . should_equal '[{"x":0,"y":225}, {"x":15,"y":0}, {"x":29,"y":196}]' group_builder.specify "filter the elements" <| vector = [0,10,20,30] diff --git a/tools/performance/benchmark-analysis/src/Main.enso b/tools/performance/benchmark-analysis/src/Main.enso index eda93bd6194d..9807313c900c 100644 --- a/tools/performance/benchmark-analysis/src/Main.enso +++ b/tools/performance/benchmark-analysis/src/Main.enso @@ -28,7 +28,7 @@ main = operator19 = operator17.remove_columns ['Approach'] operator21 = operator19.set '"circle"' "Shape" operator20 = operator21.rename_columns ['x' ] - operator22 = operator18.drop (First 40) + operator22 = operator18.drop (..First 40) operator23 = operator22.select_columns ['Approach'] operator24 = operator23.distinct ['Approach'] operator26 = operator24.at 0 From 30d97750bf51e64649d0d2b0f3579d4904d3e701 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Tue, 9 Jul 2024 00:32:24 +1000 Subject: [PATCH 04/11] Replace `Button variant="custom"` with appropriate variant whenever possible (#10315) Buttons were switched to `variant="custom"` as low hanging fruit in order to remove `UnstyledButton` (which was strictly inferior UX wise). - Switch buttons to use `variant="submit"` or `variant="cancel"` as appropriate - Replace both `ButtonBar` and `HorizontalMenuBar` with `ButtonGroup` # Important Notes None --- .../src/components/styled/ButtonRow.tsx | 39 ---- .../components/styled/HorizontalMenuBar.tsx | 43 ---- .../src/components/styled/SettingsInput.tsx | 2 +- .../layouts/AssetVersions/AssetVersion.tsx | 5 +- .../lib/dashboard/src/layouts/Drive.tsx | 3 +- .../lib/dashboard/src/layouts/DriveBar.tsx | 201 +++++++++--------- .../layouts/Settings/ChangePasswordForm.tsx | 5 +- .../KeyboardShortcutsSettingsSection.tsx | 5 +- .../Settings/MembersSettingsSection.tsx | 5 +- .../Settings/UserGroupsSettingsSection.tsx | 73 +++---- .../src/layouts/Settings/UserRow.tsx | 2 +- .../lib/dashboard/src/layouts/UserBar.tsx | 3 +- .../modals/CaptureKeyboardShortcutModal.tsx | 16 +- .../src/modals/ConfirmDeleteModal.tsx | 20 +- .../src/modals/ConfirmDeleteUserModal.tsx | 5 +- .../src/modals/DuplicateAssetsModal.tsx | 16 +- .../src/modals/ManageLabelsModal.tsx | 8 +- .../dashboard/src/modals/NewLabelModal.tsx | 20 +- .../src/modals/NewUserGroupModal.tsx | 5 +- .../src/modals/UpsertDatalinkModal.tsx | 20 +- .../src/modals/UpsertSecretModal.tsx | 14 +- .../lib/dashboard/tailwind.config.js | 3 - 22 files changed, 174 insertions(+), 339 deletions(-) delete mode 100644 app/ide-desktop/lib/dashboard/src/components/styled/ButtonRow.tsx delete mode 100644 app/ide-desktop/lib/dashboard/src/components/styled/HorizontalMenuBar.tsx diff --git a/app/ide-desktop/lib/dashboard/src/components/styled/ButtonRow.tsx b/app/ide-desktop/lib/dashboard/src/components/styled/ButtonRow.tsx deleted file mode 100644 index 7a072dccaa71..000000000000 --- a/app/ide-desktop/lib/dashboard/src/components/styled/ButtonRow.tsx +++ /dev/null @@ -1,39 +0,0 @@ -/** @file A styled horizontal button row. Does not have padding; does not have a background. */ -import * as React from 'react' - -import FocusArea from '#/components/styled/FocusArea' - -import * as tailwindMerge from '#/utilities/tailwindMerge' - -// ================= -// === ButtonRow === -// ================= - -/** The flex `align-self` of a {@link ButtonRow}. */ -export type ButtonRowPosition = 'center' | 'end' | 'start' - -/** Props for a {@link ButtonRow}. */ -export interface ButtonRowProps extends Readonly { - /** The flex `align-self` of this element. Defaults to `start`. */ - readonly position?: ButtonRowPosition -} - -/** A styled horizontal button row. Does not have padding; does not have a background. */ -export default function ButtonRow(props: ButtonRowProps) { - const { children, position = 'start' } = props - const positionClass = - position === 'start' ? 'self-start' : position === 'center' ? 'self-center' : 'self-end' - - return ( - - {innerProps => ( -
- {children} -
- )} -
- ) -} diff --git a/app/ide-desktop/lib/dashboard/src/components/styled/HorizontalMenuBar.tsx b/app/ide-desktop/lib/dashboard/src/components/styled/HorizontalMenuBar.tsx deleted file mode 100644 index cc49824acbe5..000000000000 --- a/app/ide-desktop/lib/dashboard/src/components/styled/HorizontalMenuBar.tsx +++ /dev/null @@ -1,43 +0,0 @@ -/** @file A styled horizontal menu bar. */ -import * as React from 'react' - -import FocusArea from '#/components/styled/FocusArea' - -import * as tailwindVariants from '#/utilities/tailwindVariants' - -// ================= -// === Constants === -// ================= - -const HORIZONTAL_MENU_BAR_VARIANTS = tailwindVariants.tv({ - base: 'flex items-center h-row gap-drive-bar', - variants: { - grow: { true: 'grow' }, - }, -}) - -// ========================= -// === HorizontalMenuBar === -// ========================= - -/** Props for a {@link HorizontalMenuBar}. */ -export interface HorizontalMenuBarProps - extends Readonly, - Readonly> { - readonly className?: string -} - -/** A styled horizontal menu bar. */ -export default function HorizontalMenuBar(props: HorizontalMenuBarProps) { - const { children, ...variantProps } = props - - return ( - - {innerProps => ( -
- {children} -
- )} -
- ) -} diff --git a/app/ide-desktop/lib/dashboard/src/components/styled/SettingsInput.tsx b/app/ide-desktop/lib/dashboard/src/components/styled/SettingsInput.tsx index 8e568a80770e..b901c8e84394 100644 --- a/app/ide-desktop/lib/dashboard/src/components/styled/SettingsInput.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/styled/SettingsInput.tsx @@ -62,7 +62,7 @@ function SettingsInput(props: SettingsInputProps, ref: React.ForwardedRef {opts => (
- + - + { const downloadUrl = await github.getDownloadUrl() if (downloadUrl == null) { diff --git a/app/ide-desktop/lib/dashboard/src/layouts/DriveBar.tsx b/app/ide-desktop/lib/dashboard/src/layouts/DriveBar.tsx index 3c60b28f87ef..dad302532093 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/DriveBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/DriveBar.tsx @@ -25,7 +25,6 @@ import StartModal from '#/layouts/StartModal' import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' -import HorizontalMenuBar from '#/components/styled/HorizontalMenuBar' import ConfirmDeleteModal from '#/modals/ConfirmDeleteModal' import UpsertDatalinkModal from '#/modals/UpsertDatalinkModal' @@ -131,139 +130,131 @@ export default function DriveBar(props: DriveBarProps) { switch (category) { case Category.recent: { return ( -
- - {searchBar} - {assetPanelToggle} - -
+ + {searchBar} + {assetPanelToggle} + ) } case Category.trash: { return ( -
- - { - setModal( - - ) - }} - > - {getText('clearTrash')} - - {searchBar} - {assetPanelToggle} - -
+ + { + setModal( + + ) + }} + > + {getText('clearTrash')} + + {searchBar} + {assetPanelToggle} + ) } case Category.cloud: case Category.local: { return ( -
- - - - {getText('startWithATemplate')} - + + + + {getText('startWithATemplate')} + - - + + + { + doCreateProject() + }} + > + {getText('newEmptyProject')} + +
{ - doCreateProject() + doCreateDirectory() }} - > - {getText('newEmptyProject')} - -
+ /> + {isCloud && ( { - doCreateDirectory() - }} - /> - {isCloud && ( - { - setModal() - }} - /> - )} - {isCloud && ( - { - setModal() - }} - /> - )} - { - if (event.currentTarget.files != null) { - doUploadFiles(Array.from(event.currentTarget.files)) - } - // Clear the list of selected files. Otherwise, `onInput` will not be - // dispatched again if the same file is selected. - event.currentTarget.value = '' + setModal() }} /> + )} + {isCloud && ( { - unsetModal() - uploadFilesRef.current?.click() + setModal() }} /> - { - unsetModal() - dispatchAssetEvent({ type: AssetEventType.downloadSelected }) - }} - /> -
- {searchBar} - {assetPanelToggle} - -
+ )} + { + if (event.currentTarget.files != null) { + doUploadFiles(Array.from(event.currentTarget.files)) + } + // Clear the list of selected files. Otherwise, `onInput` will not be + // dispatched again if the same file is selected. + event.currentTarget.value = '' + }} + /> + { + unsetModal() + uploadFilesRef.current?.click() + }} + /> + { + unsetModal() + dispatchAssetEvent({ type: AssetEventType.downloadSelected }) + }} + /> +
+ {searchBar} + {assetPanelToggle} + ) } } diff --git a/app/ide-desktop/lib/dashboard/src/layouts/Settings/ChangePasswordForm.tsx b/app/ide-desktop/lib/dashboard/src/layouts/Settings/ChangePasswordForm.tsx index 746a805c8d3b..78b68f030d08 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/Settings/ChangePasswordForm.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/Settings/ChangePasswordForm.tsx @@ -6,7 +6,6 @@ import * as textProvider from '#/providers/TextProvider' import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' -import ButtonRow from '#/components/styled/ButtonRow' import SettingsInput from '#/components/styled/SettingsInput' import * as eventModule from '#/utilities/event' @@ -94,7 +93,7 @@ export default function ChangePasswordForm() { autoComplete="new-password" /> - + {getText('cancel')} - + ) } diff --git a/app/ide-desktop/lib/dashboard/src/layouts/Settings/KeyboardShortcutsSettingsSection.tsx b/app/ide-desktop/lib/dashboard/src/layouts/Settings/KeyboardShortcutsSettingsSection.tsx index f2b58866394f..ee4be22adace 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/Settings/KeyboardShortcutsSettingsSection.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/Settings/KeyboardShortcutsSettingsSection.tsx @@ -19,7 +19,6 @@ import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import KeyboardShortcut from '#/components/dashboard/KeyboardShortcut' import FocusArea from '#/components/styled/FocusArea' -import HorizontalMenuBar from '#/components/styled/HorizontalMenuBar' import SvgMask from '#/components/SvgMask' import CaptureKeyboardShortcutModal from '#/modals/CaptureKeyboardShortcutModal' @@ -54,7 +53,7 @@ export default function KeyboardShortcutsSettingsSection() { return ( <> - + { @@ -75,7 +74,7 @@ export default function KeyboardShortcutsSettingsSection() { > {getText('resetAll')} - + {innerProps => (
- + {getText('inviteMembers')} @@ -82,7 +81,7 @@ export default function MembersSettingsSection() {
)} - + diff --git a/app/ide-desktop/lib/dashboard/src/layouts/Settings/UserGroupsSettingsSection.tsx b/app/ide-desktop/lib/dashboard/src/layouts/Settings/UserGroupsSettingsSection.tsx index 2239aa224309..ea93aed78f6f 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/Settings/UserGroupsSettingsSection.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/Settings/UserGroupsSettingsSection.tsx @@ -19,7 +19,6 @@ import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import * as paywallComponents from '#/components/Paywall' import StatelessSpinner, * as statelessSpinner from '#/components/StatelessSpinner' -import HorizontalMenuBar from '#/components/styled/HorizontalMenuBar' import NewUserGroupModal from '#/modals/NewUserGroupModal' @@ -131,43 +130,41 @@ export default function UserGroupsSettingsSection(props: UserGroupsSettingsSecti return ( <> - -
- {shouldDisplayPaywall && ( - - {getText('newUserGroup')} - - )} - {!shouldDisplayPaywall && ( - { - const rect = event.target.getBoundingClientRect() - const position = { pageX: rect.left, pageY: rect.top } - setModal() - }} - > - {getText('newUserGroup')} - - )} - - {isUnderPaywall && ( - - {userGroupsLeft <= 0 - ? getText('userGroupsPaywallMessage') - : getText('userGroupsLimitMessage', userGroupsLeft)} - - )} -
-
+ + {shouldDisplayPaywall && ( + + {getText('newUserGroup')} + + )} + {!shouldDisplayPaywall && ( + { + const rect = event.target.getBoundingClientRect() + const position = { pageX: rect.left, pageY: rect.top } + setModal() + }} + > + {getText('newUserGroup')} + + )} + + {isUnderPaywall && ( + + {userGroupsLeft <= 0 + ? getText('userGroupsPaywallMessage') + : getText('userGroupsLimitMessage', userGroupsLeft)} + + )} +
{ const rect = event.target.getBoundingClientRect() const position = { pageX: rect.left, pageY: rect.top } @@ -110,7 +111,6 @@ export default function UserRow(props: UserRowProps) { /> ) }} - className="absolute right-full mr-4 size-4 -translate-y-1/2" > diff --git a/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx b/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx index a9b9b822f30c..16121eef7555 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx @@ -14,7 +14,6 @@ import * as textProvider from '#/providers/TextProvider' import UserMenu from '#/layouts/UserMenu' -import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import * as paywall from '#/components/Paywall' import Button from '#/components/styled/Button' @@ -124,7 +123,7 @@ export default function UserBar(props: UserBarProps) { ) }} > - {getText('share')} + {getText('share')} )}
{count > 1 && ( - + { doUpdate([firstConflict]) @@ -239,7 +236,6 @@ export default function DuplicateAssetsModal(props: DuplicateAssetsModalProps) { {getText('update')} { doRename([firstConflict]) @@ -259,7 +255,7 @@ export default function DuplicateAssetsModal(props: DuplicateAssetsModalProps) { ? getText('renameNewFile') : getText('renameNewProject')} - + )} )} @@ -277,9 +273,8 @@ export default function DuplicateAssetsModal(props: DuplicateAssetsModalProps) { : getText('andOtherProjects', otherProjectsCount)} )} - + { unsetModal() @@ -290,7 +285,6 @@ export default function DuplicateAssetsModal(props: DuplicateAssetsModalProps) { {count === 1 ? getText('update') : getText('updateAll')} { unsetModal() @@ -306,10 +300,10 @@ export default function DuplicateAssetsModal(props: DuplicateAssetsModalProps) { ? getText('renameNewFiles') : getText('renameNewProjects')} - + {getText('cancel')} - + ) diff --git a/app/ide-desktop/lib/dashboard/src/modals/ManageLabelsModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/ManageLabelsModal.tsx index f18680e2c88f..e2a3659fba99 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/ManageLabelsModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/ManageLabelsModal.tsx @@ -193,15 +193,11 @@ export default function ManageLabelsModal< - - {getText('create')} - + {getText('create')} )} diff --git a/app/ide-desktop/lib/dashboard/src/modals/NewLabelModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/NewLabelModal.tsx index 6efc19077b5b..84dc761d9126 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/NewLabelModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/NewLabelModal.tsx @@ -11,7 +11,6 @@ import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import ColorPicker from '#/components/ColorPicker' import Modal from '#/components/Modal' -import ButtonRow from '#/components/styled/ButtonRow' import FocusArea from '#/components/styled/FocusArea' import FocusRing from '#/components/styled/FocusRing' @@ -131,25 +130,14 @@ export default function NewLabelModal(props: NewLabelModalProps) { )} - - + + {getText('create')} - + {getText('cancel')} - + ) diff --git a/app/ide-desktop/lib/dashboard/src/modals/NewUserGroupModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/NewUserGroupModal.tsx index a96eba2348fb..a61c73fc98ca 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/NewUserGroupModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/NewUserGroupModal.tsx @@ -10,7 +10,6 @@ import * as textProvider from '#/providers/TextProvider' import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import Modal from '#/components/Modal' -import ButtonRow from '#/components/styled/ButtonRow' import type Backend from '#/services/Backend' @@ -108,7 +107,7 @@ export default function NewUserGroupModal(props: NewUserGroupModalProps) { {nameError} - + {getText('cancel')} - + ) diff --git a/app/ide-desktop/lib/dashboard/src/modals/UpsertDatalinkModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/UpsertDatalinkModal.tsx index 2cf16f28b83a..e369d8c6e064 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/UpsertDatalinkModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/UpsertDatalinkModal.tsx @@ -11,7 +11,6 @@ import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import DatalinkInput from '#/components/dashboard/DatalinkInput' import Modal from '#/components/Modal' -import ButtonRow from '#/components/styled/ButtonRow' import FocusArea from '#/components/styled/FocusArea' import FocusRing from '#/components/styled/FocusRing' @@ -93,25 +92,14 @@ export default function UpsertDatalinkModal(props: UpsertDatalinkModalProps) {
- - + + {getText('create')} - + {getText('cancel')} - + ) diff --git a/app/ide-desktop/lib/dashboard/src/modals/UpsertSecretModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/UpsertSecretModal.tsx index 073bdd49c887..9b3c9f8c1604 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/UpsertSecretModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/UpsertSecretModal.tsx @@ -13,7 +13,6 @@ import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import Modal from '#/components/Modal' import Button from '#/components/styled/Button' -import ButtonRow from '#/components/styled/ButtonRow' import FocusArea from '#/components/styled/FocusArea' import FocusRing from '#/components/styled/FocusRing' @@ -123,19 +122,14 @@ export default function UpsertSecretModal(props: UpsertSecretModalProps) { )} - - + + {isCreatingSecret ? getText('create') : getText('update')} - + {getText('cancel')} - + ) diff --git a/app/ide-desktop/lib/dashboard/tailwind.config.js b/app/ide-desktop/lib/dashboard/tailwind.config.js index b69be92fb4fb..7871af9d2b05 100644 --- a/app/ide-desktop/lib/dashboard/tailwind.config.js +++ b/app/ide-desktop/lib/dashboard/tailwind.config.js @@ -598,9 +598,6 @@ inset 0 -36px 51px -51px #00000014`, '.text-subheading': { '@apply text-xl leading-snug py-0.5': '', }, - '.settings-value': { - '@apply leading-cozy h-text py-px px-2': '', - }, }, { respectPrefix: true, From b2c455967838a8ce179c68eb2769f96977389f4f Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Mon, 8 Jul 2024 20:59:50 +0300 Subject: [PATCH 05/11] Persist a subset of IdMap (#10347) close #9257 Changelog: - update: Store in the file only the subset of IdMap containing the IDs used in the metadata section on the 2nd line. The full IdMap is transmitted as a parameter of the `text/applyEdit` request. --- app/gui2/shared/languageServer.ts | 10 +++++-- app/gui2/shared/languageServerTypes.ts | 9 ++++++ app/gui2/ydoc-server/languageServerSession.ts | 30 +++++++++++++++---- app/gui2/ydoc-server/serialization.ts | 2 +- 4 files changed, 42 insertions(+), 9 deletions(-) diff --git a/app/gui2/shared/languageServer.ts b/app/gui2/shared/languageServer.ts index 935f94f5d59b..9fd73ae6c62d 100644 --- a/app/gui2/shared/languageServer.ts +++ b/app/gui2/shared/languageServer.ts @@ -14,6 +14,8 @@ import type { ExpressionId, FileEdit, FileSystemObject, + IdMapTriple, + IdMapTuple, Notifications, Path, RegisterOptions, @@ -288,8 +290,12 @@ export class LanguageServer extends ObservableV2> { - return this.request('text/applyEdit', { edit, execute }) + applyEdit( + edit: FileEdit, + execute: boolean, + idMap?: IdMapTriple[] | IdMapTuple[], + ): Promise> { + return this.request('text/applyEdit', { edit, execute, idMap }) } /** [Documentation](https://github.com/enso-org/enso/blob/develop/docs/language-server/protocol-language-server.md#filewrite) */ diff --git a/app/gui2/shared/languageServerTypes.ts b/app/gui2/shared/languageServerTypes.ts index 9aac47a2fb13..c72db55f3b1e 100644 --- a/app/gui2/shared/languageServerTypes.ts +++ b/app/gui2/shared/languageServerTypes.ts @@ -74,6 +74,15 @@ export interface Position { character: number } +interface IdMapSpan { + index: { value: number } + size: { value: number } +} + +export type IdMapTuple = [IdMapSpan, string] + +export type IdMapTriple = [number, number, string] + export type RegisterOptions = { path: Path } | { contextId: ContextId } | {} export interface CapabilityRegistration { diff --git a/app/gui2/ydoc-server/languageServerSession.ts b/app/gui2/ydoc-server/languageServerSession.ts index 6e5a265084f3..d7d5d10997bf 100644 --- a/app/gui2/ydoc-server/languageServerSession.ts +++ b/app/gui2/ydoc-server/languageServerSession.ts @@ -34,7 +34,7 @@ import { translateVisualizationFromFile, } from './edits' import * as fileFormat from './fileFormat' -import { deserializeIdMap, serializeIdMap } from './serialization' +import { deserializeIdMap, idMapToArray, serializeIdMap } from './serialization' import { WSSharedDoc } from './ydoc' const SOURCE_DIR = 'src' @@ -457,6 +457,18 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { } } + private static getIdMapToPersist( + idMap: IdMap | undefined, + metadata: fileFormat.IdeMetadata['node'], + ): IdMap | undefined { + if (idMap === undefined) { + return + } else { + const entriesIntersection = idMap.entries().filter(([, id]) => id in metadata) + return new IdMap(entriesIntersection) + } + } + private sendLsUpdate( synced: EnsoFileParts, newCode: string | undefined, @@ -468,11 +480,17 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { const code = newCode ?? synced.code const newMetadataJson = newMetadata && - json.stringify({ ...this.syncedMeta, ide: { ...this.syncedMeta.ide, node: newMetadata } }) - const newIdMapJson = newIdMap && serializeIdMap(newIdMap) + json.stringify({ + ...this.syncedMeta, + ide: { ...this.syncedMeta.ide, node: newMetadata }, + }) + const idMapToPersist = + (newIdMap || newMetadata) && + ModulePersistence.getIdMapToPersist(newIdMap, newMetadata ?? this.syncedMeta.ide.node) + const newIdMapToPersistJson = idMapToPersist && serializeIdMap(idMapToPersist) const newContent = combineFileParts({ code, - idMapJson: newIdMapJson ?? synced.idMapJson ?? '[]', + idMapJson: newIdMapToPersistJson ?? synced.idMapJson ?? '[]', metadataJson: newMetadataJson ?? synced.metadataJson ?? '{}', }) @@ -502,7 +520,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { const execute = newCode != null || newIdMap != null const edit: FileEdit = { path: this.path, edits, oldVersion: this.syncedVersion, newVersion } - const apply = this.ls.applyEdit(edit, execute) + const apply = this.ls.applyEdit(edit, execute, newIdMap && idMapToArray(newIdMap)) const handleError = (error: unknown) => { console.error('Could not apply edit:', error) // Try to recover by reloading the file. @@ -521,7 +539,7 @@ class ModulePersistence extends ObservableV2<{ removed: () => void }> { this.syncedVersion = newVersion if (newMetadata) this.syncedMeta.ide.node = newMetadata if (newCode) this.syncedCode = newCode - if (newIdMapJson) this.syncedIdMap = newIdMapJson + if (newIdMapToPersistJson) this.syncedIdMap = newIdMapToPersistJson if (newMetadataJson) this.syncedMetaJson = newMetadataJson this.setState(LsSyncState.Synchronized) }, handleError) diff --git a/app/gui2/ydoc-server/serialization.ts b/app/gui2/ydoc-server/serialization.ts index 913c6e3acfc6..f00bb47351ed 100644 --- a/app/gui2/ydoc-server/serialization.ts +++ b/app/gui2/ydoc-server/serialization.ts @@ -23,7 +23,7 @@ export function serializeIdMap(map: IdMap): string { return json.stringify(idMapToArray(map)) } -function idMapToArray(map: IdMap): fileFormat.IdMapEntry[] { +export function idMapToArray(map: IdMap): fileFormat.IdMapEntry[] { const entries: fileFormat.IdMapEntry[] = [] map.entries().forEach(([rangeBuffer, id]) => { const decoded = sourceRangeFromKey(rangeBuffer) From 4b3e4ae15e8e66faf27a810337bfe6d65f8a2827 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Tue, 9 Jul 2024 10:12:23 +0100 Subject: [PATCH 06/11] Rename `Map` to `Dictionary` and `Set` to `Hashset`. (#10474) - Rename `Map` to `Dictionary`. - Rename `Set` to `Hashset`. - Add a deprecated place holder for the static method of `Map`. --- CHANGELOG.md | 2 + .../lib/Standard/AWS/0.0.0-dev/src/S3/S3.enso | 6 +- .../lib/Standard/Base/0.0.0-dev/src/Any.enso | 4 +- .../lib/Standard/Base/0.0.0-dev/src/Data.enso | 4 +- .../Base/0.0.0-dev/src/Data/Dictionary.enso | 425 ++++++++++++ .../0.0.0-dev/src/Data/Filter_Condition.enso | 6 +- .../src/Data/{Set.enso => Hashset.enso} | 70 +- .../Base/0.0.0-dev/src/Data/Json.enso | 14 +- .../0.0.0-dev/src/Data/Json/Extensions.enso | 10 +- .../Standard/Base/0.0.0-dev/src/Data/Map.enso | 443 +----------- .../Base/0.0.0-dev/src/Data/Text/Regex.enso | 27 +- .../0.0.0-dev/src/Data/Text/Regex/Match.enso | 13 +- .../Standard/Base/0.0.0-dev/src/Data/XML.enso | 16 +- .../0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso | 4 +- .../Internal/Enso_File_Helpers.enso | 8 +- .../Internal/Existing_Enso_Asset.enso | 4 +- .../src/Enso_Cloud/Internal/Utils.enso | 6 +- .../src/Internal/Array_Like_Helpers.enso | 6 +- .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 3 +- .../Base/0.0.0-dev/src/Network/HTTP.enso | 18 +- .../0.0.0-dev/src/Network/HTTP/Request.enso | 8 +- .../src/Network/HTTP/Request_Body.enso | 4 +- .../0.0.0-dev/src/Network/HTTP/Response.enso | 2 +- .../Standard/Base/0.0.0-dev/src/Random.enso | 2 - .../Standard/Base/0.0.0-dev/src/Warning.enso | 2 +- .../0.0.0-dev/src/Connection/Connection.enso | 4 +- .../Database/0.0.0-dev/src/DB_Table.enso | 44 +- .../src/Internal/Base_Generator.enso | 21 +- .../Internal/Common/Database_Join_Helper.enso | 7 +- .../Internal/Postgres/Postgres_Dialect.enso | 6 +- .../Postgres/Postgres_Type_Mapping.enso | 4 +- .../src/Internal/SQLite/SQLite_Dialect.enso | 6 +- .../Internal/SQLite/SQLite_Type_Mapping.enso | 12 +- .../0.0.0-dev/src/Internal/Upload_Table.enso | 16 +- .../Standard/Examples/0.0.0-dev/src/Main.enso | 6 +- .../src/Internal/Snowflake_Dialect.enso | 6 +- .../src/Internal/Snowflake_Type_Mapping.enso | 4 +- .../Standard/Table/0.0.0-dev/src/Column.enso | 2 +- .../Conversions/Convertible_To_Columns.enso | 12 +- .../src/Conversions/Convertible_To_Rows.enso | 2 +- .../Standard/Table/0.0.0-dev/src/Errors.enso | 2 +- .../src/Internal/Expand_Objects_Helpers.enso | 29 +- .../Table/0.0.0-dev/src/Internal/Fan_Out.enso | 10 +- .../src/Internal/Lookup_Helpers.enso | 4 +- .../src/Internal/Replace_Helpers.enso | 10 +- .../src/Internal/Split_Tokenize.enso | 2 +- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 36 +- .../src/Internal/Unique_Name_Strategy.enso | 4 +- .../Table/0.0.0-dev/src/Match_Columns.enso | 4 +- .../lib/Standard/Table/0.0.0-dev/src/Row.enso | 8 +- .../Standard/Table/0.0.0-dev/src/Table.enso | 55 +- .../Test/0.0.0-dev/src/Test_Reporter.enso | 2 +- .../0.0.0-dev/src/Scatter_Plot.enso | 2 +- .../0.0.0-dev/src/Table/Visualization.enso | 20 +- .../test/ConversionMethodTests.java | 8 +- .../enso/interpreter/test/MetaObjectTest.java | 132 ++-- .../interpreter/test/ValuesGenerator.java | 44 +- .../node/callable/InvokeConversionNode.java | 5 +- .../node/callable/InvokeMethodNode.java | 2 +- .../interpreter/runtime/builtin/Builtins.java | 8 +- .../runtime/data/hash/EnsoHashMap.java | 6 +- .../data/hash/HashMapContainsKeyNode.java | 2 +- .../runtime/data/hash/HashMapGetNode.java | 4 +- .../runtime/data/hash/HashMapInsertNode.java | 2 +- .../runtime/data/hash/HashMapRemoveNode.java | 2 +- .../runtime/data/hash/HashMapSizeNode.java | 2 +- .../runtime/data/hash/HashMapToTextNode.java | 2 +- .../data/hash/HashMapToVectorNode.java | 2 +- .../runtime/library/dispatch/TypeOfNode.java | 2 +- test/AWS_Tests/src/S3_Spec.enso | 2 +- test/Base_Tests/src/Data/Dictionary_Spec.enso | 633 +++++++++++++++++ .../Data/{Set_Spec.enso => Hashset_Spec.enso} | 49 +- test/Base_Tests/src/Data/Json_Spec.enso | 8 +- test/Base_Tests/src/Data/Map_Spec.enso | 637 ------------------ test/Base_Tests/src/Data/Text/Regex_Spec.enso | 2 +- test/Base_Tests/src/Data/XML/XML_Spec.enso | 4 +- test/Base_Tests/src/Main.enso | 8 +- .../src/Network/Http/Request_Spec.enso | 2 +- test/Base_Tests/src/Network/Http_Spec.enso | 4 +- test/Benchmarks/src/Collections.enso | 2 +- test/Benchmarks/src/Map/Hash_Map.enso | 4 +- test/Examples_Tests/src/Examples_Spec.enso | 4 +- test/Snowflake_Tests/src/Snowflake_Spec.enso | 4 +- .../Column_Operations_Spec.enso | 2 +- .../Cross_Tab_Spec.enso | 2 +- .../Join/Replace_Spec.enso | 10 +- .../Map_To_Table_Spec.enso | 20 +- .../Select_Columns_Spec.enso | 34 +- .../src/Database/Postgres_Spec.enso | 6 +- .../Table_Tests/src/Database/SQLite_Spec.enso | 4 +- .../Types/SQLite_Type_Mapping_Spec.enso | 4 +- .../Table_Tests/src/Database/Upload_Spec.enso | 6 +- .../Helpers/Unique_Naming_Strategy_Spec.enso | 2 +- .../src/In_Memory/Table_Conversion_Spec.enso | 12 +- .../Table_Tests/src/In_Memory/Table_Spec.enso | 2 +- 95 files changed, 1585 insertions(+), 1552 deletions(-) create mode 100644 distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso rename distribution/lib/Standard/Base/0.0.0-dev/src/Data/{Set.enso => Hashset.enso} (67%) create mode 100644 test/Base_Tests/src/Data/Dictionary_Spec.enso rename test/Base_Tests/src/Data/{Set_Spec.enso => Hashset_Spec.enso} (55%) delete mode 100644 test/Base_Tests/src/Data/Map_Spec.enso diff --git a/CHANGELOG.md b/CHANGELOG.md index 2737eabd722d..73e8f9102e10 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -14,11 +14,13 @@ `Location.Right`.][10445] - [Renamed `Postgres_Details.Postgres` to `Postgres.Server`.][10466] - [Remove `First` and `Last` from namespace, use auto-scoped.][10467] +- [Rename `Map` to `Dictionary` and `Set` to `Hashset`.][10474] [10434]: https://github.com/enso-org/enso/pull/10434 [10445]: https://github.com/enso-org/enso/pull/10445 [10466]: https://github.com/enso-org/enso/pull/10466 [10467]: https://github.com/enso-org/enso/pull/10467 +[10474]: https://github.com/enso-org/enso/pull/10474 # Enso 2024.2 diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3.enso index c0661cefba83..cf57e3339160 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/S3/S3.enso @@ -96,11 +96,11 @@ read_bucket bucket prefix="" credentials:AWS_Credential=AWS_Credential.Default d - key: the key of the object. - credentials: AWS credentials. If not provided, the default credentials will be used. -head : Text -> Text -> AWS_Credential -> Map Text Any ! S3_Error +head : Text -> Text -> AWS_Credential -> Dictionary Text Any ! S3_Error head bucket key="" credentials:AWS_Credential=AWS_Credential.Default = response = raw_head bucket key credentials pairs = response.sdkFields.map f-> [f.memberName, f.getValueOrDefault response] - Map.from_vector pairs + Dictionary.from_vector pairs ## PRIVATE Gets the raw metadata of a bucket or object. @@ -109,7 +109,7 @@ head bucket key="" credentials:AWS_Credential=AWS_Credential.Default = - bucket: the name of the bucket. - key: the key of the object. - credentials: AWS credentials. -raw_head : Text -> Text -> AWS_Credential -> Map Text Any ! S3_Error +raw_head : Text -> Text -> AWS_Credential -> Dictionary Text Any ! S3_Error raw_head bucket key credentials = client = make_client_for_bucket bucket credentials case key == "" of diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso index 101ac05ab2f7..d03f77d955cf 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Any.enso @@ -392,8 +392,8 @@ type Any from Standard.Examples import Example_Error_Type example_map_error = - my_map = Map.empty - error = my_map.at "x" + my_dictionary = Dictionary.empty + error = my_dictionary.at "x" error.map_error (_ -> Example_Error_Type "x is missing") map_error : (Error -> Error) -> Any map_error self ~f = diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso index bcd2d0f41d09..e17fb60fa865 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data.enso @@ -310,7 +310,7 @@ fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector ( import Standard.Base.Data test_file = enso_project.data / "sample.png" - form_data = Map.from_vector [["key", "val"], ["a_file", test_file]] + form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data) > Example @@ -318,7 +318,7 @@ fetch (uri:(URI | Text)) (method:HTTP_Method=HTTP_Method.Get) (headers:(Vector ( import Standard.Base.Data test_file = enso_project.data / "sample.txt" - form_data = Map.from_vector [["key", "val"], ["a_file", test_file]] + form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True) @uri Text_Input @response_format Data_Read_Helpers.format_widget_with_raw_response diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso new file mode 100644 index 000000000000..14fadddf3746 --- /dev/null +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Dictionary.enso @@ -0,0 +1,425 @@ +import project.Any.Any +import project.Data.Numbers.Integer +import project.Data.Pair.Pair +import project.Data.Text.Text +import project.Data.Vector.Vector +import project.Error.Error +import project.Errors.Illegal_Argument.Illegal_Argument +import project.Errors.No_Such_Key.No_Such_Key +import project.Nothing.Nothing +import project.Panic.Panic +from project.Data.Boolean import Boolean, False, True +from project.Data.Text.Extensions import all + +## A key-value store. It is possible to use any type as keys and values and mix + them in one Dictionary. Keys are checked for equality based on their hash + code and `==` operator, which is both an internal part of Enso. Enso is + capable of computing a hash code, and checking for equality any objects that + can appear in Enso - primitives, Atoms, values coming from different + languages, etc. + + For keys that are not reflexive, like `Number.nan`, + [Same Value equality specification](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Equality_comparisons_and_sameness#same-value-zero_equality) + is used. This means that both `Number.nan` and types with comparators that + violate reflexivity (e.g. their `compare` method always returns `Nothing`) + can be used as keys in the Dictionary. + + A single key-value pair is called an *entry*. + + It is possible to pass a Dictionary created in Enso to foreign functions, + where it will be treated as appropriate map structures - in Python that is a + dictionary, and in JavaScript, it is a `Map`. And likewise, it is possible + to pass a foreign map into Enso, where it will be treated as a Dictionary. +@Builtin_Type +type Dictionary key value + ## PRIVATE + ADVANCED + Returns an empty dictionary. + empty : Dictionary + empty = @Builtin_Method "Dictionary.empty" + + ## PRIVATE + ADVANCED + Returns a single-element dictionary with the given key and value. + A Call to `Dictionary.singleton key value` is the same as a call to + `Dictionary.empty.insert key value`. + + Arguments: + - key: The key to to use for `value` in the dictionary. + - value: The value to store under 'key' in the dictionary. + + > Example + Create a single element dictionary storing the key "my_key" and the + value 2. + + example_singleton = Dictionary.singleton "my_key" 2 + singleton : Any -> Any -> Dictionary + singleton key value = Dictionary.empty.insert key value + + ## ALIAS dictionary, lookup table + GROUP Constants + ICON convert + Builds a dictionary from two Vectors. The first vector contains the keys, + and the second vector contains the values. The two vectors must be of the + same length. + + Arguments: + - keys: A vector of keys. + - values: A vector of values. + - error_on_duplicates: A flag which specifies if duplicate keys on the + input vector should result in an error. By default, set to `True`, + meaning that if two entries in the vector share the same key, an + `Illegal_Argument` error is raised. If set to `False`, the last entry + with a given key will be kept. + from_keys_and_values : Vector Any -> Vector Any -> Boolean -> Dictionary ! Illegal_Argument + from_keys_and_values keys:Vector values:Vector error_on_duplicates:Boolean=True = + if keys.length != values.length then Error.throw (Illegal_Argument.Error "`Dictionary.from_keys_and_values` encountered two vectors of different lengths.") else + keys.fold_with_index Dictionary.empty current-> idx-> key-> + if error_on_duplicates.not || (current.contains_key key . not) then current.insert key (values.at idx) else + Error.throw (Illegal_Argument.Error "`Dictionary.from_keys_and_values` encountered duplicate key: "+key.to_display_text) + + ## ALIAS dictionary, lookup table + GROUP Constants + ICON convert + Builds a dictionary from a vector of key-value pairs, with each key-value + pair represented as a 2 element vector. + + Arguments: + - vec: A vector of key-value pairs (2 element vectors). + - error_on_duplicates: A flag which specifies if duplicate keys on the + input vector should result in an error. By default, set to `True`, + meaning that if two entries in the vector share the same key, an + `Illegal_Argument` error is raised. If set to `False`, the last entry + with a given key will be kept. + + > Example + Building a dictionary containing two key-value pairs. + + example_from_vector = Dictionary.from_vector [["A", 1], ["B", 2]] + from_vector : Vector Any -> Boolean -> Dictionary ! Illegal_Argument + from_vector vec error_on_duplicates=True = + vec.fold Dictionary.empty m-> el-> if el.length != 2 then Error.throw (Illegal_Argument.Error "`Dictionary.from_vector` encountered an invalid value. Each value in the vector has to be a key-value pair - it must have exactly 2 elements.") else + key = el.at 0 + value = el.at 1 + if error_on_duplicates.not || (m.contains_key key . not) then m.insert key value else + Error.throw (Illegal_Argument.Error "`Dictionary.from_vector` encountered duplicate key: "+key.to_display_text) + + ## GROUP Logical + ICON metadata + Returns True if the Dictionary is empty, i.e. does not have any entries. + is_empty : Boolean + is_empty self = self.size == 0 + + ## GROUP Logical + ICON metadata + Returns True if the Dictionary is not empty, i.e. has at least one entry. + not_empty : Boolean + not_empty self = self.is_empty.not + + ## GROUP Metadata + ICON metadata + Returns the number of entries in this dictionary. + size : Integer + size self = @Builtin_Method "Dictionary.size" + + ## GROUP Metadata + ICON metadata + Returns the number of entries in this dictionary. + length : Integer + length self = self.size + + ## GROUP Calculations + ICON row_add + Inserts a key-value mapping into this dictionary, overriding any existing + instance of `key` with the new `value`. + + Note that since the return type is also a `Dictionary`, multiple `insert` + calls can be chained, e.g., `dictionary.insert "A" 1 . insert "B" 2`. + + Due to the limitation of the current implementation, inserts with a + key that is already contained in the dictionary, or insert on a + dictionary instance that is re-used in other computations, have a linear + time complexity. For all the other cases, the time complexity of this + method is constant. + + Arguments: + - key: The key to insert the value for. + - value: The value to associate with the `key`. + + > Example + Insert the value "seven" into the dictionary for the key 7. + + import Standard.Examples + + example_insert = Examples.dictionary.insert 7 "seven" + insert : Any -> Any -> Dictionary + insert self key value = @Builtin_Method "Dictionary.insert" + + ## GROUP Selections + ICON table_clean + Removes an entry specified by the given key from this dictionary, and + returns a new dictionary without this entry. Throw `No_Such_Key.Error` if + `key` is not present. + + Arguments: + - key: The key to look up in the dictionary. + + > Example + Remove key "A" from a dictionary + + import Standard.Examples + + Examples.dictionary.remove "A" + remove : Any -> Dictionary ! No_Such_Key + remove self key = + Panic.catch Any (self.remove_builtin key) _-> + Error.throw (No_Such_Key.Error self key) + + ## GROUP Selections + ICON parse3 + Gets the value associated with `key` in this dictionary, or throws a + `No_Such_Key.Error` if `key` is not present. + + This method has a constant time complexity. + + Arguments: + - key: The key to look up in the dictionary. + + > Example + Looks up the value for the key "A" in a dictionary. + + import Standard.Examples + + example_at = Examples.dictionary.at "A" + at : Any -> Any ! No_Such_Key + at self key = self.get key (Error.throw (No_Such_Key.Error self key)) + + ## ICON parse3 + Gets the value associated with `key` in this dictionary, or returns + `if_missing` if it isn't present. + + This method has a constant time complexity. + + Arguments: + - key: The key to look up in the dictionary. + - if_missing: The value to use if the key isn't present. + + > Example + Get the value for the key 2 in a dictionary or instead return "zero" if it + isn't present. + + import Standard.Examples + + example_get = Examples.dictionary.get 2 "zero" + get : Any -> Any -> Any + get self key ~if_missing=Nothing = self.get_builtin key if_missing + + ## GROUP Logical + ICON preparation + Returns True iff the Dictionary contains the given `key`. + contains_key : Any -> Boolean + contains_key self key = @Builtin_Method "Dictionary.contains_key" + + ## GROUP Selections + ICON select_column + Returns an unsorted vector of all the keys in this Dictionary. + keys : Vector Any + keys self = self.to_vector.map pair-> pair.at 0 + + ## GROUP Selections + ICON select_column + Returns an unsorted vector of all the values in this Dictionary. + values : Vector Any + values self = self.to_vector.map pair-> pair.at 1 + + ## ICON column_add + Maps a function over each value in this dictionary. + + Arguments: + - function: The function to apply to each value in the dictionary, taking + a value and returning a value. + + > Example + Append "_word" to all values in the dictionary. + + import Standard.Examples + + example_map = Examples.dictionary.map (+ "_word") + map : (Any -> Any) -> Dictionary + map self function = + kv_func = _ -> function + self.map_with_key kv_func + + ## ICON column_add + Maps a function over each key-value pair in the dictionary, transforming + the value. + + Arguments: + - function: Function to apply to each key and value in the dictionary, + taking a key and a value and returning a value. + + > Example + Prepend the keys to the values in the dictionary. + + import Standard.Examples + + example_map_with_key = + Examples.dictionary.map_with_key (k -> v -> k.to_text + "-" + v) + map_with_key : (Any -> Any -> Any) -> Dictionary + map_with_key self function = + Dictionary.from_vector <| self.to_vector.map pair-> + key = pair.first + value = pair.last + [key, (function key value)] + + ## ICON column_add + Maps a function over each key in this dictionary. + + Arguments: + - function: The function to apply to each key in the dictionary, taking a + key and returning a new key. + + > Example + Doubling all keys in the dictionary. + + import Standard.Examples + + example_map_keys = Examples.dictionary.map_keys *2 + map_keys : (Any -> Any) -> Dictionary + map_keys self function = + trans_function = k -> v -> [function k, v] + self.transform trans_function + + ## ICON column_add + Transforms the map's keys and values to create a new dictionary. + + Arguments: + - function: The function used to transform the dictionary, taking a key + and a value and returning a pair of `[key, value]`. + + ! Error Conditions + - If multiple dictionary entries end up with duplicate keys after the + transformation, an `Illegal_Argument.Error` is thrown. + + > Example + Turn all keys into `Text` and append "_word" to the values in the + dictionary. + + import Standard.Examples + + example_transform = + Examples.dictionary.transform (k -> v -> [k.to_text, v + "_word"]) + transform : (Any -> Any -> [Any, Any]) -> Dictionary + transform self function = + func_pairs = p -> function (p.at 0) (p.at 1) + vec_transformed = self.to_vector.map func_pairs + new_dictionary = Dictionary.from_vector vec_transformed error_on_duplicates=True + new_dictionary.catch Illegal_Argument error-> + case error.message.starts_with "`Dictionary.from_vector` encountered duplicate key" of + True -> + new_message = error.message.replace "from_vector" "transform" + Error.throw (Illegal_Argument.Error new_message error.cause) + False -> new_dictionary + + ## ICON transform4 + Combines the values in the dictionary. + + Arguments: + - init: The initial value for the fold. + - function: A binary function to apply to pairs of values. + + > Example + Find the length of the longest word in the dictionary. + + import Standard.Examples + + example_fold = Examples.dictionary.fold 0 (l -> r -> l.max r.length) + fold : Any -> (Any -> Any -> Any) -> Any + fold self init function = self.values.fold init function + + ## ICON transform4 + Combines the key-value pairs in the dictionary. + + Arguments: + - init: The initial value for the fold. + - function: A function taking the left value, the current key, and the + current value, and combining them to yield a single value. + + > Example + Glue the values in the dictionary together with the keys. + + import Standard.Examples + + example_fold_with_key = + Examples.dictionary.fold_with_key "" (l -> k -> v -> l + k.to_text + v) + fold_with_key : Any -> (Any -> Any -> Any -> Any) -> Any + fold_with_key self init function = + self.to_vector.fold init acc-> pair-> + function acc pair.first pair.last + + ## PRIVATE + ADVANCED + Applies a function to each value in the dictionary. + + Arguments: + - function: The function to apply to each value in the dictionary, taking + a value and returning anything. + + This method does not return the results, so it is only useful for performing + computations with side-effects. + + If the function returns a dataflow error, the error is converted to a + panic and thrown immediately stopping further processing. + + > Example + Printing each value in the dictionary. + + import Standard.Examples + + example_each = Examples.dictionary.each IO.println + each : (Any -> Any) -> Nothing + each self function = + kv_func = _ -> function + self.each_with_key kv_func + + ## PRIVATE + ADVANCED + Applies a function to each key-value pair in the dictionary. + + Arguments: + - function: The function to apply to each key-value pair in the + dictionary, taking a key and a value and returning anything. + + This method does not return the results, so it is only useful for performing + computations with side-effects. + + > Example + Printing each key and value in the dictionary. + + import Standard.Examples + + example_each_with_key = Examples.dictionary.each_with_key k->v-> + IO.println k + IO.println v + each_with_key : (Any -> Any -> Any) -> Nothing + each_with_key self function = + self.to_vector.each pair-> + function pair.first pair.last + + ## GROUP Conversions + ICON convert + Returns an unsorted vector of key-value pairs (nested 2 element vectors). + `Dictionary.from_vector` method is an inverse method, so the following + expression is true for all dictionaries: + `Dictionary.from_vector dictionary.to_vector == dictionary`. + to_vector : Vector Any + to_vector self = @Builtin_Method "Dictionary.to_vector" + + ## PRIVATE + Returns a text representation of this Dictionary. + to_text : Text + to_text self = @Builtin_Method "Dictionary.to_text" + + ## PRIVATE + get_builtin : Any -> Any -> Any + get_builtin self key ~if_missing = @Builtin_Method "Dictionary.get_builtin" diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso index 528025b48441..2a637f9ff329 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Filter_Condition.enso @@ -1,7 +1,7 @@ import project.Any.Any +import project.Data.Hashset.Hashset import project.Data.Locale.Locale import project.Data.Numbers.Number -import project.Data.Set.Set import project.Data.Text.Case_Sensitivity.Case_Sensitivity import project.Data.Text.Regex.Regex import project.Data.Text.Text @@ -198,9 +198,7 @@ type Filter_Condition Like sql_pattern _ -> regex = sql_like_to_regex sql_pattern handle_nothing <| regex.matches - Is_In values _ -> - set = Set.from_vector values - set.contains + Is_In values _ -> Hashset.from_vector values . contains if self.action == Filter_Action.Keep then base else v -> (base v).not ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso similarity index 67% rename from distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso rename to distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso index 2fcd474f2cf2..f73dffc68981 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Set.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Hashset.enso @@ -1,6 +1,6 @@ import project.Any.Any import project.Data.Array_Proxy.Array_Proxy -import project.Data.Map.Map +import project.Data.Dictionary.Dictionary import project.Data.Numbers.Integer import project.Data.Ordering.Comparable import project.Data.Ordering.Ordering @@ -13,9 +13,9 @@ from project.Data.Boolean import Boolean, False, True from project.Data.Text.Extensions import all ## An unordered collection of unique values. -type Set +type Hashset ## PRIVATE - Value (underlying_map : Map Any Nothing) + Value (underlying_dictionary : Dictionary Any Nothing) ## PRIVATE ADVANCED @@ -28,30 +28,30 @@ type Set occurrence of each duplicated element is retained in the set. If set to `True` it will raise an `Illegal_Argument` if duplicate elements are encountered. - from_vector : Vector Any -> Boolean -> Set ! Illegal_Argument + from_vector : Vector Any -> Boolean -> Hashset ! Illegal_Argument from_vector (vector : Vector) (error_on_duplicates : Boolean = False) = pairs_array = Array_Proxy.new vector.length (i-> [vector.at i, Nothing]) pairs = Vector.from_polyglot_array pairs_array - map = Map.from_vector pairs error_on_duplicates=error_on_duplicates - Set.Value map + dictionary = Dictionary.from_vector pairs error_on_duplicates=error_on_duplicates + Hashset.Value dictionary ## PRIVATE ADVANCED Constructs an empty set. - empty : Set - empty = Set.Value Map.empty + empty : Hashset + empty = Hashset.Value Dictionary.empty ## GROUP Conversions ICON convert Returns a vector containing all elements of this set. to_vector : Vector - to_vector self = self.underlying_map.keys + to_vector self = self.underlying_dictionary.keys ## GROUP Metadata ICON metadata Returns the number of elements in this set. size : Integer - size self = self.underlying_map.size + size self = self.underlying_dictionary.size ## GROUP Metadata ICON metadata @@ -63,19 +63,19 @@ type Set ICON metadata Checks if the set is empty. is_empty : Boolean - is_empty self = self.underlying_map.is_empty + is_empty self = self.underlying_dictionary.is_empty ## GROUP Logical ICON metadata Checks if the set is not empty. not_empty : Boolean - not_empty self = self.underlying_map.not_empty + not_empty self = self.underlying_dictionary.not_empty ## GROUP Logical ICON preparation Checks if this set contains a given value. contains : Any -> Boolean - contains self value = self.underlying_map.contains_key value + contains self value = self.underlying_dictionary.contains_key value ## GROUP Logical ICON preparation @@ -103,48 +103,48 @@ type Set GROUP Calculations ICON row_add Adds a value to this set. - insert : Any -> Set + insert : Any -> Hashset insert self value = - new_map = self.underlying_map.insert value Nothing - Set.Value new_map + dictionary = self.underlying_dictionary.insert value Nothing + Hashset.Value dictionary ## GROUP Calculations ICON union Creates a union of the two sets. - union : Set -> Set - union self (other : Set) = - start_map = self.underlying_map - new_map = other.to_vector.fold start_map m-> el-> m.insert el Nothing - Set.Value new_map + union : Hashset -> Hashset + union self (other : Hashset) = + start_dictionary = self.underlying_dictionary + dictionary = other.to_vector.fold start_dictionary m-> el-> m.insert el Nothing + Hashset.Value dictionary ## GROUP Calculations ICON join Creates an intersection of the two sets. - intersection : Set -> Set - intersection self (other : Set) = - other_map = other.underlying_map - new_map = self.underlying_map.keys.fold Map.empty m-> el-> - if other_map.contains_key el then m.insert el Nothing else m - Set.Value new_map + intersection : Hashset -> Hashset + intersection self (other : Hashset) = + other_dictionary = other.underlying_dictionary + dictionary = self.underlying_dictionary.keys.fold Dictionary.empty m-> el-> + if other_dictionary.contains_key el then m.insert el Nothing else m + Hashset.Value dictionary ## ICON join Computes a set difference. Returns the set that contains all elements of this set that are not in the other set. - difference : Set -> Set - difference self (other : Set) = - other_map = other.underlying_map - new_map = self.underlying_map.keys.fold Map.empty m-> el-> - if other_map.contains_key el then m else m.insert el Nothing - Set.Value new_map + difference : Hashset -> Hashset + difference self (other : Hashset) = + other_dictionary = other.underlying_dictionary + dictionary = self.underlying_dictionary.keys.fold Dictionary.empty m-> el-> + if other_dictionary.contains_key el then m else m.insert el Nothing + Hashset.Value dictionary ## PRIVATE to_text : Text - to_text self = self.to_vector.map .pretty . join ", " "Set{" "}" + to_text self = self.to_vector.map .pretty . join ", " "Hashset{" "}" ## PRIVATE -type Set_Comparator +type Hashset_Comparator ## PRIVATE compare x y = if x.size != y.size then Nothing else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso index c9c689ca2e70..07fd19969dd6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json.enso @@ -2,7 +2,7 @@ import project.Any.Any import project.Data.Array.Array import project.Data.Array_Proxy.Array_Proxy import project.Data.Decimal.Decimal -import project.Data.Map.Map +import project.Data.Dictionary.Dictionary import project.Data.Numbers.Float import project.Data.Numbers.Integer import project.Data.Numbers.Number @@ -252,13 +252,13 @@ type JS_Object ## GROUP Logical ICON metadata - Returns True iff the Map is empty, i.e., does not have any entries. + Returns True if the JS_Object is empty, i.e., does not have any entries. is_empty : Boolean is_empty self = self.length == 0 ## GROUP Logical ICON metadata - Returns True iff the Map is not empty, i.e., has at least one entry. + Returns True if the JS_Object is not empty, i.e., has at least one entry. not_empty : Boolean not_empty self = self.is_empty.not @@ -304,10 +304,10 @@ type JS_Object Creates an Enso object from the JS_Object. into : Any -> Any into self target_type = case target_type of - JS_Object -> self - Vector -> self.to_vector - Map -> Map.from_vector self.to_vector - _ -> + JS_Object -> self + Vector -> self.to_vector + Dictionary -> Dictionary.from_vector self.to_vector + _ -> ## First try a conversion Panic.catch No_Such_Conversion (self.to target_type) _-> ## If that fails, try to construct the type diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso index 33a5497e31c0..53aa4bd0a584 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Json/Extensions.enso @@ -2,10 +2,10 @@ import project.Any.Any import project.Data.Array.Array import project.Data.Array_Proxy.Array_Proxy import project.Data.Decimal.Decimal +import project.Data.Dictionary.Dictionary import project.Data.Json.JS_Object import project.Data.Json.Json import project.Data.Locale.Locale -import project.Data.Map.Map import project.Data.Numbers.Float import project.Data.Numbers.Integer import project.Data.Numbers.Number @@ -182,10 +182,10 @@ Locale.to_js_object self = For Map, this is serialized as a Vector of Key-Value pairs. Enso Maps support arbitrary types as map keys, so we cannot serialize them into JS Objects because there only strings are accepted as keys. -Map.to_js_object : JS_Object -Map.to_js_object self = - map_vector = self.to_vector - map_vector.map p-> [p.first.to_js_object, p.second.to_js_object] +Dictionary.to_js_object : JS_Object +Dictionary.to_js_object self = + as_vector = self.to_vector + as_vector.map p-> [p.first.to_js_object, p.second.to_js_object] ## PRIVATE ICON convert diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso index e9075f8901ff..f6508fc67a88 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Map.enso @@ -1,436 +1,37 @@ import project.Any.Any -import project.Data.Numbers.Integer -import project.Data.Pair.Pair -import project.Data.Text.Text import project.Data.Vector.Vector import project.Error.Error -import project.Errors.Illegal_Argument.Illegal_Argument -import project.Errors.No_Such_Key.No_Such_Key -import project.Nothing.Nothing -import project.Panic.Panic +import project.Errors.Deprecated.Deprecated from project.Data.Boolean import Boolean, False, True -from project.Data.Text.Extensions import all -## A key-value store. It is possible to use any type as keys and values and mix them in - one Map. Keys are checked for equality based on their hash code and `==` operator, which - is both an internal part of Enso. Enso is capable of computing a hash code, and checking - for equality any objects that can appear in Enso - primitives, Atoms, values coming from - different languages, etc. - - For keys that are not reflexive, like `Number.nan`, - [Same Value equality specification](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Equality_comparisons_and_sameness#same-value-zero_equality) - is used. This means that both `Number.nan` and types with comparators that violate - reflexivity (e.g. their `compare` method always returns `Nothing`) can be used as keys - in the Map. - - A single key-value pair is called an *entry*. - - It is possible to pass a Map created in Enso to foreign functions, where it will be treated - as appropriate map structures - in Python that is a dictionary, and in JavaScript, it is - a `Map`. And likewise, it is possible to pass a foreign map into Enso, where it will be - treated as a Map. -@Builtin_Type +## PRIVATE + Deprecate place holder for the Map type. type Map key value ## PRIVATE - ADVANCED - Returns an empty map. - empty : Map - empty = @Builtin_Method "Map.empty" + DEPRECATED Use Dictionary.empty instead. + empty : Any ! Deprecated + empty = + Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "empty" "Deprecated: `Map.empty` has been replaced by `Dictionary.empty`.") ## PRIVATE - ADVANCED - Returns a single-element map with the given key and value. - A Call to `Map.singleton key value` is the same as a call to - `Map.empty.insert key value`. - - Arguments: - - key: The key to to use for `value` in the map. - - value: The value to store under 'key' in the map. - - > Example - Create a single element map storing the key "my_key" and the value 2. - - import Standard.Base.Data.Map.Map - - example_singleton = Map.singleton "my_key" 2 - singleton : Any -> Any -> Map - singleton key value = Map.empty.insert key value + DEPRECATED Use Dictionary.singleton instead. + singleton : Any -> Any -> Any ! Deprecated + singleton key value = + _ = [key, value] + Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "singleton" "Deprecated: `Map.singleton` has been replaced by `Dictionary.singleton`.") - ## ALIAS dictionary, lookup table - GROUP Constants + ## PRIVATE ICON convert - Builds a map from two Vectors. The first vector contains the keys, and - the second vector contains the values. The two vectors must be of the - same length. - - Arguments: - - keys: A vector of keys. - - values: A vector of values. - - error_on_duplicates: A flag which specifies if duplicate keys on the - input vector should result in an error. By default, set to `True`, - meaning that if two entries in the vector share the same key, an - `Illegal_Argument` error is raised. If set to `False`, the last entry - with a given key will be kept. - from_keys_and_values : Vector Any -> Vector Any -> Boolean -> Map ! Illegal_Argument + DEPRECATED Use Dictionary.from_keys_and_values instead. + from_keys_and_values : Vector Any -> Vector Any -> Boolean -> Any ! Deprecated from_keys_and_values keys:Vector values:Vector error_on_duplicates:Boolean=True = - if keys.length != values.length then Error.throw (Illegal_Argument.Error "`Map.from_keys_and_values` encountered two vectors of different lengths.") else - keys.fold_with_index Map.empty current-> idx-> key-> - if error_on_duplicates.not || (current.contains_key key . not) then current.insert key (values.at idx) else - Error.throw (Illegal_Argument.Error "`Map.from_keys_and_values` encountered duplicate key: "+key.to_display_text) - - ## ALIAS dictionary, lookup table - GROUP Constants - ICON convert - Builds a map from a vector of key-value pairs, with each key-value pair - represented as a 2 element vector. - - Arguments: - - vec: A vector of key-value pairs (2 element vectors). - - error_on_duplicates: A flag which specifies if duplicate keys on the - input vector should result in an error. By default, set to `True`, - meaning that if two entries in the vector share the same key, an - `Illegal_Argument` error is raised. If set to `False`, the last entry - with a given key will be kept. - - > Example - Building a map containing two key-value pairs. - - import Standard.Base.Data.Map.Map - - example_from_vector = Map.from_vector [["A", 1], ["B", 2]] - from_vector : Vector Any -> Boolean -> Map ! Illegal_Argument - from_vector vec error_on_duplicates=True = - vec.fold Map.empty m-> el-> if el.length != 2 then Error.throw (Illegal_Argument.Error "`Map.from_vector` encountered an invalid value. Each value in the vector has to be a key-value pair - it must have exactly 2 elements.") else - key = el.at 0 - value = el.at 1 - if error_on_duplicates.not || (m.contains_key key . not) then m.insert key value else - Error.throw (Illegal_Argument.Error "`Map.from_vector` encountered duplicate key: "+key.to_display_text) - - ## GROUP Logical - ICON metadata - Returns True iff the Map is empty, i.e., does not have any entries. - is_empty : Boolean - is_empty self = self.size == 0 - - ## GROUP Logical - ICON metadata - Returns True iff the Map is not empty, i.e., has at least one entry. - not_empty : Boolean - not_empty self = self.is_empty.not - - ## GROUP Metadata - ICON metadata - Returns the number of entries in this map. - size : Integer - size self = @Builtin_Method "Map.size" - - ## GROUP Metadata - ICON metadata - Returns the number of entries in this map. - length : Integer - length self = self.size - - ## GROUP Calculations - ICON row_add - Inserts a key-value mapping into this map, overriding any existing - instance of `key` with the new `value`. - - Note that since the return type is also a `Map`, multiple `insert` - calls can be chained, e.g., `map.insert "A" 1 . insert "B" 2`. - - Due to the limitation of the current implementation, inserts with a - key that is already contained in the map, or insert on a map instance that - is re-used in other computations, have a linear time complexity. - For all the other cases, the time complexity of this method is constant. - - Arguments: - - key: The key to insert the value for. - - value: The value to associate with the `key`. - - > Example - Insert the value "seven" into the map for the key 7. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_insert = Examples.map.insert 7 "seven" - insert : Any -> Any -> Map - insert self key value = @Builtin_Method "Map.insert" - - ## GROUP Selections - ICON table_clean - Removes an entry specified by the given key from this map, and - returns a new map without this entry. Throw `No_Such_Key.Error` - if `key` is not present. - - Arguments: - - key: The key to look up in the map. - - > Example - Remove key "A" from a map - - import Standard.Data.Map.Map - - Examples.map.remove "A" - - remove : Any -> Map ! No_Such_Key - remove self key = - Panic.catch Any (self.remove_builtin key) _-> - Error.throw (No_Such_Key.Error self key) - - ## GROUP Selections - ICON parse3 - Gets the value associated with `key` in this map, or throws a - `No_Such_Key.Error` if `key` is not present. - - This method has a constant time complexity. - - Arguments: - - key: The key to look up in the map. - - > Example - Looks up the value for the key "A" in a map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_at = Examples.map.at "A" - at : Any -> Any ! No_Such_Key - at self key = self.get key (Error.throw (No_Such_Key.Error self key)) - - ## ICON parse3 - Gets the value associated with `key` in this map, or returns - `if_missing` if it isn't present. - - This method has a constant time complexity. - - Arguments: - - key: The key to look up in the map. - - if_missing: The value to use if the key isn't present. - - > Example - Get the value for the key 2 in a map or instead return "zero" if it - isn't present. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_get = Examples.map.get 2 "zero" - get : Any -> Any -> Any - get self key ~if_missing=Nothing = self.get_builtin key if_missing - - ## GROUP Logical - ICON preparation - Returns True iff the Map contains the given `key`. - contains_key : Any -> Boolean - contains_key self key = @Builtin_Method "Map.contains_key" - - ## GROUP Selections - ICON select_column - Returns an unsorted vector of all the keys in this Map. - keys : Vector Any - keys self = self.to_vector.map pair-> pair.at 0 - - ## GROUP Selections - ICON select_column - Returns an unsorted vector of all the values in this Map. - values : Vector Any - values self = self.to_vector.map pair-> pair.at 1 - - ## ICON column_add - Maps a function over each value in this map. - - Arguments: - - function: The function to apply to each value in the map, taking a - value and returning a value. - - > Example - Append "_word" to all values in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_map = Examples.map.map (+ "_word") - map : (Any -> Any) -> Map - map self function = - kv_func = _ -> function - self.map_with_key kv_func - - ## ICON column_add - Maps a function over each key-value pair in the map, transforming the - value. - - Arguments: - - function: The function to apply to each key and value in the map, - taking a key and a value and returning a value. - - > Example - Prepend the keys to the values in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_map_with_key = - Examples.map.map_with_key (k -> v -> k.to_text + "-" + v) - map_with_key : (Any -> Any -> Any) -> Map - map_with_key self function = - Map.from_vector <| self.to_vector.map pair-> - key = pair.first - value = pair.last - [key, (function key value)] - - ## ICON column_add - Maps a function over each key in this map. - - Arguments: - - function: The function to apply to each key in the map, taking a key - and returning a key. - - > Example - Doubling all keys in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_map_keys = Examples.map.map_keys *2 - map_keys : (Any -> Any) -> Map - map_keys self function = - trans_function = k -> v -> [function k, v] - self.transform trans_function - - ## ICON column_add - Transforms the map's keys and values to create a new map. - - Arguments: - - function: The function used to transform the map, taking a key and a - value and returning a pair of `[key, value]`. - - ! Error Conditions - - If multiple map entries end up with duplicate keys after the - transformation, an `Illegal_Argument.Error` is thrown. - - > Example - Turn all keys into `Text` and append "_word" to the values in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_transform = - Examples.map.transform (k -> v -> [k.to_text, v + "_word"]) - transform : (Any -> Any -> [Any, Any]) -> Map - transform self function = - func_pairs = p -> function (p.at 0) (p.at 1) - vec_transformed = self.to_vector.map func_pairs - new_map = Map.from_vector vec_transformed error_on_duplicates=True - new_map.catch Illegal_Argument error-> - case error.message.starts_with "`Map.from_vector` encountered duplicate key" of - True -> - new_message = error.message.replace "from_vector" "transform" - Error.throw (Illegal_Argument.Error new_message error.cause) - False -> new_map - - ## ICON transform4 - Combines the values in the map. - - Arguments: - - init: The initial value for the fold. - - function: A binary function to apply to pairs of values in the map. - - > Example - Find the length of the longest word in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_fold = Examples.map.fold 0 (l -> r -> l.max r.length) - fold : Any -> (Any -> Any -> Any) -> Any - fold self init function = self.values.fold init function - - ## ICON transform4 - Combines the key-value pairs in the map. - - Arguments: - - init: The initial value for the fold. - - function: A function taking the left value, the current key, and the - current value, and combining them to yield a single value. - - > Example - Glue the values in the map together with the keys. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_fold_with_key = - Examples.map.fold_with_key "" (l -> k -> v -> l + k.to_text + v) - fold_with_key : Any -> (Any -> Any -> Any -> Any) -> Any - fold_with_key self init function = - self.to_vector.fold init acc-> pair-> - function acc pair.first pair.last - - ## PRIVATE - ADVANCED - Applies a function to each value in the map. - - Arguments: - - function: The function to apply to each value in the map, taking a - value and returning anything. - - This method does not return the results, so it is only useful for performing - computations with side-effects. - - If the function returns a dataflow error, the error is converted to a - panic and thrown immediately stopping further processing. - - > Example - Printing each value in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_each = Examples.map.each IO.println - each : (Any -> Any) -> Nothing - each self function = - kv_func = _ -> function - self.each_with_key kv_func + _ = [keys, values, error_on_duplicates] + Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "from_keys_and_values" "Deprecated: `Map.from_keys_and_values` has been replaced by `Dictionary.from_keys_and_values`.") ## PRIVATE - ADVANCED - Applies a function to each key-value pair in the map. - - Arguments: - - function: The function to apply to each key-value pair in the map, - taking a key and a value and returning anything. - - This method does not return the results, so it is only useful for performing - computations with side-effects. - - > Example - Printing each key and value in the map. - - import Standard.Base.Data.Map.Map - import Standard.Examples - - example_each_with_key = Examples.map.each_with_key k->v-> - IO.println k - IO.println v - each_with_key : (Any -> Any -> Any) -> Nothing - each_with_key self function = - self.to_vector.each pair-> - function pair.first pair.last - - ## GROUP Conversions ICON convert - Returns an unsorted vector of key-value pairs (nested 2 element vectors). - `Map.from_vector` method is an inverse method, so the following expression - is true for all maps: `Map.from_vector map.to_vector == map`. - to_vector : Vector Any - to_vector self = @Builtin_Method "Map.to_vector" - - ## PRIVATE - Returns a text representation of this Map. - to_text : Text - to_text self = @Builtin_Method "Map.to_text" - - ## PRIVATE - get_builtin : Any -> Any -> Any - get_builtin self key ~if_missing = @Builtin_Method "Map.get_builtin" + DEPRECATED Use Dictionary.from_vector instead. + from_vector : Vector Any -> Boolean -> Any ! Deprecated + from_vector vec error_on_duplicates=True = + _ = [vec, error_on_duplicates] + Error.throw (Deprecated.Warning "Standard.Base.Data.Map.Map" "from_vector" "Deprecated: `Map.from_vector` has been replaced by `Dictionary.from_vector`.") diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso index 6b0179c258b4..e35b6762e198 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex.enso @@ -1,8 +1,8 @@ import project.Any.Any import project.Data.Array.Array +import project.Data.Dictionary.Dictionary import project.Data.Filter_Condition.Filter_Condition import project.Data.Json.JS_Object -import project.Data.Map.Map import project.Data.Numbers.Integer import project.Data.Range.Range import project.Data.Text.Case_Sensitivity.Case_Sensitivity @@ -379,15 +379,16 @@ type Regex Return a vector of all named group names. named_groups : Vector Text named_groups self = - map = polyglot_map_to_map self.internal_regex_object.groups - map.keys + dictionary = polyglot_map_to_dictionary self.internal_regex_object.groups + dictionary.keys ## ICON metadata - Return a map from group number to group name. Only includes named groups. - group_nums_to_names : Map Integer Text + Return a Dictionary from group number to group name. Only includes named + groups. + group_nums_to_names : Dictionary Integer Text group_nums_to_names self = - map = polyglot_map_to_map self.internal_regex_object.groups - map.transform k-> v-> [v.at 0, k] + dictionary = polyglot_map_to_dictionary self.internal_regex_object.groups + dictionary.transform k-> v-> [v.at 0, k] ## ICON text Escape the special characters in `expression` such that the result is a @@ -419,20 +420,20 @@ type Regex Regex.compile self.pattern case_insensitive ## PRIVATE - Convert the polyglot map to a Map. -polyglot_map_to_map : Any -> Map Any Any -polyglot_map_to_map map = + Convert the polyglot map to a Dictionary. +polyglot_map_to_dictionary : Any -> Dictionary Any Any +polyglot_map_to_dictionary map = polyglot_keys = Polyglot.get_members map keys = Vector.from_polyglot_array polyglot_keys pairs = keys.map key-> [key, Polyglot.get_member map key] - Map.from_vector pairs + Dictionary.from_vector pairs ## PRIVATE Get the named group from the polyglot map. read_group_map : Any -> Text -> Integer | Nothing read_group_map polyglot_map name = - map = polyglot_map_to_map polyglot_map - map.get name + dictionary = polyglot_map_to_dictionary polyglot_map + dictionary.get name ## PRIVATE match_to_group_maybe : Match | Nothing -> Text | Nothing diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso index 50a566862e07..7ced03144b65 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Regex/Match.enso @@ -1,6 +1,6 @@ import project.Any.Any +import project.Data.Dictionary.Dictionary import project.Data.Json.JS_Object -import project.Data.Map.Map import project.Data.Numbers.Integer import project.Data.Range.Range import project.Data.Text.Regex.No_Such_Group @@ -260,7 +260,7 @@ type Match ## GROUP Metadata ICON metadata - Gets a map containing the named capturing groups for the pattern, + Gets a Dictionary containing the named capturing groups for the pattern, replacing the value for groups that did not participate in the match with `default`. @@ -279,17 +279,18 @@ type Match a named group that does not participate to the default value. > Example - Get the map of all of the named groups in this match, replacing the - value for groups that didn't participate in the match with "UNMATCHED". + Get the Dictionary of all of the named groups in this match, replacing + the value for groups that didn't participate in the match with + "UNMATCHED". pattern = Regex.compile "(.. .. )(?.+)()??(?)??" input = "aa ab abc a bc bcd" match = pattern.match input ## match.named_groups.keys.sort == ["empty", "letters"] - named_groups : Any -> Map Text (Text | Any) + named_groups : Any -> Dictionary Text (Text | Any) named_groups self default=Nothing = pattern_named_groups = self.pattern.named_groups - Map.from_vector <| + Dictionary.from_vector <| pattern_named_groups.map name-> [name, self.text name default=default] ## ICON split diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/XML.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/XML.enso index c6222d977fb9..cea7a99f7d8d 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/XML.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/XML.enso @@ -1,7 +1,7 @@ import project.Any.Any +import project.Data.Dictionary.Dictionary import project.Data.Json.Extensions import project.Data.Json.JS_Object -import project.Data.Map.Map import project.Data.Numbers.Integer import project.Data.Text.Encoding.Encoding import project.Data.Text.Text @@ -305,9 +305,9 @@ type XML_Document ## GROUP Selections ICON array_new - Gets a map containing of the attributes of an XML document. - attributes : Map Text Text ! XML_Error - attributes self = Map.empty + Gets a Dictionary containing of the attributes of an XML document. + attributes : Dictionary Text Text ! XML_Error + attributes self = Dictionary.empty ## GROUP Selections ICON metadata @@ -508,19 +508,19 @@ type XML_Element ## GROUP Selections ICON array_new - Gets a map containing of the attributes of an XML element. + Gets a Dictionary containing of the attributes of an XML element. > Example XML_Document.from_text 'hello' . root_element . attributes - # => Map.from_vector [["bar", "one"]] - attributes : Map Text Text ! XML_Error + # => Dictionary.from_vector [["bar", "one"]] + attributes : Dictionary Text Text ! XML_Error attributes self = XML_Error.handle_java_exceptions <| named_node_map = self.java_element.getAttributes keys_and_values = 0.up_to named_node_map.getLength . map i-> node = named_node_map.item i [node.getNodeName, node.getNodeValue] - Map.from_vector keys_and_values + Dictionary.from_vector keys_and_values ## GROUP Selections ICON metadata diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso index e58b05e77ddb..79b7e8521892 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_Secret.enso @@ -1,6 +1,6 @@ import project.Data.Base_64.Base_64 +import project.Data.Dictionary.Dictionary import project.Data.Json.JS_Object -import project.Data.Map.Map import project.Data.Text.Text import project.Data.Vector.Vector import project.Enso_Cloud.Enso_File.Enso_Asset_Type @@ -56,7 +56,7 @@ type Enso_Secret handle_already_exists _ = message = "A secret with the name " + name.pretty + " already exists inside of directory " + parent_dir.name + "." Error.throw (Illegal_Argument.Error message) - error_handlers = Map.from_vector [["resource_already_exists", handle_already_exists]] + error_handlers = Dictionary.from_vector [["resource_already_exists", handle_already_exists]] id = Utils.http_request_as_json HTTP_Method.Post Utils.secrets_api body error_handlers=error_handlers Enso_Secret.Value name id path diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso index d68ad9ffc603..3952e82a5d5c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Enso_File_Helpers.enso @@ -1,9 +1,9 @@ private import project.Any.Any +import project.Data.Dictionary.Dictionary import project.Data.Json.Invalid_JSON import project.Data.Json.JS_Object -import project.Data.Map.Map import project.Data.Text.Text import project.Enso_Cloud.Enso_File.Enso_Asset_Type import project.Enso_Cloud.Enso_File.Enso_File @@ -44,7 +44,7 @@ upload_file (local_file : File) (destination : Enso_File) (replace_existing : Bo The `create_action` function is called with the existing asset for the parent directory and for the file, if it already exists, or `Nothing` otherwise, and with a mapping of error handlers that may be added to the request. -generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (create_action : Existing_Enso_Asset -> (Existing_Enso_Asset | Nothing) -> Map -> Any) -> Any = +generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (create_action : Existing_Enso_Asset -> (Existing_Enso_Asset | Nothing) -> Dictionary -> Any) -> Any = parent_directory = destination.parent if parent_directory.is_nothing then Error.throw (Illegal_Argument.Error "Please provide an asset name inside of the root directory.") else parent_directory_asset = Existing_Enso_Asset.get_asset_reference_for parent_directory @@ -55,7 +55,7 @@ generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (creat File_Error.Not_Found _ -> Nothing _ -> Error.throw error if existing_asset.is_nothing.not && allow_existing.not then Error.throw (File_Error.Already_Exists destination) else - error_handlers = if existing_asset.is_nothing.not then Map.empty else + error_handlers = if existing_asset.is_nothing.not then Dictionary.empty else ## Currently we just report the race condition and request the user to re-run. We don't retry automatically because it is harder than it seems - the `create_action` usually depends on some user code that is writing to a stream (the callback given to `with_output_stream`). @@ -64,7 +64,7 @@ generic_create_asset (destination : Enso_File) (allow_existing : Boolean) (creat into memory or a temporary file and relies on that for the retry. For now, reporting the race condition in a sane way seemed like the simplest choice. This situation should be very rare. - Map.from_vector [["resource_already_exists", Error.throw (Illegal_State.Error "A race-condition has been encountered - another process has created a colliding resource at "+destination.path+". Please try re-running the operation.")]] + Dictionary.from_vector [["resource_already_exists", Error.throw (Illegal_State.Error "A race-condition has been encountered - another process has created a colliding resource at "+destination.path+". Please try re-running the operation.")]] create_action parent_directory_asset existing_asset error_handlers ## PRIVATE diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso index 7e7971eaed5e..8c786aa0434b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso @@ -1,7 +1,7 @@ private +import project.Data.Dictionary.Dictionary import project.Data.Json.JS_Object -import project.Data.Map.Map import project.Data.Text.Text import project.Data.Time.Date_Time.Date_Time import project.Data.Time.Date_Time_Formatter.Date_Time_Formatter @@ -83,7 +83,7 @@ type Existing_Enso_Asset Resolves a path to an existing asset in the cloud. resolve_path (path : Text) ~if_not_found = path.if_not_error <| handle_not_found _ = Error.throw Not_Found - error_handlers = Map.from_vector [["resource_missing", handle_not_found]] + error_handlers = Dictionary.from_vector [["resource_missing", handle_not_found]] uri = ((URI.from Utils.cloud_root_uri) / "path/resolve") . add_query_argument "path" path response = Utils.http_request_as_json HTTP_Method.Get uri error_handlers=error_handlers diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso index 32636acaaa63..676846590108 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Utils.enso @@ -1,8 +1,8 @@ private import project.Any.Any +import project.Data.Dictionary.Dictionary import project.Data.Json.Invalid_JSON -import project.Data.Map.Map import project.Data.Numbers.Integer import project.Data.Text.Text import project.Data.Time.Duration.Duration @@ -72,7 +72,7 @@ flush_caches = CloudAPI.flushCloudCaches ## PRIVATE Performs a standard request to the Enso Cloud API, parsing the result as JSON. -http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) (retries : Integer = 3) -> Any ! Enso_Cloud_Error = +http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Dictionary Text (Any -> Any) = Dictionary.empty) (retries : Integer = 3) -> Any ! Enso_Cloud_Error = response = http_request method url body additional_headers error_handlers retries response.decode_as_json.catch Invalid_JSON error-> Error.throw (Enso_Cloud_Error.Invalid_Response_Payload error) @@ -87,7 +87,7 @@ http_request_as_json (method : HTTP_Method) (url : URI) (body : Request_Body = R Custom error handlers can be provided as a mapping from error codes (defined in the cloud project) to functions that take the full JSON payload and return a custom error. -http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Map Text (Any -> Any) = Map.empty) (retries : Integer = 3) -> Response ! Enso_Cloud_Error = method.if_not_error <| url.if_not_error <| body.if_not_error <| additional_headers.if_not_error <| +http_request (method : HTTP_Method) (url : URI) (body : Request_Body = Request_Body.Empty) (additional_headers : Vector = []) (error_handlers : Dictionary Text (Any -> Any) = Dictionary.empty) (retries : Integer = 3) -> Response ! Enso_Cloud_Error = method.if_not_error <| url.if_not_error <| body.if_not_error <| additional_headers.if_not_error <| all_headers = [authorization_header] + additional_headers as_connection_error err = Error.throw (Enso_Cloud_Error.Connection_Error err) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso index 1915c7428cfb..e08a08fef8be 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Internal/Array_Like_Helpers.enso @@ -3,9 +3,9 @@ private import project.Any.Any import project.Data.Array.Array import project.Data.Array_Proxy.Array_Proxy +import project.Data.Dictionary.Dictionary import project.Data.Index_Sub_Range.Index_Sub_Range import project.Data.List.List -import project.Data.Map.Map import project.Data.Maybe.Maybe import project.Data.Numbers.Integer import project.Data.Pair.Pair @@ -166,7 +166,7 @@ private on_problems_to_number on_problems:Problem_Behavior = case on_problems of distinct vector on = Vector.build builder-> - vector.fold Map.empty existing-> + vector.fold Dictionary.empty existing-> item-> key = on item if (existing.get key False) then existing else @@ -174,7 +174,7 @@ distinct vector on = existing.insert key True duplicates vector on = Vector.build builder-> - counts = vector.fold Map.empty current-> item-> + counts = vector.fold Dictionary.empty current-> item-> key = on item count = current.get key 0 current.insert key count+1 diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index afde3f395f5d..fa1434f556e7 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -2,8 +2,10 @@ export project.Any.Any export project.Data export project.Data.Array.Array export project.Data.Decimal.Decimal +export project.Data.Dictionary.Dictionary export project.Data.Filter_Condition.Filter_Action export project.Data.Filter_Condition.Filter_Condition +export project.Data.Hashset.Hashset export project.Data.Index_Sub_Range.Index_Sub_Range export project.Data.Interval.Bound export project.Data.Interval.Interval @@ -25,7 +27,6 @@ export project.Data.Pair.Pair export project.Data.Range.Range export project.Data.Raw_Response export project.Data.Regression -export project.Data.Set.Set export project.Data.Sort_Direction.Sort_Direction export project.Data.Statistics.Rank_Method export project.Data.Statistics.Statistic diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso index ead500bc394f..62b19da77954 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP.enso @@ -1,7 +1,7 @@ import project.Any.Any -import project.Data.Map.Map +import project.Data.Dictionary.Dictionary +import project.Data.Hashset.Hashset import project.Data.Pair.Pair -import project.Data.Set.Set import project.Data.Text.Encoding.Encoding import project.Data.Text.Text import project.Data.Time.Duration.Duration @@ -261,8 +261,8 @@ resolve_body_to_publisher_and_boundary body:Request_Body = Build a BodyPublisher from the given form data. The pair's second value is a content boundary in the case of a `multipart/form-data` form; otherwise, Nothing -build_form_body_publisher : Map Text (Text | File) -> Boolean -> Pair BodyPublisher Text -build_form_body_publisher (form_data:(Map Text (Text | File))) (url_encoded:Boolean=False) = case url_encoded of +build_form_body_publisher : Dictionary Text (Text | File) -> Boolean -> Pair BodyPublisher Text +build_form_body_publisher (form_data:(Dictionary Text (Text | File))) (url_encoded:Boolean=False) = case url_encoded of True -> body_builder = UrlencodedBodyBuilder.new form_data.map_with_key key-> value-> @@ -280,15 +280,15 @@ build_form_body_publisher (form_data:(Map Text (Text | File))) (url_encoded:Bool Pair.new body_builder.build boundary ## PRIVATE -fetch_methods : Set HTTP_Method -fetch_methods = Set.from_vector [HTTP_Method.Get, HTTP_Method.Head, HTTP_Method.Options] +fetch_methods : Hashset HTTP_Method +fetch_methods = Hashset.from_vector [HTTP_Method.Get, HTTP_Method.Head, HTTP_Method.Options] ## PRIVATE -post_methods : Set HTTP_Method -post_methods = Set.from_vector [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete] +post_methods : Hashset HTTP_Method +post_methods = Hashset.from_vector [HTTP_Method.Post, HTTP_Method.Put, HTTP_Method.Patch, HTTP_Method.Delete] ## PRIVATE -check_method : Set HTTP_Method -> Any -> Any -> Any ! Illegal_Argument +check_method : Hashset HTTP_Method -> Any -> Any -> Any ! Illegal_Argument check_method allowed_methods method ~action = if allowed_methods.contains method then action else Error.throw (Illegal_Argument.Error ("Unsupported method " + method.to_display_text)) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso index d873de1bb18b..c282eef39b7f 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request.enso @@ -1,5 +1,5 @@ import project.Any.Any -import project.Data.Map.Map +import project.Data.Dictionary.Dictionary import project.Data.Pair.Pair import project.Data.Text.Text import project.Data.Vector.Vector @@ -249,10 +249,10 @@ type Request example_delete = Request.delete (URI.parse "http://example.com") . with_form [] - with_form : (Vector | Map) -> Request + with_form : (Vector | Dictionary) -> Request with_form self parts = form_data = case parts of - _ : Vector -> Map.from_vector parts - _ : Map -> parts + _ : Vector -> Dictionary.from_vector parts + _ : Dictionary -> parts new_body = Request_Body.Form_Data form_data Request.Value self.method self.uri self.headers new_body . with_headers [Header.application_x_www_form_urlencoded] diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso index cdbdeb45cfb4..84a42d25d9a6 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Request_Body.enso @@ -1,5 +1,5 @@ import project.Any.Any -import project.Data.Map.Map +import project.Data.Dictionary.Dictionary import project.Data.Text.Encoding.Encoding import project.Data.Text.Text import project.Network.HTTP.Header.Header @@ -36,7 +36,7 @@ type Request_Body - form_data: the form fields (text or file) to be sent - url_encoded: if true, use a URL-encoded form; otherwise, use a multi-part encoding. - Form_Data (form_data:(Map Text (Text | File))) (url_encoded:Boolean=False) + Form_Data (form_data:(Dictionary Text (Text | File))) (url_encoded:Boolean=False) ## Empty request body; used for GET Empty diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso index 696d54bb9f3c..77760bb6b664 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Network/HTTP/Response.enso @@ -75,7 +75,7 @@ type Response import Standard.Examples - example_headers = Map.from_vector error_on_duplicates=True (Examples.get_response.headers.map h-> [h.name, h.value]) + example_headers = Dictionary.from_vector error_on_duplicates=True (Examples.get_response.headers.map h-> [h.name, h.value]) headers : Vector Header headers self = # This is a mapping that maps a header name to a list of values (since headers may be duplicated). diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso index b17182ff52cb..857c9b3f765c 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso @@ -1,8 +1,6 @@ import project.Any.Any import project.Data.Array.Array -import project.Data.Map.Map import project.Data.Range.Range -import project.Data.Set.Set import project.Data.Text.Text import project.Data.Time.Date.Date import project.Data.Time.Date_Range.Date_Range diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso index f933851ddc71..b4bae445f414 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Warning.enso @@ -1,9 +1,9 @@ import project.Any.Any import project.Data.Array.Array +import project.Data.Hashset.Hashset import project.Data.Maybe.Maybe import project.Data.Numbers.Integer import project.Data.Pair.Pair -import project.Data.Set.Set import project.Data.Vector.Map_Error import project.Data.Vector.No_Wrap import project.Data.Vector.Vector diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index c30cbf551549..7343d5a1cdca 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -168,11 +168,11 @@ type Connection Nothing -> Nothing _ : Vector -> types _ -> [types] - name_map = Map.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]] + name_dict = Dictionary.from_vector [["TABLE_CAT", "Database"], ["TABLE_SCHEM", "Schema"], ["TABLE_NAME", "Name"], ["TABLE_TYPE", "Type"], ["REMARKS", "Description"], ["TYPE_CAT", "Type Database"], ["TYPE_SCHEM", "Type Schema"], ["TYPE_NAME", "Type Name"]] result = self.jdbc_connection.with_metadata metadata-> table = Managed_Resource.bracket (metadata.getTables database schema name_like types_vector) .close result_set-> result_set_to_table result_set self.dialect.get_type_mapping.make_column_fetcher - renamed = table.rename_columns name_map + renamed = table.rename_columns name_dict if all_fields then renamed else renamed.select_columns ["Database", "Schema", "Name", "Type", "Description"] case include_hidden of diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso index f9c309a0945c..9a03f1715f61 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/DB_Table.enso @@ -544,17 +544,17 @@ type DB_Table > Example Rename the "Alpha" column to "Beta" - table.rename_columns (Map.from_vector [["Alpha", "Beta"]]) + table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"]]) > Example Rename the last column to "LastColumn" - table.rename_columns (Map.from_vector [[-1, "LastColumn"]]) + table.rename_columns (Dictionary.from_vector [[-1, "LastColumn"]]) > Example Rename the "Alpha" column to "Beta" and last column to "LastColumn" - table.rename_columns (Map.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]]) + table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]]) > Example Rename the first column to "FirstColumn" @@ -569,12 +569,12 @@ type DB_Table > Example For all columns starting with the prefix `name=`, replace it with `key:`. - table.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]]) + table.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]]) @column_map Widget_Helpers.make_rename_name_vector_selector - rename_columns : Map (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names - rename_columns self (column_map:(Table | Map | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of + rename_columns : Table | Dictionary (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names + rename_columns self (column_map:(Table | Dictionary | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of _ : Table -> - resolved = Table_Helpers.read_name_map_from_table column_map + resolved = Table_Helpers.read_name_mapping_from_table column_map self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems _ -> new_names = Table_Helpers.rename_columns self.column_naming_helper self.internal_columns column_map case_sensitivity error_on_missing_columns on_problems @@ -1035,26 +1035,26 @@ type DB_Table Warning.set result [] ## PRIVATE - A helper that creates a two-column table from a Map. + A helper that creates a two-column table from a Dictionary. - The keys of the `Map` become the first column, with name - `key_column_name`, and the values of the `Map` become the second column, - with name `value_column_name`. + The keys of the `Dictionary` become the first column, with name + `key_column_name`, and the values become the second column, with name + `value_column_name`. - For the in-memory database, the `Map` can be empty. For the database - backends, it must not be empty. + For the in-memory database, the `Dictionary` can be empty. For the + database backends, it must not be empty. Arguments: - - map: The `Map` to create the table from. + - dict: The `Dictionary` to create the table from. - key_column_name: The name to use for the first column. - value_column_name: The name to use for the second column. - make_table_from_map : Map Any Any -> Text -> Text -> Table - make_table_from_map self map key_column_name value_column_name = - total_size = map.size * 2 + make_table_from_dictionary : Dictionary Any Any -> Text -> Text -> Table + make_table_from_dictionary self dict key_column_name value_column_name = + total_size = dict.size * 2 - if map.is_empty then Error.throw (Illegal_Argument.Error "Map argument cannot be empty") else - if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Map argument is too large ("+map.size.to_text+" entries): materialize a table into the database instead") else - keys_and_values = map.to_vector + if dict.is_empty then Error.throw (Illegal_Argument.Error "Dictionary cannot be empty") else + if total_size > MAX_LITERAL_ELEMENT_COUNT then Error.throw (Illegal_Argument.Error "Dictionary is too large ("+dict.size.to_text+" entries): materialize a table into the database instead") else + keys_and_values = dict.to_vector self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name] ## PRIVATE @@ -1683,8 +1683,8 @@ type DB_Table @columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True) @from_column Widget.Text_Input @to_column Widget.Text_Input - replace : (DB_Table | Map) -> Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup - replace self lookup_table:(DB_Table | Map) columns:(Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning = + replace : (DB_Table | Dictionary) -> Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> DB_Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup + replace self lookup_table:(DB_Table | Dictionary) columns:(Vector (Integer | Text | Regex | By_Type) | Text | Integer | Regex | By_Type) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning = Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems ## ALIAS join by row position diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index 88814a6fdf74..427b22ce8678 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -17,33 +17,32 @@ from project.Internal.IR.Operation_Metadata import Row_Number_Metadata type Dialect_Operations ## PRIVATE - - Operations supported by a particular SQL dialect and how they are implemeneted. + Operations supported by a particular SQL dialect and how they are + implemented. Arguments: - - operation_map: The mapping which maps operation names to their + - operations_dict: Dictionary mapping operation names to their implementations; each implementation is a function which takes SQL builders for the arguments, and optionally an additional metadata argument, and should return a SQL builder yielding code for the whole operation. - Value (operation_map:(Map Text (Vector (SQL_Builder->SQL_Builder)))) + Value (operations_dict:(Dictionary Text (Vector (SQL_Builder->SQL_Builder)))) ## PRIVATE - Creates a copy of the dialect that supports additional operations or overrides existing ones. # extend_with : Vector [Text, Vector SQL_Builder -> SQL_Builder] -> Dialect_Operations extend_with : Vector Any -> Dialect_Operations extend_with self mappings = - new_map = mappings.fold self.operation_map (m -> el -> m.insert (el.at 0) (el.at 1)) - Dialect_Operations.Value new_map + new_dict = mappings.fold self.operations_dict (m -> el -> m.insert (el.at 0) (el.at 1)) + Dialect_Operations.Value new_dict ## PRIVATE Checks if an operation is supported by the dialect. is_supported : Text -> Boolean is_supported self operation = - self.operation_map.contains_key operation + self.operations_dict.contains_key operation ## PRIVATE @@ -200,8 +199,8 @@ base_dialect_operations = contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]] types = [simple_cast] windows = [["ROW_NUMBER", make_row_number], ["ROW_NUMBER_IN_GROUP", make_row_number_in_group]] - base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows) - Dialect_Operations.Value base_map + base_dict = Dictionary.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows) + Dialect_Operations.Value base_dict ## PRIVATE is_empty = lift_unary_op "IS_EMPTY" arg-> @@ -311,7 +310,7 @@ generate_expression dialect expr = case expr of escaped = value.replace "'" "''" SQL_Builder.code ("'" + escaped + "'") SQL_Expression.Operation kind arguments metadata -> - op = dialect.dialect_operations.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind) + op = dialect.dialect_operations.operations_dict.get kind (Error.throw <| Unsupported_Database_Operation.Error kind) parsed_args = arguments.map (generate_expression dialect) result = op parsed_args # If the function expects more arguments, we pass the metadata as the last argument. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso index 4b5b19d30f1b..78ca8456285b 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Database_Join_Helper.enso @@ -70,11 +70,10 @@ type Join_Subquery_Setup ## PRIVATE Creates a mapping from names of columns in the original table to corresponding columns in the created subquery. - column_mapping : Map Text Internal_Column + column_mapping : Dictionary Text Internal_Column column_mapping self = - Map.from_vector <| - self.old_columns.zip self.new_columns old-> new-> - [old.name, new] + Dictionary.from_vector <| + self.old_columns.zip self.new_columns old->new->[old.name, new] ## PRIVATE prepare_subqueries : Connection -> DB_Table -> DB_Table -> Boolean -> Boolean -> Pair Join_Subquery_Setup diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 454302fc8a63..f89a2780f44c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -116,7 +116,7 @@ type Postgres_Dialect inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner" setup = table.context.as_subquery inner_table_alias [table.internal_columns] new_columns = setup.new_columns.first - column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c] + column_mapping = Dictionary.from_vector <| new_columns.map c-> [c.name, c] new_key_columns = key_columns.map c-> column_mapping.at c.name type_mapping = self.get_type_mapping distinct_expressions = new_key_columns.map column-> @@ -563,14 +563,14 @@ decimal_mod = Base_Generator.lift_binary_op "DECIMAL_MOD" x-> y-> x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y ## PRIVATE -supported_replace_params : Set Replace_Params +supported_replace_params : Hashset Replace_Params supported_replace_params = e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False] e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True] e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False] e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True] e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False] - Set.from_vector <| e0 + e1 + e2 + e3 + e4 + Hashset.from_vector <| e0 + e1 + e2 + e3 + e4 ## PRIVATE replace : Vector SQL_Builder -> Any -> SQL_Builder diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso index 88deded846d0..138133cd5a0f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Type_Mapping.enso @@ -126,14 +126,14 @@ type Postgres_Type_Mapping Column_Fetcher_Module.default_fetcher_for_value_type value_type ## PRIVATE -simple_types_map = Map.from_vector <| +simple_types_map = Dictionary.from_vector <| ints = [[Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]] floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]] other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time]] ints + floats + other ## PRIVATE -complex_types_map = Map.from_vector <| +complex_types_map = Dictionary.from_vector <| make_decimal sql_type = Value_Type.Decimal sql_type.precision sql_type.scale make_varchar sql_type = diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 198e408a8913..0facd5fea189 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -125,7 +125,7 @@ type SQLite_Dialect inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner" setup = table.context.as_subquery inner_table_alias [table.internal_columns] new_columns = setup.new_columns.first - column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c] + column_mapping = Dictionary.from_vector <| new_columns.map c-> [c.name, c] new_key_columns = key_columns.map c-> column_mapping.at c.name type_mapping = self.get_type_mapping distinct_expressions = new_key_columns.map column-> @@ -447,10 +447,10 @@ mod_op = Base_Generator.lift_binary_op "MOD" x-> y-> x ++ " - FLOOR(CAST(" ++ x ++ " AS REAL) / CAST(" ++ y ++ " AS REAL)) * " ++ y ## PRIVATE -supported_replace_params : Set Replace_Params +supported_replace_params : Hashset Replace_Params supported_replace_params = e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True] - Set.from_vector e + Hashset.from_vector e ## PRIVATE replace : Vector SQL_Builder -> Any -> SQL_Builder diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso index 4c0eb55ad84c..935383fc7d46 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso @@ -96,7 +96,7 @@ type SQLite_Type_Mapping infer_return_type : (SQL_Expression -> SQL_Type_Reference) -> Text -> Vector -> SQL_Expression -> SQL_Type_Reference infer_return_type infer_from_database_callback op_name arguments expression = _ = [infer_from_database_callback, expression] - handler = operations_map.get op_name (_ -> Error.throw (Illegal_State.Error "Impossible: Unknown operation "+op_name+". This is a bug in the Database library.")) + handler = operations_dict.get op_name (_ -> Error.throw (Illegal_State.Error "Impossible: Unknown operation "+op_name+". This is a bug in the Database library.")) sql_type = handler arguments SQL_Type_Reference.from_constant sql_type @@ -127,7 +127,7 @@ type SQLite_Type_Mapping For types like dates - we map them to unsupported type, because date operations in SQLite are currently not supported due to their weird storage. -simple_types_map = Map.from_vector <| +simple_types_map = Dictionary.from_vector <| ints = [Types.TINYINT, Types.SMALLINT, Types.BIGINT, Types.INTEGER] . map x-> [x, default_integer] floats = [Types.DOUBLE, Types.REAL, Types.FLOAT] . map x-> [x, default_float] # We treat numeric as a float, since that is what really sits in SQLite under the hood. @@ -142,13 +142,13 @@ simple_types_map = Map.from_vector <| https://www.sqlite.org/datatype3.html#affinity_name_examples However, with this the date-time columns will be mapped to the numeric type. Instead, we want to treat such columns as Text, so we override the mapping. -name_based_workarounds = Map.from_vector <| +name_based_workarounds = Dictionary.from_vector <| ["TIME", "DATE", "DATETIME", "TIMESTAMP"] . map x-> [x, default_text] ## PRIVATE Maps operation names to functions that infer its result type. -operations_map : Map Text (Vector -> SQL_Type) -operations_map = +operations_dict : Dictionary Text (Vector -> SQL_Type) +operations_dict = find_type arg = case arg of column : DB_Column -> column.value_type internal_column : Internal_Column -> @@ -198,7 +198,7 @@ operations_map = arithmetic_ops = ["ADD_NUMBER", "-", "*", "^", "%", "SUM"] merge_input_types_ops = ["ROW_MAX", "ROW_MIN", "MAX", "MIN", "FILL_NULL", "COALESCE"] others = [["IIF", handle_iif], ["CAST", handle_cast], ["CASE", handle_case], ["RUNTIME_ERROR", handle_runtime_error]] - Map.from_vector <| + Dictionary.from_vector <| v1 = always_boolean_ops.map [_, const SQLite_Types.boolean] v2 = always_floating_ops.map [_, const SQLite_Types.real] v3 = always_integer_ops.map [_, const SQLite_Types.integer] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso index cec7fd97b0f7..2887d9318b67 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Upload_Table.enso @@ -190,8 +190,8 @@ resolve_primary_key structure primary_key = case primary_key of if key.is_a Text then key else Error.throw (Illegal_Argument.Error ("Primary key must be a vector of column names, instead got a " + (Meta.type_of key . to_display_text))) validated.if_not_error <| - column_names = Set.from_vector (structure.map .name) - missing_columns = (Set.from_vector primary_key).difference column_names + column_names = Hashset.from_vector (structure.map .name) + missing_columns = (Hashset.from_vector primary_key).difference column_names if missing_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector) else primary_key @@ -470,13 +470,13 @@ check_update_arguments_structure_match source_table target_table key_columns upd if source_type.can_be_widened_to target_type then [Inexact_Type_Coercion.Warning source_type target_type unavailable=False] else Error.throw (Column_Type_Mismatch.Error source_column.name target_type source_type) - source_columns = Set.from_vector source_table.column_names - target_columns = Set.from_vector target_table.column_names + source_columns = Hashset.from_vector source_table.column_names + target_columns = Hashset.from_vector target_table.column_names extra_columns = source_columns.difference target_columns if extra_columns.not_empty then Error.throw (Unmatched_Columns.Error extra_columns.to_vector) else missing_columns = target_columns.difference source_columns if missing_columns.not_empty && error_on_missing_columns then Error.throw (Missing_Input_Columns.Error missing_columns.to_vector "the source table") else - key_set = Set.from_vector key_columns + key_set = Hashset.from_vector key_columns missing_source_key_columns = key_set.difference source_columns missing_target_key_columns = key_set.difference target_columns if missing_source_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_source_key_columns.to_vector "the source table") else @@ -600,10 +600,10 @@ type Delete_Rows_Source check_delete_rows_arguments target_table key_values_to_delete key_columns ~continuation = check_target_table_for_update target_table <| if key_columns.is_empty then Error.throw (Illegal_Argument.Error "One or more key columns must be provided to correlate the rows to be deleted.") else - key_set = Set.from_vector key_columns - missing_target_key_columns = key_set . difference (Set.from_vector target_table.column_names) + key_set = Hashset.from_vector key_columns + missing_target_key_columns = key_set . difference (Hashset.from_vector target_table.column_names) if missing_target_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_target_key_columns.to_vector "the target table") else - missing_source_key_columns = key_set . difference (Set.from_vector key_values_to_delete.column_names) + missing_source_key_columns = key_set . difference (Hashset.from_vector key_values_to_delete.column_names) if missing_source_key_columns.not_empty then Error.throw (Missing_Input_Columns.Error missing_source_key_columns.to_vector "the key values to delete table") else continuation diff --git a/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso index 3878bdc74d34..d72fe11d854b 100644 --- a/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Examples/0.0.0-dev/src/Main.enso @@ -112,9 +112,9 @@ json_object = json.first list : List list = List.Cons 1 (List.Cons 2 (List.Cons 3 List.Nil)) -## A simple map that contains some numbers mapped to their word equivalents. -map : Map -map = Map.empty . insert 1 "one" . insert 3 "three" . insert 5 "five" +## A simple dictionary that contains some numbers mapped to their word equivalents. +dictionary : Dictionary +dictionary = Dictionary.empty . insert 1 "one" . insert 3 "three" . insert 5 "five" ## A dummy type that is used for example purposes. type No_Methods diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso index 089a8572c3ba..6b350302457a 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Dialect.enso @@ -119,7 +119,7 @@ type Snowflake_Dialect inner_table_alias = table_name_deduplicator.make_unique table.name+"_inner" setup = (Internals_Access.get_context table).as_subquery inner_table_alias [Internals_Access.internal_columns table] new_columns = setup.new_columns.first - column_mapping = Map.from_vector <| new_columns.map c-> [c.name, c] + column_mapping = Dictionary.from_vector <| new_columns.map c-> [c.name, c] new_key_columns = key_columns.map c-> column_mapping.at c.name type_mapping = self.get_type_mapping distinct_expressions = new_key_columns.map column-> @@ -464,14 +464,14 @@ decimal_mod = Base_Generator.lift_binary_op "DECIMAL_MOD" x-> y-> x ++ " - FLOOR(CAST(" ++ x ++ " AS decimal) / CAST(" ++ y ++ " AS decimal)) * " ++ y ## PRIVATE -supported_replace_params : Set Replace_Params +supported_replace_params : Hashset Replace_Params supported_replace_params = e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False] e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True] e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False] e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True] e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False] - Set.from_vector <| e0 + e1 + e2 + e3 + e4 + Hashset.from_vector <| e0 + e1 + e2 + e3 + e4 ## PRIVATE replace : Vector SQL_Builder -> Any -> SQL_Builder diff --git a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso index 2cae993f4871..e472e489adf6 100644 --- a/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso +++ b/distribution/lib/Standard/Snowflake/0.0.0-dev/src/Internal/Snowflake_Type_Mapping.enso @@ -118,14 +118,14 @@ type Snowflake_Type_Mapping _ -> Column_Fetcher_Module.default_fetcher_for_value_type value_type ## PRIVATE -simple_types_map = Map.from_vector <| +simple_types_map = Dictionary.from_vector <| ints = [[Types.TINYINT, Value_Type.Byte], [Types.SMALLINT, Value_Type.Integer Bits.Bits_16], [Types.BIGINT, Value_Type.Integer Bits.Bits_64], [Types.INTEGER, Value_Type.Integer Bits.Bits_32]] floats = [[Types.DOUBLE, Value_Type.Float Bits.Bits_64], [Types.REAL, Value_Type.Float Bits.Bits_32]] other = [[Types.DATE, Value_Type.Date], [Types.TIME, Value_Type.Time], [Types.BOOLEAN, Value_Type.Boolean]] ints + floats + other ## PRIVATE -complex_types_map = Map.from_vector <| +complex_types_map = Dictionary.from_vector <| make_decimal sql_type = Value_Type.Decimal sql_type.precision sql_type.scale make_varchar sql_type = diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso index 8e88dd1f7060..0c944905eedc 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Column.enso @@ -1716,7 +1716,7 @@ type Column True -> run_vectorized_binary_op self op_name as_vector expected_result_type=Value_Type.Boolean skip_nulls=False new_name=result_name False -> - set = Set.from_vector as_vector error_on_duplicates=False + set = Hashset.from_vector as_vector error_on_duplicates=False apply_unary_map self result_name set.contains_relational Value_Type.Boolean nothing_unchanged=False ## GROUP Standard.Base.Conversions diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Columns.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Columns.enso index 75cc10f2bc8c..bbe233f7aaea 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Columns.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Columns.enso @@ -20,12 +20,12 @@ Convertible_To_Columns.from (that:JS_Object) = Convertible_To_Columns.Value that.field_names (that.get _) ## PRIVATE -Convertible_To_Columns.from (that:Map) = +Convertible_To_Columns.from (that:Dictionary) = pairs = that.keys.map k-> [k.to_text, k] - field_map = pairs.if_not_error <| - Map.from_vector pairs error_on_duplicates=True . catch Illegal_Argument _-> + field_dict = pairs.if_not_error <| + Dictionary.from_vector pairs error_on_duplicates=True . catch Illegal_Argument _-> Error.throw (Illegal_Argument.Error "Cannot convert "+that.to_display_text+" to a set of columns, because its keys are duplicated when converted to text.") - Convertible_To_Columns.Value field_map.keys (k-> that.get (field_map.get k)) + Convertible_To_Columns.Value field_dict.keys (k-> that.get (field_dict.get k)) ## PRIVATE Convertible_To_Columns.from (that:Pair) = @@ -67,5 +67,5 @@ Convertible_To_Columns.from (that:XML_Element) = has_child_nodes = that_children.any (_.is_a XML_Element) children = if that_children.is_empty.not && has_child_nodes then [["Children", that_children]] else [] value = if that_children.is_empty.not && has_child_nodes.not then [["Value", that.text]] else [] - as_map = Map.from_vector (name + tags + children + value) - Convertible_To_Columns.from as_map + as_dict = Dictionary.from_vector (name + tags + children + value) + Convertible_To_Columns.from as_dict diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Rows.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Rows.enso index 8ac248100849..eae50d1d01a1 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Rows.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Conversions/Convertible_To_Rows.enso @@ -54,7 +54,7 @@ Convertible_To_Rows.from that:Pair = Convertible_To_Rows.Value that.length (that Convertible_To_Rows.from that:Date_Range = Convertible_To_Rows.Value that.length (that.get _) ## PRIVATE -Convertible_To_Rows.from that:Map = +Convertible_To_Rows.from that:Dictionary = vals = that.to_vector.map p-> Key_Value.Pair p.first p.second Convertible_To_Rows.Value vals.length (vals.get _) ["Key", "Value"] diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso index 84cfa30e6a73..c32b7fbeaf66 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Errors.enso @@ -755,7 +755,7 @@ type Truncated_Column_Names ## PRIVATE Indicates that some column names were truncated to fit the maximum length supported by the backend. - Warning (names_map : Map Text Text) + Warning (names_map : Dictionary Text Text) ## PRIVATE Get the original column names. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Expand_Objects_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Expand_Objects_Helpers.enso index d9e6743d7d39..775facd9f130 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Expand_Objects_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Expand_Objects_Helpers.enso @@ -97,47 +97,46 @@ create_table_from_objects (value : Convertible_To_Rows) (fields : Vector | Nothi Java_Problems.with_problem_aggregator Problem_Behavior.Report_Warning java_problem_aggregator-> preset_fields = fields.is_nothing.not - initial_map = if preset_fields.not then Map.empty else - Map.from_vector (fields.map field_name-> [field_name, Java_Exports.make_inferred_builder len java_problem_aggregator]) error_on_duplicates=False + initial_dict = if preset_fields.not then Dictionary.empty else + Dictionary.from_vector (fields.map field_name-> [field_name, Java_Exports.make_inferred_builder len java_problem_aggregator]) error_on_duplicates=False # This is used to ensure that field names in the resulting table are in the order they were encountered. discovered_field_names = Builder.new - builder_map = case preset_fields of + builder_dict = case preset_fields of # Just get the queried keys from each object. True -> 0.up_to len . each idx-> v = (value.getter idx).to Convertible_To_Columns - initial_map.each_with_key field_name-> builder-> + initial_dict.each_with_key field_name-> builder-> builder.append (v.getter field_name) - initial_map + initial_dict # In this mode we are discovering the key set as we go. False -> - 0.up_to len . fold initial_map current_map-> idx-> + 0.up_to len . fold initial_dict current_dict-> idx-> v = (value.getter idx).to Convertible_To_Columns - v.field_names.fold current_map inner_current_map-> f-> - existing_builder = inner_current_map.get f Nothing + v.field_names.fold current_dict inner_current_dict-> f-> + existing_builder = inner_current_dict.get f Nothing builder = existing_builder.if_nothing <| discovered_field_names.append f Java_Exports.make_inferred_builder len java_problem_aggregator builder.fillUpToSize idx builder.append (v.getter f) - new_map = if existing_builder.is_nothing.not then inner_current_map else - inner_current_map.insert f builder - new_map + if existing_builder.is_nothing.not then inner_current_dict else + inner_current_dict.insert f builder # Seal all builders and create columns - column_map = builder_map.map_with_key name-> builder-> + column_dict = builder_dict.map_with_key name-> builder-> builder.fillUpToSize len Column.from_storage name builder.seal - column_map.if_not_error <| + column_dict.if_not_error <| # Create a vector of columns, preserving the original order if it was specified. columns = case preset_fields of - True -> fields.distinct.map column_map.get + True -> fields.distinct.map column_dict.get False -> if discovered_field_names.is_empty then Error.throw (Illegal_Argument.Error "Unable to generate column names as all inputs had no fields.") else - discovered_field_names.to_vector.map column_map.get + discovered_field_names.to_vector.map column_dict.get Table.new columns diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso index 05a98904a69c..8664ce9d600b 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Fan_Out.enso @@ -172,7 +172,7 @@ fan_out_to_rows_and_columns_fixed input_storage function at_least_one_row:Boolea fan_out_to_rows_and_columns_dynamic : Any -> (Any -> Vector (Vector Any)) -> Boolean -> (Any -> Text) -> (Integer -> Any) -> Problem_Builder -> Vector fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row column_names_for_row column_builder problem_builder = # Accumulates the outputs of the function. - column_map = Ref.new Map.empty + column_dict = Ref.new Dictionary.empty output_column_builders = Builder.new # Guess that most of the time, we'll get at least one value for each input. @@ -180,7 +180,7 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu # Column Builder add function add_column n current_length = - column_map.put (column_map.get.insert n output_column_builders.length) + column_dict.put (column_dict.get.insert n output_column_builders.length) builder = column_builder num_input_rows builder.appendNulls current_length output_column_builders.append builder @@ -200,11 +200,11 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu # Add any missing columns. row_column_names.each n-> - if column_map.get.contains_key n . not then + if column_dict.get.contains_key n . not then add_column n order_mask_positions.length # Append each group of values to the builder. - current_columns = column_map.get + current_columns = column_dict.get output_values.each row_unchecked-> row = uniform_length row_column_names.length row_unchecked problem_builder row_column_names.each_with_index i->n-> @@ -220,7 +220,7 @@ fan_out_to_rows_and_columns_dynamic input_storage function at_least_one_row colu repeat_each output_values.length <| order_mask_positions.append i # Build the output column - output_columns = column_map.get.to_vector.sort on=_.second . map pair-> + output_columns = column_dict.get.to_vector.sort on=_.second . map pair-> Column.from_storage pair.first (output_column_builders.at pair.second . seal) [output_columns, order_mask_positions] diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso index 9c8eef4fcb63..1f09b59273ed 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso @@ -50,7 +50,7 @@ prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_ problems_to_add = Builder.new key_columns.if_not_error <| lookup_table_key_columns.if_not_error <| - key_set = Set.from_vector key_columns + key_set = Hashset.from_vector key_columns my_updated_columns = base_table.columns.map on_problems=No_Wrap base_column-> base_column_name = base_column.name is_key = key_set.contains base_column_name @@ -67,7 +67,7 @@ prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_ Nothing -> Lookup_Column.Keep_Column base_column _ -> merge_columns base_column lookup_column allow_unmatched_rows - my_column_set = Set.from_vector base_table.column_names + my_column_set = Hashset.from_vector base_table.column_names extra_columns = lookup_table.columns.filter col-> is_new = my_column_set.contains col.name . not is_new diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso index 6fb9591be9c3..8adeffa0313b 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Replace_Helpers.enso @@ -10,7 +10,7 @@ import project.Value_Type.By_Type from project.Errors import Missing_Input_Columns, No_Such_Column, Non_Unique_Key, Unmatched_Rows_In_Lookup ## PRIVATE -replace : Table -> (Table | Map) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup +replace : Table -> (Table | Dictionary) -> (Text | Integer | By_Type | Vector (Text | Integer | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text | Integer | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning = case columns of _ : Vector -> @@ -20,8 +20,8 @@ replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text _ -> column = columns case lookup_table of - _ : Map -> - if from_column.is_nothing.not || to_column.is_nothing.not then Error.throw (Illegal_Argument.Error "If a Map is provided as the lookup_table, then from_column and to_column should not also be specified.") else + _ : Dictionary -> + if from_column.is_nothing.not || to_column.is_nothing.not then Error.throw (Illegal_Argument.Error "If a Dictionary is provided as the lookup_table, then from_column and to_column should not also be specified.") else handle_empty_lookup_table ~action = if lookup_table.is_empty.not then action else ## If the lookup table is empty but the base table is @@ -33,7 +33,7 @@ replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text a_key_value = base_table.at column . at 0 Error.throw (Unmatched_Rows_In_Lookup.Error [a_key_value]) handle_empty_lookup_table <| - base_table.replace (base_table.make_table_from_map lookup_table 'from' 'to') column 'from' 'to' allow_unmatched_rows=allow_unmatched_rows on_problems=on_problems + base_table.replace (base_table.make_table_from_dictionary lookup_table 'from' 'to') column 'from' 'to' allow_unmatched_rows=allow_unmatched_rows on_problems=on_problems _ -> from_column_resolved = from_column.if_nothing 0 to_column_resolved = to_column.if_nothing 1 @@ -52,7 +52,7 @@ replace base_table lookup_table columns:(Text | Integer | By_Type | Vector (Text ## Create a lookup table with just `to_column` and `from_column`, renamed to match the base table's `column` and its duplicate, respectively. - lookup_table_renamed = selected_lookup_columns . rename_columns (Map.from_vector [[from_column_resolved, duplicate_key_column_name], [to_column_resolved, column]]) + lookup_table_renamed = selected_lookup_columns . rename_columns (Dictionary.from_vector [[from_column_resolved, duplicate_key_column_name], [to_column_resolved, column]]) warn_if_empty result_table = if lookup_table_renamed.row_count != 0 then result_table else Warning.attach (Empty_Error.Error "lookup_table") result_table diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso index 7665a8336d59..08059b39a9c1 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Split_Tokenize.enso @@ -91,7 +91,7 @@ regex_to_column_names pattern original_column_name = group_nums_to_names = pattern.group_nums_to_names unnamed_group_numbers = 1.up_to pattern.group_count . filter i-> group_nums_to_names.contains_key i . not - group_number_to_column_name_suffix = Map.from_vector <| unnamed_group_numbers.zip (0.up_to unnamed_group_numbers.length) + group_number_to_column_name_suffix = Dictionary.from_vector <| unnamed_group_numbers.zip (0.up_to unnamed_group_numbers.length) Vector.new (pattern.group_count-1) i-> # explicit groups start at 1 diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index f97c754d241b..8331544f576d 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -112,7 +112,7 @@ type Table_Column_Helper remove_columns self (selectors:(Text | Integer | Regex | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) = problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns selection = self.select_columns_helper selectors case_sensitivity False problem_builder - selected_names = Map.from_vector (selection.map column-> [column.name, True]) + selected_names = Dictionary.from_vector (selection.map column-> [column.name, True]) result = self.columns.filter column-> should_be_removed = selected_names.get column.name False should_be_removed.not @@ -160,7 +160,7 @@ type Table_Column_Helper problem_builder = Problem_Builder.new error_on_missing_columns=error_on_missing_columns selection = self.select_columns_helper selectors case_sensitivity True problem_builder problem_builder.attach_problems_before on_problems <| - selected_names = Map.from_vector (selection.map column-> [column.name, True]) + selected_names = Dictionary.from_vector (selection.map column-> [column.name, True]) other_columns = self.columns.filter column-> is_selected = selected_names.get column.name False is_selected.not @@ -203,8 +203,8 @@ type Table_Column_Helper _ -> [selectors] selected_columns = vector.map resolve_selector . flatten if reorder then selected_columns.distinct on=_.name else - map = Map.from_vector (selected_columns.map column-> [column.name, True]) error_on_duplicates=False - self.columns.filter column-> map.contains_key column.name + dict = Dictionary.from_vector (selected_columns.map column-> [column.name, True]) error_on_duplicates=False + self.columns.filter column-> dict.contains_key column.name ## PRIVATE A helper function which selects a single column from the table. @@ -289,14 +289,14 @@ type Table_Column_Helper A helper function which takes a Table like object and a Table for a name mapping and returns a new Table with the columns renamed according to the mapping. -read_name_map_from_table : Table -> Vector | Map ! Illegal_Argument -read_name_map_from_table column_map:Table = case column_map.column_count of +read_name_mapping_from_table : Table -> Vector | Dictionary ! Illegal_Argument +read_name_mapping_from_table column_map:Table = case column_map.column_count of 1 -> col = column_map.first_column if col.value_type.is_text then col.to_vector else Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.") 2 -> - if column_map.first_column.value_type.is_text && (column_map.at 1).value_type.is_text then Map.from_vector column_map.rows else + if column_map.first_column.value_type.is_text && (column_map.at 1).value_type.is_text then Dictionary.from_vector column_map.rows else Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.") _ -> Error.throw (Illegal_Argument.Error "Expected a table with one or two columns of text values.") @@ -322,9 +322,9 @@ read_name_map_from_table column_map:Table = case column_map.column_count of operation. By default, a warning is issued, but the operation proceeds. If set to `Report_Error`, the operation fails with a dataflow error. If set to `Ignore`, the operation proceeds without errors or warnings. -rename_columns : Column_Naming_Helper -> Vector -> Map (Text | Integer | Regex) Text | Vector Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Map Text Text -rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) (mapping:(Map | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) = - ## Convert Vector of Pairs to Map +rename_columns : Column_Naming_Helper -> Vector -> Dictionary (Text | Integer | Regex) Text | Vector Text -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Dictionary Text Text +rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) (mapping:(Dictionary | Vector)) (case_sensitivity:Case_Sensitivity) (error_on_missing_columns:Boolean) (on_problems:Problem_Behavior) = + ## Convert Vector of Pairs to Dictionary is_vec_pairs = mapping.is_a Vector && mapping.length > 0 && (mapping.first.is_a Text . not) case is_vec_pairs of True -> @@ -333,8 +333,8 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) is_valid_key k = k.is_a Integer || k.is_a Text || k.is_a Regex all_pairs = mapping.all p-> (is_valid_row p) && p.length == 2 && (is_valid_key p.first) && p.second.is_a Text if all_pairs.not then Error.throw (Illegal_Argument.Error "mapping is not a Vector of old name to new name.") else - ## Attempt to treat as Map - map = Map.from_vector mapping error_on_duplicates=False + ## Attempt to treat as Dictionary + map = Dictionary.from_vector mapping error_on_duplicates=False if map.length == mapping.length then rename_columns naming_helper internal_columns map case_sensitivity error_on_missing_columns on_problems else duplicates = mapping.duplicates on=_.first . map p->p.first.to_text . distinct duplicate_text = if duplicates.length < 5 then duplicates.to_vector . join ", " else @@ -356,7 +356,7 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) problem_builder.report_other_warning (Too_Many_Column_Names_Provided.Error (vec.drop (..First col_count))) vec.take (..First col_count) internal_columns.take good_names.length . zip good_names - _ : Map -> + _ : Dictionary -> resolve_rename selector replacement = case selector of ix : Integer -> if is_index_valid internal_columns.length ix then [Pair.new (internal_columns.at ix) replacement] else problem_builder.report_oob_indices [ix] @@ -378,7 +378,7 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) naming_helper.validate_many_column_names all_new_names <| ## Resolves actual new names for renamed columns. If a column received ambiguous new names, an error is raised. - name_map = columns_with_new_names.fold Map.empty current-> pair-> + name_dict = columns_with_new_names.fold Dictionary.empty current-> pair-> old_name = pair.first.name case current.contains_key old_name of False -> current.insert old_name pair.second @@ -387,12 +387,12 @@ rename_columns (naming_helper : Column_Naming_Helper) (internal_columns:Vector) Error.throw (Ambiguous_Column_Rename.Error old_name new_names) ## Renamed names take precedence, so we first deduplicate those. - resolved_name_map = name_map.map unique.make_unique + resolved_name_dict = name_dict.map unique.make_unique ## Then we ensure that the names of not-renamed columns are also unique and return the effective column names for each column. - not_renamed = internal_columns.filter c-> resolved_name_map.contains_key c.name . not - new_column_names = not_renamed.fold resolved_name_map current-> column-> + not_renamed = internal_columns.filter c-> resolved_name_dict.contains_key c.name . not + new_column_names = not_renamed.fold resolved_name_dict current-> column-> new_name = unique.make_unique column.name current.insert column.name new_name @@ -443,7 +443,7 @@ select_indices_reordering vector indices = The elements are returned in the same order as they appeared in the original vector. select_indices_preserving_order vector indices = - indices_to_keep = Map.from_vector (indices.map i-> [i, True]) + indices_to_keep = Dictionary.from_vector (indices.map i-> [i, True]) vector.filter_with_index ix-> _-> indices_to_keep.get ix False diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso index 13f2320a539b..16ac675242e8 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Unique_Name_Strategy.enso @@ -98,10 +98,10 @@ type Unique_Name_Strategy ## PRIVATE A mapping of original names to their truncated counterpart. - truncated_names : Map Text Text + truncated_names : Dictionary Text Text truncated_names self = entries = Vector.from_polyglot_array self.deduplicator.getTruncatedNames - Map.from_vector <| entries.map e-> [e.getLeft, e.getRight] + Dictionary.from_vector <| entries.map e-> [e.getLeft, e.getRight] ## PRIVATE ADVANCED diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Match_Columns.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Match_Columns.enso index b240105bf792..7cab62bcdcb2 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Match_Columns.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Match_Columns.enso @@ -142,7 +142,7 @@ type Column_Set ## PRIVATE Returns a map indicating in how many tables did a column with a given name appear. find_column_counts tables = - tables.fold Map.empty current->table-> + tables.fold Dictionary.empty current->table-> table.columns.fold current counts-> column-> name=column.name new_count = counts.get name 0 + 1 @@ -153,7 +153,7 @@ find_column_counts tables = starting from the first table. distinct_columns_in_appearance_order tables = Vector.build names_builder-> - tables.fold Map.empty current-> table-> + tables.fold Dictionary.empty current-> table-> table.columns.fold current seen_names-> column-> name = column.name if seen_names.contains_key name then seen_names else diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso index 041b8ab03f5a..1fec02f336c6 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Row.enso @@ -53,11 +53,11 @@ type Row ## GROUP Standard.Base.Conversions ICON convert - Gets the row as a Map. - to_map : Map - to_map self = + Gets the row as a Dictionary. + to_dictionary : Dictionary + to_dictionary self = pairs = self.table.column_names.map n-> [n, self.get n] - Map.from_vector pairs + Dictionary.from_vector pairs ## PRIVATE Converts this row into a JS_Object. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso index ddd7f5ec1e98..e3df2077656f 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Table.enso @@ -680,17 +680,17 @@ type Table > Example Rename the "Alpha" column to "Beta" - table.rename_columns (Map.from_vector [["Alpha", "Beta"]]) + table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"]]) > Example Rename the last column to "LastColumn" - table.rename_columns (Map.from_vector [[-1, "LastColumn"]]) + table.rename_columns (Dictionary.from_vector [[-1, "LastColumn"]]) > Example Rename the "Alpha" column to "Beta" and last column to "LastColumn" - table.rename_columns (Map.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]]) + table.rename_columns (Dictionary.from_vector [["Alpha", "Beta"], [-1, "LastColumn"]]) > Example Rename the first column to "FirstColumn" @@ -705,12 +705,12 @@ type Table > Example For all columns starting with the prefix `name=`, replace it with `key:`. - table.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]]) + table.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]]) @column_map Widget_Helpers.make_rename_name_vector_selector - rename_columns : Map (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names - rename_columns self (column_map:(Table | Map | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of + rename_columns : Table | Dictionary (Text | Integer | Regex) Text | Vector Text | Vector Vector -> Case_Sensitivity -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Ambiguous_Column_Rename | Too_Many_Column_Names_Provided | Invalid_Column_Names | Duplicate_Output_Column_Names + rename_columns self (column_map:(Table | Dictionary | Vector)=["Column"]) (case_sensitivity:Case_Sensitivity=..Default) (error_on_missing_columns:Boolean=True) (on_problems:Problem_Behavior=..Report_Warning) = case column_map of _ : Table -> - resolved = Table_Helpers.read_name_map_from_table column_map + resolved = Table_Helpers.read_name_mapping_from_table column_map self.rename_columns resolved case_sensitivity error_on_missing_columns on_problems _ -> new_names = Table_Helpers.rename_columns self.column_naming_helper self.columns column_map case_sensitivity error_on_missing_columns on_problems @@ -1123,9 +1123,9 @@ type Table no_columns_problem_behavior = case error_on_missing_columns of True -> Problem_Behavior.Report_Error False -> on_problems - no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Map.empty + no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Dictionary.empty False -> - Map.from_vector <| selected_columns.map c-> [c.name, True] + Dictionary.from_vector <| selected_columns.map c-> [c.name, True] new_columns = self.columns.map on_problems=No_Wrap column-> if selected_column_names.contains_key column.name . not then column else Value_Type.expect_text column <| @@ -1222,9 +1222,9 @@ type Table no_columns_problem_behavior = case error_on_missing_columns of True -> Problem_Behavior.Report_Error False -> on_problems - no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Map.empty + no_columns_problem_behavior.attach_problem_before No_Input_Columns_Selected Dictionary.empty False -> - Map.from_vector <| selected_columns.map c-> [c.name, True] + Dictionary.from_vector <| selected_columns.map c-> [c.name, True] new_columns = self.columns.map column-> if selected_column_names.contains_key column.name . not then column else column.format format locale @@ -1887,22 +1887,22 @@ type Table Warning.set result [] ## PRIVATE - A helper that creates a two-column table from a Map. + A helper that creates a two-column table from a Dictionary. - The keys of the `Map` become the first column, with name - `key_column_name`, and the values of the `Map` become the second column, - with name `value_column_name`. + The keys of the `Dictionary` become the first column, with name + `key_column_name`, and the values become the second column, with name + `value_column_name`. - For the in-memory database, the `Map` can be empty. For the database - backends, it must not be empty. + For the in-memory database, the `Dictionary` can be empty. For the + database backends, it must not be empty. Arguments: - - map: The `Map` to create the table from. + - dict: The `Dictionary` to create the table from. - key_column_name: The name to use for the first column. - value_column_name: The name to use for the second column. - make_table_from_map : Map Any Any -> Text -> Text -> Table - make_table_from_map self map key_column_name value_column_name = - keys_and_values = map.to_vector + make_table_from_dictionary : Dictionary Any Any -> Text -> Text -> Table + make_table_from_dictionary self dict key_column_name value_column_name = + keys_and_values = dict.to_vector self.make_table_from_vectors [keys_and_values.map .first, keys_and_values.map .second] [key_column_name, value_column_name] ## PRIVATE @@ -2283,8 +2283,8 @@ type Table @columns (Widget_Helpers.make_column_name_multi_selector add_regex=True add_by_type=True) @from_column Widget.Text_Input @to_column Widget.Text_Input - replace : (Table | Map) -> (Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup - replace self lookup_table:(Table | Map) columns:(Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning = + replace : (Table | Dictionary) -> (Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) -> (Text | Integer | Nothing) -> (Text | Integer | Nothing) -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup + replace self lookup_table:(Table | Dictionary) columns:(Text | Integer | Regex | By_Type | Vector (Text | Integer | Regex | By_Type)) from_column:(Text | Integer | Nothing)=Nothing to_column:(Text | Integer | Nothing)=Nothing allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=..Report_Warning = Replace_Helpers.replace self lookup_table columns from_column to_column allow_unmatched_rows on_problems ## ALIAS join by row position @@ -2566,7 +2566,7 @@ type Table id_columns = columns_helper.select_columns_helper key_columns Case_Sensitivity.Default False problem_builder - selected_names = Map.from_vector (id_columns.map column-> [column.name, True]) + selected_names = Dictionary.from_vector (id_columns.map column-> [column.name, True]) data = columns_helper.internal_columns.filter column->(selected_names.get column.name False . not) java_data = data.map c->c.java_column @@ -3183,13 +3183,6 @@ concat_columns column_set all_tables result_type result_row_count needs_cast on_ sealed_storage = storage_builder.seal Column.from_storage column_set.name sealed_storage -## PRIVATE - A helper that creates a two-column table from a map. -map_to_lookup_table : Map Any Any -> Text -> Text -> Table -map_to_lookup_table map key_column value_column = - keys_and_values = map.to_vector - Table.new [[key_column, keys_and_values.map .first], [value_column, keys_and_values.map .second]] - ## PRIVATE Conversion method to a Table from a Column. Table.from (that:Column) = that.to_table diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso index 13f31c6cfc4e..732b4d283351 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Test_Reporter.enso @@ -98,7 +98,7 @@ print_single_result (test_result : Test_Result) (config : Suite_Config) = print_report : Vector Test_Result -> Suite_Config -> (StringBuilder | Nothing) -> Nothing print_report (test_results : Vector Test_Result) (config : Suite_Config) (builder : (StringBuilder | Nothing)) = distinct_group_names = test_results.map (_.group_name) . distinct - results_per_group = distinct_group_names.fold Map.empty acc-> group_name-> + results_per_group = distinct_group_names.fold Dictionary.empty acc-> group_name-> group_results = test_results.filter res-> res.group_name == group_name assert (group_results.length > 0) diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso index 1ac1cc7bfc01..b3d05fa64891 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Scatter_Plot.enso @@ -173,7 +173,7 @@ limit_data limit data = case limit of bounds = case data.fold_with_index (Extreme.Value first first first first) update_extreme of Extreme.Value min_x max_x min_y max_y -> [min_x, max_x, min_y, max_y] _ -> [] - extreme = Map.from_vector bounds error_on_duplicates=False . values + extreme = Dictionary.from_vector bounds error_on_duplicates=False . values if limit <= extreme.length then extreme.take (..First limit) else extreme + data.take (..Sample (limit - extreme.length)) diff --git a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso index fa07daa5bb87..0e82180fc97d 100644 --- a/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso +++ b/distribution/lib/Standard/Visualization/0.0.0-dev/src/Table/Visualization.enso @@ -27,9 +27,9 @@ prepare_visualization y max_rows=1000 = result = case x of _ : Vector -> make_json_for_vector x max_rows _ : Array -> prepare_visualization x.to_vector max_rows - _ : Map -> make_json_for_map x max_rows + _ : Dictionary -> make_json_for_dictionary x max_rows _ : JS_Object -> make_json_for_js_object x max_rows - _ : Row -> make_json_for_map x.to_map max_rows "column" + _ : Row -> make_json_for_dictionary x.to_dictionary max_rows "column" _ : Column -> prepare_visualization x.to_table max_rows _ : Table -> dataframe = x.take max_rows @@ -98,7 +98,7 @@ make_json_for_object_matrix current vector idx=0 = if idx == vector.length then _ : Date_Time -> False _ : Duration -> False _ : Period -> False - _ : Map -> + _ : Dictionary -> pairs = row.keys.map k-> [k.to_text, make_json_for_value (row.get k)] JS_Object.from_pairs pairs _ : Row -> @@ -115,13 +115,13 @@ make_json_for_object_matrix current vector idx=0 = if idx == vector.length then @Tail_Call make_json_for_object_matrix current vector idx+1 ## PRIVATE - Render Map to JSON -make_json_for_map : Map -> Integer -> Text -> JS_Object -make_json_for_map map max_items key_name="key" = + Render Dictionary to JSON +make_json_for_dictionary : Dictionary -> Integer -> Text -> JS_Object +make_json_for_dictionary dict max_items key_name="key" = header = ["header", [key_name, "value"]] - all_rows = ["all_rows_count", map.size] - map_vector = Warning.clear (map.to_vector.take max_items) - mapped = map_vector . map p-> [p.first.to_text, make_json_for_value p.second] + all_rows = ["all_rows_count", dict.size] + as_vector = Warning.clear (dict.to_vector.take max_items) + mapped = as_vector . map p-> [p.first.to_text, make_json_for_value p.second] data = ["data", [mapped.map .first, mapped.map .second]] JS_Object.from_pairs [header, data, all_rows, ["type", "Map"]] @@ -187,7 +187,7 @@ make_json_for_value val level=0 = case val of prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated "[" + (prepared.join ", ") + "]" _ : Array -> make_json_for_value val.to_vector level - _ : Map -> + _ : Dictionary -> if level != 0 then "{… "+val.size.to_text+" items}" else truncated = val.keys.take 5 . map k-> k.to_text + ": " + (make_json_for_value (val.get k) level+1).to_text prepared = if val.length > 5 then truncated + ["… " + (val.length - 5).to_text+ " items"] else truncated diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ConversionMethodTests.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ConversionMethodTests.java index 2f560f39850f..ca317701e941 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ConversionMethodTests.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ConversionMethodTests.java @@ -64,12 +64,12 @@ public void testDispatchOnHostMap() { String src = """ polyglot java import java.util.Map as Java_Map - import Standard.Base.Data.Map.Map + import Standard.Base.Data.Dictionary.Dictionary type Foo Mk_Foo data - Foo.from (that:Map) = Foo.Mk_Foo that + Foo.from (that:Dictionary) = Foo.Mk_Foo that main = jmap = Java_Map.of "A" 1 "B" 2 "C" 3 @@ -83,7 +83,7 @@ public void testDispatchOnHostMap() { public void testDispatchOnJSMap() { String src = """ - import Standard.Base.Data.Map.Map + import Standard.Base.Data.Dictionary.Dictionary foreign js js_map = ''' let m = new Map() @@ -94,7 +94,7 @@ public void testDispatchOnJSMap() { type Foo Mk_Foo data - Foo.from (that:Map) = Foo.Mk_Foo that + Foo.from (that:Dictionary) = Foo.Mk_Foo that main = Foo.from js_map . data . size diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/MetaObjectTest.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/MetaObjectTest.java index 95243782081b..621dce9a01b7 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/MetaObjectTest.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/MetaObjectTest.java @@ -285,66 +285,7 @@ public void compareQualifiedAndSimpleTypeName() throws Exception { .invokeMember(MethodNames.Module.EVAL_EXPRESSION, "sn"); var sb = new StringBuilder(); for (var v : g.allValues()) { - var simpleName = sn.execute(v).asString(); - if (v.isNumber()) { - var ok = - switch (simpleName) { - case "Integer", "Float" -> true; - default -> false; - }; - assertTrue("Unexpected simple name for number: " + simpleName, ok); - continue; - } - var meta = v.getMetaObject(); - var metaName = meta != null ? meta.getMetaSimpleName() : "null"; - if (!simpleName.equals(metaName)) { - if (v.isHostObject()) { - if (v.hasArrayElements()) { - assertEquals("Array", simpleName); - continue; - } - if (v.hasHashEntries()) { - assertEquals("Map", simpleName); - continue; - } - } - if (v.isString()) { - assertEquals("Text", simpleName); - continue; - } - if (v.isDuration()) { - assertEquals("Duration", simpleName); - continue; - } - if (v.isDate() && v.isTime()) { - assertEquals("Date_Time", simpleName); - continue; - } - if (v.isTimeZone()) { - assertEquals("Time_Zone", simpleName); - continue; - } - if (v.isDate()) { - assertEquals("Date", simpleName); - continue; - } - if (v.isTime()) { - assertEquals("Time_Of_Day", simpleName); - continue; - } - if (v.isNull()) { - assertEquals("Nothing", simpleName); - continue; - } - - sb.append("\n") - .append("Simple names shall be the same for ") - .append(v) - .append(" get_simple_type_name: ") - .append(simpleName) - .append(" getMetaSimpleName: ") - .append(metaName); - } + compareQualifiedNameOfValue(sn, v, sb); } if (!sb.isEmpty()) { var lines = sb.toString().lines().count() - 1; @@ -353,6 +294,77 @@ public void compareQualifiedAndSimpleTypeName() throws Exception { } } + private boolean compareQualifiedNameOfValue(Value sn, Value v, StringBuilder sb) { + var simpleName = sn.execute(v).asString(); + if (v.isNumber()) { + var ok = + switch (simpleName) { + case "Integer", "Float" -> true; + default -> false; + }; + assertTrue("Unexpected simple name for number: " + simpleName, ok); + return true; + } + var meta = v.getMetaObject(); + var metaName = meta != null ? meta.getMetaSimpleName() : "null"; + if (!simpleName.equals(metaName)) { + if (v.isHostObject()) { + if (v.hasArrayElements()) { + assertEquals("Array", simpleName); + return true; + } + if (v.hasHashEntries()) { + assertEquals("Dictionary", simpleName); + return true; + } + } + if (v.hasMembers() && v.getMember("__proto__") != null) { + if (v.hasHashEntries()) { + assertEquals("JavaScript hash map is called Map", "Map", metaName); + assertEquals( + "JavaScript hash map is seen as Dictionary by Enso", "Dictionary", simpleName); + return true; + } + } + if (v.isString()) { + assertEquals("Text", simpleName); + return true; + } + if (v.isDuration()) { + assertEquals("Duration", simpleName); + return true; + } + if (v.isDate() && v.isTime()) { + assertEquals("Date_Time", simpleName); + return true; + } + if (v.isTimeZone()) { + assertEquals("Time_Zone", simpleName); + return true; + } + if (v.isDate()) { + assertEquals("Date", simpleName); + return true; + } + if (v.isTime()) { + assertEquals("Time_Of_Day", simpleName); + return true; + } + if (v.isNull()) { + assertEquals("Nothing", simpleName); + return true; + } + sb.append("\n") + .append("Simple names shall be the same for ") + .append(v) + .append(" get_simple_type_name: ") + .append(simpleName) + .append(" getMetaSimpleName: ") + .append(metaName); + } + return false; + } + @Test public void compareQualifiedAndSimpleTypeNameForTypes() throws Exception { var g = generator(); diff --git a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java index d457271924e8..1c41d6ddff5a 100644 --- a/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java +++ b/engine/runtime-integration-tests/src/test/java/org/enso/interpreter/test/ValuesGenerator.java @@ -303,9 +303,13 @@ public Value typeVector() { } public Value typeMap() { - return v("typeMap", """ - import Standard.Base.Data.Map.Map - """, "Map").type(); + return v( + "typeMap", + """ + import Standard.Base.Data.Dictionary.Dictionary + """, + "Dictionary") + .type(); } public Value typeWarning() { @@ -679,28 +683,28 @@ public List maps() { if (languages.contains(Language.ENSO)) { var imports = """ - import Standard.Base.Data.Map.Map + import Standard.Base.Data.Dictionary.Dictionary import Standard.Base.Nothing.Nothing """; for (var expr : List.of( - "Map.empty", - "Map.singleton Nothing Nothing", - "Map.singleton Nothing 'my_value'", - "Map.singleton 'my_value' Nothing", - "Map.singleton 1 1", - "Map.singleton 'C' 3", - "Map.singleton 'C' 43", - "Map.empty.insert 'A' 10 . insert 'B' 20", + "Dictionary.empty", + "Dictionary.singleton Nothing Nothing", + "Dictionary.singleton Nothing 'my_value'", + "Dictionary.singleton 'my_value' Nothing", + "Dictionary.singleton 1 1", + "Dictionary.singleton 'C' 3", + "Dictionary.singleton 'C' 43", + "Dictionary.empty.insert 'A' 10 . insert 'B' 20", // ((int) 'A') + ((int) 'B') = 131 ; codePoint(131) = \203 - "Map.singleton '\203' 30", - "Map.singleton Map.empty 1", - "Map.singleton Map.empty Map.empty", - "Map.empty.insert 1 1 . insert 2 2", - "Map.empty.insert Nothing 'val' . insert 'key' 42", - "Map.empty.insert 'A' 1 . insert 'B' 2 . insert 'C' 3", - "Map.empty.insert 'C' 3 . insert 'B' 2 . insert 'A' 1")) { - collect.add(v("maps-" + expr, imports, expr, "Map").type()); + "Dictionary.singleton '\203' 30", + "Dictionary.singleton Dictionary.empty 1", + "Dictionary.singleton Dictionary.empty Dictionary.empty", + "Dictionary.empty.insert 1 1 . insert 2 2", + "Dictionary.empty.insert Nothing 'val' . insert 'key' 42", + "Dictionary.empty.insert 'A' 1 . insert 'B' 2 . insert 'C' 3", + "Dictionary.empty.insert 'C' 3 . insert 'B' 2 . insert 'A' 1")) { + collect.add(v("maps-" + expr, imports, expr, "Dictionary").type()); } } if (languages.contains(Language.JAVA)) { diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java index ba6a633674eb..dfcf357a1c3a 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeConversionNode.java @@ -358,7 +358,10 @@ Object doConvertMap( @Shared("conversionResolverNode") @Cached ConversionResolverNode conversionResolverNode) { Function function = conversionResolverNode.expectNonNull( - thatMap, extractType(self), EnsoContext.get(this).getBuiltins().map(), conversion); + thatMap, + extractType(self), + EnsoContext.get(this).getBuiltins().dictionary(), + conversion); return invokeFunctionNode.execute(function, frame, state, arguments); } diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java index 57e4b2ba9bb3..ead711103a77 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/InvokeMethodNode.java @@ -637,7 +637,7 @@ Object doConvertHashMap( @Shared("warnings") @CachedLibrary(limit = "10") WarningsLibrary warnings, @Shared("methodResolverNode") @Cached MethodResolverNode methodResolverNode) { var ctx = EnsoContext.get(this); - var hashMapType = ctx.getBuiltins().map(); + var hashMapType = ctx.getBuiltins().dictionary(); var function = methodResolverNode.expectNonNull(self, hashMapType, symbol); arguments[0] = self; return invokeFunctionNode.execute(function, frame, state, arguments); diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Builtins.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Builtins.java index 4955e2457869..7224e103e624 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Builtins.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/builtin/Builtins.java @@ -104,7 +104,7 @@ public static class Debug { private final Builtin text; private final Builtin array; private final Builtin vector; - private final Builtin map; + private final Builtin dictionary; private final Builtin dataflowError; private final Builtin ref; private final Builtin managedResource; @@ -155,7 +155,7 @@ public Builtins(EnsoContext context) { text = builtins.get(Text.class); array = builtins.get(Array.class); vector = builtins.get(Vector.class); - map = builtins.get(org.enso.interpreter.node.expression.builtin.Map.class); + dictionary = builtins.get(org.enso.interpreter.node.expression.builtin.Dictionary.class); dataflowError = builtins.get(org.enso.interpreter.node.expression.builtin.Error.class); ref = builtins.get(Ref.class); managedResource = builtins.get(ManagedResource.class); @@ -691,8 +691,8 @@ public Type vector() { return vector.getType(); } - public Type map() { - return map.getType(); + public Type dictionary() { + return dictionary.getType(); } /** diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/EnsoHashMap.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/EnsoHashMap.java index 3598a6795dbc..076dbbebb0bf 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/EnsoHashMap.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/EnsoHashMap.java @@ -35,7 +35,7 @@ */ @ExportLibrary(TypesLibrary.class) @ExportLibrary(InteropLibrary.class) -@Builtin(stdlibName = "Standard.Base.Data.Map.Map", name = "Map") +@Builtin(stdlibName = "Standard.Base.Data.Dictionary.Dictionary", name = "Dictionary") public final class EnsoHashMap implements EnsoObject { private final EnsoHashMapBuilder mapBuilder; private final int generation; @@ -150,7 +150,7 @@ boolean hasType() { @ExportMessage(library = TypesLibrary.class) Type getType(@Bind("$node") Node node) { - return EnsoContext.get(node).getBuiltins().map(); + return EnsoContext.get(node).getBuiltins().dictionary(); } @ExportMessage @@ -160,7 +160,7 @@ boolean hasMetaObject() { @ExportMessage Type getMetaObject(@Bind("$node") Node node) { - return EnsoContext.get(node).getBuiltins().map(); + return EnsoContext.get(node).getBuiltins().dictionary(); } @ExportMessage diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapContainsKeyNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapContainsKeyNode.java index 05212ca524b6..98a9fe395163 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapContainsKeyNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapContainsKeyNode.java @@ -9,7 +9,7 @@ import org.enso.interpreter.dsl.BuiltinMethod; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "contains_key", description = """ diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java index 1c72fa9f7930..414bdae224c4 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapGetNode.java @@ -19,11 +19,11 @@ import org.enso.interpreter.runtime.state.State; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "get_builtin", description = """ - Gets a value from the map on the specified key, or the given default. + Gets a value from the dictionary on the specified key, or the given default. """, autoRegister = false, inlineable = true) diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapInsertNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapInsertNode.java index 6f64ed98b2cb..4111414eec02 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapInsertNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapInsertNode.java @@ -18,7 +18,7 @@ import org.enso.interpreter.runtime.error.PanicException; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "insert", description = """ diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapRemoveNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapRemoveNode.java index 53a311a09ac9..17d7705b5988 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapRemoveNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapRemoveNode.java @@ -20,7 +20,7 @@ import org.enso.interpreter.runtime.error.DataflowError; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "remove_builtin", description = """ Removes an entry from this map specified with the key. diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapSizeNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapSizeNode.java index bfed0dca68cf..4cf6f794d8a3 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapSizeNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapSizeNode.java @@ -12,7 +12,7 @@ import org.enso.interpreter.runtime.error.PanicException; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "size", description = "Returns the number of entries in this hash map", autoRegister = false) diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToTextNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToTextNode.java index ec00f3487691..694554819ceb 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToTextNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToTextNode.java @@ -13,7 +13,7 @@ import org.enso.interpreter.runtime.EnsoContext; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "to_text", description = """ Returns text representation of this hash map diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToVectorNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToVectorNode.java index 4c3c7bd57c3a..993f1739493f 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToVectorNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/data/hash/HashMapToVectorNode.java @@ -18,7 +18,7 @@ import org.enso.interpreter.runtime.error.PanicException; @BuiltinMethod( - type = "Map", + type = "Dictionary", name = "to_vector", description = """ diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/library/dispatch/TypeOfNode.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/library/dispatch/TypeOfNode.java index b3d57a56aa62..a8393c9a279c 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/library/dispatch/TypeOfNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/library/dispatch/TypeOfNode.java @@ -114,7 +114,7 @@ Type doPolyglotArray(Interop type, Object value) { @Specialization(guards = {"type.isMap()"}) Type doPolygotMap(Interop type, Object value) { - return EnsoContext.get(this).getBuiltins().map(); + return EnsoContext.get(this).getBuiltins().dictionary(); } @Specialization(guards = {"type.isString()"}) diff --git a/test/AWS_Tests/src/S3_Spec.enso b/test/AWS_Tests/src/S3_Spec.enso index 5c771c8d1141..da1369bac9e9 100644 --- a/test/AWS_Tests/src/S3_Spec.enso +++ b/test/AWS_Tests/src/S3_Spec.enso @@ -169,7 +169,7 @@ add_specs suite_builder = suite_builder.group "S3.head (bucket)" pending=api_pending group_builder-> group_builder.specify "should be able to head a bucket" <| - S3.head bucket_name credentials=test_credentials . should_be_a Map + S3.head bucket_name credentials=test_credentials . should_be_a Dictionary S3.head not_a_bucket_name credentials=test_credentials . should_fail_with S3_Bucket_Not_Found suite_builder.group "S3.read_bucket" pending=api_pending group_builder-> diff --git a/test/Base_Tests/src/Data/Dictionary_Spec.enso b/test/Base_Tests/src/Data/Dictionary_Spec.enso new file mode 100644 index 000000000000..a5bc9fae4fdd --- /dev/null +++ b/test/Base_Tests/src/Data/Dictionary_Spec.enso @@ -0,0 +1,633 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Errors.No_Such_Key.No_Such_Key + +from Standard.Test import all + +polyglot java import java.util.Map as JavaMap +polyglot java import org.enso.base.file_system.File_Utils + +## Type that violates reflexivity +type My_Nan + Value comment:Text + +type My_Nan_Comparator + compare _ _ = Nothing + hash _ = 0 + +Comparable.from (_:My_Nan) = My_Nan_Comparator + +type My_Key + Value hash_code:Integer value:Text idx:Integer + +type My_Key_Comparator + # Comparison ignores idx field + compare x y = + if x.hash_code != y.hash_code then Nothing else + if x.value == y.value then Ordering.Equal else Nothing + + hash x = x.hash_code + +Comparable.from (_:My_Key) = My_Key_Comparator + + +foreign js js_str str = """ + return new String(str) + +foreign js js_null = """ + return null + +foreign js js_empty_dict = """ + return new Map() + +foreign python py_empty_dict = """ + return {} + +foreign js js_dict_from_vec vec = """ + dict = new Map() + for (let i = 0; i < vec.length; i += 2) { + dict.set(vec[i], vec[i+1]) + } + return dict + +foreign python py_none = """ + return None + +foreign python py_dict_from_vec vec = """ + d = {} + for i in range(0, len(vec), 2): + d[vec[i]] = vec[i + 1] + return d + +foreign python py_dict_from_map map = """ + d = dict() + for key in map.__iter__(): + d[key] = map[key] + return d + +foreign python py_vec_from_map map = """ + vec = [] + for key in map.__iter__(): + value = map[key] + vec.append([key, value]) + return vec + +# Should throw error - updating immutable map from Enso +foreign python py_update_dict map key val = """ + map[key] = val + +foreign python py_wrapper obj = """ + class MyClass: + def __init__(self, obj): + self.data = obj + return MyClass(obj) + +pending_python_missing = if Polyglot.is_language_installed "python" then Nothing else "Can't run Python tests, Python is not installed." + +type Child + Value data + +type Parent + Value child + +type GrandParent + Value parent + +add_specs suite_builder = + languages = Vector.build builder-> + builder . append ["Enso", _-> Dictionary.empty, Nothing] + builder . append ["Java", _-> JavaMap.of, Nothing] + builder . append ["JavaScript", _-> js_empty_dict, Nothing] + builder . append ["Python", _-> py_empty_dict, pending_python_missing] + languages.each entry-> + lang = entry.get 0 + empty_dict_fn = entry.get 1 + pending = entry.get 2 + add_common_specs suite_builder lang pending empty_dict_fn + + suite_builder.group "Enso Dictionaries" group_builder-> + group_builder.specify "should use proper hash code for keys" <| + single_key_dict key = Dictionary.singleton key 42 + grand_parent_1 = GrandParent.Value (Parent.Value (Child.Value 2)) + grand_parent_2 = GrandParent.Value (Parent.Value (Child.Value 2.0)) + + (single_key_dict 2 . at 2.0) . should_equal 42 + (single_key_dict -2 . at -2.0) . should_equal 42 + (single_key_dict 'ś' . at 's\u0301') . should_equal 42 + (single_key_dict 's\u0301' . at 'ś') . should_equal 42 + (single_key_dict 'éabc' . at 'e\u0301abc') . should_equal 42 + (single_key_dict 'e\u0301abc' . at 'éabc') . should_equal 42 + (single_key_dict grand_parent_1 . at grand_parent_2) . should_equal 42 + (single_key_dict (Json.parse '{"a": 1}') . at (Json.parse '{"a": 1}')) . should_equal 42 + (single_key_dict (Child.Value 1) . at (Child.Value 1.0)) . should_equal 42 + + + group_builder.specify "should support another Dictionary with NaN keys as key" <| + Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . size . should_equal 1 + Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . keys . at 0 . keys . to_text . should_equal "[NaN]" + Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . keys . at 0 . get Number.nan . should_equal 1 + Dictionary.singleton (Dictionary.singleton Number.nan 1) 42 . at (Dictionary.singleton Number.nan 1) . should_equal 42 + + group_builder.specify "should support atoms with custom comparators that violate reflexivity as keys" <| + k = My_Nan.Value "foo" + k2 = My_Nan.Value "foo" + (k==k).should_be_true + (k==k2).should_be_false + Meta.is_same_object k k2 . should_be_false + Meta.is_same_object k k . should_be_true + m = Dictionary.singleton k 10 + m.contains_key k . should_be_true + m.get k . should_equal 10 + m.contains_key k2 . should_be_false + + m2 = m.insert k2 20 + m2.get k . should_equal 10 + m2.get k2 . should_equal 20 + m2.size . should_equal 2 + + m3 = m2.insert k 30 + m3.size . should_equal 2 + m3.get k . should_equal 30 + + group_builder.specify "should support atom with custom comparators with complicated hash method" <| + keys = 0.up_to 500 . map ix-> + value = ["A", "B", "C", "D", "E"].at (ix % 5) + hash_code = Comparable.from value . hash value + My_Key.Value hash_code value ix + distinct_keys = keys.fold Dictionary.empty acc_dict-> + item-> + acc_dict.insert item True + distinct_keys.size . should_equal 5 + distinct_key_values = keys.map (_.value) . fold Dictionary.empty acc_dict-> + item-> + acc_dict.insert item True + distinct_key_values.size . should_equal 5 + + group_builder.specify "should not drop warnings from keys" <| + key = Warning.attach "my_warn" "my_key" + dict = Dictionary.singleton key 42 + (Warning.get_all (dict.keys.at 0)).length . should_equal 1 + + group_builder.specify "should not drop warnings from values" <| + val = Warning.attach "my_warn" "my_val" + dict = Dictionary.singleton 42 val + (Warning.get_all (dict.values.at 0)).length . should_equal 1 + + group_builder.specify "should convert the whole Dictionary to a vector" <| + m = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2 + m.to_vector.sort on=_.first . should_equal [[0, 0], [1, 2], [3, -5]] + + group_builder.specify "should allow building the Dictionary from two vectors" <| + expected = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2 + Dictionary.from_keys_and_values [0, 3, 1] [0, -5, 2] . should_equal expected + + group_builder.specify "should allow building the Dictionary from vector like things" <| + expected = Dictionary.empty . insert 0 0 . insert 1 -5 . insert 2 2 + Dictionary.from_keys_and_values (0.up_to 3) [0, -5, 2] . should_equal expected + + group_builder.specify "should not allow building with duplicate keys unless explicitly allowed" <| + expected = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2 + Dictionary.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] . should_fail_with Illegal_Argument + Dictionary.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] error_on_duplicates=False . should_equal expected + + group_builder.specify "should not allow different length vectors when building" <| + Dictionary.from_keys_and_values [0, 3, 1] [3, -5, 2, 0] . should_fail_with Illegal_Argument + + group_builder.specify "should allow building the Dictionary from a vector" <| + expected = Dictionary.empty . insert 0 0 . insert 3 -5 . insert 1 2 + vec = [[0, 0], [3, -5], [1, 2]] + Dictionary.from_vector vec . should_equal expected + + group_builder.specify "should fail when building the Dictionary from wrong vector" <| + Dictionary.from_vector [["A", 1, "B", 2]] . should_fail_with Illegal_Argument + + group_builder.specify "should not allow duplicates when building the Dictionary from a vector, unless explicitly allowed" <| + vec = [[0, 0], [3, -5], [1, 2], [0, 1]] + d1 = Dictionary.from_vector vec + d1.should_fail_with Illegal_Argument + d1.catch.message . should_equal "`Dictionary.from_vector` encountered duplicate key: 0" + + d2 = Dictionary.from_vector vec error_on_duplicates=False + Problems.assume_no_problems d2 + d2.get 0 . should_equal 1 + d2.get 3 . should_equal -5 + + group_builder.specify "should disallow duplicate keys when transforming the Dictionary" <| + d = Dictionary.from_vector [[1, 2], [11, 3]] + d2 = d.transform (k -> v -> [k % 10, v*2]) + d2.should_fail_with Illegal_Argument + d2.catch.message . should_equal "`Dictionary.transform` encountered duplicate key: 1" + + group_builder.specify "should allow mapping over values" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + expected = Dictionary.empty . insert 1 4 . insert 2 8 + d.map (v -> v*2) . should_equal expected + + group_builder.specify "should allow mapping over keys" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + expected = Dictionary.empty . insert 2 2 . insert 4 4 + d.map_keys (k -> k*2) . should_equal expected + + group_builder.specify "should allow mapping with keys" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + expected = Dictionary.empty . insert 1 3 . insert 2 6 + d.map_with_key (k -> v -> k + v) . should_equal expected + + group_builder.specify "should allow iterating over each value" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + expected_vec = [2, 4] + vec = Vector.build builder-> + d.each (v -> builder.append v) + vec . should_equal expected_vec + + group_builder.specify "should allow iterating over each key-value pair" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + expected_vec = [3, 6] + vec = Vector.build builder-> + d.each_with_key (k -> v -> builder.append (k+v)) + vec . should_equal expected_vec + + group_builder.specify "should allow folding over the values" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + d.fold 0 (+) . should_equal 6 + + group_builder.specify "should allow folding over the key-value pairs" <| + d = Dictionary.empty . insert 1 2 . insert 2 4 + d.fold_with_key 0 (l -> k -> v -> l + k + v) . should_equal 9 + + group_builder.specify "should be able to add a Nothing key to a Dictionary of Text" <| + m = Dictionary.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3 + m.at "A" . should_equal 2 + m.at "B" . should_equal 3 + m.at Nothing . should_equal 1 + + group_builder.specify "should be able to add a Nothing key to a Dictionary of Integer" <| + m = Dictionary.empty . insert 100 2 . insert Nothing 1 . insert 200 3 + m.at 100 . should_equal 2 + m.at 200 . should_equal 3 + m.at Nothing . should_equal 1 + + suite_builder.group "Polyglot keys and values" group_builder-> + group_builder.specify "should support polyglot keys" <| + dict = Dictionary.singleton (js_str "A") 42 + dict.size.should_equal 1 + dict.get "A" . should_equal 42 + dict.get (js_str "A") . should_equal 42 + + group_builder.specify "should support host objects as keys" <| + # java.nio.path.Path has proper implementation of hashCode + dict = Dictionary.singleton (File_Utils.toPath "/home/user/file.txt") 42 + dict.get "X" . should_equal Nothing + dict.get "A" . should_equal Nothing + dict.get (File_Utils.toPath "/home/user/file.txt") . should_equal 42 + + group_builder.specify "should support Python objects as keys" pending=pending_python_missing <| + py_obj = py_wrapper 42 + dict = Dictionary.singleton py_obj "Value" + dict.get py_obj . should_equal "Value" + + group_builder.specify "should support Python objects as values" pending=pending_python_missing <| + dict = Dictionary.singleton "A" (py_wrapper 42) + dict.get "A" . data . should_equal 42 + + group_builder.specify "should insert entries to a polyglot map" pending=pending_python_missing <| + dict = py_dict_from_vec ["A", 1, "B", 2] + dict.insert "C" 3 . keys . sort . should_equal ["A", "B", "C"] + + group_builder.specify "should remove entries from a polyglot map" pending=pending_python_missing <| + dict = py_dict_from_vec ["A", 1, "B", 2] + dict.remove "B" . to_vector . should_equal [["A", 1]] + + suite_builder.group "non-linear inserts" group_builder-> + group_builder.specify "should handle inserts with different keys" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d1.insert "C" 3 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 1], ["C", 3]] + + group_builder.specify "should handle inserts with same keys (1)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "A" 2 + d3 = d1.insert "A" 3 + d4 = d1.insert "B" 4 + d2.to_vector.sort on=_.first . should_equal [["A", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 3]] + d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]] + + group_builder.specify "should handle inserts with same keys (2)" <| + d1 = Dictionary.singleton "foo" 1 + d2 = d1.insert "baz" 2 + d3 = d2.insert "foo" 3 + d1.to_vector.sort on=_.first . should_equal [['foo', 1]] + d2.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 1]] + d3.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 3]] + + group_builder.specify "should handle inserts with same keys (3)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "A" 3 + d4 = d2.insert "C" 4 + d1.to_vector.sort on=_.first . should_equal [["A", 1]] + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]] + d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 4]] + + group_builder.specify "should handle inserts with same keys (4)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "C" 3 + d4 = d2.insert "D" 4 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] + d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]] + + group_builder.specify "should handle inserts with same keys (5)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "A" 3 + d4 = d2.insert "A" 4 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]] + d4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]] + + group_builder.specify "should handle inserts with same keys (6)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "C" 3 + d4 = d2.insert "A" 4 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] + d4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]] + + group_builder.specify "should handle inserts with same keys (7)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "C" 3 + d4 = d3.insert "D" 4 + d5 = d2.insert "A" 5 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] + d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3], ["D", 4]] + d5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]] + + group_builder.specify "should handle inserts with same keys (8)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "C" 3 + d4 = d3.insert "A" 4 + d5 = d2.insert "A" 5 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] + d4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2], ["C", 3]] + d5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]] + + group_builder.specify "should handle inserts with same keys (9)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "A" 3 + d4 = d2.insert "B" 4 + d5 = d2.insert "C" 5 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]] + d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]] + d5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 5]] + + group_builder.specify "should handle inserts with same keys (10)" <| + d1 = Dictionary.singleton "A" 1 + d2 = d1.insert "B" 2 + d3 = d2.insert "C" 3 + d4 = d2.insert "D" 4 + d5 = d2.insert "E" 5 + d2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] + d3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] + d4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]] + d5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["E", 5]] + + suite_builder.group "Polyglot hash maps" group_builder-> + group_builder.specify "should pass Dictionaries as immutable maps to other languages" pending=pending_python_missing <| + dict = Dictionary.singleton "A" 1 + # Python's KeyError should be raised + Test.expect_panic_with (py_update_dict dict "A" 2) Any + dict.get "A" . should_equal 1 + + group_builder.specify "should treat JavaScript maps as Enso Dictionaries" <| + js_dict = js_dict_from_vec ["A", 1, "B", 2] + dict = js_dict.insert "C" 3 + js_dict.to_vector.should_equal [["A", 1], ["B", 2]] + dict.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] + + group_builder.specify "should treat Java Map as Enso Dictionary" <| + sort_by_keys vec = vec.sort by=x-> y-> Ordering.compare x.first y.first + dict = JavaMap.of "A" 1 "B" 2 + (sort_by_keys dict.to_vector) . should_equal [["A", 1], ["B", 2]] + (sort_by_keys (dict.insert "C" 3 . to_vector)) . should_equal [["A", 1], ["B", 2], ["C", 3]] + + group_builder.specify "should treat Python dicts as Enso Dictionaries" pending=pending_python_missing <| + py_dict = py_dict_from_vec ["A", 1, "B", 2] + dict = py_dict.insert "C" 3 + py_dict.not_empty . should_be_true + py_dict.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2]] + dict.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2], ["C", 3]] + py_empty_dict.is_empty.should_be_true + py_empty_dict.insert "A" 1 . insert "A" 2 . get "A" . should_equal 2 + + group_builder.specify "should be able to remove entries" pending=pending_python_missing <| + py_dict_from_vec ["A", 1, "B", 2] . remove "A" . size . should_equal 1 + py_dict_from_vec ["A", 1, "B", 2] . remove "A" . get "B" . should_equal 2 + + group_builder.specify "should be able to remove NaN keys" pending=pending_python_missing <| + py_dict_from_vec [Number.nan, 1] . remove Number.nan . size . should_equal 0 + + group_builder.specify "should pass Dictionaries with null keys to Python and back" pending=pending_python_missing <| + # Python supports None as keys, Enso support Nothing as keys + py_dict = py_dict_from_map (Dictionary.singleton Nothing 42) + py_dict.get Nothing . should_equal 42 + py_dict.insert "A" 23 . get Nothing . should_equal 42 + py_dict.insert Nothing 23 . get Nothing . should_equal 23 + + group_builder.specify "should treat Enso Dictionaries as Python dicts when passed to Python" pending=pending_python_missing <| + dict1 = Dictionary.singleton "A" 1 . insert "B" 2 + py_vec_from_map dict1 . should_contain_the_same_elements_as [["A", 1], ["B", 2]] + dict2 = Dictionary.singleton "A" 1 . insert Nothing 2 + py_vec_from_map dict2 . should_contain_the_same_elements_as [["A", 1], [Nothing, 2]] + + +add_common_specs suite_builder prefix:Text (pending : (Text | Nothing)) (empty_dict_fn : (Nothing -> Dictionary)) = + # Not on a single line - empty_dict is a method, not a variable + empty_dict = + empty_dict_fn Nothing + + suite_builder.group prefix+": Common polyglot Dictionary operations" pending=pending group_builder-> + group_builder.specify "should get the default comparator for polyglot maps" <| + Comparable.from empty_dict . should_equal Default_Comparator + + group_builder.specify "should compare two hash maps" <| + (empty_dict.insert "a" 1).should_equal (empty_dict.insert "a" 1) + (empty_dict.insert "b" 2).should_not_equal (empty_dict.insert "a" 1) + empty_dict.should_equal empty_dict + empty_dict.should_not_equal (empty_dict.insert "a" 1) + (empty_dict.insert "a" 1 . insert "b" 2).should_equal (empty_dict.insert "b" 2 . insert "a" 1) + + group_builder.specify "should allow checking for non emptiness" <| + non_empty = empty_dict . insert "foo" 1234 + empty_dict.not_empty . should_be_false + non_empty.not_empty . should_be_true + + group_builder.specify "should allow checking its size" <| + non_empty = empty_dict.insert "a" "b" . insert "x" "y" + empty_dict.size . should_equal 0 + non_empty.size . should_equal 2 + + group_builder.specify "should allow checking for emptiness" <| + non_empty = empty_dict . insert "foo" 1234 + empty_dict.is_empty . should_be_true + non_empty.is_empty . should_be_false + + group_builder.specify "should handle incomparable values as keys" <| + empty_dict.insert Number.nan 1 . insert Number.nan 2 . get Number.nan . should_equal 2 + + group_builder.specify "should handle Nothing as values" <| + empty_dict.insert 1 Nothing . at 1 . should_equal Nothing + empty_dict.insert Nothing Nothing . at Nothing . should_equal Nothing + + group_builder.specify "should support rewriting values with same keys" <| + dict = Dictionary.singleton "a" 1 . insert "a" 42 + dict.size.should_equal 1 + dict.get "a" . should_equal 42 + + group_builder.specify "should allow storing atoms as values" <| + json = Json.parse '{"a": 1}' + pair = Pair.new "first" "second" + dict = Dictionary.singleton 0 json . insert 1 pair + dict.get 0 . should_equal json + dict.get 1 . should_equal pair + + group_builder.specify "should support NaN as keys" <| + empty_dict.insert Number.nan 1 . contains_key Number.nan . should_be_true + empty_dict.insert Number.nan 1 . values . should_equal [1] + empty_dict.insert Number.nan 1 . insert Number.nan 2 . contains_key Number.nan . should_be_true + empty_dict.insert Number.nan 1 . insert Number.nan 2 . values . should_equal [2] + empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key Number.nan . should_be_true + empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key "key" . should_be_true + empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at Number.nan . should_equal 3 + empty_dict.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at "key" . should_equal 2 + empty_dict.insert Number.nan 1 . insert Number.nan Number.nan . at Number.nan . to_text . should_equal "NaN" + empty_dict.insert Number.nan 1 . insert Number.nan Number.nan . remove Number.nan . size . should_equal 0 + + group_builder.specify "should support arbitrary atoms as keys" <| + dict = empty_dict . insert (Pair.new "one" "two") 42 + (dict.get (Pair.new "one" "two")).should_equal 42 + (dict.get (Pair.new "A" "B")).should_equal Nothing + (dict.get (Pair.new "two" "two")).should_equal Nothing + + group_builder.specify "should support vectors as keys" <| + dict = empty_dict . insert [1, "a", 2] "Value" + dict.size.should_equal 1 + dict.get [1, "a", 2] . should_equal "Value" + + group_builder.specify "should support dates as keys" <| + dict = empty_dict.insert (Date.new 1993) 1 . insert (Date.new 1993 2 5) 2 . insert (Date_Time.new 1993 2 5 13 45) 3 + dict.size.should_equal 3 + dict.get (Date.new 1993 6 7) . should_equal Nothing + dict.get (Date.new 1993) . should_equal 1 + dict.get (Date_Time.new 1993) . should_equal Nothing + dict.get (Date.new 1993 2 5) . should_equal 2 + dict.get (Date_Time.new 1993 2 5) . should_equal Nothing + dict.get (Date_Time.new 1993 2 5 13 45) . should_equal 3 + + group_builder.specify "should support another hash map as key" <| + keys = empty_dict.insert (Pair.new "one" "two") 42 + dict = empty_dict.insert keys 23 + dict.size.should_equal 1 + (dict.get "A").should_equal Nothing + (dict.get keys).should_equal 23 + (dict.get dict).should_equal Nothing + + group_builder.specify "should handle keys with standard equality semantics" <| + dict = empty_dict.insert 2 "Hello" + (dict.get 2).should_equal "Hello" + (dict.get 2.0).should_equal "Hello" + (empty_dict.insert 2 "Hello").should_equal (empty_dict.insert 2.0 "Hello") + + group_builder.specify "should handle Nothing as keys" <| + empty_dict.insert Nothing 3 . get Nothing . should_equal 3 + empty_dict.insert Nothing 1 . insert Nothing 2 . get Nothing . should_equal 2 + empty_dict.insert Nothing 1 . should_equal (empty_dict.insert Nothing 1) + empty_dict.insert Nothing 1 . insert Nothing 2 . at Nothing . should_equal 2 + + group_builder.specify "should handle JavaScript null as keys" <| + empty_dict.insert js_null 1 . at Nothing . should_equal 1 + + group_builder.specify "should handle Python None as keys" pending=pending_python_missing <| + empty_dict.insert py_none 1 . at Nothing . should_equal 1 + + group_builder.specify "should define a well-defined text conversion" <| + d = empty_dict . insert 0 0 . insert 3 -5 . insert 1 2 + d.to_text . should_contain "0=0" + d.to_text . should_contain "3=-5" + d.to_text . should_contain "1=2" + + group_builder.specify "should define structural equality" <| + dict_1 = empty_dict . insert "1" 2 . insert "2" "1" + dict_2 = empty_dict . insert "1" 2 . insert "2" "1" + dict_3 = empty_dict + dict_1==dict_2 . should_be_true + dict_1==dict_3 . should_be_false + dict_2==dict_3 . should_be_false + + group_builder.specify "should allow inserting and looking up values" <| + m = empty_dict . insert "foo" 134 . insert "bar" 654 . insert "baz" "spam" + m.at "foo" . should_equal 134 + m.at "bar" . should_equal 654 + m.at "baz" . should_equal "spam" + (m.at "nope").should_fail_with No_Such_Key + + group_builder.specify "should support get" <| + m = empty_dict . insert 2 3 + m.get 2 0 . should_equal 3 + m.get 1 10 . should_equal 10 + m.get 2 (Panic.throw "missing") . should_equal 3 + + group_builder.specify "should allow getting a vector of the keys" <| + m = empty_dict . insert 1 2 . insert 2 4 + m.keys . should_equal [1, 2] + + group_builder.specify "should allow getting a vector of the values" <| + m = empty_dict . insert 1 2 . insert 2 4 + m.values . should_equal [2, 4] + + group_builder.specify "should support contains_key" <| + m = empty_dict . insert 2 3 + m.contains_key 2 . should_be_true + m.contains_key 1 . should_be_false + + group_builder.specify "should allow transforming the dictionary" <| + m = empty_dict . insert 1 2 . insert 2 4 + expected = empty_dict . insert "1" 4 . insert "2" 8 + m.transform (k -> v -> [k.to_text, v*2]) . should_equal expected + + group_builder.specify "should be able to remove entries (1)" <| + m1 = empty_dict.insert "A" 1 . insert "B" 2 + m2 = m1.remove "B" + m2.get "A" . should_equal 1 + m2.remove "A" . should_equal empty_dict + m1.remove "foo" . should_fail_with No_Such_Key + + group_builder.specify "should be able to remove entries (2)" <| + m1 = empty_dict.insert "A" 1 + m2 = m1.insert "B" 2 + m3 = m1.insert "C" 3 + m2.remove "A" . to_vector . should_equal [["B", 2]] + m2.remove "B" . to_vector . should_equal [["A", 1]] + m3.remove "A" . to_vector . should_equal [["C", 3]] + m3.remove "C" . to_vector . should_equal [["A", 1]] + + group_builder.specify "should be able to remove entries (3)" <| + m = empty_dict.insert "A" 1 . insert "B" 2 . insert "C" 3 + m.remove "B" . should_equal (empty_dict.insert "A" 1 . insert "C" 3) + +main filter=Nothing = + suite = Test.build suite_builder-> + add_specs suite_builder + suite.run_with_filter filter diff --git a/test/Base_Tests/src/Data/Set_Spec.enso b/test/Base_Tests/src/Data/Hashset_Spec.enso similarity index 55% rename from test/Base_Tests/src/Data/Set_Spec.enso rename to test/Base_Tests/src/Data/Hashset_Spec.enso index 28de9a0d8d5d..d333bc6fcd05 100644 --- a/test/Base_Tests/src/Data/Set_Spec.enso +++ b/test/Base_Tests/src/Data/Hashset_Spec.enso @@ -5,10 +5,11 @@ from Standard.Test import all add_specs suite_builder = - suite_builder.group "Enso Set" group_builder-> + suite_builder.group "Enso Hashset" group_builder-> group_builder.specify "should allow checking for emptiness" <| - empty_map = Set.empty - non_empty = Set.empty . insert "foo" + empty_map = + Hashset.empty + non_empty = Hashset.empty . insert "foo" empty_map.is_empty . should_be_true non_empty.is_empty . should_be_false @@ -16,34 +17,34 @@ add_specs suite_builder = non_empty.not_empty . should_be_true group_builder.specify "should be constructed from a vector" <| - s1 = Set.from_vector [1, 2, 3, 2] + s1 = Hashset.from_vector [1, 2, 3, 2] s1.size . should_equal 3 s1.to_vector.sort . should_equal [1, 2, 3] - r2 = Set.from_vector [1, 2, 2] error_on_duplicates=True + r2 = Hashset.from_vector [1, 2, 2] error_on_duplicates=True r2.should_fail_with Illegal_Argument group_builder.specify "should allow checking contains" <| - s1 = Set.from_vector [1, 2, 3, 2] + s1 = Hashset.from_vector [1, 2, 3, 2] s1.contains 1 . should_be_true s1.contains 2 . should_be_true s1.contains 3 . should_be_true s1.contains 4 . should_be_false group_builder.specify "should allow checking contains with relational NULL logic" <| - Set.from_vector [1, 2] . contains_relational 1 . should_be_true - Set.from_vector [1, 2] . contains_relational 3 . should_be_false - Set.from_vector [1, 2, Nothing] . contains_relational 1 . should_be_true - Set.from_vector [1, 2, Nothing] . contains_relational 3 . should_equal Nothing - Set.from_vector [1, 2, Nothing] . contains_relational Nothing . should_equal Nothing - Set.from_vector [1, 2] . contains_relational Nothing . should_equal Nothing - Set.from_vector [Nothing] . contains_relational Nothing . should_equal Nothing - Set.from_vector [] . contains_relational Nothing . should_be_false + Hashset.from_vector [1, 2] . contains_relational 1 . should_be_true + Hashset.from_vector [1, 2] . contains_relational 3 . should_be_false + Hashset.from_vector [1, 2, Nothing] . contains_relational 1 . should_be_true + Hashset.from_vector [1, 2, Nothing] . contains_relational 3 . should_equal Nothing + Hashset.from_vector [1, 2, Nothing] . contains_relational Nothing . should_equal Nothing + Hashset.from_vector [1, 2] . contains_relational Nothing . should_equal Nothing + Hashset.from_vector [Nothing] . contains_relational Nothing . should_equal Nothing + Hashset.from_vector [] . contains_relational Nothing . should_be_false group_builder.specify "should allow to compute a union, intersection and difference" <| - s1 = Set.from_vector [1, 2] - s2 = Set.from_vector [2, 3] - s3 = Set.from_vector [3, 4] + s1 = Hashset.from_vector [1, 2] + s2 = Hashset.from_vector [2, 3] + s3 = Hashset.from_vector [3, 4] (s1.union s2).to_vector.sort . should_equal [1, 2, 3] (s1.union s3).to_vector.sort . should_equal [1, 2, 3, 4] @@ -54,19 +55,19 @@ add_specs suite_builder = (s1.difference s1).to_vector . should_equal [] group_builder.specify "should allow to check for equality of two sets" <| - s1 = Set.from_vector [1, 2] - s2 = Set.from_vector [2, 1, 1] - s3 = Set.from_vector [1, 2, 3] + s1 = Hashset.from_vector [1, 2] + s2 = Hashset.from_vector [2, 1, 1] + s3 = Hashset.from_vector [1, 2, 3] (s1 == s2) . should_be_true (s1 == s1) . should_be_true (s1 == s3) . should_be_false group_builder.specify "should be able to convert to text" <| - s1 = Set.from_vector ["1", "2", "3"] - s2 = Set.from_vector [1, 2, 3] - s1.to_text.should_equal "Set{'1', '2', '3'}" - s2.to_text.should_equal "Set{1, 2, 3}" + s1 = Hashset.from_vector ["1", "2", "3"] + s2 = Hashset.from_vector [1, 2, 3] + s1.to_text.should_equal "Hashset{'1', '2', '3'}" + s2.to_text.should_equal "Hashset{1, 2, 3}" main filter=Nothing = suite = Test.build suite_builder-> diff --git a/test/Base_Tests/src/Data/Json_Spec.enso b/test/Base_Tests/src/Data/Json_Spec.enso index 799d50625a59..2dfd4b1c2b6c 100644 --- a/test/Base_Tests/src/Data/Json_Spec.enso +++ b/test/Base_Tests/src/Data/Json_Spec.enso @@ -85,13 +85,13 @@ add_specs suite_builder = Json.parse '{"constructor": "Skew", "population": true}' . into Statistic . should_equal (Statistic.Skew True) Json.parse '{"constructor": "NotARealOne", "population": true}' . into Statistic . should_fail_with Illegal_Argument - group_builder.specify "should be able to convert a JS_Object into a Map using into" <| - Json.parse '{"a": 15, "b": 20, "c": "X", "d": null}' . into Map . should_equal (Map.from_vector [["a", 15], ["b", 20], ["c", "X"], ["d", Nothing]]) - Json.parse '{}' . into Map . should_equal Map.empty + group_builder.specify "should be able to convert a JS_Object into a Dictionary using into" <| + Json.parse '{"a": 15, "b": 20, "c": "X", "d": null}' . into Dictionary . should_equal (Dictionary.from_vector [["a", 15], ["b", 20], ["c", "X"], ["d", Nothing]]) + Json.parse '{}' . into Dictionary . should_equal Dictionary.empty # [] parses as a vector/array which does not have the `into` method, that only works for {} objects: Test.expect_panic No_Such_Method <| - Json.parse '[]' . into Map + Json.parse '[]' . into Dictionary group_builder.specify "should be able to deserialize Date" <| '{"type": "Date", "constructor": "new", "year": 2018, "month": 7, "day": 3}'.should_parse_as (Date.new 2018 7 3) diff --git a/test/Base_Tests/src/Data/Map_Spec.enso b/test/Base_Tests/src/Data/Map_Spec.enso deleted file mode 100644 index 975b8f7f88af..000000000000 --- a/test/Base_Tests/src/Data/Map_Spec.enso +++ /dev/null @@ -1,637 +0,0 @@ -from Standard.Base import all -import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -import Standard.Base.Errors.No_Such_Key.No_Such_Key - -from Standard.Test import all - - -polyglot java import java.util.Map as JavaMap -polyglot java import org.enso.base.file_system.File_Utils - -## Type that violates reflexivity -type My_Nan - Value comment:Text - -type My_Nan_Comparator - compare _ _ = Nothing - hash _ = 0 - -Comparable.from (_:My_Nan) = My_Nan_Comparator - -type My_Key - Value hash_code:Integer value:Text idx:Integer - -type My_Key_Comparator - # Comparison ignores idx field - compare x y = - if x.hash_code != y.hash_code then Nothing else - if x.value == y.value then Ordering.Equal else Nothing - - hash x = x.hash_code - -Comparable.from (_:My_Key) = My_Key_Comparator - - -foreign js js_str str = """ - return new String(str) - -foreign js js_null = """ - return null - -foreign js js_empty_dict = """ - return new Map() - -foreign python py_empty_dict = """ - return {} - -foreign js js_dict_from_vec vec = """ - dict = new Map() - for (let i = 0; i < vec.length; i += 2) { - dict.set(vec[i], vec[i+1]) - } - return dict - -foreign python py_none = """ - return None - -foreign python py_dict_from_vec vec = """ - d = {} - for i in range(0, len(vec), 2): - d[vec[i]] = vec[i + 1] - return d - -foreign python py_dict_from_map map = """ - d = dict() - for key in map.__iter__(): - d[key] = map[key] - return d - -foreign python py_vec_from_map map = """ - vec = [] - for key in map.__iter__(): - value = map[key] - vec.append([key, value]) - return vec - -# Should throw error - updating immutable map from Enso -foreign python py_update_dict map key val = """ - map[key] = val - -foreign python py_wrapper obj = """ - class MyClass: - def __init__(self, obj): - self.data = obj - return MyClass(obj) - -pending_python_missing = if Polyglot.is_language_installed "python" then Nothing else "Can't run Python tests, Python is not installed." - -type Child - Value data - -type Parent - Value child - -type GrandParent - Value parent - -add_specs suite_builder = - languages = Vector.build builder-> - builder . append ["Enso", _-> Map.empty, Nothing] - builder . append ["Java", _-> JavaMap.of, Nothing] - builder . append ["JavaScript", _-> js_empty_dict, Nothing] - builder . append ["Python", _-> py_empty_dict, pending_python_missing] - languages.each entry-> - lang = entry.get 0 - empty_map_fn = entry.get 1 - pending = entry.get 2 - add_common_specs suite_builder lang pending empty_map_fn - - suite_builder.group "Enso maps" group_builder-> - - - group_builder.specify "should use proper hash code for keys" <| - single_key_map key = Map.singleton key 42 - grand_parent_1 = GrandParent.Value (Parent.Value (Child.Value 2)) - grand_parent_2 = GrandParent.Value (Parent.Value (Child.Value 2.0)) - - (single_key_map 2 . at 2.0) . should_equal 42 - (single_key_map -2 . at -2.0) . should_equal 42 - (single_key_map 'ś' . at 's\u0301') . should_equal 42 - (single_key_map 's\u0301' . at 'ś') . should_equal 42 - (single_key_map 'éabc' . at 'e\u0301abc') . should_equal 42 - (single_key_map 'e\u0301abc' . at 'éabc') . should_equal 42 - (single_key_map grand_parent_1 . at grand_parent_2) . should_equal 42 - (single_key_map (Json.parse '{"a": 1}') . at (Json.parse '{"a": 1}')) . should_equal 42 - (single_key_map (Child.Value 1) . at (Child.Value 1.0)) . should_equal 42 - - - group_builder.specify "should support another hash map with NaN keys as key" <| - Map.singleton (Map.singleton Number.nan 1) 42 . size . should_equal 1 - Map.singleton (Map.singleton Number.nan 1) 42 . keys . at 0 . keys . to_text . should_equal "[NaN]" - Map.singleton (Map.singleton Number.nan 1) 42 . keys . at 0 . get Number.nan . should_equal 1 - Map.singleton (Map.singleton Number.nan 1) 42 . at (Map.singleton Number.nan 1) . should_equal 42 - - group_builder.specify "should support atoms with custom comparators that violate reflexivity as keys" <| - k = My_Nan.Value "foo" - k2 = My_Nan.Value "foo" - (k==k).should_be_true - (k==k2).should_be_false - Meta.is_same_object k k2 . should_be_false - Meta.is_same_object k k . should_be_true - m = Map.empty.insert k 10 - m.contains_key k . should_be_true - m.get k . should_equal 10 - m.contains_key k2 . should_be_false - - m2 = m.insert k2 20 - m2.get k . should_equal 10 - m2.get k2 . should_equal 20 - m2.size . should_equal 2 - - m3 = m2.insert k 30 - m3.size . should_equal 2 - m3.get k . should_equal 30 - - group_builder.specify "should support atom with custom comparators with complicated hash method" <| - keys = 0.up_to 500 . map ix-> - value = ["A", "B", "C", "D", "E"].at (ix % 5) - hash_code = Comparable.from value . hash value - My_Key.Value hash_code value ix - distinct_keys = keys.fold Map.empty acc_map-> - item-> - acc_map.insert item True - distinct_keys.size . should_equal 5 - distinct_key_values = keys.map (_.value) . fold Map.empty acc_map-> - item-> - acc_map.insert item True - distinct_key_values.size . should_equal 5 - - group_builder.specify "should not drop warnings from keys" <| - key = Warning.attach "my_warn" "my_key" - map = Map.singleton key 42 - (Warning.get_all (map.keys.at 0)).length . should_equal 1 - - group_builder.specify "should not drop warnings from values" <| - val = Warning.attach "my_warn" "my_val" - map = Map.singleton 42 val - (Warning.get_all (map.values.at 0)).length . should_equal 1 - - group_builder.specify "should convert the whole map to a vector" <| - m = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2 - m.to_vector.sort on=_.first . should_equal [[0, 0], [1, 2], [3, -5]] - - group_builder.specify "should allow building the map from two vectors" <| - expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2 - Map.from_keys_and_values [0, 3, 1] [0, -5, 2] . should_equal expected - - group_builder.specify "should allow building the map from vector like things" <| - expected = Map.empty . insert 0 0 . insert 1 -5 . insert 2 2 - Map.from_keys_and_values (0.up_to 3) [0, -5, 2] . should_equal expected - - group_builder.specify "should not allow building with duplicate keys unless explicitly allowed" <| - expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2 - Map.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] . should_fail_with Illegal_Argument - Map.from_keys_and_values [0, 3, 1, 0] [3, -5, 2, 0] error_on_duplicates=False . should_equal expected - - group_builder.specify "should not allow different length vectors when building" <| - Map.from_keys_and_values [0, 3, 1] [3, -5, 2, 0] . should_fail_with Illegal_Argument - - group_builder.specify "should allow building the map from a vector" <| - expected = Map.empty . insert 0 0 . insert 3 -5 . insert 1 2 - vec = [[0, 0], [3, -5], [1, 2]] - Map.from_vector vec . should_equal expected - - group_builder.specify "should fail when building the map from wrong vector" <| - Map.from_vector [["A", 1, "B", 2]] . should_fail_with Illegal_Argument - - group_builder.specify "should not allow duplicates when building the map from a vector, unless explicitly allowed" <| - vec = [[0, 0], [3, -5], [1, 2], [0, 1]] - m1 = Map.from_vector vec - m1.should_fail_with Illegal_Argument - m1.catch.message . should_equal "`Map.from_vector` encountered duplicate key: 0" - - m2 = Map.from_vector vec error_on_duplicates=False - Problems.assume_no_problems m2 - m2.get 0 . should_equal 1 - m2.get 3 . should_equal -5 - - group_builder.specify "should disallow duplicate keys when transforming the map" <| - m = Map.from_vector [[1, 2], [11, 3]] - m2 = m.transform (k -> v -> [k % 10, v*2]) - m2.should_fail_with Illegal_Argument - m2.catch.message . should_equal "`Map.transform` encountered duplicate key: 1" - - group_builder.specify "should allow mapping over values" <| - m = Map.empty . insert 1 2 . insert 2 4 - expected = Map.empty . insert 1 4 . insert 2 8 - m.map (v -> v*2) . should_equal expected - - group_builder.specify "should allow mapping over keys" <| - m = Map.empty . insert 1 2 . insert 2 4 - expected = Map.empty . insert 2 2 . insert 4 4 - m.map_keys (k -> k*2) . should_equal expected - - group_builder.specify "should allow mapping with keys" <| - m = Map.empty . insert 1 2 . insert 2 4 - expected = Map.empty . insert 1 3 . insert 2 6 - m.map_with_key (k -> v -> k + v) . should_equal expected - - group_builder.specify "should allow iterating over each value" <| - m = Map.empty . insert 1 2 . insert 2 4 - expected_vec = [2, 4] - vec = Vector.build builder-> - m.each (v -> builder.append v) - vec . should_equal expected_vec - - group_builder.specify "should allow iterating over each key-value pair" <| - m = Map.empty . insert 1 2 . insert 2 4 - expected_vec = [3, 6] - vec = Vector.build builder-> - m.each_with_key (k -> v -> builder.append (k+v)) - vec . should_equal expected_vec - - group_builder.specify "should allow folding over the values" <| - m = Map.empty . insert 1 2 . insert 2 4 - m.fold 0 (+) . should_equal 6 - - group_builder.specify "should allow folding over the key-value pairs" <| - m = Map.empty . insert 1 2 . insert 2 4 - m.fold_with_key 0 (l -> k -> v -> l + k + v) . should_equal 9 - - group_builder.specify "should be able to add a Nothing key to the map of Text" <| - m = Map.empty . insert "A" 2 . insert Nothing 1 . insert "B" 3 - m.at "A" . should_equal 2 - m.at "B" . should_equal 3 - m.at Nothing . should_equal 1 - - group_builder.specify "should be able to add a Nothing key to the map of Integer" <| - m = Map.empty . insert 100 2 . insert Nothing 1 . insert 200 3 - m.at 100 . should_equal 2 - m.at 200 . should_equal 3 - m.at Nothing . should_equal 1 - - suite_builder.group "Polyglot keys and values" group_builder-> - group_builder.specify "should support polyglot keys" <| - map = Map.singleton (js_str "A") 42 - map.size.should_equal 1 - map.get "A" . should_equal 42 - map.get (js_str "A") . should_equal 42 - - group_builder.specify "should support host objects as keys" <| - # java.nio.path.Path has proper implementation of hashCode - map = Map.singleton (File_Utils.toPath "/home/user/file.txt") 42 - map.get "X" . should_equal Nothing - map.get "A" . should_equal Nothing - map.get (File_Utils.toPath "/home/user/file.txt") . should_equal 42 - - group_builder.specify "should support Python objects as keys" pending=pending_python_missing <| - py_obj = py_wrapper 42 - map = Map.singleton py_obj "Value" - map.get py_obj . should_equal "Value" - - group_builder.specify "should support Python objects as values" pending=pending_python_missing <| - map = Map.singleton "A" (py_wrapper 42) - map.get "A" . data . should_equal 42 - - group_builder.specify "should insert entries to a polyglot map" pending=pending_python_missing <| - dict = py_dict_from_vec ["A", 1, "B", 2] - dict.insert "C" 3 . keys . sort . should_equal ["A", "B", "C"] - - group_builder.specify "should remove entries from a polyglot map" pending=pending_python_missing <| - dict = py_dict_from_vec ["A", 1, "B", 2] - dict.remove "B" . to_vector . should_equal [["A", 1]] - - suite_builder.group "non-linear inserts" group_builder-> - group_builder.specify "should handle inserts with different keys" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m1.insert "C" 3 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 1], ["C", 3]] - - group_builder.specify "should handle inserts with same keys (1)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "A" 2 - m3 = m1.insert "A" 3 - m4 = m1.insert "B" 4 - m2.to_vector.sort on=_.first . should_equal [["A", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 3]] - m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]] - - group_builder.specify "should handle inserts with same keys (2)" <| - m1 = Map.singleton "foo" 1 - m2 = m1.insert "baz" 2 - m3 = m2.insert "foo" 3 - m1.to_vector.sort on=_.first . should_equal [['foo', 1]] - m2.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 1]] - m3.to_vector.sort on=_.first . should_equal [['baz', 2], ['foo', 3]] - - group_builder.specify "should handle inserts with same keys (3)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "A" 3 - m4 = m2.insert "C" 4 - m1.to_vector.sort on=_.first . should_equal [["A", 1]] - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]] - m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 4]] - - group_builder.specify "should handle inserts with same keys (4)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "C" 3 - m4 = m2.insert "D" 4 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] - m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]] - - group_builder.specify "should handle inserts with same keys (5)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "A" 3 - m4 = m2.insert "A" 4 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]] - m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]] - - group_builder.specify "should handle inserts with same keys (6)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "C" 3 - m4 = m2.insert "A" 4 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] - m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2]] - - group_builder.specify "should handle inserts with same keys (7)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "C" 3 - m4 = m3.insert "D" 4 - m5 = m2.insert "A" 5 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] - m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3], ["D", 4]] - m5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]] - - group_builder.specify "should handle inserts with same keys (8)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "C" 3 - m4 = m3.insert "A" 4 - m5 = m2.insert "A" 5 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] - m4.to_vector.sort on=_.first . should_equal [["A", 4], ["B", 2], ["C", 3]] - m5.to_vector.sort on=_.first . should_equal [["A", 5], ["B", 2]] - - group_builder.specify "should handle inserts with same keys (9)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "A" 3 - m4 = m2.insert "B" 4 - m5 = m2.insert "C" 5 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 3], ["B", 2]] - m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 4]] - m5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 5]] - - group_builder.specify "should handle inserts with same keys (10)" <| - m1 = Map.singleton "A" 1 - m2 = m1.insert "B" 2 - m3 = m2.insert "C" 3 - m4 = m2.insert "D" 4 - m5 = m2.insert "E" 5 - m2.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2]] - m3.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] - m4.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["D", 4]] - m5.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["E", 5]] - - suite_builder.group "Polyglot hash maps" group_builder-> - group_builder.specify "should pass maps as immutable maps to other langs" pending=pending_python_missing <| - map = Map.singleton "A" 1 - # Python's KeyError should be raised - Test.expect_panic_with (py_update_dict map "A" 2) Any - map.get "A" . should_equal 1 - - group_builder.specify "should treat JavaScript maps as Enso maps" <| - js_dict = js_dict_from_vec ["A", 1, "B", 2] - map = js_dict.insert "C" 3 - js_dict.to_vector.should_equal [["A", 1], ["B", 2]] - map.to_vector.sort on=_.first . should_equal [["A", 1], ["B", 2], ["C", 3]] - - group_builder.specify "should treat Java Map as Enso map" <| - sort_by_keys vec = vec.sort by=x-> y-> Ordering.compare x.first y.first - jmap = JavaMap.of "A" 1 "B" 2 - (sort_by_keys jmap.to_vector) . should_equal [["A", 1], ["B", 2]] - (sort_by_keys (jmap.insert "C" 3 . to_vector)) . should_equal [["A", 1], ["B", 2], ["C", 3]] - - group_builder.specify "should treat Python dicts as Enso maps" pending=pending_python_missing <| - py_dict = py_dict_from_vec ["A", 1, "B", 2] - map = py_dict.insert "C" 3 - py_dict.not_empty . should_be_true - py_dict.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2]] - map.to_vector . should_contain_the_same_elements_as [["A", 1], ["B", 2], ["C", 3]] - py_empty_dict.is_empty.should_be_true - py_empty_dict.insert "A" 1 . insert "A" 2 . get "A" . should_equal 2 - - group_builder.specify "should be able to remove entries" pending=pending_python_missing <| - py_dict_from_vec ["A", 1, "B", 2] . remove "A" . size . should_equal 1 - py_dict_from_vec ["A", 1, "B", 2] . remove "A" . get "B" . should_equal 2 - - group_builder.specify "should be able to remove NaN keys" pending=pending_python_missing <| - py_dict_from_vec [Number.nan, 1] . remove Number.nan . size . should_equal 0 - - group_builder.specify "should pass maps with null keys to Python and back" pending=pending_python_missing <| - # Python supports None as keys, Enso support Nothing as keys - py_dict = py_dict_from_map (Map.singleton Nothing 42) - py_dict.get Nothing . should_equal 42 - py_dict.insert "A" 23 . get Nothing . should_equal 42 - py_dict.insert Nothing 23 . get Nothing . should_equal 23 - - group_builder.specify "should treat Enso maps as Python dicts when passed to Python" pending=pending_python_missing <| - map1 = Map.empty.insert "A" 1 . insert "B" 2 - py_vec_from_map map1 . should_contain_the_same_elements_as [["A", 1], ["B", 2]] - map2 = Map.empty.insert "A" 1 . insert Nothing 2 - py_vec_from_map map2 . should_contain_the_same_elements_as [["A", 1], [Nothing, 2]] - - -add_common_specs suite_builder prefix:Text (pending : (Text | Nothing)) (empty_map_fn : (Nothing -> Map)) = - # Not on a single line - empty_map is a method, not a variable - empty_map = - empty_map_fn Nothing - - suite_builder.group prefix+": Common polyglot Map operations" pending=pending group_builder-> - group_builder.specify "should get the default comparator for polyglot maps" <| - Comparable.from empty_map . should_equal Default_Comparator - - group_builder.specify "should compare two hash maps" <| - (empty_map.insert "a" 1).should_equal (empty_map.insert "a" 1) - (empty_map.insert "b" 2).should_not_equal (empty_map.insert "a" 1) - empty_map.should_equal empty_map - empty_map.should_not_equal (empty_map.insert "a" 1) - (empty_map.insert "a" 1 . insert "b" 2).should_equal (empty_map.insert "b" 2 . insert "a" 1) - - group_builder.specify "should allow checking for non emptiness" <| - non_empty = empty_map . insert "foo" 1234 - empty_map.not_empty . should_be_false - non_empty.not_empty . should_be_true - - group_builder.specify "should allow checking its size" <| - non_empty = empty_map.insert "a" "b" . insert "x" "y" - empty_map.size . should_equal 0 - non_empty.size . should_equal 2 - - group_builder.specify "should allow checking for emptiness" <| - non_empty = empty_map . insert "foo" 1234 - empty_map.is_empty . should_be_true - non_empty.is_empty . should_be_false - - group_builder.specify "should handle incomparable values as keys" <| - empty_map.insert Number.nan 1 . insert Number.nan 2 . get Number.nan . should_equal 2 - - group_builder.specify "should handle Nothing as values" <| - empty_map.insert 1 Nothing . at 1 . should_equal Nothing - empty_map.insert Nothing Nothing . at Nothing . should_equal Nothing - - group_builder.specify "should support rewriting values with same keys" <| - map = Map.empty.insert "a" 1 . insert "a" 42 - map.size.should_equal 1 - map.get "a" . should_equal 42 - - group_builder.specify "should allow storing atoms as values" <| - json = Json.parse '{"a": 1}' - pair = Pair.new "first" "second" - map = Map.empty.insert 0 json . insert 1 pair - map.get 0 . should_equal json - map.get 1 . should_equal pair - - group_builder.specify "should support NaN as keys" <| - empty_map.insert Number.nan 1 . contains_key Number.nan . should_be_true - empty_map.insert Number.nan 1 . values . should_equal [1] - empty_map.insert Number.nan 1 . insert Number.nan 2 . contains_key Number.nan . should_be_true - empty_map.insert Number.nan 1 . insert Number.nan 2 . values . should_equal [2] - empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key Number.nan . should_be_true - empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . contains_key "key" . should_be_true - empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at Number.nan . should_equal 3 - empty_map.insert Number.nan 1 . insert "key" 2 . insert Number.nan 3 . at "key" . should_equal 2 - empty_map.insert Number.nan 1 . insert Number.nan Number.nan . at Number.nan . to_text . should_equal "NaN" - empty_map.insert Number.nan 1 . insert Number.nan Number.nan . remove Number.nan . size . should_equal 0 - - group_builder.specify "should support arbitrary atoms as keys" <| - map = empty_map . insert (Pair.new "one" "two") 42 - (map.get (Pair.new "one" "two")).should_equal 42 - (map.get (Pair.new "A" "B")).should_equal Nothing - (map.get (Pair.new "two" "two")).should_equal Nothing - - group_builder.specify "should support vectors as keys" <| - map = empty_map . insert [1, "a", 2] "Value" - map.size.should_equal 1 - map.get [1, "a", 2] . should_equal "Value" - - group_builder.specify "should support dates as keys" <| - map = empty_map.insert (Date.new 1993) 1 . insert (Date.new 1993 2 5) 2 . insert (Date_Time.new 1993 2 5 13 45) 3 - map.size.should_equal 3 - map.get (Date.new 1993 6 7) . should_equal Nothing - map.get (Date.new 1993) . should_equal 1 - map.get (Date_Time.new 1993) . should_equal Nothing - map.get (Date.new 1993 2 5) . should_equal 2 - map.get (Date_Time.new 1993 2 5) . should_equal Nothing - map.get (Date_Time.new 1993 2 5 13 45) . should_equal 3 - - group_builder.specify "should support another hash map as key" <| - key_map = empty_map.insert (Pair.new "one" "two") 42 - map = empty_map.insert key_map 23 - map.size.should_equal 1 - (map.get "A").should_equal Nothing - (map.get key_map).should_equal 23 - (map.get map).should_equal Nothing - - group_builder.specify "should handle keys with standard equality semantics" <| - map = empty_map.insert 2 "Hello" - (map.get 2).should_equal "Hello" - (map.get 2.0).should_equal "Hello" - (empty_map.insert 2 "Hello").should_equal (empty_map.insert 2.0 "Hello") - - group_builder.specify "should handle Nothing as keys" <| - empty_map.insert Nothing 3 . get Nothing . should_equal 3 - empty_map.insert Nothing 1 . insert Nothing 2 . get Nothing . should_equal 2 - empty_map.insert Nothing 1 . should_equal (empty_map.insert Nothing 1) - empty_map.insert Nothing 1 . insert Nothing 2 . at Nothing . should_equal 2 - - group_builder.specify "should handle JavaScript null as keys" <| - empty_map.insert js_null 1 . at Nothing . should_equal 1 - - group_builder.specify "should handle Python None as keys" pending=pending_python_missing <| - empty_map.insert py_none 1 . at Nothing . should_equal 1 - - group_builder.specify "should define a well-defined text conversion" <| - m = empty_map . insert 0 0 . insert 3 -5 . insert 1 2 - m.to_text . should_contain "0=0" - m.to_text . should_contain "3=-5" - m.to_text . should_contain "1=2" - - group_builder.specify "should define structural equality" <| - map_1 = empty_map . insert "1" 2 . insert "2" "1" - map_2 = empty_map . insert "1" 2 . insert "2" "1" - map_3 = empty_map - map_1==map_2 . should_be_true - map_1==map_3 . should_be_false - map_2==map_3 . should_be_false - - group_builder.specify "should allow inserting and looking up values" <| - m = empty_map . insert "foo" 134 . insert "bar" 654 . insert "baz" "spam" - m.at "foo" . should_equal 134 - m.at "bar" . should_equal 654 - m.at "baz" . should_equal "spam" - (m.at "nope").should_fail_with No_Such_Key - - group_builder.specify "should support get" <| - m = empty_map . insert 2 3 - m.get 2 0 . should_equal 3 - m.get 1 10 . should_equal 10 - m.get 2 (Panic.throw "missing") . should_equal 3 - - group_builder.specify "should allow getting a vector of the keys" <| - m = empty_map . insert 1 2 . insert 2 4 - m.keys . should_equal [1, 2] - - group_builder.specify "should allow getting a vector of the values" <| - m = empty_map . insert 1 2 . insert 2 4 - m.values . should_equal [2, 4] - - group_builder.specify "should support contains_key" <| - m = empty_map . insert 2 3 - m.contains_key 2 . should_be_true - m.contains_key 1 . should_be_false - - group_builder.specify "should allow transforming the map" <| - m = empty_map . insert 1 2 . insert 2 4 - expected = empty_map . insert "1" 4 . insert "2" 8 - m.transform (k -> v -> [k.to_text, v*2]) . should_equal expected - - group_builder.specify "should be able to remove entries (1)" <| - m1 = empty_map.insert "A" 1 . insert "B" 2 - m2 = m1.remove "B" - m2.get "A" . should_equal 1 - m2.remove "A" . should_equal empty_map - m1.remove "foo" . should_fail_with No_Such_Key - - group_builder.specify "should be able to remove entries (2)" <| - m1 = empty_map.insert "A" 1 - m2 = m1.insert "B" 2 - m3 = m1.insert "C" 3 - m2.remove "A" . to_vector . should_equal [["B", 2]] - m2.remove "B" . to_vector . should_equal [["A", 1]] - m3.remove "A" . to_vector . should_equal [["C", 3]] - m3.remove "C" . to_vector . should_equal [["A", 1]] - - group_builder.specify "should be able to remove entries (3)" <| - m = empty_map.insert "A" 1 . insert "B" 2 . insert "C" 3 - m.remove "B" . should_equal (empty_map.insert "A" 1 . insert "C" 3) - - -main filter=Nothing = - suite = Test.build suite_builder-> - add_specs suite_builder - suite.run_with_filter filter diff --git a/test/Base_Tests/src/Data/Text/Regex_Spec.enso b/test/Base_Tests/src/Data/Text/Regex_Spec.enso index 258596496ae7..e7ce093c359d 100644 --- a/test/Base_Tests/src/Data/Text/Regex_Spec.enso +++ b/test/Base_Tests/src/Data/Text/Regex_Spec.enso @@ -393,7 +393,7 @@ add_specs suite_builder = group_builder.specify "should provide access to info about group names" <| data.pattern.named_groups.sort . should_equal ["empty", "letters"] - data.pattern.group_nums_to_names . should_equal <| Map.from_vector [[2, "letters"],[4, "empty"]] + data.pattern.group_nums_to_names . should_equal <| Dictionary.from_vector [[2, "letters"],[4, "empty"]] group_builder.specify "should return the results of all named groups" <| groups = data.match.named_groups diff --git a/test/Base_Tests/src/Data/XML/XML_Spec.enso b/test/Base_Tests/src/Data/XML/XML_Spec.enso index 25a9cde16a26..4cd5ea95b7a6 100644 --- a/test/Base_Tests/src/Data/XML/XML_Spec.enso +++ b/test/Base_Tests/src/Data/XML/XML_Spec.enso @@ -133,8 +133,8 @@ add_specs suite_builder = data.root.at 3 . attribute "does_not_exist" if_missing="if_missing" . should_equal "if_missing" group_builder.specify "Can get element an attribute map" <| - data.root.at 2 . attributes . should_equal (Map.from_vector [["studentId", "1000"], ["year", "2"]]) - data.root.at 3 . attributes . should_equal (Map.from_vector [["studentId", "1001"], ["year", "3"]]) + data.root.at 2 . attributes . should_equal (Dictionary.from_vector [["studentId", "1000"], ["year", "2"]]) + data.root.at 3 . attributes . should_equal (Dictionary.from_vector [["studentId", "1001"], ["year", "3"]]) group_builder.specify "Can get nodes via xpath" <| classes = data.root.get_xpath "/class" diff --git a/test/Base_Tests/src/Main.enso b/test/Base_Tests/src/Main.enso index 80e6ef962052..341bd9820376 100644 --- a/test/Base_Tests/src/Main.enso +++ b/test/Base_Tests/src/Main.enso @@ -30,12 +30,13 @@ import project.Data.Array_Proxy_Spec import project.Data.Bool_Spec import project.Data.Base_64_Spec import project.Data.Decimal_Spec +import project.Data.Dictionary_Spec import project.Data.Function_Spec +import project.Data.Hashset_Spec import project.Data.Interval_Spec import project.Data.Json_Spec import project.Data.List_Spec import project.Data.Locale_Spec -import project.Data.Map_Spec import project.Data.Maybe_Spec import project.Data.Numbers_Spec import project.Data.Ordering_Spec @@ -47,7 +48,6 @@ import project.Data.Polyglot_Spec import project.Data.Problems_Spec import project.Data.Range_Spec import project.Data.Regression_Spec -import project.Data.Set_Spec import project.Data.Statistics_Spec import project.Data.Time.Spec as Time_Spec import project.Data.Vector_Spec @@ -129,8 +129,8 @@ main filter=Nothing = Json_Spec.add_specs suite_builder List_Spec.add_specs suite_builder Locale_Spec.add_specs suite_builder - Map_Spec.add_specs suite_builder - Set_Spec.add_specs suite_builder + Dictionary_Spec.add_specs suite_builder + Hashset_Spec.add_specs suite_builder Maybe_Spec.add_specs suite_builder Meta_Spec.add_specs suite_builder Instrumentor_Spec.add_specs suite_builder diff --git a/test/Base_Tests/src/Network/Http/Request_Spec.enso b/test/Base_Tests/src/Network/Http/Request_Spec.enso index 59fb5b75eac2..a3b80d2a076a 100644 --- a/test/Base_Tests/src/Network/Http/Request_Spec.enso +++ b/test/Base_Tests/src/Network/Http/Request_Spec.enso @@ -42,7 +42,7 @@ add_specs suite_builder = req.body.should_equal (Request_Body.Json json) req.headers.should_equal [Header.application_json] group_builder.specify "should set form body" <| - body_form = Map.from_vector [["key", "val"]] + body_form = Dictionary.from_vector [["key", "val"]] req = Request.get test_uri . with_form body_form req.body.should_equal (Request_Body.Form_Data body_form) req.headers.should_equal [Header.application_x_www_form_urlencoded] diff --git a/test/Base_Tests/src/Network/Http_Spec.enso b/test/Base_Tests/src/Network/Http_Spec.enso index 325e2a0dc14c..b4db06920cc7 100644 --- a/test/Base_Tests/src/Network/Http_Spec.enso +++ b/test/Base_Tests/src/Network/Http_Spec.enso @@ -288,14 +288,14 @@ add_specs suite_builder = group_builder.specify "Can perform a url-encoded form POST" <| Test.with_retries <| test_file = enso_project.data / "sample.txt" - form_data = Map.from_vector [["key", "val"], ["a_file", test_file]] + form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data url_encoded=True) response.at "headers" . at "Content-Type" . should_equal "application/x-www-form-urlencoded" response.at "data" . replace "%0D%" "%" . should_equal 'key=val&a_file=Cupcake+ipsum+dolor+sit+amet.+Caramels+tootsie+roll+cake+ice+cream.+Carrot+cake+apple+pie+gingerbread+chocolate+cake+pudding+tart+souffl%C3%A9+jelly+beans+gummies.%0A%0ATootsie+roll+chupa+chups+muffin+croissant+fruitcake+jujubes+danish+cotton+candy+danish.+Oat+cake+chocolate+fruitcake+halvah+icing+oat+cake+toffee+powder.+Pastry+drag%C3%A9e+croissant.+Ice+cream+candy+canes+dessert+muffin+sugar+plum+tart+jujubes.%0A' group_builder.specify "Can perform a multipart form POST" <| Test.with_retries <| test_file = enso_project.data / "sample.png" - form_data = Map.from_vector [["key", "val"], ["a_file", test_file]] + form_data = Dictionary.from_vector [["key", "val"], ["a_file", test_file]] response = Data.post url_post (Request_Body.Form_Data form_data) response_json = response response_json.at "headers" . at "Content-Type" . should_start_with "multipart/form-data; boundary=" diff --git a/test/Benchmarks/src/Collections.enso b/test/Benchmarks/src/Collections.enso index 462078c3c06a..6e0850f91b84 100644 --- a/test/Benchmarks/src/Collections.enso +++ b/test/Benchmarks/src/Collections.enso @@ -20,7 +20,7 @@ sum_recur n = if n == 0 then 0 else 1 + sum_recur n-1 build_map size = rand = Java_Random.new - 0.up_to size . fold Map.empty (m -> i -> m.insert (rand.nextInt 10000) i) + 0.up_to size . fold Dictionary.empty (m -> i -> m.insert (rand.nextInt 10000) i) type Data Value ~list ~vec ~vec_float diff --git a/test/Benchmarks/src/Map/Hash_Map.enso b/test/Benchmarks/src/Map/Hash_Map.enso index e56b9340e62e..743336cb3fb0 100644 --- a/test/Benchmarks/src/Map/Hash_Map.enso +++ b/test/Benchmarks/src/Map/Hash_Map.enso @@ -44,13 +44,13 @@ collect_benches = Bench.build builder-> builder.group ("Enso_Hash_Map_" + n.to_text) options group_builder-> # Scenario similar to what is done in distinct group_builder.specify "Enso_Incremental" <| - Scenario.Instance (_ -> Map.empty) . run_distinct data.ints + Scenario.Instance (_ -> Dictionary.empty) . run_distinct data.ints group_builder.specify "Java_Incremental" <| Scenario.Instance (_ -> JavaHashMapWrapper.new) . run_distinct data.ints # A scenario similar to what is done in add_row_number with grouping group_builder.specify "Enso_Replacement" <| - Scenario.Instance (_ -> Map.empty) . run_count_keys data.ints + Scenario.Instance (_ -> Dictionary.empty) . run_count_keys data.ints group_builder.specify "Java_Replacement" <| Scenario.Instance (_ -> JavaHashMapWrapper.new) . run_count_keys data.ints diff --git a/test/Examples_Tests/src/Examples_Spec.enso b/test/Examples_Tests/src/Examples_Spec.enso index 60221572dc53..633d0d10a9f0 100644 --- a/test/Examples_Tests/src/Examples_Spec.enso +++ b/test/Examples_Tests/src/Examples_Spec.enso @@ -49,8 +49,8 @@ add_specs suite_builder = suite_builder.group "Examples" group_builder-> group_builder.specify "should provide a basic cons list" <| Examples.list.length . should_equal 3 - group_builder.specify "should provide a basic KV map" <| - Examples.map.size . should_equal 3 + group_builder.specify "should provide a basic KV dictionary" <| + Examples.dictionary.size . should_equal 3 group_builder.specify "should provide a type with no methods" <| Examples.No_Methods.should_be_a Examples.No_Methods diff --git a/test/Snowflake_Tests/src/Snowflake_Spec.enso b/test/Snowflake_Tests/src/Snowflake_Spec.enso index 1a6591feea03..021cdcfafa26 100644 --- a/test/Snowflake_Tests/src/Snowflake_Spec.enso +++ b/test/Snowflake_Tests/src/Snowflake_Spec.enso @@ -583,14 +583,14 @@ add_snowflake_specs suite_builder create_connection_fn db_name = Common_Table_Operations.Main.add_specs suite_builder setup ## PRIVATE -supported_replace_params : Set Replace_Params +supported_replace_params : Hashset Replace_Params supported_replace_params = e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False] e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True] e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False] e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True] e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False] - Set.from_vector <| e0 + e1 + e2 + e3 + e4 + Hashset.from_vector <| e0 + e1 + e2 + e3 + e4 add_table_specs suite_builder = case create_connection_builder of diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso index a16a7de1ce67..a0254c3e996b 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso @@ -1297,7 +1297,7 @@ add_specs suite_builder setup = input_type = Meta.type_of term params = Replace_Params.Value input_type case_sensitivity only_first supported_replace_params = setup.test_selection.supported_replace_params - supported_replace_params . should_be_a Set + supported_replace_params . should_be_a Hashset are_params_supported = supported_replace_params.contains params case are_params_supported of True -> column.text_replace term new_text case_sensitivity only_first . to_vector . should_equal expected diff --git a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso index 8a2debbc5611..2ea734453885 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Cross_Tab_Spec.enso @@ -186,7 +186,7 @@ add_specs suite_builder setup = problems = [Duplicate_Output_Column_Names.Error ["x Agg1", "y Agg1", "z Agg1"]] Problems.test_problem_handling action problems tester - table3 = data.table2.rename_columns (Map.from_vector [["Group", "x"]]) + table3 = data.table2.rename_columns (Dictionary.from_vector [["Group", "x"]]) action3 = table3.cross_tab ["x"] "Key" on_problems=_ tester3 table = table.column_names . should_equal ["x", "x 1", "y", "z"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Replace_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Replace_Spec.enso index e0ef668dbdd4..516896c69d55 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Replace_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Replace_Spec.enso @@ -56,7 +56,7 @@ add_specs suite_builder setup = group_builder.specify "should be able to replace values via a lookup table provided as a Map" <| table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]] - lookup_table = Map.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]] + lookup_table = Dictionary.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]] expected = table_builder [['x', [10, 20, 20, 30, 40]], ['y', ['a', 'b', 'e', 'c', 'd']]] result = table.replace lookup_table 'x' . sort ["x", "y"] result . should_equal expected @@ -158,25 +158,25 @@ add_specs suite_builder setup = group_builder.specify "should accept an empty lookup map, if allow_unmatched_rows=True, but expect a warning" <| table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]] - t = table.replace Map.empty 'x' + t = table.replace Dictionary.empty 'x' t . should_equal table Problems.expect_warning (Empty_Error.Error "lookup_table") t group_builder.specify "should throw an error on an empty lookup map and non-empty base table if allow_unmatched_rows=False" <| table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]] . sort ['x'] - t = table.replace Map.empty 'x' allow_unmatched_rows=False + t = table.replace Dictionary.empty 'x' allow_unmatched_rows=False t . should_fail_with Unmatched_Rows_In_Lookup t.catch.example_key_values . should_equal [1] group_builder.specify "should accept an empty lookup map if the base table is also empty, but expect a warning" <| table = table_builder_typed [['x', []], ['z', []]] Value_Type.Integer - t = table.replace Map.empty 'x' + t = table.replace Dictionary.empty 'x' t . should_equal table Problems.expect_warning (Empty_Error.Error "lookup_table") t group_builder.specify "should not allow from/to_coumn to specified if the argument is a Map" <| table = table_builder [['x', [1, 2, 3, 4, 2]], ['y', ['a', 'b', 'c', 'd', 'e']]] - lookup_table = Map.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]] + lookup_table = Dictionary.from_vector [[2, 20], [1, 10], [4, 40], [3, 30]] table.replace lookup_table 'x' from_column=8 . should_fail_with Illegal_Argument table.replace lookup_table 'x' to_column=9 . should_fail_with Illegal_Argument table.replace lookup_table 'x' from_column=8 to_column=9 . should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Common_Table_Operations/Map_To_Table_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Map_To_Table_Spec.enso index fce64c0f2ca2..03c33b6f75fe 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Map_To_Table_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Map_To_Table_Spec.enso @@ -28,7 +28,7 @@ type Data add_specs suite_builder setup = prefix = setup.prefix create_connection_fn = setup.create_connection_func - suite_builder.group prefix+"Table.make_table_from_map/vectors" group_builder-> + suite_builder.group prefix+"Table.make_table_from_dictionary/vectors" group_builder-> data = Data.setup setup create_connection_fn group_builder.teardown <| @@ -66,21 +66,21 @@ add_specs suite_builder setup = vecs2 = [[], [3, 4, 5], [6, 7, 8]] data.dummy_table.make_table_from_vectors vecs2 ['x', 'y', 'z'] . read . should_fail_with Illegal_Argument - group_builder.specify "should be able to create a literal table from a map" <| - map = Map.from_vector [['x', 1], ['y', 2], ['z', 3]] - t = data.dummy_table.make_table_from_map map 'k' 'v' . sort 'v' + group_builder.specify "should be able to create a literal table from a dictionary" <| + map = Dictionary.from_vector [['x', 1], ['y', 2], ['z', 3]] + t = data.dummy_table.make_table_from_dictionary map 'k' 'v' . sort 'v' t.at 'k' . to_vector . should_equal ['x', 'y', 'z'] t.at 'v' . to_vector . should_equal [1, 2, 3] if setup.is_database then - group_builder.specify "should not be able to create a literal table from an empty map" <| - map = Map.empty - data.dummy_table.make_table_from_map map 'k' 'v' . should_fail_with Illegal_Argument + group_builder.specify "should not be able to create a literal table from an empty dictionary" <| + map = Dictionary.empty + data.dummy_table.make_table_from_dictionary map 'k' 'v' . should_fail_with Illegal_Argument if setup.is_database.not then - group_builder.specify "should be able to create a literal table from an empty map" <| - map = Map.empty - t = data.dummy_table.make_table_from_map map 'k' 'v' + group_builder.specify "should be able to create a literal table from an empty dictionary" <| + map = Dictionary.empty + t = data.dummy_table.make_table_from_dictionary map 'k' 'v' t.row_count . should_equal 0 if setup.is_database then diff --git a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso index e2df843fd368..d192b82e516a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Select_Columns_Spec.enso @@ -481,10 +481,10 @@ add_specs suite_builder setup = t1 = table_builder [["alpha", [1]], ["name=123", [2]], ["name= foo bar", [3]]] expect_column_names ["alpha", "key:123", "key: foo bar"] <| - t1.rename_columns (Map.from_vector [["name=(.*)".to_regex, "key:$1"]]) + t1.rename_columns (Dictionary.from_vector [["name=(.*)".to_regex, "key:$1"]]) group_builder.specify "should work by index" <| - map = Map.from_vector [[0, "FirstColumn"], [-2, "Another"]] + map = Dictionary.from_vector [[0, "FirstColumn"], [-2, "Another"]] expect_column_names ["FirstColumn", "beta", "Another", "delta"] <| data.table.rename_columns map @@ -504,12 +504,12 @@ add_specs suite_builder setup = data.table.rename_columns vec group_builder.specify "should work by name" <| - map = Map.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]] + map = Dictionary.from_vector [["alpha", "FirstColumn"], ["delta", "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| data.table.rename_columns map group_builder.specify "should work by mixed Map" <| - map = Map.from_vector [["alpha", "FirstColumn"], [-1, "Another"]] + map = Dictionary.from_vector [["alpha", "FirstColumn"], [-1, "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| data.table.rename_columns map @@ -552,17 +552,17 @@ add_specs suite_builder setup = fail_2.catch.message.should_contain "materialize" group_builder.specify "should work by name case-insensitively" <| - map = Map.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]] + map = Dictionary.from_vector [["ALPHA", "FirstColumn"], ["DELTA", "Another"]] expect_column_names ["FirstColumn", "beta", "gamma", "Another"] <| data.table.rename_columns map Case_Sensitivity.Insensitive group_builder.specify "should work by name using regex" <| - map = Map.from_vector [["a.*".to_regex, "FirstColumn"]] + map = Dictionary.from_vector [["a.*".to_regex, "FirstColumn"]] expect_column_names ["FirstColumn", "beta", "gamma", "delta"] <| data.table.rename_columns map group_builder.specify "should work by name using regex substitution" <| - map = Map.from_vector [["a(.*)".to_regex, "$1"]] + map = Dictionary.from_vector [["a(.*)".to_regex, "$1"]] expect_column_names ["lpha", "beta", "gamma", "delta"] <| data.table.rename_columns map @@ -591,7 +591,7 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle problems: unmatched names" <| weird_name = '.*?-!@#!"' - map = Map.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]] + map = Dictionary.from_vector [["alpha", "FirstColumn"], ["omicron", "Another"], [weird_name, "Fixed"]] action = data.table.rename_columns map error_on_missing_columns=False on_problems=_ tester = expect_column_names ["FirstColumn", "beta", "gamma", "delta"] err_checker err = @@ -603,7 +603,7 @@ add_specs suite_builder setup = err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle problems: out of bounds indices" <| - map = Map.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]] + map = Dictionary.from_vector [[0, "FirstColumn"], [-1, "Another"], [100, "Boo"], [-200, "Nothing"], [300, "Here"]] action = data.table.rename_columns map error_on_missing_columns=False on_problems=_ tester = expect_column_names ["FirstColumn", "beta", "gamma", "Another"] err_checker err = @@ -615,12 +615,12 @@ add_specs suite_builder setup = err.should_fail_with Missing_Input_Columns group_builder.specify "should correctly handle edge-cases: aliased indices" <| - map1 = Map.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]] + map1 = Dictionary.from_vector [[1, "FirstColumn"], [-3, "FirstColumn"]] t1 = data.table.rename_columns map1 on_problems=..Report_Error Problems.assume_no_problems t1 expect_column_names ["alpha", "FirstColumn", "gamma", "delta"] t1 - map2 = Map.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]] + map2 = Dictionary.from_vector [[1, "FirstColumn"], [-3, "DifferentName!"]] t2 = data.table.rename_columns map2 on_problems=..Report_Error t2.should_fail_with Ambiguous_Column_Rename err = t2.catch . inner_error @@ -629,12 +629,12 @@ add_specs suite_builder setup = group_builder.specify "should correctly handle edge-cases: aliased selectors" <| t = table_builder [["alpha", [1,2,3]], ["bet", [4,5,6]]] - map1 = Map.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]] + map1 = Dictionary.from_vector [["a.*".to_regex, "AA"], [".*a".to_regex, "AA"]] t1 = t.rename_columns map1 on_problems=..Report_Error Problems.assume_no_problems t1 expect_column_names ["AA", "bet"] t1 - map2 = Map.from_vector [["a.*".to_regex, "StartsWithA"], [".*a".to_regex, "EndsWithA"]] + map2 = Dictionary.from_vector [["a.*".to_regex, "StartsWithA"], [".*a".to_regex, "EndsWithA"]] t2 = t.rename_columns map2 on_problems=..Report_Error t2.should_fail_with Ambiguous_Column_Rename err = t2.catch . inner_error @@ -647,13 +647,13 @@ add_specs suite_builder setup = This is to show that even if distinct rename patterns match the same column, if the resulting rename is unambiguous, no error is raised. - map3 = Map.from_vector [["a(.*)".to_regex, "$1A"], ["(.*)aa".to_regex, "$1aA"]] + map3 = Dictionary.from_vector [["a(.*)".to_regex, "$1A"], ["(.*)aa".to_regex, "$1aA"]] t4 = t3.rename_columns map3 on_problems=..Report_Error Problems.assume_no_problems t4 expect_column_names ["aaA", "bbb"] t4 group_builder.specify "should correctly handle problems: invalid names ''" <| - map = Map.from_vector [[1, ""]] + map = Dictionary.from_vector [[1, ""]] [Problem_Behavior.Ignore, Problem_Behavior.Report_Warning, Problem_Behavior.Report_Error].each pb-> r = data.table.rename_columns map on_problems=pb r.should_fail_with Invalid_Column_Names @@ -678,13 +678,13 @@ add_specs suite_builder setup = Problems.test_problem_handling action problems tester group_builder.specify "should correctly handle problems: new name is clashing with existing name of existing column" <| - map = Map.from_vector [["alpha", "beta"]] + map = Dictionary.from_vector [["alpha", "beta"]] action = data.table.rename_columns map on_problems=_ tester = expect_column_names ["beta", "beta 1", "gamma", "delta"] problems = [Duplicate_Output_Column_Names.Error ["beta"]] Problems.test_problem_handling action problems tester - map2 = Map.from_vector [["beta", "alpha"]] + map2 = Dictionary.from_vector [["beta", "alpha"]] action2 = data.table.rename_columns map2 on_problems=_ tester2 = expect_column_names ["alpha 1", "alpha", "gamma", "delta"] problems2 = [Duplicate_Output_Column_Names.Error ["alpha"]] diff --git a/test/Table_Tests/src/Database/Postgres_Spec.enso b/test/Table_Tests/src/Database/Postgres_Spec.enso index 2f00c1e34f13..74c4cc702da0 100644 --- a/test/Table_Tests/src/Database/Postgres_Spec.enso +++ b/test/Table_Tests/src/Database/Postgres_Spec.enso @@ -211,7 +211,7 @@ postgres_specific_spec suite_builder create_connection_fn db_name setup = column/table names and their lengths, this should not be a big problem usually, so only a warning is issued. It may however lead to data integrity issues in some very rare edge cases. - unsupported_encodings = Set.from_vector <| + unsupported_encodings = Hashset.from_vector <| ["EUC_JIS_2004", "LATIN6", "LATIN8", "MULE_INTERNAL", "SHIFT_JIS_2004"] known_encodings.each encoding_name-> @@ -706,14 +706,14 @@ add_postgres_specs suite_builder create_connection_fn db_name = Common_Table_Operations.Main.add_specs suite_builder setup ## PRIVATE -supported_replace_params : Set Replace_Params +supported_replace_params : Hashset Replace_Params supported_replace_params = e0 = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive False] e1 = [Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive False, Replace_Params.Value Text Case_Sensitivity.Insensitive True] e2 = [Replace_Params.Value Regex Case_Sensitivity.Default False, Replace_Params.Value Regex Case_Sensitivity.Default True, Replace_Params.Value Regex Case_Sensitivity.Sensitive False] e3 = [Replace_Params.Value Regex Case_Sensitivity.Sensitive True, Replace_Params.Value Regex Case_Sensitivity.Insensitive False, Replace_Params.Value Regex Case_Sensitivity.Insensitive True] e4 = [Replace_Params.Value DB_Column Case_Sensitivity.Default False, Replace_Params.Value DB_Column Case_Sensitivity.Sensitive False] - Set.from_vector <| e0 + e1 + e2 + e3 + e4 + Hashset.from_vector <| e0 + e1 + e2 + e3 + e4 add_table_specs suite_builder = db_name = Environment.get "ENSO_POSTGRES_DATABASE" diff --git a/test/Table_Tests/src/Database/SQLite_Spec.enso b/test/Table_Tests/src/Database/SQLite_Spec.enso index 4999f99fc354..313dbca7fff1 100644 --- a/test/Table_Tests/src/Database/SQLite_Spec.enso +++ b/test/Table_Tests/src/Database/SQLite_Spec.enso @@ -356,10 +356,10 @@ sqlite_spec suite_builder prefix create_connection_func = ## PRIVATE -supported_replace_params : Set Replace_Params +supported_replace_params : Hashset Replace_Params supported_replace_params = e = [Replace_Params.Value Text Case_Sensitivity.Default False, Replace_Params.Value Text Case_Sensitivity.Sensitive False, Replace_Params.Value Text Case_Sensitivity.Default True, Replace_Params.Value Text Case_Sensitivity.Sensitive True, Replace_Params.Value Text Case_Sensitivity.Insensitive True] - Set.from_vector e + Hashset.from_vector e ## Reference to the database file that ensures the first test that uses it will clean any leftover files from earlier runs. diff --git a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso index 10b744a68f12..6319d17cbdb3 100644 --- a/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso +++ b/test/Table_Tests/src/Database/Types/SQLite_Type_Mapping_Spec.enso @@ -123,8 +123,8 @@ add_specs suite_builder = group_builder.specify "should be able to infer types for all supported operations" <| dialect = Dialect.sqlite - internal_mapping = dialect.dialect_operations.operation_map - operation_type_mapping = SQLite_Type_Mapping.operations_map + internal_mapping = dialect.dialect_operations.operations_dict + operation_type_mapping = SQLite_Type_Mapping.operations_dict operation_type_mapping.keys.sort . should_equal internal_mapping.keys.sort diff --git a/test/Table_Tests/src/Database/Upload_Spec.enso b/test/Table_Tests/src/Database/Upload_Spec.enso index 7ed12d753210..ac17a67bf71d 100644 --- a/test/Table_Tests/src/Database/Upload_Spec.enso +++ b/test/Table_Tests/src/Database/Upload_Spec.enso @@ -562,7 +562,7 @@ add_specs suite_builder make_new_connection prefix persistent_connector=True = e2.clashing_example_key_values.length . should_equal 1 x = e2.clashing_example_key_values.first [1, 2, 3].should_contain x - counts = Map.from_vector [[1, 2], [2, 4], [3, 2]] + counts = Dictionary.from_vector [[1, 2], [2, 4], [3, 2]] e2.clashing_example_row_count . should_equal (counts.at x) # Will not find clashes if they are not in the first 1000 rows, in Output disabled mode. @@ -1201,14 +1201,14 @@ test_table_append group_builder (data : Data) source_table_builder target_table_ ## If there are some additional tables, we add some timeout to allow the database to do the cleaning up. - additional_tables = (Set.from_vector tables_immediately_after).difference (Set.from_vector existing_tables) + additional_tables = (Hashset.from_vector tables_immediately_after).difference (Hashset.from_vector existing_tables) if additional_tables.is_empty then Nothing else additional_table = additional_tables.to_vector.first wait_until_temporary_table_is_deleted_after_closing_connection data.connection additional_table # After the wait we check again and now there should be no additional tables. tables_after_wait = data.connection.base_connection.get_tables_advanced types=Nothing include_hidden=True . at "Name" . to_vector - additional_tables_2 = (Set.from_vector tables_after_wait).difference (Set.from_vector existing_tables) + additional_tables_2 = (Hashset.from_vector tables_after_wait).difference (Hashset.from_vector existing_tables) additional_tables_2.to_vector . should_equal [] diff --git a/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso b/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso index 46fe81a764c3..f0412cfc10f3 100644 --- a/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso +++ b/test/Table_Tests/src/Helpers/Unique_Naming_Strategy_Spec.enso @@ -116,7 +116,7 @@ add_specs suite_builder = strategy.make_unique "abc" . should_equal "ab 10" strategy.make_unique "abc" . should_equal "ab 11" - strategy.truncated_names . should_be_a Map + strategy.truncated_names . should_be_a Dictionary strategy.truncated_names.get "abcdefgh" . should_equal "abcde" # abc will contain the entry for the last truncated case strategy.truncated_names.get "abc" . should_equal "ab 11" diff --git a/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso index 46e2ea947198..e49452f415a8 100644 --- a/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Conversion_Spec.enso @@ -227,30 +227,30 @@ add_specs suite_builder = table.expand_column "cols" . should_equal expected group_builder.specify "will work even if keys are not Text" <| - table = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[1, "x"], [2, "y"]], Map.from_vector [[2, "z"], [3, "w"]]]]] + table = Table.new [["a", [1, 2]], ["b", [Dictionary.from_vector [[1, "x"], [2, "y"]], Dictionary.from_vector [[2, "z"], [3, "w"]]]]] expected = Table.new [["a", [1, 2]], ["b 1", ["x", Nothing]], ["b 2", ["y", "z"]], ["b 3", [Nothing, "w"]]] table.expand_column "b" . should_equal expected - table2 = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[My_Mod_Type.Value 12, "x"], [My_Mod_Type.Value 23, "y"]], Map.from_vector [[My_Mod_Type.Value 32, "z"]]]]] + table2 = Table.new [["a", [1, 2]], ["b", [Dictionary.from_vector [[My_Mod_Type.Value 12, "x"], [My_Mod_Type.Value 23, "y"]], Dictionary.from_vector [[My_Mod_Type.Value 32, "z"]]]]] expected2 = Table.new [["a", [1, 2]], ["b x%10=3", ["y", Nothing]], ["b x%10=2", ["x", "z"]]] table2.expand_column "b" . should_equal expected2 group_builder.specify "will fail if text representation of keys is not unique" <| k1 = My_Mod_Type.Value 12 k2 = My_Mod_Type.Value 32 - m = Map.from_vector [[k1, "a"], [k2, "b"]] + m = Dictionary.from_vector [[k1, "a"], [k2, "b"]] m.at k1 . should_equal "a" m.at k2 . should_equal "b" k1.to_text . should_equal "x%10=2" k2.to_text . should_equal "x%10=2" - table = Table.new [["a", [1, 2]], ["b", [Map.from_vector [[k1, "x"], [k2, "y"]] , Map.from_vector []]]] + table = Table.new [["a", [1, 2]], ["b", [Dictionary.from_vector [[k1, "x"], [k2, "y"]] , Dictionary.from_vector []]]] r = table.expand_column "b" r.should_fail_with Illegal_Argument r.catch.to_display_text . should_contain "keys are duplicated when converted to text" group_builder.specify "will error when all objects have no fields" <| - table = Table.new [["aaa", [1, 2]], ["bbb", [Map.from_vector [], Map.from_vector []]], ["ccc", [5, 6]]] + table = Table.new [["aaa", [1, 2]], ["bbb", [Dictionary.from_vector [], Dictionary.from_vector []]], ["ccc", [5, 6]]] r = table.expand_column "bbb" r.should_fail_with Illegal_Argument r.catch.message.should_contain "as all inputs had no fields" @@ -337,7 +337,7 @@ add_specs suite_builder = table.expand_to_rows "bbb" . should_equal expected group_builder.specify "Can expand Map" <| - values_to_expand = [Map.empty.insert "a" 10, Map.empty.insert "d" 40 . insert "b" 20, Map.empty.insert "c" 30] + values_to_expand = [Dictionary.singleton "a" 10, Dictionary.singleton "d" 40 . insert "b" 20, Dictionary.singleton "c" 30] table = Table.new [["aaa", [1, 2, 3]], ["bbb", values_to_expand], ["ccc", [5, 6, 7]]] expected = Table.new [["aaa", [1, 2, 2, 3]], ["bbb Key", ["a", "d", "b", "c"]], ["bbb", [10, 40, 20, 30]], ["ccc", [5, 6, 6, 7]]] table.expand_to_rows "bbb" . should_equal expected diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index da67d628ecec..f742adb74a8d 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -905,7 +905,7 @@ add_specs suite_builder = if has_nulls then builder.append Nothing if has_true then builder.append True if has_false then builder.append False - in_vector_set = Set.from_vector in_vector + in_vector_set = Hashset.from_vector in_vector vectors = [[True, False, Nothing], [Nothing, Nothing, Nothing], [False, False, True], [True, True, True], [False, False, False], [Nothing, True, True], [False, Nothing, False]] vectors.each column_vector-> From a3dc50fe1e4f914648b50d60a8e94ebd53cbef75 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Tue, 9 Jul 2024 11:36:10 +0200 Subject: [PATCH 07/11] Replace presigned S3 URL with lambda request (#10456) - Closes #10419 --- .../Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso | 2 +- .../src/Enso_Cloud/Internal/Existing_Enso_Asset.enso | 7 ------- .../java/org/enso/base/enso_cloud/EnsoSecretReader.java | 4 ++-- .../src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java | 1 - 4 files changed, 3 insertions(+), 11 deletions(-) diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso index 099125f4d5cb..30cac40a6409 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Enso_File.enso @@ -224,7 +224,7 @@ type Enso_File asset = Existing_Enso_Asset.get_asset_reference_for self response = case asset.asset_type of Enso_Asset_Type.File -> - HTTP.fetch asset.get_download_url HTTP_Method.Get + Utils.http_request HTTP_Method.Get (asset.internal_uri + "/contents") Enso_Asset_Type.Data_Link -> Runtime.assert (open_options.contains Data_Link_Access.No_Follow) Utils.http_request HTTP_Method.Get asset.internal_uri diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso index 8c786aa0434b..418484455c71 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Enso_Cloud/Internal/Existing_Enso_Asset.enso @@ -66,13 +66,6 @@ type Existing_Enso_Asset Panic.throw (Illegal_Argument.Error "`get_file_description` can only be called on File assets.") Utils.http_request_as_json HTTP_Method.Get self.internal_uri - ## PRIVATE - The returned URL may be valid only for a very short time, so it should not be - stored anywhere. - get_download_url self -> Text = - presigned_url = self.get_file_description |> get_required_field "url" expected_type=Text - presigned_url - ## PRIVATE Fetches the basic information about an existing file from the Cloud. It will fail if the file does not exist. diff --git a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretReader.java b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretReader.java index 408c9328706e..9cd2f226d1fb 100644 --- a/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretReader.java +++ b/std-bits/base/src/main/java/org/enso/base/enso_cloud/EnsoSecretReader.java @@ -57,11 +57,11 @@ private static String fetchSecretValue(String secretId, int retryCount) { } int status = response.statusCode(); - if (status == 401 || status == 403 || status >= 500) { + if (status == 401 || status >= 500) { if (retryCount < 0) { String kind = status >= 500 ? "server" : "authentication"; throw new IllegalArgumentException( - "Unable to read secret - numerous " + kind + " failures."); + "Unable to read secret - numerous " + kind + " failures (status code " + status + ")."); } else { // We forcibly refresh the access token and try again. AuthenticationProvider.getAuthenticationService().force_refresh(); diff --git a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java index de227d37ff30..27124896f525 100644 --- a/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java +++ b/tools/http-test-helper/src/main/java/org/enso/shttp/cloud_mock/CloudRoot.java @@ -40,7 +40,6 @@ protected int getInvalidTokenStatus(String token) { boolean isValidButExpired = token.equals("TEST-EXPIRED-TOKEN-beef"); if (isValidButExpired) { expiredTokensCounter.registerExpiredTokenFailure(); - return 403; } return 401; From 9229010cb6a32c5a2d86b3a1a03691929db5b871 Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Tue, 9 Jul 2024 22:54:09 +1000 Subject: [PATCH 08/11] Dashboard fixes (#10423) - Fix some of https://github.com/enso-org/cloud-v2/issues/1350 - Move close icon back to right hand side of tabs - Change icon for Local category to a computer - Fix rendering of buttons in the delete pop up - Remove "Versions" and "Sessions" buttons in Local mode from properties sidebar - Also remove "Shared with" and "Labels" in Local mode - Show real path of all assets in properties sidebar when in Local mode - Opening Settings properly closes the user menu again - "Export" tooltip changed to "Download" - "Open help chat" button removed for now Notes: - re: Download within a folder doesn't work - this is due to the frontend using a PM endpoint that doesn't support specifying the parent directory --- app/ide-desktop/lib/assets/computer.svg | 10 ++ app/ide-desktop/lib/assets/not_cloud.svg | 5 - .../dashboard/src/components/MenuEntry.tsx | 7 +- .../src/components/dashboard/AssetRow.tsx | 2 +- .../lib/dashboard/src/layouts/AssetPanel.tsx | 32 +++-- .../dashboard/src/layouts/AssetProperties.tsx | 114 ++++++++++++------ .../src/layouts/CategorySwitcher.tsx | 4 +- .../src/layouts/Settings/settingsData.tsx | 4 +- .../lib/dashboard/src/layouts/TabBar.tsx | 25 ++-- .../lib/dashboard/src/layouts/UserBar.tsx | 29 +++-- .../src/modals/ConfirmDeleteModal.tsx | 15 ++- .../lib/dashboard/src/text/english.json | 4 +- 12 files changed, 162 insertions(+), 89 deletions(-) create mode 100644 app/ide-desktop/lib/assets/computer.svg delete mode 100644 app/ide-desktop/lib/assets/not_cloud.svg diff --git a/app/ide-desktop/lib/assets/computer.svg b/app/ide-desktop/lib/assets/computer.svg new file mode 100644 index 000000000000..d351baa8b6d9 --- /dev/null +++ b/app/ide-desktop/lib/assets/computer.svg @@ -0,0 +1,10 @@ + + + + + + \ No newline at end of file diff --git a/app/ide-desktop/lib/assets/not_cloud.svg b/app/ide-desktop/lib/assets/not_cloud.svg deleted file mode 100644 index 1e84b46e203a..000000000000 --- a/app/ide-desktop/lib/assets/not_cloud.svg +++ /dev/null @@ -1,5 +0,0 @@ - - - \ No newline at end of file diff --git a/app/ide-desktop/lib/dashboard/src/components/MenuEntry.tsx b/app/ide-desktop/lib/dashboard/src/components/MenuEntry.tsx index 118ede05e88d..2a4648dd3820 100644 --- a/app/ide-desktop/lib/dashboard/src/components/MenuEntry.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/MenuEntry.tsx @@ -11,6 +11,7 @@ import type * as inputBindings from '#/configurations/inputBindings' import * as focusHooks from '#/hooks/focusHooks' import * as inputBindingsProvider from '#/providers/InputBindingsProvider' +import * as modalProvider from '#/providers/ModalProvider' import * as textProvider from '#/providers/TextProvider' import * as aria from '#/components/aria' @@ -113,6 +114,7 @@ export default function MenuEntry(props: MenuEntryProps) { ...variantProps } = props const { getText } = textProvider.useText() + const { unsetModal } = modalProvider.useSetModal() const inputBindings = inputBindingsProvider.useInputBindings() const focusChildProps = focusHooks.useFocusChild() const info = inputBindings.metadata[action] @@ -146,7 +148,10 @@ export default function MenuEntry(props: MenuEntryProps) { {...aria.mergeProps()(focusChildProps, { isDisabled, className: 'group flex w-full rounded-menu-entry', - onPress: doAction, + onPress: () => { + unsetModal() + doAction() + }, })} >
diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx index 5602eea56604..19b38337477d 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx @@ -493,7 +493,7 @@ export default function AssetRow(props: AssetRowProps) { } case AssetEventType.download: case AssetEventType.downloadSelected: { - if (event.type === AssetEventType.downloadSelected ? selected : event.ids.has(item.key)) { + if (event.type === AssetEventType.downloadSelected ? selected : event.ids.has(asset.id)) { if (isCloud) { switch (asset.type) { case backendModule.AssetType.project: { diff --git a/app/ide-desktop/lib/dashboard/src/layouts/AssetPanel.tsx b/app/ide-desktop/lib/dashboard/src/layouts/AssetPanel.tsx index d980498284c7..efa8984acb2e 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/AssetPanel.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/AssetPanel.tsx @@ -73,37 +73,42 @@ export interface AssetPanelProps extends AssetPanelRequiredProps { export default function AssetPanel(props: AssetPanelProps) { const { isVisible, backend, isReadonly = false, item, setItem, category } = props const { dispatchAssetEvent, dispatchAssetListEvent } = props + const isCloud = backend?.type === backendModule.BackendType.remote const { getText } = textProvider.useText() const { localStorage } = localStorageProvider.useLocalStorage() const [initialized, setInitialized] = React.useState(false) const initializedRef = React.useRef(initialized) initializedRef.current = initialized - const [tab, setTab] = React.useState(() => { - const savedTab = localStorage.get('assetPanelTab') ?? AssetPanelTab.properties - if ( + const [tabRaw, setTab] = React.useState( + () => localStorage.get('assetPanelTab') ?? AssetPanelTab.properties + ) + const tab = (() => { + if (!isCloud) { + return AssetPanelTab.properties + } else if ( (item?.item.type === backendModule.AssetType.secret || item?.item.type === backendModule.AssetType.directory) && - savedTab === AssetPanelTab.versions + tabRaw === AssetPanelTab.versions ) { return AssetPanelTab.properties } else if ( item?.item.type !== backendModule.AssetType.project && - savedTab === AssetPanelTab.projectSessions + tabRaw === AssetPanelTab.projectSessions ) { return AssetPanelTab.properties } else { - return savedTab + return tabRaw } - }) + })() React.useEffect(() => { // This prevents secrets and directories always setting the tab to `properties` // (because they do not support the `versions` tab). if (initializedRef.current) { - localStorage.set('assetPanelTab', tab) + localStorage.set('assetPanelTab', tabRaw) } - }, [tab, localStorage]) + }, [tabRaw, localStorage]) React.useEffect(() => { setInitialized(true) @@ -113,15 +118,16 @@ export default function AssetPanel(props: AssetPanelProps) {
{ event.stopPropagation() }} > - - {item != null && + + {isCloud && + item != null && item.item.type !== backendModule.AssetType.secret && item.item.type !== backendModule.AssetType.directory && ( )} - {item != null && item.item.type === backendModule.AssetType.project && ( + {isCloud && item != null && item.item.type === backendModule.AssetType.project && (
-
- - {getText('settings')} - -
- - - - - - - - - - -
- {getText('sharedWith')} - - {} }} - /> -
- {getText('labels')} - - {item.item.labels?.map(value => { - const label = labels.find(otherLabel => otherLabel.value === value) - return label == null ? null : ( - - ) - })} -
-
+ {!isCloud && ( +
+ + {getText('metadata')} + + + + + + + + +
+ {getText('path')} + +
+ {path} + +
+
+
+ )} + {isCloud && ( +
+ + {getText('settings')} + + + + + + + + + + + + +
+ {getText('sharedWith')} + + {} }} + /> +
+ {getText('labels')} + + {item.item.labels?.map(value => { + const label = labels.find(otherLabel => otherLabel.value === value) + return label == null ? null : ( + + ) + })} +
+
+ )} {isDatalink && (
context.localBackend != null, sections: [ { diff --git a/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx b/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx index 5f275322f3e0..d5e6d080cae4 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx @@ -213,7 +213,7 @@ export function Tab(props: InternalTabProps) {
@@ -221,25 +221,26 @@ export function Tab(props: InternalTabProps) { size="custom" variant="custom" loaderPosition="icon" - icon={({ isFocusVisible, isHovered }) => - (isFocusVisible || isHovered) && onClose ? ( -
- -
- ) : ( - icon - ) - } + icon={icon} isDisabled={false} isActive={isActive} loading={isActive ? false : isLoading} aria-label={getText(labelId)} + className={tailwindMerge.twMerge('h-full', onClose ? 'pl-4' : 'px-4')} + contentClassName="gap-3" tooltip={false} - className={tailwindMerge.twMerge('relative flex h-full items-center gap-3 px-4')} onPress={onPress} > - {children} + + {children} + + + {onClose && ( +
+ +
+ )}
) } diff --git a/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx b/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx index 16121eef7555..46578a3526de 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/UserBar.tsx @@ -25,6 +25,13 @@ import ManagePermissionsModal from '#/modals/ManagePermissionsModal' import * as backendModule from '#/services/Backend' import type Backend from '#/services/Backend' +// ================= +// === Constants === +// ================= + +/** Whether the chat button should be visible. Temporarily disabled. */ +const SHOULD_SHOW_CHAT_BUTTON: boolean = false + // =============== // === UserBar === // =============== @@ -74,16 +81,18 @@ export default function UserBar(props: UserBarProps) { className="flex h-[46px] shrink-0 cursor-default items-center gap-user-bar pl-icons-x pr-3" {...innerProps} > - { - setIsHelpChatOpen(true) - }} - /> + {SHOULD_SHOW_CHAT_BUTTON && ( + { + setIsHelpChatOpen(true) + }} + /> + )} {shouldShowUpgradeButton && ( diff --git a/app/ide-desktop/lib/dashboard/src/modals/ConfirmDeleteModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/ConfirmDeleteModal.tsx index 4256d18e9d29..2f0dca90bbd1 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/ConfirmDeleteModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/ConfirmDeleteModal.tsx @@ -68,10 +68,21 @@ export default function ConfirmDeleteModal(props: ConfirmDeleteModalProps) { > {getText('confirmPrompt', actionText)} - + {actionButtonLabel} - + {getText('cancel')} diff --git a/app/ide-desktop/lib/dashboard/src/text/english.json b/app/ide-desktop/lib/dashboard/src/text/english.json index 11daffc2d5d5..fa85ebf53dfb 100644 --- a/app/ide-desktop/lib/dashboard/src/text/english.json +++ b/app/ide-desktop/lib/dashboard/src/text/english.json @@ -222,6 +222,8 @@ "resetAll": "Reset All", "openHelpChat": "Open Help Chat", "organization": "Organization", + "metadata": "Metadata", + "path": "Path", "enterSecretPath": "Enter secret path", "enterText": "Enter text", @@ -268,7 +270,7 @@ "newFolder": "New Folder", "newProject": "New Project", "uploadFiles": "Import", - "downloadFiles": "Export", + "downloadFiles": "Download", "newDatalink": "New Datalink", "newSecret": "New Secret", "newLabel": "New Label", From e4da96e943d7ed669c48f58ce805c993a853865f Mon Sep 17 00:00:00 2001 From: Sergei Garin Date: Tue, 9 Jul 2024 17:47:46 +0300 Subject: [PATCH 09/11] Fix opening projects (#10433) #### Tl;dr - Closes: enso-org/cloud-v2#1338 This PR fixes bugs with opened projects. Now all projects close/open properly and list of opened projects stored in the single place --- #### Context: Few sentences on the high level context for the change. Link to relevant design docs or discussion. #### This Change: What this change does in the larger context. Specific details to highlight for review: 1. Removes a bunch of useEffects across the Dashboard page 2. Project status now a react-query state, can be reused across the app 3. Eliminated the need of `waitIntilProjectIsOpened` --- --- .../lib/dashboard/e2e/createAsset.spec.ts | 8 +- .../lib/dashboard/e2e/driveView.spec.ts | 26 +- .../lib/dashboard/e2e/startModal.spec.ts | 2 +- .../AriaComponents/Button/Button.tsx | 8 +- .../dashboard/src/components/Autocomplete.tsx | 1 + .../src/components/ErrorBoundary.tsx | 12 +- .../src/components/StatelessSpinner.tsx | 16 +- .../lib/dashboard/src/components/Suspense.tsx | 4 +- .../src/components/dashboard/AssetRow.tsx | 26 +- .../src/components/dashboard/Permission.tsx | 3 +- .../src/components/dashboard/ProjectIcon.tsx | 338 +++------- .../dashboard/ProjectNameColumn.tsx | 47 +- .../src/components/dashboard/column.ts | 6 + .../dashboard/column/NameColumn.tsx | 1 + .../dashboard/column/SharedWithColumn.tsx | 5 +- .../lib/dashboard/src/events/assetEvent.ts | 6 + .../lib/dashboard/src/hooks/gtagHooks.ts | 11 +- .../src/layouts/AssetContextMenu.tsx | 30 +- .../dashboard/src/layouts/AssetProperties.tsx | 12 +- .../lib/dashboard/src/layouts/AssetsTable.tsx | 118 +++- .../lib/dashboard/src/layouts/Chat.tsx | 6 +- .../lib/dashboard/src/layouts/Drive.tsx | 35 +- .../lib/dashboard/src/layouts/Editor.tsx | 174 +++-- .../lib/dashboard/src/layouts/TabBar.tsx | 41 +- .../lib/dashboard/src/layouts/UserBar.tsx | 42 +- .../src/modals/ManagePermissionsModal.tsx | 564 ++++++++-------- .../src/pages/dashboard/Dashboard.tsx | 631 ++++++++++++------ .../dashboard/src/providers/AuthProvider.tsx | 8 +- .../src/providers/SessionProvider.tsx | 27 +- .../dashboard/src/services/RemoteBackend.ts | 14 +- .../lib/dashboard/src/text/english.json | 1 + .../lib/dashboard/src/utilities/error.ts | 22 + .../lib/dashboard/src/utilities/newtype.ts | 7 +- .../dashboard/src/utilities/tailwindMerge.ts | 3 + 34 files changed, 1228 insertions(+), 1027 deletions(-) diff --git a/app/ide-desktop/lib/dashboard/e2e/createAsset.spec.ts b/app/ide-desktop/lib/dashboard/e2e/createAsset.spec.ts index 11580fa17948..fda606496c29 100644 --- a/app/ide-desktop/lib/dashboard/e2e/createAsset.spec.ts +++ b/app/ide-desktop/lib/dashboard/e2e/createAsset.spec.ts @@ -36,13 +36,9 @@ test.test('create project', ({ page }) => async ({ pageActions }) => await pageActions .newEmptyProject() - .do(async thePage => { - await test.expect(actions.locateEditor(thePage)).toBeVisible() - }) + .do(thePage => test.expect(actions.locateEditor(thePage)).toBeAttached()) .goToPage.drive() - .driveTable.withRows(async rows => { - await test.expect(rows).toHaveCount(1) - }) + .driveTable.withRows(rows => test.expect(rows).toHaveCount(1)) ) ) diff --git a/app/ide-desktop/lib/dashboard/e2e/driveView.spec.ts b/app/ide-desktop/lib/dashboard/e2e/driveView.spec.ts index c6c3adbc013a..18a428cad9a6 100644 --- a/app/ide-desktop/lib/dashboard/e2e/driveView.spec.ts +++ b/app/ide-desktop/lib/dashboard/e2e/driveView.spec.ts @@ -13,7 +13,7 @@ test.test('drive view', ({ page }) => .driveTable.expectPlaceholderRow() .newEmptyProject() .do(async () => { - await test.expect(actions.locateEditor(page)).toBeVisible() + await test.expect(actions.locateEditor(page)).toBeAttached() }) .goToPage.drive() .driveTable.withRows(async rows => { @@ -24,7 +24,7 @@ test.test('drive view', ({ page }) => }) .newEmptyProject() .do(async () => { - await test.expect(actions.locateEditor(page)).toBeVisible() + await test.expect(actions.locateEditor(page)).toBeAttached() }) .goToPage.drive() .driveTable.withRows(async rows => { @@ -36,15 +36,17 @@ test.test('drive view', ({ page }) => .driveTable.withRows(async rows => { await actions.locateStopProjectButton(rows.nth(0)).click() }) - // Project context menu - .driveTable.rightClickRow(0) - .withContextMenus(async menus => { - // actions.locateContextMenus(page) - await test.expect(menus).toBeVisible() - }) - .contextMenu.moveToTrash() - .driveTable.withRows(async rows => { - await test.expect(rows).toHaveCount(1) - }) + // FIXME(#10488): This test fails because the mock endpoint returns the project is opened, + // but it must be stopped first to delete the project. + // Project context menu + // .driveTable.rightClickRow(0) + // .withContextMenus(async menus => { + // // actions.locateContextMenus(page) + // await test.expect(menus).toBeVisible() + // }) + // .contextMenu.moveToTrash() + // .driveTable.withRows(async rows => { + // await test.expect(rows).toHaveCount(1) + // }) ) ) diff --git a/app/ide-desktop/lib/dashboard/e2e/startModal.spec.ts b/app/ide-desktop/lib/dashboard/e2e/startModal.spec.ts index 9cf000cea827..53c55b40ec26 100644 --- a/app/ide-desktop/lib/dashboard/e2e/startModal.spec.ts +++ b/app/ide-desktop/lib/dashboard/e2e/startModal.spec.ts @@ -10,7 +10,7 @@ test.test('create project from template', ({ page }) => .openStartModal() .createProjectFromTemplate(0) .do(async thePage => { - await test.expect(actions.locateEditor(thePage)).toBeVisible() + await test.expect(actions.locateEditor(thePage)).toBeAttached() await test.expect(actions.locateSamples(page).first()).not.toBeVisible() }) ) diff --git a/app/ide-desktop/lib/dashboard/src/components/AriaComponents/Button/Button.tsx b/app/ide-desktop/lib/dashboard/src/components/AriaComponents/Button/Button.tsx index fdb95ba1bfa1..8808a67b3b08 100644 --- a/app/ide-desktop/lib/dashboard/src/components/AriaComponents/Button/Button.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/AriaComponents/Button/Button.tsx @@ -41,7 +41,7 @@ interface PropsWithoutHref { export interface BaseButtonProps extends Omit, 'iconOnly'> { /** Falls back to `aria-label`. Pass `false` to explicitly disable the tooltip. */ - readonly tooltip?: React.ReactElement | string | false + readonly tooltip?: React.ReactElement | string | false | null readonly tooltipPlacement?: aria.Placement /** * The icon to display in the button @@ -220,6 +220,12 @@ export const BUTTON_STYLES = twv.tv({ false: { extraClickZone: '', }, + xxsmall: { + extraClickZone: 'after:inset-[-2px]', + }, + xsmall: { + extraClickZone: 'after:inset-[-4px]', + }, small: { extraClickZone: 'after:inset-[-6px]', }, diff --git a/app/ide-desktop/lib/dashboard/src/components/Autocomplete.tsx b/app/ide-desktop/lib/dashboard/src/components/Autocomplete.tsx index e3c87640bc20..e751cd332496 100644 --- a/app/ide-desktop/lib/dashboard/src/components/Autocomplete.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/Autocomplete.tsx @@ -191,6 +191,7 @@ export default function Autocomplete(props: AutocompleteProps) { autoFocus={autoFocus} size={1} value={text ?? ''} + autoComplete="off" placeholder={placeholder == null ? placeholder : placeholder} className="text grow rounded-full bg-transparent px-button-x" onFocus={() => { diff --git a/app/ide-desktop/lib/dashboard/src/components/ErrorBoundary.tsx b/app/ide-desktop/lib/dashboard/src/components/ErrorBoundary.tsx index 17a6009693bb..33a94aadaa7d 100644 --- a/app/ide-desktop/lib/dashboard/src/components/ErrorBoundary.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/ErrorBoundary.tsx @@ -7,6 +7,8 @@ import * as errorBoundary from 'react-error-boundary' import * as detect from 'enso-common/src/detect' +import * as offlineHooks from '#/hooks/offlineHooks' + import * as textProvider from '#/providers/TextProvider' import * as ariaComponents from '#/components/AriaComponents' @@ -64,14 +66,16 @@ export function ErrorDisplay(props: ErrorDisplayProps): React.JSX.Element { const { getText } = textProvider.useText() + const { isOffline } = offlineHooks.useOffline() + const stack = errorUtils.tryGetStack(error) return ( { + resetErrorBoundary() + }} > {getText('tryAgain')} diff --git a/app/ide-desktop/lib/dashboard/src/components/StatelessSpinner.tsx b/app/ide-desktop/lib/dashboard/src/components/StatelessSpinner.tsx index 61bba7f221e1..308a2d4d6aae 100644 --- a/app/ide-desktop/lib/dashboard/src/components/StatelessSpinner.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/StatelessSpinner.tsx @@ -17,18 +17,22 @@ export interface StatelessSpinnerProps extends spinner.SpinnerProps {} /** A spinner that does not expose its {@link spinner.SpinnerState}. Instead, it begins at * {@link spinner.SpinnerState.initial} and immediately changes to the given state. */ export default function StatelessSpinner(props: StatelessSpinnerProps) { - const { size, state: rawState } = props + const { size, state: rawState, ...spinnerProps } = props + const [, startTransition] = React.useTransition() const [state, setState] = React.useState(spinner.SpinnerState.initial) - React.useEffect(() => { - const timeout = window.setTimeout(() => { - setState(rawState) + React.useLayoutEffect(() => { + const id = requestAnimationFrame(() => { + // consider this as a low-priority update + startTransition(() => { + setState(rawState) + }) }) return () => { - window.clearTimeout(timeout) + cancelAnimationFrame(id) } }, [rawState]) - return + return } diff --git a/app/ide-desktop/lib/dashboard/src/components/Suspense.tsx b/app/ide-desktop/lib/dashboard/src/components/Suspense.tsx index c4de442de42d..3a52ac9b0aef 100644 --- a/app/ide-desktop/lib/dashboard/src/components/Suspense.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/Suspense.tsx @@ -39,7 +39,7 @@ const OFFLINE_FETCHING_TOGGLE_DELAY_MS = 250 export function Suspense(props: SuspenseProps) { const { children } = props - return }>{children} + return }>{children} } /** @@ -53,7 +53,7 @@ export function Suspense(props: SuspenseProps) { * We check the fetching status in fallback component because * we want to know if there are ongoing requests once React renders the fallback in suspense */ -function FallbackElement(props: SuspenseProps) { +export function Loader(props: SuspenseProps) { const { loaderProps, fallback, offlineFallbackProps, offlineFallback } = props const { getText } = textProvider.useText() diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx index 19b38337477d..b7e45d1a6b74 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/AssetRow.tsx @@ -16,6 +16,8 @@ import * as textProvider from '#/providers/TextProvider' import AssetEventType from '#/events/AssetEventType' import AssetListEventType from '#/events/AssetListEventType' +import type * as dashboard from '#/pages/dashboard/Dashboard' + import AssetContextMenu from '#/layouts/AssetContextMenu' import type * as assetsTable from '#/layouts/AssetsTable' import Category from '#/layouts/CategorySwitcher/Category' @@ -74,6 +76,7 @@ export interface AssetRowInnerProps { /** Props for an {@link AssetRow}. */ export interface AssetRowProps extends Readonly> { + readonly isOpened: boolean readonly item: assetTreeNode.AnyAssetTreeNode readonly state: assetsTable.AssetsTableState readonly hidden: boolean @@ -89,13 +92,24 @@ export interface AssetRowProps props: AssetRowInnerProps, event: React.MouseEvent ) => void + readonly doOpenProject: (project: dashboard.Project) => void + readonly doCloseProject: (project: dashboard.Project) => void + readonly updateAssetRef: React.Ref<(asset: backendModule.AnyAsset) => void> } /** A row containing an {@link backendModule.AnyAsset}. */ export default function AssetRow(props: AssetRowProps) { - const { item: rawItem, hidden: hiddenRaw, selected, isSoleSelected, isKeyboardSelected } = props + const { + item: rawItem, + hidden: hiddenRaw, + selected, + isSoleSelected, + isKeyboardSelected, + isOpened, + updateAssetRef, + } = props const { setSelected, allowContextMenu, onContextMenu, state, columns, onClick } = props - const { grabKeyboardFocus } = props + const { grabKeyboardFocus, doOpenProject, doCloseProject } = props const { backend, visibilities, assetEvents, dispatchAssetEvent, dispatchAssetListEvent } = state const { nodeMap, setAssetPanelProps, doToggleDirectoryExpansion, doCopy, doCut, doPaste } = state const { setIsAssetPanelTemporarilyVisible, scrollContainerRef, rootDirectoryId } = state @@ -167,6 +181,10 @@ export default function AssetRow(props: AssetRowProps) { } }, [isKeyboardSelected]) + React.useImperativeHandle(updateAssetRef, () => newItem => { + setAsset(newItem) + }) + const doCopyOnBackend = React.useCallback( async (newParentId: backendModule.DirectoryId | null) => { try { @@ -879,6 +897,8 @@ export default function AssetRow(props: AssetRowProps) { ) diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/Permission.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/Permission.tsx index e8f119ea31f6..15a7941b4968 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/Permission.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/Permission.tsx @@ -38,7 +38,8 @@ const ASSET_TYPE_TO_TEXT_ID: Readonly + readonly self: backendModule.UserPermission readonly isOnlyOwner: boolean readonly permission: backendModule.AssetPermission diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectIcon.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectIcon.tsx index 2d3ad8e04fba..892f29b6fd34 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectIcon.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectIcon.tsx @@ -7,24 +7,18 @@ import ArrowUpIcon from 'enso-assets/arrow_up.svg' import PlayIcon from 'enso-assets/play.svg' import StopIcon from 'enso-assets/stop.svg' -import * as backendHooks from '#/hooks/backendHooks' -import * as eventHooks from '#/hooks/eventHooks' -import * as toastAndLogHooks from '#/hooks/toastAndLogHooks' - import * as authProvider from '#/providers/AuthProvider' -import * as sessionProvider from '#/providers/SessionProvider' import * as textProvider from '#/providers/TextProvider' -import type * as assetEvent from '#/events/assetEvent' -import AssetEventType from '#/events/AssetEventType' +import * as dashboard from '#/pages/dashboard/Dashboard' import * as ariaComponents from '#/components/AriaComponents' -import Spinner, * as spinner from '#/components/Spinner' +import Spinner from '#/components/Spinner' +import StatelessSpinner, * as spinner from '#/components/StatelessSpinner' import * as backendModule from '#/services/Backend' import type Backend from '#/services/Backend' -import * as object from '#/utilities/object' import * as tailwindMerge from '#/utilities/tailwindMerge' // ================= @@ -34,10 +28,10 @@ import * as tailwindMerge from '#/utilities/tailwindMerge' /** The corresponding {@link spinner.SpinnerState} for each {@link backendModule.ProjectState}, * when using the remote backend. */ const REMOTE_SPINNER_STATE: Readonly> = { - [backendModule.ProjectState.closed]: spinner.SpinnerState.initial, - [backendModule.ProjectState.closing]: spinner.SpinnerState.initial, - [backendModule.ProjectState.created]: spinner.SpinnerState.initial, - [backendModule.ProjectState.new]: spinner.SpinnerState.initial, + [backendModule.ProjectState.closed]: spinner.SpinnerState.loadingSlow, + [backendModule.ProjectState.closing]: spinner.SpinnerState.loadingMedium, + [backendModule.ProjectState.created]: spinner.SpinnerState.loadingSlow, + [backendModule.ProjectState.new]: spinner.SpinnerState.loadingSlow, [backendModule.ProjectState.placeholder]: spinner.SpinnerState.loadingSlow, [backendModule.ProjectState.openInProgress]: spinner.SpinnerState.loadingSlow, [backendModule.ProjectState.provisioned]: spinner.SpinnerState.loadingSlow, @@ -47,12 +41,12 @@ const REMOTE_SPINNER_STATE: Readonly> = { - [backendModule.ProjectState.closed]: spinner.SpinnerState.initial, - [backendModule.ProjectState.closing]: spinner.SpinnerState.initial, - [backendModule.ProjectState.created]: spinner.SpinnerState.initial, - [backendModule.ProjectState.new]: spinner.SpinnerState.initial, + [backendModule.ProjectState.closed]: spinner.SpinnerState.loadingSlow, + [backendModule.ProjectState.closing]: spinner.SpinnerState.loadingMedium, + [backendModule.ProjectState.created]: spinner.SpinnerState.loadingSlow, + [backendModule.ProjectState.new]: spinner.SpinnerState.loadingSlow, [backendModule.ProjectState.placeholder]: spinner.SpinnerState.loadingMedium, - [backendModule.ProjectState.openInProgress]: spinner.SpinnerState.loadingMedium, + [backendModule.ProjectState.openInProgress]: spinner.SpinnerState.loadingSlow, [backendModule.ProjectState.provisioned]: spinner.SpinnerState.loadingMedium, [backendModule.ProjectState.scheduled]: spinner.SpinnerState.loadingMedium, [backendModule.ProjectState.opened]: spinner.SpinnerState.done, @@ -65,227 +59,71 @@ const LOCAL_SPINNER_STATE: Readonly> - readonly assetEvents: assetEvent.AssetEvent[] - readonly dispatchAssetEvent: (event: assetEvent.AssetEvent) => void - readonly setProjectStartupInfo: (projectStartupInfo: backendModule.ProjectStartupInfo) => void - readonly doCloseEditor: (id: backendModule.ProjectId) => void - readonly doOpenEditor: () => void + readonly doOpenProject: (id: backendModule.ProjectId, runInBackground: boolean) => void + readonly doCloseProject: (id: backendModule.ProjectId) => void + readonly openProjectTab: (projectId: backendModule.ProjectId) => void } /** An interactive icon indicating the status of a project. */ export default function ProjectIcon(props: ProjectIconProps) { - const { backend, item, setItem, assetEvents, setProjectStartupInfo, dispatchAssetEvent } = props - const { doCloseEditor, doOpenEditor } = props - const { session } = sessionProvider.useSession() + const { backend, item, isOpened } = props + const { openProjectTab, doOpenProject, doCloseProject } = props + const { user } = authProvider.useNonPartialUserSession() - const toastAndLog = toastAndLogHooks.useToastAndLog() const { getText } = textProvider.useText() - const state = item.projectState.type - const setState = React.useCallback( - (stateOrUpdater: React.SetStateAction) => { - setItem(oldItem => { - let newState: backendModule.ProjectState - if (typeof stateOrUpdater === 'function') { - newState = stateOrUpdater(oldItem.projectState.type) - } else { - newState = stateOrUpdater - } - let newProjectState: backendModule.ProjectStateType = object.merge(oldItem.projectState, { - type: newState, - }) - if (!backendModule.IS_OPENING_OR_OPENED[newState]) { - newProjectState = object.omit(newProjectState, 'openedBy') - } else { - newProjectState = object.merge(newProjectState, { - openedBy: user.email, - }) - } - return object.merge(oldItem, { projectState: newProjectState }) - }) - }, - [user, setItem] - ) - const [spinnerState, setSpinnerState] = React.useState(spinner.SpinnerState.initial) - const shouldOpenWhenReadyRef = React.useRef(false) - const [isRunningInBackground, setIsRunningInBackground] = React.useState( - item.projectState.executeAsync ?? false - ) - const doAbortOpeningRef = React.useRef(() => {}) - const doOpenEditorRef = React.useRef(doOpenEditor) - doOpenEditorRef.current = doOpenEditor - const isCloud = backend.type === backendModule.BackendType.remote - const isOtherUserUsingProject = - isCloud && item.projectState.openedBy != null && item.projectState.openedBy !== user.email - - const openProjectMutation = backendHooks.useBackendMutation(backend, 'openProject') - const closeProjectMutation = backendHooks.useBackendMutation(backend, 'closeProject') - const getProjectDetailsMutation = backendHooks.useBackendMutation(backend, 'getProjectDetails') - const waitUntilProjectIsReadyMutation = backendHooks.useBackendMutation( - backend, - 'waitUntilProjectIsReady' - ) - const openProjectMutate = openProjectMutation.mutateAsync - const getProjectDetailsMutate = getProjectDetailsMutation.mutateAsync - const openEditorMutation = reactQuery.useMutation({ - mutationKey: ['openEditor'], - networkMode: 'always', - mutationFn: async (item2: backendModule.ProjectAsset) => { - const abortController = new AbortController() - doAbortOpeningRef.current = () => { - abortController.abort() - } - const projectPromise = openProjectMutation - .mutateAsync([ - item2.id, - { executeAsync: false, parentId: item2.parentId, cognitoCredentials: session }, - item2.title, - ]) - .then(async () => { - const proj = await waitUntilProjectIsReadyMutation.mutateAsync([ - item2.id, - item2.parentId, - item2.title, - abortController.signal, - ]) - return proj - }) - setProjectStartupInfo({ - project: projectPromise, - projectAsset: item2, - setProjectAsset: setItem, - backendType: backend.type, - accessToken: session?.accessToken ?? null, - }) - await projectPromise - if (!abortController.signal.aborted) { - setState(backendModule.ProjectState.opened) - if (shouldOpenWhenReadyRef.current) { - doOpenEditor() - } - } - }, + const isRunningInBackground = item.projectState.executeAsync ?? false + const { + data: status, + isLoading, + isError, + } = reactQuery.useQuery({ + ...dashboard.createGetProjectDetailsQuery.createPassiveListener(item.id), + select: data => data.state.type, + enabled: isOpened, }) - const openEditorMutate = openEditorMutation.mutate - const openProject = React.useCallback( - async (shouldRunInBackground: boolean) => { - if (state !== backendModule.ProjectState.opened) { - try { - if (!shouldRunInBackground) { - setState(backendModule.ProjectState.openInProgress) - openEditorMutate(item) - } else { - setState(backendModule.ProjectState.opened) - await openProjectMutate([ - item.id, - { - executeAsync: shouldRunInBackground, - parentId: item.parentId, - cognitoCredentials: session, - }, - item.title, - ]) - } - } catch (error) { - const project = await getProjectDetailsMutate([item.id, item.parentId, item.title]) - // `setState` is not used here as `project` contains the full state information, - // not just the state type. - setItem(object.merger({ projectState: project.state })) - toastAndLog('openProjectError', error, item.title) - } - } - }, - [ - state, - item, - session, - toastAndLog, - openProjectMutate, - openEditorMutate, - getProjectDetailsMutate, - setState, - setItem, - ] - ) + const isCloud = backend.type === backendModule.BackendType.remote - React.useEffect(() => { - // Ensure that the previous spinner state is visible for at least one frame. - requestAnimationFrame(() => { - const newSpinnerState = - backend.type === backendModule.BackendType.remote - ? REMOTE_SPINNER_STATE[state] - : LOCAL_SPINNER_STATE[state] - setSpinnerState(newSpinnerState) - }) - }, [state, backend.type]) + const isOtherUserUsingProject = + isCloud && item.projectState.openedBy != null && item.projectState.openedBy !== user.email - eventHooks.useEventHandler(assetEvents, event => { - switch (event.type) { - case AssetEventType.openProject: { - if (event.id !== item.id) { - if (!event.runInBackground && !isRunningInBackground) { - shouldOpenWhenReadyRef.current = false - if (!isOtherUserUsingProject && backendModule.IS_OPENING_OR_OPENED[state]) { - doAbortOpeningRef.current() - void closeProject() - } - } - } else { - if ( - backendModule.IS_OPENING_OR_OPENED[state] && - state !== backendModule.ProjectState.placeholder - ) { - const projectPromise = waitUntilProjectIsReadyMutation.mutateAsync([ - item.id, - item.parentId, - item.title, - ]) - setProjectStartupInfo({ - project: projectPromise, - projectAsset: item, - setProjectAsset: setItem, - backendType: backend.type, - accessToken: session?.accessToken ?? null, - }) - if (!isRunningInBackground) { - doOpenEditor() - } - } else { - shouldOpenWhenReadyRef.current = !event.runInBackground - setIsRunningInBackground(event.runInBackground) - void openProject(event.runInBackground) - } - } - break - } - case AssetEventType.closeProject: { - if (event.id === item.id) { - shouldOpenWhenReadyRef.current = false - void closeProject() - } - break - } - default: { - // Ignored. Any missing project-related events should be handled by `ProjectNameColumn`. - // `delete`, `deleteForever`, `restore`, `download`, and `downloadSelected` - // are handled by`AssetRow`. - break - } + const state = (() => { + // Project is closed, show open button + if (!isOpened) { + return backendModule.ProjectState.closed + } else if (!isLoading && status == null) { + // Project is opened, but not yet queried. + return backendModule.ProjectState.openInProgress + } else if (isLoading) { + return backendModule.ProjectState.openInProgress + } else if (status == null) { + return backendModule.ProjectState.openInProgress + } else if (status === backendModule.ProjectState.closed) { + // Project is opened locally, but not on the backend yet. + return backendModule.ProjectState.openInProgress + } else { + return status } - }) - - const closeProject = async () => { - if (!isRunningInBackground) { - doCloseEditor(item.id) + })() + + const spinnerState = (() => { + if (!isOpened) { + return spinner.SpinnerState.initial + } else if (isLoading) { + return spinner.SpinnerState.loadingSlow + } else if (isError) { + return spinner.SpinnerState.initial + } else if (status == null) { + return spinner.SpinnerState.loadingSlow + } else { + return backend.type === backendModule.BackendType.remote + ? REMOTE_SPINNER_STATE[status] + : LOCAL_SPINNER_STATE[status] } - shouldOpenWhenReadyRef.current = false - setState(backendModule.ProjectState.closing) - await closeProjectMutation.mutateAsync([item.id, item.title]) - setState(backendModule.ProjectState.closed) - } + })() switch (state) { case null: @@ -300,13 +138,9 @@ export default function ProjectIcon(props: ProjectIconProps) { icon={PlayIcon} aria-label={getText('openInEditor')} tooltipPlacement="left" - className="h-6 border-0" + extraClickZone="xsmall" onPress={() => { - dispatchAssetEvent({ - type: AssetEventType.openProject, - id: item.id, - runInBackground: false, - }) + doOpenProject(item.id, false) }} /> ) @@ -317,21 +151,23 @@ export default function ProjectIcon(props: ProjectIconProps) { return (
{ + doCloseProject(item.id) + }} /> - @@ -342,40 +178,38 @@ export default function ProjectIcon(props: ProjectIconProps) {
{ + doCloseProject(item.id) + }} />
+ {!isOtherUserUsingProject && !isRunningInBackground && ( { - doOpenEditor() + openProjectTab(item.id) }} /> )} diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectNameColumn.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectNameColumn.tsx index e5957d24e7d6..8549a63973c4 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectNameColumn.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/ProjectNameColumn.tsx @@ -44,13 +44,26 @@ export interface ProjectNameColumnProps extends column.AssetColumnProps {} * @throws {Error} when the asset is not a {@link backendModule.ProjectAsset}. * This should never happen. */ export default function ProjectNameColumn(props: ProjectNameColumnProps) { - const { item, setItem, selected, rowState, setRowState, state, isEditable } = props - const { backend, selectedKeys, assetEvents, dispatchAssetEvent, dispatchAssetListEvent } = state - const { nodeMap, setProjectStartupInfo, doOpenEditor, doCloseEditor } = state + const { + item, + setItem, + selected, + rowState, + setRowState, + state, + isEditable, + doCloseProject, + doOpenProject, + backendType, + isOpened, + } = props + const { backend, selectedKeys, assetEvents, dispatchAssetListEvent } = state + const { nodeMap, doOpenEditor } = state const toastAndLog = toastAndLogHooks.useToastAndLog() const { user } = authProvider.useNonPartialUserSession() const { getText } = textProvider.useText() const inputBindings = inputBindingsProvider.useInputBindings() + if (item.type !== backendModule.AssetType.project) { // eslint-disable-next-line no-restricted-syntax throw new Error('`ProjectNameColumn` can only display projects.') @@ -175,10 +188,11 @@ export default function ProjectNameColumn(props: ProjectNameColumnProps) { }), }) ) - dispatchAssetEvent({ - type: AssetEventType.openProject, + doOpenProject({ id: createdProject.projectId, - runInBackground: false, + type: backendType, + parentId: asset.parentId, + title: asset.title, }) } catch (error) { dispatchAssetListEvent({ type: AssetListEventType.delete, key: item.key }) @@ -298,10 +312,11 @@ export default function ProjectNameColumn(props: ProjectNameColumnProps) { ) { setIsEditing(true) } else if (eventModule.isDoubleClick(event)) { - dispatchAssetEvent({ - type: AssetEventType.openProject, + doOpenProject({ id: asset.id, - runInBackground: false, + type: backendType, + parentId: asset.parentId, + title: asset.title, }) } }} @@ -310,16 +325,18 @@ export default function ProjectNameColumn(props: ProjectNameColumnProps) { ) : ( { + doCloseProject({ id, parentId: asset.parentId, title: asset.title, type: backendType }) + }} + doOpenProject={id => { + doOpenProject({ id, type: backendType, parentId: asset.parentId, title: asset.title }) + }} + openProjectTab={doOpenEditor} /> )} > readonly selected: boolean readonly setSelected: (selected: boolean) => void @@ -31,6 +35,8 @@ export interface AssetColumnProps { readonly rowState: assetsTable.AssetRowState readonly setRowState: React.Dispatch> readonly isEditable: boolean + readonly doOpenProject: (project: dashboard.Project) => void + readonly doCloseProject: (project: dashboard.Project) => void } /** Props for a {@link AssetColumn}. */ diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/column/NameColumn.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/column/NameColumn.tsx index f19a9e4b7d57..c2b8caca0775 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/column/NameColumn.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/column/NameColumn.tsx @@ -20,6 +20,7 @@ export interface AssetNameColumnProps extends column.AssetColumnProps {} /** The icon and name of an {@link backendModule.Asset}. */ export default function AssetNameColumn(props: AssetNameColumnProps) { const { item } = props + switch (item.item.type) { case backendModule.AssetType.directory: { return diff --git a/app/ide-desktop/lib/dashboard/src/components/dashboard/column/SharedWithColumn.tsx b/app/ide-desktop/lib/dashboard/src/components/dashboard/column/SharedWithColumn.tsx index dc04feaf8f68..7a9f53e7f3f9 100644 --- a/app/ide-desktop/lib/dashboard/src/components/dashboard/column/SharedWithColumn.tsx +++ b/app/ide-desktop/lib/dashboard/src/components/dashboard/column/SharedWithColumn.tsx @@ -30,7 +30,7 @@ import * as uniqueString from '#/utilities/uniqueString' /** The type of the `state` prop of a {@link SharedWithColumn}. */ interface SharedWithColumnStateProp - extends Pick { + extends Pick { readonly setQuery: column.AssetColumnProps['state']['setQuery'] | null } @@ -43,7 +43,7 @@ interface SharedWithColumnPropsInternal extends Pick { readonly id: backend.ProjectId + readonly backendType: backend.BackendType + readonly title: string + readonly parentId: backend.DirectoryId readonly runInBackground: boolean } /** A signal to close the specified project. */ export interface AssetCloseProjectEvent extends AssetBaseEvent { readonly id: backend.ProjectId + readonly backendType: backend.BackendType + readonly title: string + readonly parentId: backend.DirectoryId } /** A signal that multiple assets should be copied. `ids` are the `Id`s of the newly created diff --git a/app/ide-desktop/lib/dashboard/src/hooks/gtagHooks.ts b/app/ide-desktop/lib/dashboard/src/hooks/gtagHooks.ts index 8443a39308b0..21658d73a809 100644 --- a/app/ide-desktop/lib/dashboard/src/hooks/gtagHooks.ts +++ b/app/ide-desktop/lib/dashboard/src/hooks/gtagHooks.ts @@ -24,18 +24,19 @@ export function useGtagEvent() { * * Also sends the close event when the window is unloaded. */ export function gtagOpenCloseCallback( - gtagEventRef: React.MutableRefObject>, + gtagEvent: ReturnType, openEvent: string, closeEvent: string ) { - const gtagEventCurrent = gtagEventRef.current - gtagEventCurrent(openEvent) + gtagEvent(openEvent) + const onBeforeUnload = () => { - gtagEventCurrent(closeEvent) + gtagEvent(closeEvent) } window.addEventListener('beforeunload', onBeforeUnload) + return () => { window.removeEventListener('beforeunload', onBeforeUnload) - gtagEventCurrent(closeEvent) + gtagEvent(closeEvent) } } diff --git a/app/ide-desktop/lib/dashboard/src/layouts/AssetContextMenu.tsx b/app/ide-desktop/lib/dashboard/src/layouts/AssetContextMenu.tsx index 1675a49a3c2c..647858f1843d 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/AssetContextMenu.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/AssetContextMenu.tsx @@ -1,6 +1,7 @@ /** @file The context menu for an arbitrary {@link backendModule.Asset}. */ import * as React from 'react' +import * as reactQuery from '@tanstack/react-query' import * as toast from 'react-toastify' import * as billingHooks from '#/hooks/billing' @@ -16,6 +17,8 @@ import * as textProvider from '#/providers/TextProvider' import AssetEventType from '#/events/AssetEventType' import AssetListEventType from '#/events/AssetListEventType' +import * as dashboard from '#/pages/dashboard/Dashboard' + import Category, * as categoryModule from '#/layouts/CategorySwitcher/Category' import GlobalContextMenu from '#/layouts/GlobalContextMenu' @@ -91,19 +94,32 @@ export default function AssetContextMenu(props: AssetContextMenuProps) { const systemApi = window.systemApi const ownsThisAsset = !isCloud || self?.permission === permissions.PermissionAction.own const managesThisAsset = ownsThisAsset || self?.permission === permissions.PermissionAction.admin + const canEditThisAsset = managesThisAsset || self?.permission === permissions.PermissionAction.edit + + const { data } = reactQuery.useQuery( + item.item.type === backendModule.AssetType.project + ? dashboard.createGetProjectDetailsQuery.createPassiveListener(item.item.id) + : { queryKey: ['__IGNORED__'] } + ) + const isRunningProject = - asset.type === backendModule.AssetType.project && - backendModule.IS_OPENING_OR_OPENED[asset.projectState.type] + (asset.type === backendModule.AssetType.project && + data && + backendModule.IS_OPENING_OR_OPENED[data.state.type]) ?? + false + const canExecute = !isCloud || (self?.permission != null && permissions.PERMISSION_ACTION_CAN_EXECUTE[self.permission]) + const isOtherUserUsingProject = isCloud && backendModule.assetIsProject(asset) && asset.projectState.openedBy != null && asset.projectState.openedBy !== user.email + const setAsset = setAssetHooks.useSetAsset(asset, setItem) return category === Category.trash ? ( @@ -170,6 +186,9 @@ export default function AssetContextMenu(props: AssetContextMenuProps) { dispatchAssetEvent({ type: AssetEventType.openProject, id: asset.id, + title: asset.title, + parentId: item.directoryId, + backendType: state.backend.type, runInBackground: false, }) }} @@ -184,6 +203,9 @@ export default function AssetContextMenu(props: AssetContextMenuProps) { dispatchAssetEvent({ type: AssetEventType.openProject, id: asset.id, + title: asset.title, + parentId: item.directoryId, + backendType: state.backend.type, runInBackground: true, }) }} @@ -211,6 +233,9 @@ export default function AssetContextMenu(props: AssetContextMenuProps) { dispatchAssetEvent({ type: AssetEventType.closeProject, id: asset.id, + title: asset.title, + parentId: item.directoryId, + backendType: state.backend.type, }) }} /> @@ -343,7 +368,6 @@ export default function AssetContextMenu(props: AssetContextMenuProps) { doAction={() => { setModal( )}
-
+
{' '} {!isCloud && (
- + {getText('sharedWith')} - + {} }} + state={{ category, dispatchAssetEvent, setQuery: () => {} }} /> - + {getText('labels')} - + {item.item.labels?.map(value => { const label = labels.find(otherLabel => otherLabel.value === value) return label == null ? null : ( diff --git a/app/ide-desktop/lib/dashboard/src/layouts/AssetsTable.tsx b/app/ide-desktop/lib/dashboard/src/layouts/AssetsTable.tsx index 7e47cf576da6..32f044cb296b 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/AssetsTable.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/AssetsTable.tsx @@ -26,6 +26,8 @@ import AssetEventType from '#/events/AssetEventType' import type * as assetListEvent from '#/events/assetListEvent' import AssetListEventType from '#/events/AssetListEventType' +import type * as dashboard from '#/pages/dashboard/Dashboard' + import type * as assetPanel from '#/layouts/AssetPanel' import type * as assetSearchBar from '#/layouts/AssetSearchBar' import AssetsTableContextMenu from '#/layouts/AssetsTableContextMenu' @@ -313,7 +315,6 @@ export interface AssetsTableState { readonly setSortInfo: (sortInfo: sorting.SortInfo | null) => void readonly query: AssetQuery readonly setQuery: React.Dispatch> - readonly setProjectStartupInfo: (projectStartupInfo: backendModule.ProjectStartupInfo) => void readonly dispatchAssetListEvent: (event: assetListEvent.AssetListEvent) => void readonly assetEvents: assetEvent.AssetEvent[] readonly dispatchAssetEvent: (event: assetEvent.AssetEvent) => void @@ -329,8 +330,7 @@ export interface AssetsTableState { title?: string | null, override?: boolean ) => void - readonly doOpenEditor: () => void - readonly doCloseEditor: (projectId: backendModule.ProjectId) => void + readonly doOpenEditor: (id: backendModule.ProjectId) => void readonly doCopy: () => void readonly doCut: () => void readonly doPaste: ( @@ -349,13 +349,13 @@ export interface AssetRowState { /** Props for a {@link AssetsTable}. */ export interface AssetsTableProps { + readonly openedProjects: dashboard.Project[] readonly hidden: boolean readonly query: AssetQuery readonly setQuery: React.Dispatch> readonly setSuggestions: React.Dispatch< React.SetStateAction > - readonly setProjectStartupInfo: (projectStartupInfo: backendModule.ProjectStartupInfo) => void readonly setCanDownload: (canDownload: boolean) => void readonly category: Category readonly initialProjectName: string | null @@ -366,16 +366,37 @@ export interface AssetsTableProps { readonly setAssetPanelProps: (props: assetPanel.AssetPanelRequiredProps | null) => void readonly setIsAssetPanelTemporarilyVisible: (visible: boolean) => void readonly targetDirectoryNodeRef: React.MutableRefObject | null> - readonly doOpenEditor: () => void - readonly doCloseEditor: (projectId: backendModule.ProjectId) => void + readonly doOpenEditor: (id: dashboard.ProjectId) => void + readonly doOpenProject: ( + project: dashboard.Project, + options?: dashboard.OpenProjectOptions + ) => void + readonly doCloseProject: (project: dashboard.Project) => void + readonly assetManagementApiRef: React.Ref +} + +/** + * The API for managing assets in the table. + */ +export interface AssetManagementApi { + readonly getAsset: (id: backendModule.AssetId) => backendModule.AnyAsset | null + readonly setAsset: (id: backendModule.AssetId, asset: backendModule.AnyAsset) => void } /** The table of project assets. */ export default function AssetsTable(props: AssetsTableProps) { - const { hidden, query, setQuery, setProjectStartupInfo, setCanDownload, category } = props + const { + hidden, + query, + setQuery, + setCanDownload, + category, + openedProjects, + assetManagementApiRef, + } = props const { setSuggestions, initialProjectName } = props const { assetListEvents, dispatchAssetListEvent, assetEvents, dispatchAssetEvent } = props - const { doOpenEditor, doCloseEditor } = props + const { doOpenEditor, doOpenProject, doCloseProject } = props const { setAssetPanelProps, targetDirectoryNodeRef, setIsAssetPanelTemporarilyVisible } = props const { user } = authProvider.useNonPartialUserSession() @@ -398,6 +419,9 @@ export default function AssetsTable(props: AssetsTableProps) { () => new Set() ) const selectedKeysRef = React.useRef(selectedKeys) + const updateAssetRef = React.useRef< + Record void> + >({}) const [pasteData, setPasteData] = React.useState > | null>(null) @@ -882,12 +906,11 @@ export default function AssetsTable(props: AssetsTableProps) { .filter(backendModule.assetIsProject) .find(isInitialProject) if (projectToLoad != null) { - window.setTimeout(() => { - dispatchAssetEvent({ - type: AssetEventType.openProject, - id: projectToLoad.id, - runInBackground: false, - }) + doOpenProject({ + type: backendModule.BackendType.local, + id: projectToLoad.id, + title: projectToLoad.title, + parentId: projectToLoad.parentId, }) } else if (initialProjectName != null) { toastAndLog('findProjectError', null, initialProjectName) @@ -969,13 +992,15 @@ export default function AssetsTable(props: AssetsTableProps) { .filter(backendModule.assetIsProject) .find(isInitialProject) if (projectToLoad != null) { - window.setTimeout(() => { - dispatchAssetEvent({ - type: AssetEventType.openProject, + doOpenProject( + { + type: backendModule.BackendType.local, id: projectToLoad.id, - runInBackground: false, - }) - }) + title: projectToLoad.title, + parentId: projectToLoad.parentId, + }, + { openInBackground: false } + ) } else { toastAndLog('findProjectError', null, oldNameOfProjectToImmediatelyOpen) } @@ -993,7 +1018,7 @@ export default function AssetsTable(props: AssetsTableProps) { return null }) }, - [rootDirectoryId, backend.rootPath, dispatchAssetEvent, toastAndLog] + [doOpenProject, rootDirectoryId, backend.rootPath, dispatchAssetEvent, toastAndLog] ) const overwriteNodesRef = React.useRef(overwriteNodes) overwriteNodesRef.current = overwriteNodes @@ -1220,11 +1245,14 @@ export default function AssetsTable(props: AssetsTableProps) { case backendModule.AssetType.project: { event.preventDefault() event.stopPropagation() - dispatchAssetEvent({ - type: AssetEventType.openProject, + + doOpenProject({ + type: backend.type, id: item.item.id, - runInBackground: false, + title: item.item.title, + parentId: item.item.parentId, }) + break } case backendModule.AssetType.datalink: { @@ -1918,7 +1946,6 @@ export default function AssetsTable(props: AssetsTableProps) { setSortInfo, query, setQuery, - setProjectStartupInfo, assetEvents, dispatchAssetEvent, dispatchAssetListEvent, @@ -1928,7 +1955,6 @@ export default function AssetsTable(props: AssetsTableProps) { hideColumn, doToggleDirectoryExpansion, doOpenEditor, - doCloseEditor, doCopy, doCut, doPaste, @@ -1944,7 +1970,6 @@ export default function AssetsTable(props: AssetsTableProps) { query, doToggleDirectoryExpansion, doOpenEditor, - doCloseEditor, doCopy, doCut, doPaste, @@ -1952,7 +1977,6 @@ export default function AssetsTable(props: AssetsTableProps) { setAssetPanelProps, setIsAssetPanelTemporarilyVisible, setQuery, - setProjectStartupInfo, dispatchAssetEvent, dispatchAssetListEvent, ] @@ -2180,6 +2204,26 @@ export default function AssetsTable(props: AssetsTableProps) { [visibleItems, calculateNewKeys, setSelectedKeys, setMostRecentlySelectedIndex] ) + const getAsset = React.useCallback( + (key: backendModule.AssetId) => nodeMapRef.current.get(key)?.item ?? null, + [nodeMapRef] + ) + + const setAsset = React.useCallback( + (key: backendModule.AssetId, asset: backendModule.AnyAsset) => { + setAssetTree(oldAssetTree => + oldAssetTree.map(item => (item.key === key ? item.with({ item: asset }) : item)) + ) + updateAssetRef.current[asset.id]?.(asset) + }, + [] + ) + + React.useImperativeHandle(assetManagementApiRef, () => ({ + getAsset, + setAsset, + })) + const columns = columnUtils.getColumnList(backend.type, enabledColumns) const headerRow = ( @@ -2210,13 +2254,27 @@ export default function AssetsTable(props: AssetsTableProps) { const key = AssetTreeNode.getKey(item) const isSelected = (visuallySelectedKeysOverride ?? selectedKeys).has(key) const isSoleSelected = selectedKeys.size === 1 && isSelected + return ( { + if (instance != null) { + updateAssetRef.current[item.item.id] = instance + } else { + // Hacky way to clear the reference to the asset on unmount. + // eventually once we pull the assets up in the tree, we can remove this. + // eslint-disable-next-line @typescript-eslint/no-dynamic-delete + delete updateAssetRef.current[item.item.id] + } + }} + isOpened={openedProjects.some(({ id }) => item.item.id === id)} columns={columns} item={item} state={state} hidden={hidden || visibilities.get(item.key) === Visibility.hidden} + doOpenProject={doOpenProject} + doCloseProject={doCloseProject} selected={isSelected} setSelected={selected => { setSelectedKeys(set.withPresence(selectedKeysRef.current, key, selected)) @@ -2272,8 +2330,10 @@ export default function AssetsTable(props: AssetsTableProps) { {nodes.map(node => ( {}} setRowState={() => {}} isEditable={false} + doCloseProject={doCloseProject} + doOpenProject={doOpenProject} /> ))} diff --git a/app/ide-desktop/lib/dashboard/src/layouts/Chat.tsx b/app/ide-desktop/lib/dashboard/src/layouts/Chat.tsx index 37ad3c4ca3d7..74ecd0e9f820 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/Chat.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/Chat.tsx @@ -412,16 +412,14 @@ export default function Chat(props: ChatProps) { }, }) const gtagEvent = gtagHooks.useGtagEvent() - const gtagEventRef = React.useRef(gtagEvent) - gtagEventRef.current = gtagEvent React.useEffect(() => { if (!isOpen) { return } else { - return gtagHooks.gtagOpenCloseCallback(gtagEventRef, 'cloud_open_chat', 'cloud_close_chat') + return gtagHooks.gtagOpenCloseCallback(gtagEvent, 'cloud_open_chat', 'cloud_close_chat') } - }, [isOpen]) + }, [isOpen, gtagEvent]) /** This is SAFE, because this component is only rendered when `accessToken` is present. * See `dashboard.tsx` for its sole usage. */ diff --git a/app/ide-desktop/lib/dashboard/src/layouts/Drive.tsx b/app/ide-desktop/lib/dashboard/src/layouts/Drive.tsx index 7f6df43d872f..37bd9b13878a 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/Drive.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/Drive.tsx @@ -15,9 +15,12 @@ import type * as assetEvent from '#/events/assetEvent' import type * as assetListEvent from '#/events/assetListEvent' import AssetListEventType from '#/events/AssetListEventType' +import type * as dashboard from '#/pages/dashboard/Dashboard' + import type * as assetPanel from '#/layouts/AssetPanel' import AssetPanel from '#/layouts/AssetPanel' import type * as assetSearchBar from '#/layouts/AssetSearchBar' +import type * as assetsTable from '#/layouts/AssetsTable' import AssetsTable from '#/layouts/AssetsTable' import CategorySwitcher from '#/layouts/CategorySwitcher' import Category, * as categoryModule from '#/layouts/CategorySwitcher/Category' @@ -60,6 +63,7 @@ enum DriveStatus { /** Props for a {@link Drive}. */ export interface DriveProps { + readonly openedProjects: dashboard.Project[] readonly category: Category readonly setCategory: (category: Category) => void readonly hidden: boolean @@ -68,16 +72,29 @@ export interface DriveProps { readonly dispatchAssetListEvent: (directoryEvent: assetListEvent.AssetListEvent) => void readonly assetEvents: assetEvent.AssetEvent[] readonly dispatchAssetEvent: (directoryEvent: assetEvent.AssetEvent) => void - readonly setProjectStartupInfo: (projectStartupInfo: backendModule.ProjectStartupInfo) => void - readonly doOpenEditor: () => void - readonly doCloseEditor: (projectId: backendModule.ProjectId) => void + readonly doOpenEditor: (id: dashboard.ProjectId) => void + readonly doOpenProject: (project: dashboard.Project) => void + readonly doCloseProject: (project: dashboard.Project) => void + readonly assetsManagementApiRef: React.Ref } /** Contains directory path and directory contents (projects, folders, secrets and files). */ export default function Drive(props: DriveProps) { - const { hidden, initialProjectName } = props - const { assetListEvents, dispatchAssetListEvent, assetEvents, dispatchAssetEvent } = props - const { setProjectStartupInfo, doOpenEditor, doCloseEditor, category, setCategory } = props + const { + openedProjects, + doOpenEditor, + doCloseProject, + category, + setCategory, + hidden, + initialProjectName, + doOpenProject, + assetListEvents, + dispatchAssetListEvent, + assetEvents, + dispatchAssetEvent, + assetsManagementApiRef, + } = props const { isOffline } = offlineHooks.useOffline() const { localStorage } = localStorageProvider.useLocalStorage() @@ -321,11 +338,12 @@ export default function Drive(props: DriveProps) { ) : (
diff --git a/app/ide-desktop/lib/dashboard/src/layouts/Editor.tsx b/app/ide-desktop/lib/dashboard/src/layouts/Editor.tsx index a1031a3ccbc9..0041f4577b36 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/Editor.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/Editor.tsx @@ -6,18 +6,18 @@ import * as reactQuery from '@tanstack/react-query' import * as appUtils from '#/appUtils' import * as gtagHooks from '#/hooks/gtagHooks' -import * as toastAndLogHooks from '#/hooks/toastAndLogHooks' import * as backendProvider from '#/providers/BackendProvider' import * as textProvider from '#/providers/TextProvider' +import * as dashboard from '#/pages/dashboard/Dashboard' + import * as errorBoundary from '#/components/ErrorBoundary' -import * as loader from '#/components/Loader' +import * as suspense from '#/components/Suspense' -import type Backend from '#/services/Backend' import * as backendModule from '#/services/Backend' -import * as object from '#/utilities/object' +import * as twMerge from '#/utilities/tailwindMerge' import type * as types from '../../../types/types' @@ -33,27 +33,69 @@ const IGNORE_PARAMS_REGEX = new RegExp(`^${appUtils.SEARCH_PARAMS_PREFIX}(.+)$`) /** Props for an {@link Editor}. */ export interface EditorProps { + readonly isOpening: boolean + readonly startProject: (project: dashboard.Project) => void + readonly project: dashboard.Project readonly hidden: boolean readonly ydocUrl: string | null - readonly projectStartupInfo: backendModule.ProjectStartupInfo | null readonly appRunner: types.EditorRunner | null + readonly renameProject: (newName: string) => void + readonly projectId: backendModule.ProjectAsset['id'] } /** The container that launches the IDE. */ export default function Editor(props: EditorProps) { - const { hidden, projectStartupInfo } = props + const { project, hidden, isOpening, startProject } = props - const editor = projectStartupInfo && ( - - ) + const remoteBackend = backendProvider.useRemoteBackendStrict() + const localBackend = backendProvider.useLocalBackend() + + const projectStatusQuery = dashboard.createGetProjectDetailsQuery({ + type: project.type, + assetId: project.id, + parentId: project.parentId, + title: project.title, + remoteBackend, + localBackend, + }) + + const projectQuery = reactQuery.useQuery({ + ...projectStatusQuery, + networkMode: project.type === backendModule.BackendType.remote ? 'online' : 'always', + }) + + if (!isOpening && projectQuery.data?.state.type === backendModule.ProjectState.closed) { + startProject(project) + } return ( - }> - {/* eslint-disable-next-line @typescript-eslint/naming-convention */} - null } : {})}> - {editor} - - + ) } @@ -62,30 +104,18 @@ export default function Editor(props: EditorProps) { // ====================== /** Props for an {@link EditorInternal}. */ -interface EditorInternalProps extends EditorProps { - readonly projectStartupInfo: backendModule.ProjectStartupInfo +interface EditorInternalProps extends Omit { + readonly openedProject: backendModule.Project } /** An internal editor. */ function EditorInternal(props: EditorInternalProps) { - const { hidden, ydocUrl, projectStartupInfo, appRunner: AppRunner } = props - const toastAndLog = toastAndLogHooks.useToastAndLog() + const { hidden, ydocUrl, appRunner: AppRunner, renameProject, openedProject } = props + const { getText } = textProvider.useText() const gtagEvent = gtagHooks.useGtagEvent() - const gtagEventRef = React.useRef(gtagEvent) - gtagEventRef.current = gtagEvent - const remoteBackend = backendProvider.useRemoteBackend() - const localBackend = backendProvider.useLocalBackend() - const projectQuery = reactQuery.useSuspenseQuery({ - queryKey: ['editorProject', projectStartupInfo.projectAsset.id], - // Wrap in an unresolved promise, otherwise React Suspense breaks. - queryFn: () => Promise.resolve(projectStartupInfo.project), - staleTime: 0, - gcTime: 0, - meta: { persist: false }, - }) - const project = projectQuery.data + const remoteBackend = backendProvider.useRemoteBackend() const logEvent = React.useCallback( (message: string, projectId?: string | null, metadata?: object | null) => { @@ -96,47 +126,19 @@ function EditorInternal(props: EditorInternalProps) { [remoteBackend] ) - const renameProject = React.useCallback( - (newName: string) => { - let backend: Backend | null - switch (projectStartupInfo.backendType) { - case backendModule.BackendType.local: - backend = localBackend - break - case backendModule.BackendType.remote: - backend = remoteBackend - break - } - const { id: projectId, parentId, title } = projectStartupInfo.projectAsset - backend - ?.updateProject( - projectId, - { projectName: newName, ami: null, ideVersion: null, parentId }, - title - ) - .then( - () => { - projectStartupInfo.setProjectAsset?.(object.merger({ title: newName })) - }, - e => toastAndLog('renameProjectError', e) - ) - }, - [remoteBackend, localBackend, projectStartupInfo, toastAndLog] - ) - React.useEffect(() => { if (hidden) { return } else { - return gtagHooks.gtagOpenCloseCallback(gtagEventRef, 'open_workflow', 'close_workflow') + return gtagHooks.gtagOpenCloseCallback(gtagEvent, 'open_workflow', 'close_workflow') } - }, [projectStartupInfo, hidden]) + }, [hidden, gtagEvent]) const appProps: types.EditorProps | null = React.useMemo(() => { - const projectId = project.projectId - const jsonAddress = project.jsonAddress - const binaryAddress = project.binaryAddress + const jsonAddress = openedProject.jsonAddress + const binaryAddress = openedProject.binaryAddress const ydocAddress = ydocUrl ?? '' + if (jsonAddress == null) { throw new Error(getText('noJSONEndpointError')) } else if (binaryAddress == null) { @@ -144,44 +146,20 @@ function EditorInternal(props: EditorInternalProps) { } else { return { config: { - engine: { - rpcUrl: jsonAddress, - dataUrl: binaryAddress, - ydocUrl: ydocAddress, - }, - startup: { - project: project.packageName, - displayedProjectName: project.name, - }, - window: { - topBarOffset: '0', - }, + engine: { rpcUrl: jsonAddress, dataUrl: binaryAddress, ydocUrl: ydocAddress }, + startup: { project: openedProject.packageName, displayedProjectName: openedProject.name }, + window: { topBarOffset: '0' }, }, - projectId, + projectId: openedProject.projectId, hidden, ignoreParamsRegex: IGNORE_PARAMS_REGEX, logEvent, renameProject, } } - }, [ - project.projectId, - project.jsonAddress, - project.binaryAddress, - project.packageName, - project.name, - ydocUrl, - getText, - hidden, - logEvent, - renameProject, - ]) - - if (AppRunner == null) { - return null - } else { - // Currently the GUI component needs to be fully rerendered whenever the project is changed. Once - // this is no longer necessary, the `key` could be removed. - return - } + }, [openedProject, ydocUrl, getText, hidden, logEvent, renameProject]) + + // Currently the GUI component needs to be fully rerendered whenever the project is changed. Once + // this is no longer necessary, the `key` could be removed. + return AppRunner == null ? null : } diff --git a/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx b/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx index d5e6d080cae4..c5f64ab4fdfb 100644 --- a/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx +++ b/app/ide-desktop/lib/dashboard/src/layouts/TabBar.tsx @@ -1,16 +1,21 @@ /** @file Switcher to choose the currently visible full-screen page. */ import * as React from 'react' +import * as reactQuery from '@tanstack/react-query' import invariant from 'tiny-invariant' import type * as text from '#/text' import * as textProvider from '#/providers/TextProvider' +import * as dashboard from '#/pages/dashboard/Dashboard' + import * as aria from '#/components/aria' import * as ariaComponents from '#/components/AriaComponents' import FocusArea from '#/components/styled/FocusArea' +import * as backend from '#/services/Backend' + import * as tailwindMerge from '#/utilities/tailwindMerge' // ================= @@ -162,22 +167,22 @@ const Tabs = React.forwardRef(TabsInternal) /** Props for a {@link Tab}. */ interface InternalTabProps extends Readonly { + readonly project?: dashboard.Project readonly isActive: boolean readonly icon: string readonly labelId: text.TextId - /** When the promise is in flight, the tab icon will instead be a loading spinner. */ - readonly loadingPromise?: Promise readonly onPress: () => void readonly onClose?: () => void + readonly onLoadEnd?: () => void } /** A tab in a {@link TabBar}. */ export function Tab(props: InternalTabProps) { - const { isActive, icon, labelId, loadingPromise, children, onPress, onClose } = props + const { isActive, icon, labelId, children, onPress, onClose, project, onLoadEnd } = props const { updateClipPath, observeElement } = useTabBarContext() const ref = React.useRef(null) + const isLoadingRef = React.useRef(true) const { getText } = textProvider.useText() - const [isLoading, setIsLoading] = React.useState(loadingPromise != null) React.useLayoutEffect(() => { if (isActive) { @@ -193,21 +198,21 @@ export function Tab(props: InternalTabProps) { } }, [observeElement]) + const { isLoading, data } = reactQuery.useQuery( + project?.id + ? dashboard.createGetProjectDetailsQuery.createPassiveListener(project.id) + : { queryKey: ['__IGNORE__'], queryFn: reactQuery.skipToken } + ) + + const isFetching = + (isLoading || (data && data.state.type !== backend.ProjectState.opened)) ?? false + React.useEffect(() => { - if (loadingPromise) { - setIsLoading(true) - loadingPromise.then( - () => { - setIsLoading(false) - }, - () => { - setIsLoading(false) - } - ) - } else { - setIsLoading(false) + if (!isFetching && isLoadingRef.current) { + isLoadingRef.current = false + onLoadEnd?.() } - }, [loadingPromise]) + }, [isFetching, onLoadEnd]) return (
void - readonly projectAsset: backendModule.ProjectAsset | null - readonly setProjectAsset: React.Dispatch> | null - readonly doRemoveSelf: () => void readonly goToSettingsPage: () => void readonly onSignOut: () => void + readonly onShareClick?: (() => void) | null | undefined } /** A toolbar containing chat and the user menu. */ export default function UserBar(props: UserBarProps) { - const { backend, invisible = false, isOnEditorPage, setIsHelpChatOpen } = props - const { projectAsset, setProjectAsset, doRemoveSelf, goToSettingsPage, onSignOut } = props + const { invisible = false, setIsHelpChatOpen, onShareClick, goToSettingsPage, onSignOut } = props + const { user } = authProvider.useNonPartialUserSession() const { setModal } = modalProvider.useSetModal() const { getText } = textProvider.useText() const { isFeatureUnderPaywall } = billing.usePaywall({ plan: user.plan }) - const self = - projectAsset?.permissions?.find( - backendModule.isUserPermissionAnd(permissions => permissions.user.userId === user.userId) - ) ?? null - const shouldShowShareButton = - backend?.type === backendModule.BackendType.remote && - isOnEditorPage && - projectAsset != null && - setProjectAsset != null && - self != null + const shouldShowUpgradeButton = isFeatureUnderPaywall('inviteUser') - const shouldShowInviteButton = - backend != null && !shouldShowShareButton && !shouldShowUpgradeButton + const shouldShowShareButton = onShareClick != null + const shouldShowInviteButton = !shouldShowShareButton && !shouldShowUpgradeButton return ( @@ -119,18 +102,7 @@ export default function UserBar(props: UserBarProps) { size="medium" variant="tertiary" aria-label={getText('shareButtonAltText')} - onPress={() => { - setModal( - - ) - }} + onPress={onShareClick} > {getText('share')} diff --git a/app/ide-desktop/lib/dashboard/src/modals/ManagePermissionsModal.tsx b/app/ide-desktop/lib/dashboard/src/modals/ManagePermissionsModal.tsx index b90c405f382e..321fe0378528 100644 --- a/app/ide-desktop/lib/dashboard/src/modals/ManagePermissionsModal.tsx +++ b/app/ide-desktop/lib/dashboard/src/modals/ManagePermissionsModal.tsx @@ -10,6 +10,7 @@ import * as billingHooks from '#/hooks/billing' import * as toastAndLogHooks from '#/hooks/toastAndLogHooks' import * as authProvider from '#/providers/AuthProvider' +import * as backendProvider from '#/providers/BackendProvider' import * as modalProvider from '#/providers/ModalProvider' import * as textProvider from '#/providers/TextProvider' @@ -23,7 +24,6 @@ import * as paywall from '#/components/Paywall' import FocusArea from '#/components/styled/FocusArea' import * as backendModule from '#/services/Backend' -import type Backend from '#/services/Backend' import * as object from '#/utilities/object' import * as permissionsModule from '#/utilities/permissions' @@ -44,8 +44,7 @@ const TYPE_SELECTOR_Y_OFFSET_PX = 32 export interface ManagePermissionsModalProps< Asset extends backendModule.AnyAsset = backendModule.AnyAsset, > { - readonly backend: Backend - readonly item: Asset + readonly item: Pick readonly setItem: React.Dispatch> readonly self: backendModule.UserPermission /** Remove the current user's permissions from this asset. This MUST be a prop because it should @@ -61,7 +60,8 @@ export interface ManagePermissionsModalProps< export default function ManagePermissionsModal< Asset extends backendModule.AnyAsset = backendModule.AnyAsset, >(props: ManagePermissionsModalProps) { - const { backend, item, setItem, self, doRemoveSelf, eventTarget } = props + const { item, setItem, self, doRemoveSelf, eventTarget } = props + const remoteBackend = backendProvider.useRemoteBackendStrict() const { user } = authProvider.useFullUserSession() const { unsetModal } = modalProvider.useSetModal() const toastAndLog = toastAndLogHooks.useToastAndLog() @@ -72,14 +72,14 @@ export default function ManagePermissionsModal< const listedUsers = reactQuery.useQuery({ queryKey: ['listUsers'], - queryFn: () => backend.listUsers(), + queryFn: () => remoteBackend.listUsers(), enabled: !isUnderPaywall, select: data => (isUnderPaywall ? [] : data), }) const listedUserGroups = reactQuery.useQuery({ queryKey: ['listUserGroups'], - queryFn: () => backend.listUserGroups(), + queryFn: () => remoteBackend.listUserGroups(), }) const [permissions, setPermissions] = React.useState(item.permissions ?? []) @@ -122,8 +122,11 @@ export default function ManagePermissionsModal< [user.userId, permissions, self.permission] ) - const inviteUserMutation = backendHooks.useBackendMutation(backend, 'inviteUser') - const createPermissionMutation = backendHooks.useBackendMutation(backend, 'createPermission') + const inviteUserMutation = backendHooks.useBackendMutation(remoteBackend, 'inviteUser') + const createPermissionMutation = backendHooks.useBackendMutation( + remoteBackend, + 'createPermission' + ) React.useEffect(() => { // This is SAFE, as the type of asset is not being changed. @@ -131,308 +134,297 @@ export default function ManagePermissionsModal< setItem(object.merger({ permissions } as Partial)) }, [permissions, setItem]) - if (backend.type === backendModule.BackendType.local) { - // This should never happen - the local backend does not have the "shared with" column, - // and `organization` is absent only when offline - in which case the user should only - // be able to access the local backend. - // This MUST be an error, otherwise the hooks below are considered as conditionally called. - throw new Error('Cannot share assets on the local backend.') - } else { - const canAdd = React.useMemo( - () => [ - ...(listedUsers.data ?? []).filter( - listedUser => - !permissionsHoldersNames.has(listedUser.name) && - !emailsOfUsersWithPermission.has(listedUser.email) - ), - ...(listedUserGroups.data ?? []).filter( - userGroup => !permissionsHoldersNames.has(userGroup.groupName) - ), - ], - [emailsOfUsersWithPermission, permissionsHoldersNames, listedUsers, listedUserGroups] - ) - const willInviteNewUser = React.useMemo(() => { - if (usersAndUserGroups.length !== 0 || email == null || email === '') { - return false - } else { - const lowercase = email.toLowerCase() - return ( - lowercase !== '' && - !permissionsHoldersNames.has(lowercase) && - !emailsOfUsersWithPermission.has(lowercase) && - !canAdd.some( - userOrGroup => - ('name' in userOrGroup && userOrGroup.name.toLowerCase() === lowercase) || - ('email' in userOrGroup && userOrGroup.email.toLowerCase() === lowercase) || - ('groupName' in userOrGroup && userOrGroup.groupName.toLowerCase() === lowercase) - ) + const canAdd = React.useMemo( + () => [ + ...(listedUsers.data ?? []).filter( + listedUser => + !permissionsHoldersNames.has(listedUser.name) && + !emailsOfUsersWithPermission.has(listedUser.email) + ), + ...(listedUserGroups.data ?? []).filter( + userGroup => !permissionsHoldersNames.has(userGroup.groupName) + ), + ], + [emailsOfUsersWithPermission, permissionsHoldersNames, listedUsers, listedUserGroups] + ) + const willInviteNewUser = React.useMemo(() => { + if (usersAndUserGroups.length !== 0 || email == null || email === '') { + return false + } else { + const lowercase = email.toLowerCase() + return ( + lowercase !== '' && + !permissionsHoldersNames.has(lowercase) && + !emailsOfUsersWithPermission.has(lowercase) && + !canAdd.some( + userOrGroup => + ('name' in userOrGroup && userOrGroup.name.toLowerCase() === lowercase) || + ('email' in userOrGroup && userOrGroup.email.toLowerCase() === lowercase) || + ('groupName' in userOrGroup && userOrGroup.groupName.toLowerCase() === lowercase) ) - } - }, [ - usersAndUserGroups.length, - email, - emailsOfUsersWithPermission, - permissionsHoldersNames, - canAdd, - ]) + ) + } + }, [ + usersAndUserGroups.length, + email, + emailsOfUsersWithPermission, + permissionsHoldersNames, + canAdd, + ]) - const doSubmit = async () => { - if (willInviteNewUser) { - try { - setUserAndUserGroups([]) - setEmail('') - if (email != null) { - await inviteUserMutation.mutateAsync([ - { - organizationId: user.organizationId, - userEmail: backendModule.EmailAddress(email), - }, - ]) - toast.toast.success(getText('inviteSuccess', email)) - } - } catch (error) { - toastAndLog('couldNotInviteUser', error, email ?? '(unknown)') - } - } else { + const doSubmit = async () => { + if (willInviteNewUser) { + try { setUserAndUserGroups([]) - const addedPermissions = usersAndUserGroups.map( - newUserOrUserGroup => - 'userId' in newUserOrUserGroup - ? { user: newUserOrUserGroup, permission: action } - : { userGroup: newUserOrUserGroup, permission: action } - ) - const addedUsersIds = new Set( - addedPermissions.flatMap(permission => - backendModule.isUserPermission(permission) ? [permission.user.userId] : [] - ) + setEmail('') + if (email != null) { + await inviteUserMutation.mutateAsync([ + { + organizationId: user.organizationId, + userEmail: backendModule.EmailAddress(email), + }, + ]) + toast.toast.success(getText('inviteSuccess', email)) + } + } catch (error) { + toastAndLog('couldNotInviteUser', error, email ?? '(unknown)') + } + } else { + setUserAndUserGroups([]) + const addedPermissions = usersAndUserGroups.map( + newUserOrUserGroup => + 'userId' in newUserOrUserGroup + ? { user: newUserOrUserGroup, permission: action } + : { userGroup: newUserOrUserGroup, permission: action } + ) + const addedUsersIds = new Set( + addedPermissions.flatMap(permission => + backendModule.isUserPermission(permission) ? [permission.user.userId] : [] ) - const addedUserGroupsIds = new Set( - addedPermissions.flatMap(permission => - backendModule.isUserGroupPermission(permission) ? [permission.userGroup.id] : [] - ) + ) + const addedUserGroupsIds = new Set( + addedPermissions.flatMap(permission => + backendModule.isUserGroupPermission(permission) ? [permission.userGroup.id] : [] ) - const isPermissionNotBeingOverwritten = (permission: backendModule.AssetPermission) => - backendModule.isUserPermission(permission) - ? !addedUsersIds.has(permission.user.userId) - : !addedUserGroupsIds.has(permission.userGroup.id) + ) + const isPermissionNotBeingOverwritten = (permission: backendModule.AssetPermission) => + backendModule.isUserPermission(permission) + ? !addedUsersIds.has(permission.user.userId) + : !addedUserGroupsIds.has(permission.userGroup.id) - try { - setPermissions(oldPermissions => - [...oldPermissions.filter(isPermissionNotBeingOverwritten), ...addedPermissions].sort( - backendModule.compareAssetPermissions - ) + try { + setPermissions(oldPermissions => + [...oldPermissions.filter(isPermissionNotBeingOverwritten), ...addedPermissions].sort( + backendModule.compareAssetPermissions ) - await createPermissionMutation.mutateAsync([ - { - actorsIds: addedPermissions.map(permission => - backendModule.isUserPermission(permission) - ? permission.user.userId - : permission.userGroup.id - ), - resourceId: item.id, - action: action, - }, - ]) - } catch (error) { - setPermissions(oldPermissions => - [...oldPermissions.filter(isPermissionNotBeingOverwritten), ...oldPermissions].sort( - backendModule.compareAssetPermissions - ) + ) + await createPermissionMutation.mutateAsync([ + { + actorsIds: addedPermissions.map(permission => + backendModule.isUserPermission(permission) + ? permission.user.userId + : permission.userGroup.id + ), + resourceId: item.id, + action: action, + }, + ]) + } catch (error) { + setPermissions(oldPermissions => + [...oldPermissions.filter(isPermissionNotBeingOverwritten), ...oldPermissions].sort( + backendModule.compareAssetPermissions ) - toastAndLog('setPermissionsError', error) - } + ) + toastAndLog('setPermissionsError', error) } } + } - const doDelete = async (permissionId: backendModule.UserPermissionIdentifier) => { - if (permissionId === self.user.userId) { - doRemoveSelf() - } else { - const oldPermission = permissions.find( - permission => backendModule.getAssetPermissionId(permission) === permissionId + const doDelete = async (permissionId: backendModule.UserPermissionIdentifier) => { + if (permissionId === self.user.userId) { + doRemoveSelf() + } else { + const oldPermission = permissions.find( + permission => backendModule.getAssetPermissionId(permission) === permissionId + ) + try { + setPermissions(oldPermissions => + oldPermissions.filter( + permission => backendModule.getAssetPermissionId(permission) !== permissionId + ) ) - try { + await createPermissionMutation.mutateAsync([ + { + actorsIds: [permissionId], + resourceId: item.id, + action: null, + }, + ]) + } catch (error) { + if (oldPermission != null) { setPermissions(oldPermissions => - oldPermissions.filter( - permission => backendModule.getAssetPermissionId(permission) !== permissionId - ) + [...oldPermissions, oldPermission].sort(backendModule.compareAssetPermissions) ) - await createPermissionMutation.mutateAsync([ - { - actorsIds: [permissionId], - resourceId: item.id, - action: null, - }, - ]) - } catch (error) { - if (oldPermission != null) { - setPermissions(oldPermissions => - [...oldPermissions, oldPermission].sort(backendModule.compareAssetPermissions) - ) - } - toastAndLog('setPermissionsError', error) } + toastAndLog('setPermissionsError', error) } } + } - return ( - +
{ + mouseEvent.stopPropagation() + }} + onContextMenu={mouseEvent => { + mouseEvent.stopPropagation() + mouseEvent.preventDefault() + }} > -
{ - mouseEvent.stopPropagation() - }} - onContextMenu={mouseEvent => { - mouseEvent.stopPropagation() - mouseEvent.preventDefault() - }} - > -
-
- - {getText('invite')} - - {/* Space reserved for other tabs. */} -
- - {innerProps => ( -
{ - event.preventDefault() - void doSubmit() - }} - {...innerProps} - > -
- +
+ + {getText('invite')} + + {/* Space reserved for other tabs. */} +
+ + {innerProps => ( + { + event.preventDefault() + void doSubmit() + }} + {...innerProps} + > +
+ +
+ 1 + ? getText('inviteUserPlaceholder') + : getText('inviteFirstUserPlaceholder') + } + type="text" + itemsToString={items => + items.length === 1 && items[0] != null + ? 'email' in items[0] + ? items[0].email + : items[0].groupName + : getText('xUsersAndGroupsSelected', items.length) + } + values={usersAndUserGroups} + setValues={setUserAndUserGroups} + items={canAdd} + itemToKey={userOrGroup => + 'userId' in userOrGroup ? userOrGroup.userId : userOrGroup.id + } + itemToString={userOrGroup => + 'name' in userOrGroup + ? `${userOrGroup.name} (${userOrGroup.email})` + : userOrGroup.groupName + } + matches={(userOrGroup, text) => + ('email' in userOrGroup && + userOrGroup.email.toLowerCase().includes(text.toLowerCase())) || + ('name' in userOrGroup && + userOrGroup.name.toLowerCase().includes(text.toLowerCase())) || + ('groupName' in userOrGroup && + userOrGroup.groupName.toLowerCase().includes(text.toLowerCase())) + } + text={email} + setText={setEmail} /> -
- 1 - ? getText('inviteUserPlaceholder') - : getText('inviteFirstUserPlaceholder') - } - type="text" - itemsToString={items => - items.length === 1 && items[0] != null - ? 'email' in items[0] - ? items[0].email - : items[0].groupName - : getText('xUsersAndGroupsSelected', items.length) - } - values={usersAndUserGroups} - setValues={setUserAndUserGroups} - items={canAdd} - itemToKey={userOrGroup => - 'userId' in userOrGroup ? userOrGroup.userId : userOrGroup.id - } - itemToString={userOrGroup => - 'name' in userOrGroup - ? `${userOrGroup.name} (${userOrGroup.email})` - : userOrGroup.groupName - } - matches={(userOrGroup, text) => - ('email' in userOrGroup && - userOrGroup.email.toLowerCase().includes(text.toLowerCase())) || - ('name' in userOrGroup && - userOrGroup.name.toLowerCase().includes(text.toLowerCase())) || - ('groupName' in userOrGroup && - userOrGroup.groupName.toLowerCase().includes(text.toLowerCase())) - } - text={email} - setText={setEmail} - /> -
- - {willInviteNewUser ? getText('invite') : getText('share')} - - - )} - -
- {editablePermissions.map(permission => ( -
+ - { - const permissionId = backendModule.getAssetPermissionId(newPermission) - setPermissions(oldPermissions => - oldPermissions.map(oldPermission => - backendModule.getAssetPermissionId(oldPermission) === permissionId - ? newPermission - : oldPermission - ) + {willInviteNewUser ? getText('invite') : getText('share')} + + + )} + +
+ {editablePermissions.map(permission => ( +
+ { + const permissionId = backendModule.getAssetPermissionId(newPermission) + setPermissions(oldPermissions => + oldPermissions.map(oldPermission => + backendModule.getAssetPermissionId(oldPermission) === permissionId + ? newPermission + : oldPermission ) - if (permissionId === self.user.userId) { - // This must run only after the permissions have - // been updated through `setItem`. - setTimeout(() => { - unsetModal() - }, 0) - } - }} - doDelete={id => { - if (id === self.user.userId) { + ) + if (permissionId === self.user.userId) { + // This must run only after the permissions have + // been updated through `setItem`. + setTimeout(() => { unsetModal() - } - void doDelete(id) - }} - /> -
- ))} -
- - {isUnderPaywall && ( - - )} + }, 0) + } + }} + doDelete={id => { + if (id === self.user.userId) { + unsetModal() + } + void doDelete(id) + }} + /> +
+ ))}
+ + {isUnderPaywall && ( + + )}
- - ) - } +
+ + ) } diff --git a/app/ide-desktop/lib/dashboard/src/pages/dashboard/Dashboard.tsx b/app/ide-desktop/lib/dashboard/src/pages/dashboard/Dashboard.tsx index cf1e04aaefa1..5000efc5a9a9 100644 --- a/app/ide-desktop/lib/dashboard/src/pages/dashboard/Dashboard.tsx +++ b/app/ide-desktop/lib/dashboard/src/pages/dashboard/Dashboard.tsx @@ -2,13 +2,17 @@ * interactive components. */ import * as React from 'react' +import * as reactQuery from '@tanstack/react-query' +import invariant from 'tiny-invariant' import * as validator from 'validator' +import * as z from 'zod' import DriveIcon from 'enso-assets/drive.svg' import EditorIcon from 'enso-assets/network.svg' import SettingsIcon from 'enso-assets/settings.svg' import * as detect from 'enso-common/src/detect' +import * as eventCallbacks from '#/hooks/eventCallbackHooks' import * as eventHooks from '#/hooks/eventHooks' import * as searchParamsState from '#/hooks/searchParamsStateHooks' @@ -24,6 +28,7 @@ import AssetEventType from '#/events/AssetEventType' import type * as assetListEvent from '#/events/assetListEvent' import AssetListEventType from '#/events/AssetListEventType' +import type * as assetTable from '#/layouts/AssetsTable' import Category, * as categoryModule from '#/layouts/CategorySwitcher/Category' import Chat from '#/layouts/Chat' import ChatPlaceholder from '#/layouts/ChatPlaceholder' @@ -36,9 +41,13 @@ import UserBar from '#/layouts/UserBar' import Page from '#/components/Page' +import ManagePermissionsModal from '#/modals/ManagePermissionsModal' + import * as backendModule from '#/services/Backend' +import type LocalBackend from '#/services/LocalBackend' import * as localBackendModule from '#/services/LocalBackend' import * as projectManager from '#/services/ProjectManager' +import type RemoteBackend from '#/services/RemoteBackend' import * as array from '#/utilities/array' import LocalStorage from '#/utilities/LocalStorage' @@ -53,7 +62,6 @@ import type * as types from '../../../../types/types' /** Main content of the screen. Only one should be visible at a time. */ enum TabType { drive = 'drive', - editor = 'editor', settings = 'settings', } @@ -61,46 +69,39 @@ declare module '#/utilities/LocalStorage' { /** */ interface LocalStorageData { readonly isAssetPanelVisible: boolean - readonly page: TabType - readonly projectStartupInfo: Omit + readonly page: z.infer + readonly launchedProjects: z.infer } } -LocalStorage.registerKey('isAssetPanelVisible', { - tryParse: value => (value === true ? value : null), -}) +LocalStorage.registerKey('isAssetPanelVisible', { schema: z.boolean() }) -const PAGES = Object.values(TabType) -LocalStorage.registerKey('page', { - tryParse: value => (array.includes(PAGES, value) ? value : null), +const PROJECT_SCHEMA = z.object({ + id: z.custom(), + parentId: z.custom(), + title: z.string(), + type: z.nativeEnum(backendModule.BackendType), }) - -const BACKEND_TYPES = Object.values(backendModule.BackendType) -LocalStorage.registerKey('projectStartupInfo', { +const LAUNCHED_PROJECT_SCHEMA = z.array(PROJECT_SCHEMA) + +/** + * Launched project information. + */ +export type Project = z.infer +/** + * Launched project ID. + */ +export type ProjectId = Project['id'] + +LocalStorage.registerKey('launchedProjects', { isUserSpecific: true, - tryParse: value => { - if (typeof value !== 'object' || value == null) { - return null - } else if ( - !('accessToken' in value) || - (typeof value.accessToken !== 'string' && value.accessToken != null) - ) { - return null - } else if (!('backendType' in value) || !array.includes(BACKEND_TYPES, value.backendType)) { - return null - } else if (!('projectAsset' in value)) { - return null - } else { - return { - // These type assertions are UNSAFE, however correctly type-checking these - // would be very complicated. - // eslint-disable-next-line no-restricted-syntax - projectAsset: value.projectAsset as backendModule.ProjectAsset, - backendType: value.backendType, - accessToken: value.accessToken ?? null, - } - } - }, + schema: LAUNCHED_PROJECT_SCHEMA, +}) + +const PAGES_SCHEMA = z.nativeEnum(TabType).or(z.custom()) + +LocalStorage.registerKey('page', { + schema: PAGES_SCHEMA, }) // ================= @@ -116,41 +117,114 @@ export interface DashboardProps { readonly ydocUrl: string | null } +/** + * + */ +export interface OpenProjectOptions { + /** + * Whether to open the project in the background. + * Set to `false` to navigate to the project tab. + * @default true + */ + readonly openInBackground?: boolean +} + +/** + * + */ +export interface CreateOpenedProjectQueryOptions { + readonly type: backendModule.BackendType + readonly assetId: backendModule.Asset['id'] + readonly parentId: backendModule.Asset['parentId'] + readonly title: backendModule.Asset['title'] + readonly remoteBackend: RemoteBackend + readonly localBackend: LocalBackend | null +} + +/** + * Project status query. + */ +export function createGetProjectDetailsQuery(options: CreateOpenedProjectQueryOptions) { + const { assetId, parentId, title, remoteBackend, localBackend, type } = options + + const backend = type === backendModule.BackendType.remote ? remoteBackend : localBackend + const isLocal = type === backendModule.BackendType.local + + return reactQuery.queryOptions({ + queryKey: createGetProjectDetailsQuery.getQueryKey(assetId), + meta: { persist: false }, + refetchInterval: ({ state }) => { + /** + * Default interval for refetching project status when the project is opened. + */ + const openedIntervalMS = 30_000 + /** + * Interval when we open a cloud project. + * Since opening a cloud project is a long operation, we want to check the status less often. + */ + const cloudOpeningIntervalMS = 5_000 + /** + * Interval when we open a local project or when we want to sync the project status as soon as possible. + */ + const activeSyncIntervalMS = 100 + const states = [backendModule.ProjectState.opened, backendModule.ProjectState.closed] + + if (isLocal) { + if (state.data?.state.type === backendModule.ProjectState.opened) { + return openedIntervalMS + } else { + return activeSyncIntervalMS + } + } else if (state.data == null) { + return activeSyncIntervalMS + } else if (states.includes(state.data.state.type)) { + return openedIntervalMS + } else { + return cloudOpeningIntervalMS + } + }, + refetchIntervalInBackground: true, + refetchOnWindowFocus: true, + refetchOnMount: true, + gcTime: 0, + queryFn: () => { + invariant(backend != null, 'Backend is null') + + return backend.getProjectDetails(assetId, parentId, title) + }, + }) +} +createGetProjectDetailsQuery.getQueryKey = (id: Project['id']) => ['project', id] as const +createGetProjectDetailsQuery.createPassiveListener = (id: Project['id']) => + reactQuery.queryOptions({ + queryKey: createGetProjectDetailsQuery.getQueryKey(id), + }) + /** The component that contains the entire UI. */ export default function Dashboard(props: DashboardProps) { - const { appRunner, ydocUrl, initialProjectName: initialProjectNameRaw } = props - const session = authProvider.useNonPartialUserSession() - const remoteBackend = backendProvider.useRemoteBackend() + const { appRunner, initialProjectName: initialProjectNameRaw, ydocUrl } = props + + const { user, ...session } = authProvider.useFullUserSession() + + const remoteBackend = backendProvider.useRemoteBackendStrict() const localBackend = backendProvider.useLocalBackend() const { getText } = textProvider.useText() const { modalRef } = modalProvider.useModalRef() - const { updateModal, unsetModal } = modalProvider.useSetModal() + const { updateModal, unsetModal, setModal } = modalProvider.useSetModal() const { localStorage } = localStorageProvider.useLocalStorage() const inputBindings = inputBindingsProvider.useInputBindings() - const [initialized, setInitialized] = React.useState(false) - const initializedRef = React.useRef(initialized) - initializedRef.current = initialized const [isHelpChatOpen, setIsHelpChatOpen] = React.useState(false) - // These pages MUST be ROUTER PAGES. - const [page, setPage] = searchParamsState.useSearchParamsState( - 'page', - () => localStorage.get('page') ?? TabType.drive, - (value: unknown): value is TabType => array.includes(Object.values(TabType), value) - ) - const [projectStartupInfo, setProjectStartupInfo] = - React.useState(null) - const openProjectAbortControllerRef = React.useRef(null) - const [assetListEvents, dispatchAssetListEvent] = - eventHooks.useEvent() - const [assetEvents, dispatchAssetEvent] = eventHooks.useEvent() + const assetManagementApiRef = React.useRef(null) + const initialLocalProjectId = initialProjectNameRaw != null && validator.isUUID(initialProjectNameRaw) ? localBackendModule.newProjectId(projectManager.UUID(initialProjectNameRaw)) : null const initialProjectName = initialLocalProjectId ?? initialProjectNameRaw - const defaultCategory = - remoteBackend != null && initialLocalProjectId == null ? Category.cloud : Category.local + + const defaultCategory = initialLocalProjectId == null ? Category.cloud : Category.local + const [category, setCategory] = searchParamsState.useSearchParamsState( 'driveCategory', () => defaultCategory, @@ -163,8 +237,58 @@ export default function Dashboard(props: DashboardProps) { } ) + const [launchedProjects, privateSetLaunchedProjects] = React.useState( + () => localStorage.get('launchedProjects') ?? [] + ) + + // These pages MUST be ROUTER PAGES. + const [page, privateSetPage] = searchParamsState.useSearchParamsState( + 'page', + () => localStorage.get('page') ?? TabType.drive, + (value: unknown): value is Project['id'] | TabType => { + return ( + array.includes(Object.values(TabType), value) || launchedProjects.some(p => p.id === value) + ) + } + ) + + const setLaunchedProjects = eventCallbacks.useEventCallback( + (fn: (currentState: Project[]) => Project[]) => { + React.startTransition(() => { + privateSetLaunchedProjects(currentState => { + const nextState = fn(currentState) + localStorage.set('launchedProjects', nextState) + return nextState + }) + }) + } + ) + + const addLaunchedProject = eventCallbacks.useEventCallback((project: Project) => { + setLaunchedProjects(currentState => [...currentState, project]) + }) + + const removeLaunchedProject = eventCallbacks.useEventCallback((projectId: Project['id']) => { + setLaunchedProjects(currentState => currentState.filter(({ id }) => id !== projectId)) + }) + + const clearLaunchedProjects = eventCallbacks.useEventCallback(() => { + setLaunchedProjects(() => []) + }) + + const setPage = eventCallbacks.useEventCallback((nextPage: Project['id'] | TabType) => { + privateSetPage(nextPage) + localStorage.set('page', nextPage) + }) + + const [assetListEvents, dispatchAssetListEvent] = + eventHooks.useEvent() + const [assetEvents, dispatchAssetEvent] = eventHooks.useEvent() + const isCloud = categoryModule.isCloud(category) - const isUserEnabled = session.user.isEnabled + const isUserEnabled = user.isEnabled + + const selectedProject = launchedProjects.find(p => p.id === page) ?? null if (isCloud && !isUserEnabled && localBackend != null) { setTimeout(() => { @@ -173,115 +297,111 @@ export default function Dashboard(props: DashboardProps) { }) } - React.useEffect(() => { - setInitialized(true) - }, []) + const openProjectMutation = reactQuery.useMutation({ + mutationKey: ['openProject'], + networkMode: 'always', + mutationFn: ({ title, id, type, parentId }: Project) => { + const backend = type === backendModule.BackendType.remote ? remoteBackend : localBackend + + invariant(backend != null, 'Backend is null') + + return backend.openProject( + id, + { + executeAsync: false, + cognitoCredentials: { + accessToken: session.accessToken, + refreshToken: session.accessToken, + clientId: session.clientId, + expireAt: session.expireAt, + refreshUrl: session.refreshUrl, + }, + parentId, + }, + title + ) + }, + onMutate: ({ id }) => { + const queryKey = createGetProjectDetailsQuery.getQueryKey(id) - React.useEffect(() => { - const savedProjectStartupInfo = localStorage.get('projectStartupInfo') - if (initialProjectName != null) { - if (page === TabType.editor) { - setPage(TabType.drive) - } - } else if (savedProjectStartupInfo != null) { - switch (savedProjectStartupInfo.backendType) { - case backendModule.BackendType.remote: { - if (remoteBackend != null) { - setPage(TabType.drive) - void (async () => { - const abortController = new AbortController() - openProjectAbortControllerRef.current = abortController - try { - const oldProject = await remoteBackend.getProjectDetails( - savedProjectStartupInfo.projectAsset.id, - savedProjectStartupInfo.projectAsset.parentId, - savedProjectStartupInfo.projectAsset.title - ) - if (backendModule.IS_OPENING_OR_OPENED[oldProject.state.type]) { - const project = remoteBackend.waitUntilProjectIsReady( - savedProjectStartupInfo.projectAsset.id, - savedProjectStartupInfo.projectAsset.parentId, - savedProjectStartupInfo.projectAsset.title, - abortController.signal - ) - setProjectStartupInfo({ ...savedProjectStartupInfo, project }) - if (page === TabType.editor) { - setPage(page) - } - } - } catch { - setProjectStartupInfo(null) - } - })() - } - break - } - case backendModule.BackendType.local: { - if (localBackend != null) { - const project = localBackend - .openProject( - savedProjectStartupInfo.projectAsset.id, - { - executeAsync: false, - cognitoCredentials: null, - parentId: savedProjectStartupInfo.projectAsset.parentId, - }, - savedProjectStartupInfo.projectAsset.title - ) - .then(() => - localBackend.getProjectDetails( - savedProjectStartupInfo.projectAsset.id, - savedProjectStartupInfo.projectAsset.parentId, - savedProjectStartupInfo.projectAsset.title - ) - ) - .catch(error => { - setProjectStartupInfo(null) - throw error - }) - setProjectStartupInfo({ ...savedProjectStartupInfo, project }) - if (page === TabType.editor) { - setPage(page) - } - } - } - } - } - // This MUST only run when the component is mounted. - // eslint-disable-next-line react-hooks/exhaustive-deps - }, []) + client.setQueryData(queryKey, { state: { type: backendModule.ProjectState.openInProgress } }) + + void client.cancelQueries({ queryKey }) + void client.invalidateQueries({ queryKey }) + }, + onError: async (_, { id }) => { + await client.invalidateQueries({ queryKey: createGetProjectDetailsQuery.getQueryKey(id) }) + }, + }) + + const closeProjectMutation = reactQuery.useMutation({ + mutationKey: ['closeProject'], + mutationFn: async ({ type, id, title }: Project) => { + const backend = type === backendModule.BackendType.remote ? remoteBackend : localBackend + + invariant(backend != null, 'Backend is null') + + return backend.closeProject(id, title) + }, + onMutate: ({ id }) => { + const queryKey = createGetProjectDetailsQuery.getQueryKey(id) + + client.setQueryData(queryKey, { state: { type: backendModule.ProjectState.closing } }) + + void client.cancelQueries({ queryKey }) + void client.invalidateQueries({ queryKey }) + }, + onSuccess: (_, { id }) => + client.resetQueries({ queryKey: createGetProjectDetailsQuery.getQueryKey(id) }), + onError: (_, { id }) => + client.invalidateQueries({ queryKey: createGetProjectDetailsQuery.getQueryKey(id) }), + }) + + const client = reactQuery.useQueryClient() + + const renameProjectMutation = reactQuery.useMutation({ + mutationFn: ({ newName, project }: { newName: string; project: Project }) => { + const { parentId, type, id, title } = project + const backend = type === backendModule.BackendType.remote ? remoteBackend : localBackend + + invariant(backend != null, 'Backend is null') + + return backend.updateProject( + id, + { projectName: newName, ami: null, ideVersion: null, parentId }, + title + ) + }, + onSuccess: (_, { project }) => + client.invalidateQueries({ + queryKey: createGetProjectDetailsQuery.getQueryKey(project.id), + }), + }) eventHooks.useEventHandler(assetEvents, event => { switch (event.type) { case AssetEventType.openProject: { - openProjectAbortControllerRef.current?.abort() - openProjectAbortControllerRef.current = null + const { title, parentId, backendType, id, runInBackground } = event + doOpenProject( + { title, parentId, type: backendType, id }, + { openInBackground: runInBackground } + ) + break + } + case AssetEventType.closeProject: { + const { title, parentId, backendType, id } = event + doCloseProject({ title, parentId, type: backendType, id }) break } default: { - // Ignored. + // Ignored. Any missing project-related events should be handled by `ProjectNameColumn`. + // `delete`, `deleteForever`, `restore`, `download`, and `downloadSelected` + // are handled by`AssetRow`. break } } }) - React.useEffect(() => { - if (initializedRef.current) { - if (projectStartupInfo != null) { - // This is INTENTIONAL - `project` is intentionally omitted from this object. - // eslint-disable-next-line @typescript-eslint/no-unused-vars - const { project, ...rest } = projectStartupInfo - localStorage.set('projectStartupInfo', rest) - } else { - localStorage.delete('projectStartupInfo') - } - } - }, [projectStartupInfo, localStorage]) - - React.useEffect(() => { - localStorage.set('page', page) - }, [page, localStorage]) - React.useEffect( () => inputBindings.attach(sanitizedEventTargets.document.body, 'keydown', { @@ -320,40 +440,125 @@ export default function Dashboard(props: DashboardProps) { } }, [inputBindings]) - const doOpenEditor = React.useCallback(() => { - setPage(TabType.editor) - }, [setPage]) - - const doCloseEditor = React.useCallback( - (id: backendModule.ProjectId) => { - if (id === projectStartupInfo?.projectAsset.id) { - setProjectStartupInfo(currentInfo => { - if (id === currentInfo?.projectAsset.id) { - setPage(TabType.drive) - return null - } else { - return currentInfo - } + const doOpenProject = eventCallbacks.useEventCallback( + (project: Project, options: OpenProjectOptions = {}) => { + const { openInBackground = true } = options + + // since we don't support multitabs, we need to close opened project first + if (launchedProjects.length > 0) { + doCloseAllProjects() + } + + const isOpeningTheSameProject = + client.getMutationCache().find({ + mutationKey: ['openProject'], + predicate: mutation => mutation.options.scope?.id === project.id, + })?.state.status === 'pending' + + if (!isOpeningTheSameProject) { + openProjectMutation.mutate(project) + + const openingProjectMutation = client.getMutationCache().find({ + mutationKey: ['openProject'], + // this is unsafe, but we can't do anything about it + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + predicate: mutation => mutation.state.variables?.id === project.id, + }) + + openingProjectMutation?.setOptions({ + ...openingProjectMutation.options, + scope: { id: project.id }, }) + + addLaunchedProject(project) + + if (!openInBackground) { + doOpenEditor(project.id) + } } - }, - [projectStartupInfo?.projectAsset.id, setPage] + } ) - const doRemoveSelf = React.useCallback(() => { - if (projectStartupInfo?.projectAsset != null) { - const id = projectStartupInfo.projectAsset.id - dispatchAssetListEvent({ type: AssetListEventType.removeSelf, id }) - setProjectStartupInfo(null) + const doOpenEditor = eventCallbacks.useEventCallback((projectId: Project['id']) => { + React.startTransition(() => { + setPage(projectId) + }) + }) + + const doCloseProject = eventCallbacks.useEventCallback((project: Project) => { + client + .getMutationCache() + .findAll({ + mutationKey: ['openProject'], + predicate: mutation => mutation.options.scope?.id === project.id, + }) + .forEach(mutation => { + mutation.setOptions({ ...mutation.options, retry: false }) + mutation.destroy() + }) + + closeProjectMutation.mutate(project) + + client + .getMutationCache() + .findAll({ + mutationKey: ['closeProject'], + // this is unsafe, but we can't do anything about it + // eslint-disable-next-line @typescript-eslint/no-unsafe-member-access + predicate: mutation => mutation.state.variables?.id === project.id, + }) + .forEach(mutation => { + mutation.setOptions({ ...mutation.options, scope: { id: project.id } }) + }) + + removeLaunchedProject(project.id) + + setPage(TabType.drive) + }) + + const doCloseAllProjects = eventCallbacks.useEventCallback(() => { + for (const launchedProject of launchedProjects) { + doCloseProject(launchedProject) } - }, [projectStartupInfo?.projectAsset, dispatchAssetListEvent]) + }) - const onSignOut = React.useCallback(() => { - if (page === TabType.editor) { - setPage(TabType.drive) + const doRemoveSelf = eventCallbacks.useEventCallback((project: Project) => { + dispatchAssetListEvent({ type: AssetListEventType.removeSelf, id: project.id }) + doCloseProject(project) + }) + + const onSignOut = eventCallbacks.useEventCallback(() => { + setPage(TabType.drive) + doCloseAllProjects() + clearLaunchedProjects() + }) + + const doOpenShareModal = eventCallbacks.useEventCallback(() => { + if (assetManagementApiRef.current != null && selectedProject != null) { + const asset = assetManagementApiRef.current.getAsset(selectedProject.id) + const self = + asset?.permissions?.find( + backendModule.isUserPermissionAnd(permissions => permissions.user.userId === user.userId) + ) ?? null + + if (asset != null && self != null) { + setModal( + { + const nextAsset = updater instanceof Function ? updater(asset) : updater + assetManagementApiRef.current?.setAsset(asset.id, nextAsset) + }} + self={self} + doRemoveSelf={() => { + doRemoveSelf(selectedProject) + }} + eventTarget={null} + /> + ) + } } - setProjectStartupInfo(null) - }, [page, setPage]) + }) return ( @@ -377,27 +582,28 @@ export default function Dashboard(props: DashboardProps) { > {getText('drivePageName')} - {projectStartupInfo != null && ( + + {launchedProjects.map(project => ( { - setPage(TabType.editor) + setPage(project.id) }} onClose={() => { - dispatchAssetEvent({ - type: AssetEventType.closeProject, - id: projectStartupInfo.projectAsset.id, - }) - setProjectStartupInfo(null) - setPage(TabType.drive) + doCloseProject(project) + }} + onLoadEnd={() => { + doOpenEditor(project.id) }} > - {projectStartupInfo.projectAsset.title} + {project.title} - )} + ))} + {page === TabType.settings && ( )} + { setPage(TabType.settings) }} onSignOut={onSignOut} />
+