From f2cfd7f86cb9853f523e4a703adbbbcb3defa168 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Tue, 31 Oct 2023 11:03:37 +0100 Subject: [PATCH 01/12] Allow reassigning of BindingsMap (#8190) Fixes #8186 by turning `IllegalStateException` into log message. Re-assigning of `BindingsMap` can happen in the IDE where evaluation of modules is repeated again and again. In addition to that avoid dropping errors in compiler without them being noticed. --- .../instrument/job/EnsureCompiledJob.scala | 36 ++++++++++++------- .../enso/compiler/PackageRepositoryUtils.java | 4 +-- .../compiler/context/CompilerContext.java | 2 -- .../pass/analyse/ExportSymbolAnalysis.java | 5 +-- .../pass/analyse/PrivateModuleAnalysis.java | 6 ++-- .../runtime/TruffleCompilerContext.java | 7 +--- .../interpreter/runtime/IrToTruffle.scala | 2 +- 7 files changed, 34 insertions(+), 28 deletions(-) diff --git a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala index 5789daa9bd48..577d4126ad07 100644 --- a/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala +++ b/engine/runtime-instrument-common/src/main/scala/org/enso/interpreter/instrument/job/EnsureCompiledJob.scala @@ -99,19 +99,31 @@ final class EnsureCompiledJob( ctx: RuntimeContext, logger: TruffleLogger ): Option[CompilationStatus] = { - compile(module) - applyEdits(new File(module.getPath)).map { changeset => - compile(module) - .map { _ => - invalidateCaches(module, changeset) - if (module.isIndexed) { - ctx.jobProcessor.runBackground(AnalyzeModuleJob(module, changeset)) - } else { - AnalyzeModuleJob.analyzeModule(module, changeset) - } - runCompilationDiagnostics(module) + val result = compile(module) + result match { + case Left(ex) => + logger.log( + Level.WARNING, + s"Error while ensureCompiledModule ${module.getName}", + ex + ) + Some(CompilationStatus.Failure) + case _ => + applyEdits(new File(module.getPath)).map { changeset => + compile(module) + .map { _ => + invalidateCaches(module, changeset) + if (module.isIndexed) { + ctx.jobProcessor.runBackground( + AnalyzeModuleJob(module, changeset) + ) + } else { + AnalyzeModuleJob.analyzeModule(module, changeset) + } + runCompilationDiagnostics(module) + } + .getOrElse(CompilationStatus.Failure) } - .getOrElse(CompilationStatus.Failure) } } diff --git a/engine/runtime/src/main/java/org/enso/compiler/PackageRepositoryUtils.java b/engine/runtime/src/main/java/org/enso/compiler/PackageRepositoryUtils.java index 877d782bfdc9..38ec718eb5cc 100644 --- a/engine/runtime/src/main/java/org/enso/compiler/PackageRepositoryUtils.java +++ b/engine/runtime/src/main/java/org/enso/compiler/PackageRepositoryUtils.java @@ -3,7 +3,6 @@ import com.oracle.truffle.api.TruffleFile; import java.util.Optional; import java.util.stream.StreamSupport; -import org.enso.interpreter.util.ScalaConversions; import org.enso.pkg.Package; import org.enso.pkg.QualifiedName; @@ -20,7 +19,8 @@ private PackageRepositoryUtils() {} */ public static Optional getModuleNameForFile( PackageRepository packageRepository, TruffleFile file) { - return ScalaConversions.asJava(packageRepository.getLoadedPackages()).stream() + return scala.jdk.javaapi.CollectionConverters.asJava(packageRepository.getLoadedPackages()) + .stream() .filter(pkg -> file.startsWith(pkg.sourceDir())) .map(pkg -> pkg.moduleNameForFile(file)) .findFirst(); diff --git a/engine/runtime/src/main/java/org/enso/compiler/context/CompilerContext.java b/engine/runtime/src/main/java/org/enso/compiler/context/CompilerContext.java index 24fafdc5e603..6ad07e3db611 100644 --- a/engine/runtime/src/main/java/org/enso/compiler/context/CompilerContext.java +++ b/engine/runtime/src/main/java/org/enso/compiler/context/CompilerContext.java @@ -119,8 +119,6 @@ public abstract static class Module { public abstract Package getPackage(); - public abstract boolean isSameAs(org.enso.interpreter.runtime.Module m); - public abstract QualifiedName getName(); public abstract BindingsMap getBindingsMap(); diff --git a/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java b/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java index b99998d56633..dbee20a15782 100644 --- a/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java +++ b/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java @@ -12,7 +12,6 @@ import org.enso.compiler.core.ir.module.scope.Export; import org.enso.compiler.data.BindingsMap; import org.enso.compiler.pass.IRPass; -import org.enso.interpreter.util.ScalaConversions; import scala.collection.immutable.Seq; import scala.jdk.javaapi.CollectionConverters; @@ -49,8 +48,10 @@ public Seq precursorPasses() { } @Override + @SuppressWarnings("unchecked") public Seq invalidatedPasses() { - return ScalaConversions.nil(); + Object obj = scala.collection.immutable.Nil$.MODULE$; + return (scala.collection.immutable.List) obj; } @Override diff --git a/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java b/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java index 98c27baaab21..6e5ecbeade71 100644 --- a/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java +++ b/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java @@ -2,7 +2,6 @@ import java.util.ArrayList; import java.util.List; -import java.util.Objects; import java.util.UUID; import org.enso.compiler.context.InlineContext; import org.enso.compiler.context.ModuleContext; @@ -14,7 +13,6 @@ import org.enso.compiler.core.ir.module.scope.Import; import org.enso.compiler.data.BindingsMap; import org.enso.compiler.pass.IRPass; -import org.enso.interpreter.util.ScalaConversions; import org.enso.pkg.QualifiedName; import scala.Option; import scala.collection.immutable.Seq; @@ -55,8 +53,10 @@ public Seq precursorPasses() { } @Override + @SuppressWarnings("unchecked") public Seq invalidatedPasses() { - return ScalaConversions.nil(); + Object obj = scala.collection.immutable.Nil$.MODULE$; + return (scala.collection.immutable.List) obj; } @Override diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/TruffleCompilerContext.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/TruffleCompilerContext.java index 00a42ede29b2..2bcc3677c18c 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/TruffleCompilerContext.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/TruffleCompilerContext.java @@ -330,7 +330,7 @@ public void invalidateCache() { public void close() { if (map != null) { if (module.bindings != null) { - throw new IllegalStateException("Reassigining bindings to " + module); + loggerCompiler.log(Level.FINE, "Reassigining bindings to {0}", module); } module.bindings = map; } @@ -384,11 +384,6 @@ final org.enso.interpreter.runtime.Module unsafeModule() { return module; } - @Override - public boolean isSameAs(org.enso.interpreter.runtime.Module m) { - return module == m; - } - @Override public QualifiedName getName() { return module.getName(); diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala b/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala index b7c3a2f9fe99..ed27f4ca48d7 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala @@ -907,7 +907,7 @@ class IrToTruffle( if ( resolution.isInstanceOf[ResolvedConstructor] || !resolution.module .unsafeAsModule() - .isSameAs(moduleScope.getModule) + .equals(moduleScope.getModule.asCompilerModule) ) { resolution match { case BindingsMap.ResolvedType(module, tp) => From 660f4b35cebdff5c3e0e8e5e3ea2195b164115f5 Mon Sep 17 00:00:00 2001 From: Ilya Bogdanov Date: Tue, 31 Oct 2023 19:01:54 +0400 Subject: [PATCH 02/12] Preserve error code in the error message from RPC (#8191) Needed for #8158 --- app/gui2/shared/languageServer.ts | 75 +++++++++++++++++++++++++++++-- 1 file changed, 72 insertions(+), 3 deletions(-) diff --git a/app/gui2/shared/languageServer.ts b/app/gui2/shared/languageServer.ts index 2daf6f80fb6c..f88284095f43 100644 --- a/app/gui2/shared/languageServer.ts +++ b/app/gui2/shared/languageServer.ts @@ -2,6 +2,7 @@ import { Client } from '@open-rpc/client-js' import { ObservableV2 } from 'lib0/observable' import { uuidv4 } from 'lib0/random' import { SHA3 } from 'sha3' +import { z } from 'zod' import type { Checksum, ContextId, @@ -22,11 +23,76 @@ import type { Uuid } from './yjsModel' const DEBUG_LOG_RPC = false const RPC_TIMEOUT_MS = 15000 +export enum ErrorCode { + ACCESS_DENIED = 100, + FILE_SYSTEM_ERROR = 1000, + CONTENT_ROOT_NOT_FOUND = 1001, + FILE_NOT_FOUND = 1003, + FILE_EXISTS = 1004, + OPERATION_TIMEOUT = 1005, + NOT_DIRECTORY = 1006, + NOT_FILE = 1007, + CANNOT_OVERWRITE = 1008, + READ_OUT_OF_BOUNDS = 1009, + CANNOT_DECODE = 1010, + STACK_ITEM_NOT_FOUND = 2001, + CONTEXT_NOT_FOUND = 2002, + EMPTY_STACK = 2003, + INVALID_STACK_ITEM = 2004, + MODULE_NOT_FOUND = 2005, + VISUALIZATION_NOT_FOUND = 2006, + VISUALIZATION_EXPRESSION_ERROR = 2007, + FILE_NOT_OPENED = 3001, + TEXT_EDIT_VALIDATION_ERROR = 3002, + INVALID_VERSION = 3003, + WRITE_DENIED = 3004, + CAPABILITY_NOT_ACQUIRED = 5001, + SESSION_NOT_INITIALIZED = 6001, + SESSION_ALREADY_INITIALIZED = 6002, + RESOURCES_INITIALIZATION_ERROR = 6003, + SUGGESTION_DATABASE_ERROR = 7001, + PROJECT_NOT_FOUND = 7002, + MODULE_NAME_NOT_RESOLVED = 7003, + SUGGESTION_NOT_FOUND = 7004, + EDITION_NOT_FOUND = 8001, + LIBRARY_ALREADY_EXISTS = 8002, + LIBRARY_REPOSITORY_AUTHENTICATION_ERROR = 8003, + LIBRARY_PUBLISH_ERROR = 8004, + LIBRARY_UPLOAD_ERROR = 8005, + LIBRARY_DOWNLOAD_ERROR = 8006, + LOCAL_LIBRARY_NOT_FOUND = 8007, + LIBRARY_NOT_RESOLVED = 8008, + INVALID_LIBRARY_NAME = 8009, + DEPENDENCY_DISCOVERY_ERROR = 8010, + INVALID_SEMVER_VERSION = 8011, + EXPRESSION_NOT_FOUND = 9001, + FAILED_TO_APPLY_EDITS = 9002, + REFACTORING_NOT_SUPPORTED = 9003, +} + +const RemoteRpcErrorSchema = z.object({ + code: z.nativeEnum(ErrorCode), + message: z.string(), + data: z.optional(z.any()), +}) +type RemoteRpcErrorParsed = z.infer + +export class RemoteRpcError { + code: ErrorCode + message: string + data?: any + constructor(error: RemoteRpcErrorParsed) { + this.code = error.code + this.message = error.message + this.data = error.data + } +} + export class LsRpcError extends Error { - cause: Error + cause: RemoteRpcError | Error request: string params: object - constructor(cause: Error, request: string, params: object) { + constructor(cause: RemoteRpcError | Error, request: string, params: object) { super(`Language server request '${request}' failed.`) this.cause = cause this.request = request @@ -64,7 +130,10 @@ export class LanguageServer extends ObservableV2 { } return await this.client.request({ method, params }, RPC_TIMEOUT_MS) } catch (e) { - if (e instanceof Error) { + const remoteError = RemoteRpcErrorSchema.safeParse(e) + if (remoteError.success) { + throw new LsRpcError(new RemoteRpcError(remoteError.data), method, params) + } else if (e instanceof Error) { throw new LsRpcError(e, method, params) } throw e From 79011bd550fc7cc26fcbe6882a8e07ae033879a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Rados=C5=82aw=20Wa=C5=9Bko?= Date: Tue, 31 Oct 2023 16:19:55 +0100 Subject: [PATCH 03/12] Implement `Table.lookup_and_replace` in Database (#8146) - Closes #7981 - Adds a `RUNTIME_ERROR` operation into the DB dialect, that may be used to 'crash' a query if a condition is met - used to validate if `lookup_and_replace` invariants are still satisfied when the query is materialized. - Removes old `Table_Helpers.is_table` and `same_backend` checks, in favour of the new way of checking this that relies on `Table.from` conversions, and is much simpler to use and also more robust. --- .../Internal/Redshift_Error_Mapper.enso | 4 + .../Database/0.0.0-dev/src/Data/Table.enso | 68 ++--- .../Database/0.0.0-dev/src/Errors.enso | 24 ++ .../src/Internal/Base_Generator.enso | 15 +- .../Internal/Common/Lookup_Query_Helper.enso | 239 ++++++++++++++++++ .../0.0.0-dev/src/Internal/Error_Mapper.enso | 8 + .../0.0.0-dev/src/Internal/Helpers.enso | 14 +- .../src/Internal/IR/SQL_Expression.enso | 12 + .../Internal/Postgres/Postgres_Dialect.enso | 26 +- .../Postgres/Postgres_Error_Mapper.enso | 11 +- .../src/Internal/SQLite/SQLite_Dialect.enso | 21 +- .../Internal/SQLite/SQLite_Error_Mapper.enso | 12 +- .../Internal/SQLite/SQLite_Type_Mapping.enso | 7 +- .../Table/0.0.0-dev/src/Data/Table.enso | 109 ++++---- .../src/Internal/Lookup_Helpers.enso | 18 +- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 9 - .../table/data/table/join/LookupJoin.java | 23 +- .../Common_Table_Operations/Core_Spec.enso | 16 +- .../Join/Cross_Join_Spec.enso | 20 +- .../Join/Join_Spec.enso | 13 +- .../Join/Lookup_Spec.enso | 210 ++++++++++++++- .../Join/Union_Spec.enso | 13 +- .../Join/Zip_Spec.enso | 15 +- .../src/Database/Common/Common_Spec.enso | 48 ++++ 24 files changed, 793 insertions(+), 162 deletions(-) create mode 100644 distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso diff --git a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso index 98f06d3652e6..8b31faf072d5 100644 --- a/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso +++ b/distribution/lib/Standard/AWS/0.0.0-dev/src/Database/Redshift/Internal/Redshift_Error_Mapper.enso @@ -11,3 +11,7 @@ type Redshift_Error_Mapper # Currently not implemented, skipping the error recognition. _ = error False + + ## PRIVATE + transform_custom_errors : SQL_Error -> Any + transform_custom_errors error = error diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 41c7a41103c0..ccebfecdf22e 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -29,7 +29,6 @@ import Standard.Table.Internal.Add_Row_Number import Standard.Table.Internal.Aggregate_Column_Helper import Standard.Table.Internal.Column_Naming_Helper.Column_Naming_Helper import Standard.Table.Internal.Constant_Column.Constant_Column -import Standard.Table.Internal.Lookup_Helpers import Standard.Table.Internal.Problem_Builder.Problem_Builder import Standard.Table.Internal.Table_Helpers import Standard.Table.Internal.Table_Helpers.Table_Column_Helper @@ -50,6 +49,7 @@ import project.Data.Take_Drop_Helpers import project.Internal.Aggregate_Helper import project.Internal.Base_Generator import project.Internal.Common.Database_Join_Helper +import project.Internal.Common.Lookup_Query_Helper import project.Internal.Helpers import project.Internal.IR.Context.Context import project.Internal.IR.From_Spec.From_Spec @@ -61,7 +61,7 @@ import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind import project.Internal.SQL_Type_Reference.SQL_Type_Reference from project.Data.Take_Drop_Helpers import Take_Drop -from project.Errors import Integrity_Error, Table_Not_Found, Unsupported_Database_Operation +from project.Errors import Integrity_Error, Table_Not_Found, Unsupported_Database_Operation, SQL_Error polyglot java import java.sql.JDBCType polyglot java import java.util.UUID @@ -1212,16 +1212,12 @@ type Table ## PRIVATE Implementation of both `join` and `cross_join`. join_or_cross_join : Table -> Join_Kind | Join_Kind_Cross -> Vector (Join_Condition | Text) | Text -> Text -> Problem_Behavior -> Table - join_or_cross_join self right join_kind on right_prefix on_problems = - can_proceed = if Table_Helpers.is_table right . not then Error.throw (Type_Error.Error Table right "right") else - same_backend = case right of - _ : Table -> True - _ -> False + join_or_cross_join self right:Table join_kind on right_prefix on_problems = + can_proceed = Helpers.ensure_same_connection "table" [self, right] <| join_conditions_ok = join_kind != Join_Kind_Cross.Cross || on == [] - if same_backend . not then Error.throw (Illegal_Argument.Error "Currently cross-backend joins are not supported. You need to upload the in-memory table before joining it with a database one, or materialize this table.") else - if join_conditions_ok . not then Error.throw (Illegal_Argument.Error "Cross join does not allow join conditions") else - True - if can_proceed then + if join_conditions_ok . not then Error.throw (Illegal_Argument.Error "Cross join does not allow join conditions") else + True + can_proceed.if_not_error <| left = self table_name_deduplicator = self.connection.base_connection.table_naming_helper.create_unique_name_strategy table_name_deduplicator.mark_used [left.name, right.name] @@ -1314,14 +1310,13 @@ type Table The ordering of rows in the resulting table is not specified. cross_join : Table -> Integer | Nothing -> Text -> Problem_Behavior -> Table - cross_join self right right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = - if check_db_table "right" right then - limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of - True -> - [Cross_Join_Row_Limit_Exceeded.Error right_row_limit right.row_count] - False -> [] - on_problems.attach_problems_before limit_problems <| - self.join_or_cross_join right join_kind=Join_Kind_Cross.Cross on=[] right_prefix on_problems + cross_join self right:Table right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = + limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of + True -> + [Cross_Join_Row_Limit_Exceeded.Error right_row_limit right.row_count] + False -> [] + on_problems.attach_problems_before limit_problems <| + self.join_or_cross_join right join_kind=Join_Kind_Cross.Cross on=[] right_prefix on_problems ## Replaces values in this table by values from a lookup table. New values are looked up in the lookup table based on the `key_columns`. @@ -1358,8 +1353,8 @@ type Table specified in `key_columns`, a `Missing_Input_Columns` error is raised. - If an empty vector is provided for `key_columns`, a `No_Input_Columns_Selected` error is raised. - - If the lookup table contains multiple rows with the same values in - the `key_columns`, an `Non_Unique_Key` error is raised. + - If a single row is matched by multiple entries in the lookup table, + a `Non_Unique_Key` error is raised. - If a column that is being updated from the lookup table has a type that is not compatible with the type of the corresponding column in this table, a `No_Common_Type` error is raised. @@ -1377,8 +1372,8 @@ type Table @key_columns Widget_Helpers.make_column_name_vector_selector lookup_and_replace : Table -> (Vector (Integer | Text | Regex) | Text | Integer | Regex) -> Boolean -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup lookup_and_replace self lookup_table:Table key_columns:(Vector (Integer | Text | Regex) | Text | Integer | Regex) add_new_columns:Boolean=True allow_unmatched_rows:Boolean=True on_problems:Problem_Behavior=Problem_Behavior.Report_Warning = - _ = [lookup_table, key_columns, add_new_columns, allow_unmatched_rows, on_problems] - Error.throw (Unsupported_Database_Operation.Error "Table.lookup_and_replace is not implemented yet for the Database backends.") + Helpers.ensure_same_connection "table" [self, lookup_table] <| + Lookup_Query_Helper.build_lookup_query self lookup_table key_columns add_new_columns allow_unmatched_rows on_problems ## ALIAS join by row position GROUP Standard.Base.Calculations @@ -1522,11 +1517,11 @@ type Table retyped to the `Mixed` type to indicate that intention. Note that the `Mixed` type may not be supported by most Database backends. union : (Table | Vector Table) -> Match_Columns -> Boolean | Report_Unmatched -> Boolean -> Problem_Behavior -> Table - union self tables match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning = + union self tables:Vector|Table match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning = all_tables = case tables of - v : Vector -> [self] + v + v : Vector -> [self] + (v.map t-> Table.from t) single_table -> [self, single_table] - all_tables.all (check_db_table "tables") . if_not_error <| + Helpers.ensure_same_connection "table" all_tables <| problem_builder = Problem_Builder.new matched_column_sets = Match_Columns_Helpers.match_columns all_tables match_columns keep_unmatched_columns problem_builder dialect = self.connection.dialect @@ -2154,7 +2149,8 @@ type Table False -> sql = preprocessed.to_sql column_type_suggestions = preprocessed.internal_columns.map .sql_type_reference - materialized_table = self.connection.read_statement sql column_type_suggestions + materialized_table = self.connection.read_statement sql column_type_suggestions . catch SQL_Error sql_error-> + Error.throw (self.connection.dialect.get_error_mapper.transform_custom_errors sql_error) expected_types = self.columns.map .value_type actual_types = materialized_table.columns.map .value_type @@ -2527,19 +2523,6 @@ type Wrapped_Error ## PRIVATE Value value -## PRIVATE - Checks if the argument is a proper table and comes from the current backend. - It returns True or throws a dataflow error explaining the issue. - - TODO [RW]: this should check that the tables are on the same connection -check_db_table arg_name table = - if Table_Helpers.is_table table . not then Error.throw (Type_Error.Error Table table arg_name) else - same_backend = table.is_a Table - case same_backend of - False -> - Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Materialize the table using `.read` before mixing it with an in-memory Table.") - True -> True - ## PRIVATE By default, join on the first column, unless it's a cross join, in which case there are no join conditions. @@ -2553,5 +2536,10 @@ Materialized_Table.from (that:Table) = _ = [that] Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Materialize the table using `.read` before mixing it with an in-memory Table.") +## PRIVATE +Table.from (that:Materialized_Table) = + _ = [that] + Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Either materialize the other table using `.read` or upload the table into the database using `.select_into_database_table`.") + ## PRIVATE Table_Ref.from (that:Table) = Table_Ref.Value that diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso index 24b0ab6bd4e2..f833454482dd 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Errors.enso @@ -203,3 +203,27 @@ type Unsupported_Database_Encoding Pretty print the unsupported database encoding warning. to_display_text : Text to_display_text self = self.message + +## Indicates that the database has been modified between the time the query was + prepared and the time it was executed, breaking an expected invariant and + potentially causing data corruption. + + The query needs to be rebuilt again. +type Invariant_Violation + ## PRIVATE + Indicates that the database has been modified between the time the query + was prepared and the time it was executed, breaking an expected invariant + and potentially causing data corruption. + + Arguments: + - message: A message describing the broken invariant, if available. + It may be set to `Nothing` if the backend cannot decode the invariant + message from the SQL error. + - original_cause: The original SQL error that this error has been + translated from. + Error (message:Text|Nothing) (original_cause:SQL_Error) + + ## PRIVATE + to_display_text : Text + to_display_text self = + "The database has been modified between the time the query was prepared and executed, leading to possibly incorrect results. Please re-run the workflow to retry." diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index c9e30199c009..2262da5ec63b 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -193,7 +193,7 @@ base_dialect = nulls = [["IS_NULL", make_right_unary_op "IS NULL"], ["FILL_NULL", make_function "COALESCE"]] contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]] types = [simple_cast] - windows = [["ROW_NUMBER", make_row_number]] + windows = [["ROW_NUMBER", make_row_number], ["ROW_NUMBER_IN_GROUP", make_row_number_in_group]] base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains + types + windows) Internal_Dialect.Value base_map wrap_in_quotes @@ -219,7 +219,7 @@ make_iif arguments = case arguments.length of An IR expression for constructing SQL `CASE` expressions. case_when : Vector Builder -> Builder case_when arguments = - if arguments.length < 4 then Error.throw (Illegal_State.Error "CASE_WHEN needs at least 3 arguments.") else + if arguments.length < 3 then Error.throw (Illegal_State.Error "CASE_WHEN needs at least 3 arguments.") else fallback = arguments.last cases = arguments.drop (Last 1) if cases.length % 2 != 0 then Error.throw (Illegal_State.Error "CASE_WHEN expects an odd number of arguments (two arguments for each case and a fallback).") else @@ -286,6 +286,14 @@ make_row_number (arguments : Vector) (metadata : Row_Number_Metadata) = if argum Builder.code "PARTITION BY " ++ Builder.join ", " grouping Builder.code "(row_number() OVER (" ++ group_part ++ " ORDER BY " ++ Builder.join ", " ordering ++ ") * " ++ step.paren ++ " + " ++ offset.paren ++ ")" +## PRIVATE + A helper for `lookup_and_replace`, and perhaps other operation. + It creates an expression that returns a row number within a group. +make_row_number_in_group arguments = + if arguments.length == 0 then + Panic.throw <| Illegal_State.Error "The operation ROW_NUMBER_IN_GROUP requires at least one argument." + Builder.code "row_number() OVER (PARTITION BY " ++ (Builder.join ", " arguments) ++ ")" + ## PRIVATE Builds code for an expression. @@ -299,6 +307,9 @@ generate_expression dialect expr = case expr of dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name SQL_Expression.Constant value -> Builder.interpolation value SQL_Expression.Literal value -> Builder.code value + SQL_Expression.Text_Literal value -> + escaped = value.replace "'" "''" + Builder.code ("'" + escaped + "'") SQL_Expression.Operation kind arguments metadata -> op = dialect.operation_map.get kind (Error.throw <| Unsupported_Database_Operation.Error kind) parsed_args = arguments.map (generate_expression dialect) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso new file mode 100644 index 000000000000..b61d8e8da793 --- /dev/null +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Common/Lookup_Query_Helper.enso @@ -0,0 +1,239 @@ +from Standard.Base import all +import Standard.Base.Errors.Illegal_State.Illegal_State +from Standard.Base.Runtime import assert + +import Standard.Table.Internal.Lookup_Helpers +import Standard.Table.Internal.Lookup_Helpers.Lookup_Column +from Standard.Table import Join_Kind, Value_Type +from Standard.Table.Errors import all + +import project.Data.Table.Table +import project.Internal.IR.Context.Context +import project.Internal.IR.From_Spec.From_Spec +import project.Internal.IR.Internal_Column.Internal_Column +import project.Internal.IR.SQL_Expression.SQL_Expression +import project.Internal.IR.SQL_Join_Kind.SQL_Join_Kind +import project.Internal.SQL_Type_Reference.SQL_Type_Reference +from project.Internal.Upload_Table import check_for_null_keys + +## PRIVATE + Implementation of `lookup_and_replace` for Database backend. + See `Table.lookup_and_replace` for more details. +build_lookup_query : Table -> Table -> (Vector (Integer | Text | Regex) | Text | Integer | Regex) -> Boolean -> Boolean -> Problem_Behavior -> Table ! Missing_Input_Columns | Non_Unique_Key | Unmatched_Rows_In_Lookup +build_lookup_query base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems = + lookup_columns = Lookup_Helpers.prepare_columns_for_lookup base_table lookup_table key_columns add_new_columns allow_unmatched_rows on_problems + lookup_columns.if_not_error <| check_initial_invariants base_table lookup_table lookup_columns allow_unmatched_rows <| + column_naming_helper = base_table.connection.base_connection.column_naming_helper + unique_name_strategy = column_naming_helper.create_unique_name_strategy + unique_name_strategy.mark_used base_table.column_names + unique_name_strategy.mark_used lookup_table.column_names + + subquery_setup = prepare_subqueries base_table lookup_table lookup_columns unique_name_strategy + + # We need to construct the context _before_ constructing the columns, to be able to use it in column construction (for inferring types). + new_ctx = make_context_for_lookup_join lookup_columns subquery_setup + infer_type_in_result expr = + SQL_Type_Reference.new base_table.connection new_ctx expr + + ## TODO [RW] here we will perform as many fetches as there are + Replace_Column instances, but technically we could perform just + one fetch fetching all column types - TODO we should do that. + See #6118. + new_columns = lookup_columns.map_with_index ix-> c-> case c of + Lookup_Column.Key_Column _ _ -> subquery_setup.get_self_column ix + Lookup_Column.Keep_Column _ -> subquery_setup.get_self_column ix + Lookup_Column.Replace_Column _ _ expected_type -> + dialect = base_table.connection.dialect + subquery_setup.create_merged_column ix expected_type dialect infer_type_in_result allow_unmatched_rows + Lookup_Column.Add_Column _ -> subquery_setup.get_lookup_column ix + + ## Originally, I wanted to add invariant checks to all columns (or some of them), + but storing it in WHERE has 2 benefits: + 1. The query is actually _simpler_ - instead of having the check in the expression for _every_ column + or multiple columns, we have it only once. + 2. We have a guarantee that the query optimizer will not remove it (which could happen if it was a separate + column, or was added only to some column and these columns got removed in subsequent steps) - which is + paramount to ensuring that our rows will not get duplicated in case the key uniqueness gets violated. + new_ctx_with_invariant_check = new_ctx.add_where_filters [make_invariant_check subquery_setup.lookup_counter allow_unmatched_rows] + + precheck_for_duplicate_matches lookup_columns subquery_setup base_table.connection new_ctx <| + Table.Value subquery_setup.new_table_name base_table.connection new_columns new_ctx_with_invariant_check + +## PRIVATE + Checks if they key contains NULL values or if there would be unmatched rows + (if `allow_unmatched_rows` is `False`), and reports any errors. +check_initial_invariants : Table -> Table -> Vector Lookup_Column -> Boolean -> Any -> Any +check_initial_invariants base_table lookup_table lookup_columns allow_unmatched_rows ~continuation = + key_column_names = lookup_columns.flat_map c-> case c of + Lookup_Column.Key_Column base_column lookup_column -> + assert (base_column.name == lookup_column.name) "Columns should have equal names." + [lookup_column.name] + _ -> [] + check_for_null_keys lookup_table key_column_names <| + if allow_unmatched_rows then continuation else + unmatched_rows = base_table.join lookup_table on=key_column_names join_kind=Join_Kind.Left_Exclusive . select_columns key_column_names + unmatched_example = unmatched_rows.read max_rows=1 + if unmatched_example.row_count == 0 then continuation else + first_row = unmatched_example.rows.first + Error.throw (Unmatched_Rows_In_Lookup.Error first_row.to_vector) + +## PRIVATE + Prepares the lookup table counter that is used for two things: + 1. It allows us to check if a given row had found a match in the lookup row + and decide which value to use in `Replace_Column` case (the counter will + be `1` if there is a match and `NULL` if there is not). + 2. It allows us to find duplicate matches - if a row with counter >1 is found, + that means that a single row has matched multiple rows in the lookup table + and we should report an error. +make_lookup_counter_column dialect lookup_columns unique_name_strategy = + grouping_expressions = lookup_columns.flat_map c-> case c of + Lookup_Column.Key_Column _ lookup_column -> [lookup_column.expression] + _ -> [] + row_number_expression = SQL_Expression.Operation "ROW_NUMBER_IN_GROUP" grouping_expressions + sql_type = dialect.get_type_mapping.value_type_to_sql Value_Type.Integer Problem_Behavior.Ignore + Internal_Column.Value (unique_name_strategy.make_unique "lookup_counter") (SQL_Type_Reference.from_constant sql_type) row_number_expression + +## PRIVATE + Gathers information about prepared subqueries in one place - to make it + easier to pass this context between various helper functions. + + The setup is prepared in such a way, that for each `Lookup_Column` we have a + vector of base table columns and lookup table columns corresponding to it. + Depending on the type of the `Lookup_Column`, one of these vectors may be + empty. The helper functions `get_self_column` and `get_lookup_column` allow + easy access of the corresponding columns for the given index - this should be + used with `lookup_columns.map_with_index`. +type Lookup_Subquery_Setup + ## PRIVATE + Value self_sub lookup_sub lookup_counter new_table_name + + ## PRIVATE + get_self_column self ix = + self.self_sub.new_columns.at ix . first + + ## PRIVATE + get_lookup_column self ix = + self.lookup_sub.new_columns.at ix . first + + ## PRIVATE + Creates an `Internal_Column` representing the `Replace_Column` scenario: + taking data from lookup table if it was matched, and from base table + otherwise. + This method also ensure that the column has the expected type, unifying + types of the two sources. + create_merged_column self ix expected_type dialect infer_type_in_result allow_unmatched_rows = + self_col = self.get_self_column ix + lookup_col = self.get_lookup_column ix + is_lookup_found = SQL_Expression.Operation "==" [self.lookup_counter.expression, SQL_Expression.Literal "1"] + expression = case allow_unmatched_rows of + True -> SQL_Expression.Operation "CASE" [is_lookup_found, lookup_col.expression, self_col.expression] + False -> Error.throw (Illegal_State.Error "Assumed that prepare_columns_for_lookup never returns Replace_Column if allow_unmatched_rows=False. This is a bug in the Database library.") + input_column = Internal_Column.Value self_col.name (infer_type_in_result expression) expression + adapted = dialect.adapt_unified_column input_column expected_type infer_type_in_result + Internal_Column.Value self_col.name adapted.sql_type_reference adapted.expression + +## PRIVATE + Wraps the two source tables into subqueries and creates a set of new columns + (encapsulated as `Lookup_Subquery_Setup`), together with the `lookup_counter` + column (see `make_lookup_counter_column`), that are valid in the context of a + lookup join query (it translates the source columns valid in the input + contexts, to the external join context). +prepare_subqueries base_table lookup_table lookup_columns unique_name_strategy = + table_name_deduplicator = base_table.connection.base_connection.table_naming_helper.create_unique_name_strategy + self_alias = table_name_deduplicator.make_unique base_table.name + lookup_alias = table_name_deduplicator.make_unique lookup_table.name + new_table_name = table_name_deduplicator.make_unique <| + base_table.name + "_" + lookup_table.name + + lookup_counter_base = make_lookup_counter_column lookup_table.connection.dialect lookup_columns unique_name_strategy + + self_requested_columns = Vector.new_builder + lookup_requested_columns = Vector.new_builder + + lookup_columns.each c-> case c of + Lookup_Column.Key_Column base_column key_column -> + self_requested_columns.append [base_column] + lookup_requested_columns.append [key_column] + Lookup_Column.Keep_Column base_column -> + self_requested_columns.append [base_column] + lookup_requested_columns.append [] + Lookup_Column.Replace_Column base_column lookup_column _ -> + self_requested_columns.append [base_column] + lookup_requested_columns.append [lookup_column] + Lookup_Column.Add_Column lookup_column -> + self_requested_columns.append [] + lookup_requested_columns.append [lookup_column] + + self_sub = base_table.context.as_subquery self_alias self_requested_columns.to_vector + lookup_sub = lookup_table.context.as_subquery lookup_alias lookup_requested_columns.to_vector+[[lookup_counter_base]] + lookup_counter = lookup_sub.new_columns.at -1 . first + + Lookup_Subquery_Setup.Value self_sub lookup_sub lookup_counter new_table_name + +## PRIVATE + Creates a context for the lookup join query. +make_context_for_lookup_join lookup_columns subquery_setup = + on_expressions = (_.flatten) <| lookup_columns.map_with_index ix-> c-> case c of + Lookup_Column.Key_Column _ _ -> + self_col = subquery_setup.get_self_column ix + lookup_col = subquery_setup.get_lookup_column ix + [SQL_Expression.Operation "==" [self_col.expression, lookup_col.expression]] + _ -> [] + + new_from = From_Spec.Join SQL_Join_Kind.Left subquery_setup.self_sub.subquery subquery_setup.lookup_sub.subquery on_expressions + Context.for_subquery new_from + +## PRIVATE + Runs a query that checks if there are any duplicate matches in the lookup + result. The query tries to minimize the amount of work and data that is + transferred, but it may not be cheap. It is however needed to be able to + report errors early. +precheck_for_duplicate_matches lookup_columns subquery_setup connection new_ctx ~continuation = + key_columns_for_duplicate_check = (_.flatten) <| lookup_columns.map_with_index ix-> c-> case c of + Lookup_Column.Key_Column _ _ -> [subquery_setup.get_self_column ix] + _ -> [] + table_for_duplicate_check = Table.Value subquery_setup.new_table_name connection [subquery_setup.lookup_counter]+key_columns_for_duplicate_check new_ctx + duplicate_lookup_matches = table_for_duplicate_check.filter 0 (Filter_Condition.Greater than=1) . read max_rows=1 + case duplicate_lookup_matches.row_count > 0 of + True -> + first_example_row = duplicate_lookup_matches.read.rows.first.to_vector + duplicate_example_count = first_example_row.first + duplicate_example_key = first_example_row.drop 1 + Error.throw (Non_Unique_Key.Error (key_columns_for_duplicate_check.map .name) duplicate_example_key duplicate_example_count) + False -> continuation + +## PRIVATE + Creates an expression that can be baked into the query (preferably in such + a way that it will not get removed after query transformations), that will + fail if the 'key uniqueness' invariant and the 'all rows matching' + (if `allow_unmatched_rows=False`) are violated when the query is being + materialized. + + This is needed, because the initial checks are run when the query is built, + but there is no guarantee that the table will not be changed before the query + is actually materialized. If in the meantime, the data is modified and + invariants are broken, the query may return completely unexpected results + (e.g. rows of the source table could get duplicated, which should never + happen with `lookup_and_replace`). + + This additional checks ensures that if the invariants are broken, the query + will fail instead of returning corrupted data. Its error reporting may be + limited in comparison to the initial checks which may compute more + information. +make_invariant_check lookup_counter allow_unmatched_rows = + make_error message = + SQL_Expression.Operation "RUNTIME_ERROR" [SQL_Expression.Text_Literal "lookup_and_replace: "+message, lookup_counter.expression] + + exact_match = + is_matched = SQL_Expression.Operation "==" [lookup_counter.expression, SQL_Expression.Literal "1"] + [is_matched, SQL_Expression.Literal "TRUE"] + unmatched = + is_unmatched = SQL_Expression.Operation "IS_NULL" [lookup_counter.expression] + case allow_unmatched_rows of + True -> [is_unmatched, SQL_Expression.Literal "TRUE"] + False -> [is_unmatched, make_error "Some rows have no matches in the lookup table."] + + # This will be matched if the first branches do not match - this should happen if there is more than 1 match for a row. + fallback = [make_error "Duplicate matches found in the lookup table."] + + SQL_Expression.Operation "CASE" unmatched+exact_match+fallback diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Error_Mapper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Error_Mapper.enso index 393d1800ae82..15a103b48c5f 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Error_Mapper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Error_Mapper.enso @@ -15,3 +15,11 @@ type Error_Mapper is_primary_key_violation error = _ = error Unimplemented.throw "This is an interface only." + + ## PRIVATE + Called by `Table.read`, allowing the dialect to transform a generic + `SQL_Error` into a more specific error type, if applicable. + transform_custom_errors : SQL_Error -> Any + transform_custom_errors error = + _ = error + Unimplemented.throw "This is an interface only." diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso index 58c601c8de93..4c894621f25c 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Helpers.enso @@ -1,4 +1,6 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +from Standard.Base.Runtime import assert import project.Data.Column.Column import project.Data.Table.Table @@ -32,7 +34,17 @@ check_integrity entity1 entity2 = - entity2: The entity to check against the first. check_connection : (Table | Column) -> (Table | Column) -> Boolean check_connection entity1 entity2 = - Meta.is_same_object entity1.connection entity2.connection + # The `if_not_error` is needed `Meta.is_same_object` does not forward dataflow errors. + entity1.if_not_error <| entity2.if_not_error <| + Meta.is_same_object entity1.connection entity2.connection + +## PRIVATE +ensure_same_connection : Text -> Vector -> Any -> Any ! Illegal_Argument +ensure_same_connection name entities ~continuation = + assert entities.not_empty + all_same = entities.all entity-> check_connection entity entities.first + if all_same then continuation else + Error.throw (Illegal_Argument.Error "The input "+name+"s come from different connections. Cross-connection operations are not supported.") ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso index 4605fa842da9..6c9210577117 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso @@ -36,6 +36,18 @@ type SQL_Expression as-is into a query. Literal (value : Text) + ## PRIVATE + A text literal that should be inserted into a query, wrapped in quotes + with any quotes it may contain escaped. + + This is useful to avoid requiring unnecessary interpolations for text + constants in queries. + + User-provided values should always be passed as interpolated `Constant`s. + This should only be used for 'statically' known constants - ones baked + into the library source code. + Text_Literal (value : Text) + ## PRIVATE The internal representation of an SQL expression built from an operation diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 818284970ae6..a6a68f494b23 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -297,7 +297,8 @@ make_internal_generator_dialect = stats = [agg_median, agg_mode, agg_percentile, stddev_pop, stddev_samp] date_ops = [make_extract_as_int "year", make_extract_as_int "quarter", make_extract_as_int "month", make_extract_as_int "week", make_extract_as_int "day", make_extract_as_int "hour", make_extract_as_int "minute", make_extract_fractional_as_int "second", make_extract_fractional_as_int "millisecond" modulus=1000, make_extract_fractional_as_int "microsecond" modulus=1000, ["date_add", make_date_add], ["date_diff", make_date_diff]] special_overrides = [is_null, is_empty] - my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other = [["RUNTIME_ERROR", make_runtime_error_op]] + my_mappings = text + counts + stats + first_last_aggregators + arith_extensions + bool + date_ops + special_overrides + other Base_Generator.base_dialect . extend_with my_mappings ## PRIVATE @@ -727,3 +728,26 @@ as_int32 expr = ## PRIVATE postgres_statement_setter = Statement_Setter.default + +## PRIVATE + The RUNTIME_ERROR operation should allow the query to compile fine and it + will not prevent it from running if the branch including this operation is + not taken. But if the branch is computed, it should ensure the query fails. + + This query never returns a value, so its type should be polymorphic. However, + that is not possible - so currently it just 'pretends' that it would return a + Boolean - because that is the type we expect in the use-case. This can be + altered if needed. + + It takes a variable as the second argument. It can be any value that is not + statically known - this ensure that the optimizer will not be able to + pre-compute the expression too early (which could make the query fail + spuriously). See `make_invariant_check` in `Lookup_Query_Helper` for an + example. +make_runtime_error_op arguments = + if arguments.length != 2 then + Panic.throw (Illegal_Argument.Error "RUNTIME_ERROR takes exactly 2 arguments (error message and a variable to ensure deferred execution).") + error_message = arguments.at 0 + variable_to_defer = arguments.at 1 + + Builder.code "CAST('[ENSO INVARIANT VIOLATED: '||" ++ error_message ++ "||'] '||COALESCE(" ++ variable_to_defer ++ "::TEXT,'NULL') AS BOOLEAN)" diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Error_Mapper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Error_Mapper.enso index 72f29b81a651..6d8fc474ebcf 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Error_Mapper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Error_Mapper.enso @@ -1,6 +1,6 @@ from Standard.Base import all -from project.Errors import SQL_Error +from project.Errors import SQL_Error, Invariant_Violation ## PRIVATE type Postgres_Error_Mapper @@ -9,3 +9,12 @@ type Postgres_Error_Mapper is_primary_key_violation : SQL_Error -> Boolean is_primary_key_violation error = error.java_exception.getMessage.contains "duplicate key value violates unique constraint" + + ## PRIVATE + transform_custom_errors : SQL_Error -> Any + transform_custom_errors error = + message = error.java_exception.getMessage + if message.contains "ENSO INVARIANT VIOLATED" . not then error else + payloads = message.tokenize "\[ENSO INVARIANT VIOLATED: (.*)\]" + if payloads.length != 1 then error else + Invariant_Violation.Error payloads.first error diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso index 2ea4f01f6c11..02ac7adad4fb 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Dialect.enso @@ -286,9 +286,10 @@ make_internal_generator_dialect = counts = [agg_count_is_null, agg_count_empty, agg_count_not_empty, ["COUNT_DISTINCT", agg_count_distinct], ["COUNT_DISTINCT_INCLUDE_NULL", agg_count_distinct_include_null]] stats = [agg_stddev_pop, agg_stddev_samp] arith_extensions = [is_inf, floating_point_div, mod_op] + other = [["RUNTIME_ERROR", make_runtime_error_op]] bool = [bool_or] - my_mappings = text + counts + stats + arith_extensions + bool + my_mappings = text + counts + stats + arith_extensions + bool + other Base_Generator.base_dialect . extend_with my_mappings ## PRIVATE @@ -481,3 +482,21 @@ make_custom_cast column target_value_type type_mapping = result.put expr result.get + +## PRIVATE + The RUNTIME_ERROR operation should allow the query to compile fine and it + will not prevent it from running if the branch including this operation is + not taken. But if the branch is computed, it should ensure the query fails. + + This query never returns a value, so its type should be polymorphic. However, + that is not possible - so currently the SQLite dialect just does not handle + inferring a type for it. Thus, it should only be used in places that will not + need client-side type inference (e.g. WHERE clause is ok). + This can be changed in the future, if needed. +make_runtime_error_op arguments = + if arguments.length != 2 then + Panic.throw (Illegal_Argument.Error "RUNTIME_ERROR takes exactly 2 arguments (error message and a variable to ensure deferred execution).") + error_message = arguments.at 0 + variable_to_defer = arguments.at 1 + # We have to ensure that the implementation of SQLite that we use does not have a MATCH function defined which would make the code below succeed. + Builder.code "match('[ENSO INVARIANT VIOLATED: '||" ++ error_message ++ "||'] ', " ++ variable_to_defer ++ ")" diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Error_Mapper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Error_Mapper.enso index d0f53be87dd8..b481b4e05f68 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Error_Mapper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Error_Mapper.enso @@ -1,6 +1,6 @@ from Standard.Base import all -from project.Errors import SQL_Error +from project.Errors import SQL_Error, Invariant_Violation polyglot java import org.sqlite.SQLiteErrorCode polyglot java import org.sqlite.SQLiteException @@ -14,3 +14,13 @@ type SQLite_Error_Mapper case error.java_exception of sqlite_exception : SQLiteException -> sqlite_exception.getResultCode == SQLiteErrorCode.SQLITE_CONSTRAINT_PRIMARYKEY + + ## PRIVATE + transform_custom_errors : SQL_Error -> Any + transform_custom_errors error = + invariant_failure_message = "unable to use function MATCH in the requested context" + message = error.java_exception.getMessage + if message.contains invariant_failure_message . not then error else + query_contains_invariant_check = error.related_query.if_nothing "" . contains "ENSO INVARIANT VIOLATED" + if query_contains_invariant_check . not then error else + Invariant_Violation.Error Nothing error diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso index a87f5c199eef..7e65abd0dbdc 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/SQLite/SQLite_Type_Mapping.enso @@ -179,14 +179,17 @@ operations_map = handle_cast _ = Panic.throw (Illegal_State.Error "Cast relies on its own type inference logic, so this code should never be reached. This is a bug in the Database library.") + handle_runtime_error _ = + Panic.throw (Illegal_State.Error "RUNTIME_ERROR should not be part of direct type inference, so this code should never be reached. This is a bug in the Database library.") + always_boolean_ops = ["==", "!=", "equals_ignore_case", ">=", "<=", "<", ">", "BETWEEN", "AND", "OR", "NOT", "IS_NULL", "IS_EMPTY", "LIKE", "IS_IN", "IS_IN_COLUMN", "starts_with", "ends_with", "contains", "BOOL_OR", "IS_INF"] always_floating_ops = ["/", "mod", "AVG", "STDDEV_POP", "STDDEV_SAMP", "ROUND"] always_text_ops = ["ADD_TEXT", "CONCAT", "CONCAT_QUOTE_IF_NEEDED", "MAKE_CASE_SENSITIVE", "FOLD_CASE", "TRIM", "LTRIM", "RTRIM", "REPLACE"] - always_integer_ops = ["COUNT", "COUNT_IS_NULL", "COUNT_DISTINCT", "COUNT_DISTINCT_INCLUDE_NULL", "COUNT_EMPTY", "COUNT_NOT_EMPTY", "COUNT_ROWS", "ROW_NUMBER"] + always_integer_ops = ["COUNT", "COUNT_IS_NULL", "COUNT_DISTINCT", "COUNT_DISTINCT_INCLUDE_NULL", "COUNT_EMPTY", "COUNT_NOT_EMPTY", "COUNT_ROWS", "ROW_NUMBER", "ROW_NUMBER_IN_GROUP"] same_as_first = ["TRUNCATE", "CEIL", "FLOOR"] arithmetic_ops = ["ADD_NUMBER", "-", "*", "^", "%", "SUM"] merge_input_types_ops = ["ROW_MAX", "ROW_MIN", "MAX", "MIN", "FILL_NULL", "COALESCE"] - others = [["IIF", handle_iif], ["CAST", handle_cast], ["CASE", handle_case]] + others = [["IIF", handle_iif], ["CAST", handle_cast], ["CASE", handle_case], ["RUNTIME_ERROR", handle_runtime_error]] Map.from_vector <| v1 = always_boolean_ops.map [_, const SQLite_Types.boolean] v2 = always_floating_ops.map [_, const SQLite_Types.real] diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 229495bf2da1..f2bd8c35691a 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -1666,28 +1666,27 @@ type Table @on Widget_Helpers.make_join_condition_selector join : Table -> Join_Kind -> Vector (Join_Condition | Text) | Text -> Text -> Problem_Behavior -> Table join self right:Table (join_kind : Join_Kind = Join_Kind.Left_Outer) on=[Join_Condition.Equals self.column_names.first] right_prefix="Right " on_problems=Report_Warning = Out_Of_Memory.handle_java_exception "join" <| - if check_table "right" right then - # [left_unmatched, matched, right_unmatched] - rows_to_keep = case join_kind of - Join_Kind.Inner -> [False, True, False] - Join_Kind.Left_Outer -> [True, True, False] - Join_Kind.Right_Outer -> [False, True, True] - Join_Kind.Full -> [True, True, True] - Join_Kind.Left_Exclusive -> [True, False, False] - Join_Kind.Right_Exclusive -> [False, False, True] - - columns_to_keep = case join_kind of - Join_Kind.Left_Exclusive -> [True, False] - Join_Kind.Right_Exclusive -> [False, True] - _ -> [True, True] - - join_resolution = make_join_helpers self right . resolve on on_problems - right_columns_to_drop = if join_kind == Join_Kind.Inner then join_resolution.redundant_column_names else [] - - java_conditions = join_resolution.conditions - new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> - self.java_table.join right.java_table java_conditions (rows_to_keep.at 0) (rows_to_keep.at 1) (rows_to_keep.at 2) (columns_to_keep.at 0) (columns_to_keep.at 1) right_columns_to_drop right_prefix java_aggregator - Table.Value new_java_table + # [left_unmatched, matched, right_unmatched] + rows_to_keep = case join_kind of + Join_Kind.Inner -> [False, True, False] + Join_Kind.Left_Outer -> [True, True, False] + Join_Kind.Right_Outer -> [False, True, True] + Join_Kind.Full -> [True, True, True] + Join_Kind.Left_Exclusive -> [True, False, False] + Join_Kind.Right_Exclusive -> [False, False, True] + + columns_to_keep = case join_kind of + Join_Kind.Left_Exclusive -> [True, False] + Join_Kind.Right_Exclusive -> [False, True] + _ -> [True, True] + + join_resolution = make_join_helpers self right . resolve on on_problems + right_columns_to_drop = if join_kind == Join_Kind.Inner then join_resolution.redundant_column_names else [] + + java_conditions = join_resolution.conditions + new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> + self.java_table.join right.java_table java_conditions (rows_to_keep.at 0) (rows_to_keep.at 1) (rows_to_keep.at 2) (columns_to_keep.at 0) (columns_to_keep.at 1) right_columns_to_drop right_prefix java_aggregator + Table.Value new_java_table ## ALIAS cartesian join GROUP Standard.Base.Calculations @@ -1726,15 +1725,14 @@ type Table layout order while for database tables the order may be unspecified). cross_join : Table -> Integer | Nothing -> Text -> Problem_Behavior -> Table cross_join self right:Table right_row_limit=100 right_prefix="Right " on_problems=Report_Warning = Out_Of_Memory.handle_java_exception "cross_join" <| - if check_table "right" right then - limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of - True -> - [Cross_Join_Row_Limit_Exceeded.Error right_row_limit right.row_count] - False -> [] - on_problems.attach_problems_before limit_problems <| - new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> - self.java_table.crossJoin right.java_table right_prefix java_aggregator - Table.Value new_java_table + limit_problems = case right_row_limit.is_nothing.not && (right.row_count > right_row_limit) of + True -> + [Cross_Join_Row_Limit_Exceeded.Error right_row_limit right.row_count] + False -> [] + on_problems.attach_problems_before limit_problems <| + new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> + self.java_table.crossJoin right.java_table right_prefix java_aggregator + Table.Value new_java_table ## Replaces values in this table by values from a lookup table. New values are looked up in the lookup table based on the `key_columns`. @@ -1771,8 +1769,8 @@ type Table specified in `key_columns`, a `Missing_Input_Columns` error is raised. - If an empty vector is provided for `key_columns`, a `No_Input_Columns_Selected` error is raised. - - If the lookup table contains multiple rows with the same values in - the `key_columns`, an `Non_Unique_Key` error is raised. + - If a single row is matched by multiple entries in the lookup table, + a `Non_Unique_Key` error is raised. - If a column that is being updated from the lookup table has a type that is not compatible with the type of the corresponding column in this table, a `No_Common_Type` error is raised. @@ -1874,20 +1872,19 @@ type Table @keep_unmatched (make_single_choice [["True", "Boolean.True"], ["False", "Boolean.False"], ["Report", Meta.get_qualified_type_name Report_Unmatched]]) zip : Table -> Boolean | Report_Unmatched -> Text -> Problem_Behavior -> Table zip self right:Table keep_unmatched=Report_Unmatched right_prefix="Right " on_problems=Report_Warning = - if check_table "right" right then - keep_unmatched_bool = case keep_unmatched of - Report_Unmatched -> True - b : Boolean -> b - report_mismatch = keep_unmatched == Report_Unmatched - - left_row_count = self.row_count - right_row_count = right.row_count - problems = if (left_row_count == right_row_count) || report_mismatch.not then [] else - [Row_Count_Mismatch.Error left_row_count right_row_count] - on_problems.attach_problems_before problems <| - new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> - self.java_table.zip right.java_table keep_unmatched_bool right_prefix java_aggregator - Table.Value new_java_table + keep_unmatched_bool = case keep_unmatched of + Report_Unmatched -> True + b : Boolean -> b + report_mismatch = keep_unmatched == Report_Unmatched + + left_row_count = self.row_count + right_row_count = right.row_count + problems = if (left_row_count == right_row_count) || report_mismatch.not then [] else + [Row_Count_Mismatch.Error left_row_count right_row_count] + on_problems.attach_problems_before problems <| + new_java_table = Java_Problems.with_problem_aggregator on_problems java_aggregator-> + self.java_table.zip right.java_table keep_unmatched_bool right_prefix java_aggregator + Table.Value new_java_table ## ALIAS append, concat GROUP Standard.Base.Calculations @@ -1980,12 +1977,11 @@ type Table retyped to the `Mixed` type to indicate that intention. Note that the `Mixed` type may not be supported by most Database backends. union : (Table | Vector Table) -> Match_Columns -> Boolean | Report_Unmatched -> Boolean -> Problem_Behavior -> Table - union self tables match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning = + union self tables:Vector|Table match_columns=Match_Columns.By_Name keep_unmatched_columns=Report_Unmatched allow_type_widening=True on_problems=Report_Warning = all_tables = case tables of - v : Vector -> [self] + v - single_column : Column -> [self, single_column.to_table] + v : Vector -> [self] + (v.map t-> Table.from t) single_table -> [self, single_table] - all_tables.all (check_table "tables") . if_not_error <| + all_tables.if_not_error <| problem_builder = Problem_Builder.new matched_column_sets = Match_Columns_Helpers.match_columns all_tables match_columns keep_unmatched_columns problem_builder result_row_count = all_tables.fold 0 c-> t-> c + t.row_count @@ -2531,17 +2527,6 @@ make_join_helpers left_table right_table = Java_Join_Between.new left.java_column right_lower.java_column right_upper.java_column Join_Helpers.Join_Condition_Resolver.Value (left_table.at _) (right_table.at _) make_equals make_equals_ignore_case make_between -## PRIVATE - Checks if the argument is a proper table and comes from the current backend. - It returns True or throws a dataflow error explaining the issue. -check_table arg_name table = - if Table_Helpers.is_table table . not then Error.throw (Type_Error.Error Table table arg_name) else - same_backend = table.is_a Table - case same_backend of - False -> - Error.throw (Illegal_Argument.Error "Currently cross-backend operations are not supported. Materialize the table using `.read` before mixing it with an in-memory Table.") - True -> True - ## PRIVATE A helper that efficiently concatenates storages of in-memory columns. concat_columns column_set all_tables result_type result_row_count on_problems = diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso index 7bf5753b57e6..0ecb2af58efb 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Lookup_Helpers.enso @@ -1,29 +1,32 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import project.Data.Type.Storage import project.Data.Type.Value_Type.Value_Type import project.Data.Type.Value_Type_Helpers -from project.Errors import Missing_Input_Columns, Unexpected_Extra_Columns, Floating_Point_Equality, No_Common_Type +from project.Errors import Missing_Input_Columns, Unexpected_Extra_Columns, Floating_Point_Equality, No_Common_Type, No_Output_Columns polyglot java import org.enso.table.data.table.join.LookupColumnDescription -## PRIVATE -type Lookup_Settings - ## PRIVATE - Value (key_columns : Vector Text) (columns_to_update : Set Text) (columns_to_add : Vector Text) - ## PRIVATE type Lookup_Column ## PRIVATE + A key column used to correlate rows between the two tables. Key_Column base_column lookup_column ## PRIVATE + A column from the base table that is kept as-is. Keep_Column base_column ## PRIVATE + A column that is present in both tables. + If the row is matched with a row from the lookup table, the value from + the lookup table is used; otherwise the value from the base table is kept. Replace_Column base_column lookup_column (common_type : Value_Type) ## PRIVATE + A new column added from the lookup table. + If the row has no match, it will be `Nothing`. Add_Column lookup_column ## PRIVATE @@ -38,7 +41,8 @@ type Lookup_Column It also reports any errors or warnings related to selecting these columns. prepare_columns_for_lookup base_table lookup_table key_columns_selector add_new_columns allow_unmatched_rows on_problems = - key_columns = base_table.select_columns key_columns_selector . column_names + key_columns = base_table.select_columns key_columns_selector . column_names . catch No_Output_Columns _-> + Error.throw (Illegal_Argument.Error "At least one key column must be provided for `lookup_and_replace`.") lookup_table_key_columns = lookup_table.select_columns key_columns . catch Missing_Input_Columns error-> Error.throw (Missing_Input_Columns.Error error.criteria "the lookup table") diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 85980850954a..10930b43d4e8 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -452,15 +452,6 @@ filter_blank_rows table when_any treat_nans_as_blank = table.filter non_missing_mask Filter_Condition.Is_True False -> table -## PRIVATE - Checks if the given object implements a Table interface. - - Currently, it relies on a hack - it checks if the object has a method - `is_table` and if it returns `True`. -is_table obj = - known_types = ["Standard.Table.Data.Table.Table", "Standard.Database.Data.Table.Table"] - known_types.contains (Meta.get_qualified_type_name obj) - ## PRIVATE Checks if the given object implements a Table interface. diff --git a/std-bits/table/src/main/java/org/enso/table/data/table/join/LookupJoin.java b/std-bits/table/src/main/java/org/enso/table/data/table/join/LookupJoin.java index 44fde8643fb4..77ca5cbdcadb 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/table/join/LookupJoin.java +++ b/std-bits/table/src/main/java/org/enso/table/data/table/join/LookupJoin.java @@ -35,7 +35,6 @@ public static Table lookupAndReplace( LookupJoin joiner = new LookupJoin(keys, columnDescriptions, allowUnmatchedRows, problemAggregator); joiner.checkNullsInKey(); - joiner.verifyLookupUniqueness(); return joiner.join(); } @@ -78,22 +77,6 @@ private void checkNullsInKey() { } } - private void verifyLookupUniqueness() { - if (!lookupIndex.isUnique()) { - // Find the duplicated key - for (Map.Entry> group : lookupIndex.mapping().entrySet()) { - int groupSize = group.getValue().size(); - if (groupSize > 1) { - UnorderedMultiValueKey key = group.getKey(); - List exampleValues = IntStream.range(0, keyColumnNames.size()).mapToObj(key::get).toList(); - throw new NonUniqueLookupKey(keyColumnNames, exampleValues, groupSize); - } - } - - assert false : "isUnique returned false, but no duplicated key was found."; - } - } - private Table join() { List outputColumns = columnDescriptions.stream().map(this::prepareOutputColumn).toList(); List columnsToMerge = @@ -145,8 +128,12 @@ private int findLookupRow(int baseRowIx) { } } + if (lookupRowIndices.size() > 1) { + List exampleValues = IntStream.range(0, keyColumnNames.size()).mapToObj(key::get).toList(); + throw new NonUniqueLookupKey(keyColumnNames, exampleValues, lookupRowIndices.size()); + } + assert !lookupRowIndices.isEmpty() : "No Index group should be empty."; - assert lookupRowIndices.size() == 1 : "This should have been checked in verifyLookupUniqueness()"; return lookupRowIndices.get(0); } diff --git a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso index b0c25350396e..706c6d03953c 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Core_Spec.enso @@ -3,10 +3,13 @@ import Standard.Base.Errors.Common.Arithmetic_Error import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -from Standard.Table import Set_Mode +from Standard.Table import all from Standard.Table.Errors import all import Standard.Table.Data.Expression.Expression_Error +from Standard.Database import all +from Standard.Database.Errors import Integrity_Error + from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -163,6 +166,17 @@ spec setup = problems2 = [Arithmetic_Error.Error "Division by zero (at rows [2])."] Problems.test_problem_handling action2 problems2 tester2 + Test.specify "should gracefully handle columns from different backends" <| + t1 = table_builder [["A", [1, 2, 3]]] + alternative_connection = Database.connect (SQLite In_Memory) + t0 = (Table.new [["X", [3, 2, 1]]]).select_into_database_table alternative_connection "T0" temporary=True + c0 = t0.at "X" + + r1 = t1.set c0 + case setup.is_database of + True -> r1.should_fail_with Integrity_Error + False -> r1.should_fail_with Illegal_Argument + Test.group prefix+"Table.column_names" <| Test.specify "should return the names of all columns" <| table.column_names . should_equal ["foo", "bar", "Baz", "foo 1", "foo 2", "ab.+123", "abcd123"] diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso index f5310ab225c7..08280a532163 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Cross_Join_Spec.enso @@ -1,15 +1,17 @@ from Standard.Base import all - import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State -import Standard.Table.Data.Join_Kind_Cross.Join_Kind_Cross -import Standard.Test.Extensions -from Standard.Database.Errors import Unsupported_Database_Operation -from Standard.Table import all hiding Table +from Standard.Table import all from Standard.Table.Errors import all +import Standard.Table.Data.Join_Kind_Cross.Join_Kind_Cross + +from Standard.Database import all +from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error + from Standard.Test import Test, Problems +import Standard.Test.Extensions from project.Common_Table_Operations.Util import expect_column_names, run_default_backend @@ -159,3 +161,11 @@ spec setup = t1 = table_builder [["X", [1, 2]], ["Y", [4, 5]]] t2 = table_builder [["Z", ['a', 'b']], ["W", ['c', 'd']]] Test.expect_panic_with (t1.join t2 join_kind=Join_Kind_Cross.Cross on=[]) Type_Error + + Test.specify "should gracefully handle tables from different backends" <| + t1 = table_builder [["A", ["a", "b"]]] + alternative_connection = Database.connect (SQLite In_Memory) + t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True + + r1 = t1.cross_join t0 + r1.should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso index 9b5d85ecc35c..fcade1cf466e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Join_Spec.enso @@ -1,10 +1,12 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State -from Standard.Table import all hiding Table +from Standard.Table import all from Standard.Table.Errors import all -from Standard.Database.Errors import Unsupported_Database_Operation +from Standard.Database import all +from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -647,3 +649,10 @@ spec setup = r3.at 2 . should_equal [Nothing, 10, 7, 7, 200] r3.at 3 . should_equal [2, 20, 2, Nothing, Nothing] r3.at 4 . should_equal [3, 30, 3, Nothing, Nothing] + + Test.specify "should gracefully handle tables from different backends" <| + alternative_connection = Database.connect (SQLite In_Memory) + t0 = (Table.new [["X", [1, 2, 4]], ["Z", [10, 20, 30]]]).select_into_database_table alternative_connection "T0" temporary=True + + r1 = t1.join t0 + r1.should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso index 6b7e5b0b521f..7c5080951e9f 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Lookup_Spec.enso @@ -1,9 +1,11 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument from Standard.Table import all from Standard.Table.Errors import all -from Standard.Database.Errors import Unsupported_Database_Operation +from Standard.Database import all +from Standard.Database.Errors import Unsupported_Database_Operation, Invariant_Violation, Integrity_Error from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -17,8 +19,7 @@ spec setup = prefix = setup.prefix table_builder = setup.table_builder materialize = setup.materialize - db_pending = if setup.is_database then "Not implemented yet. TODO: https://github.com/enso-org/enso/issues/7981" - Test.group prefix+"Table.lookup_and_replace" pending=db_pending <| + Test.group prefix+"Table.lookup_and_replace" <| Test.specify "should allow to simply update columns based on a lookup table" <| lookup = table_builder [["Y", ["A", "B", "A"]], ["X", [1, 2, 3]]] my_table = table_builder [["X", [1, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -149,17 +150,42 @@ spec setup = m2.at "Y" . to_vector . should_equal ["A", "B", "A", "B"] m2.at "Z" . to_vector . should_equal [100, 200, 100, 100] - Test.specify "will fail on duplicate keys in the lookup table" <| + Test.specify "will fail on duplicate matches in the lookup table" <| lookup = table_builder [["X", [1, 2, 1]], ["Y", ["A", "B", "C"]]] my_table = table_builder [["X", [4, 2, 3, 2]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] - r2 = my_table.lookup_and_replace lookup key_columns=["X"] + # If the duplicates do not show up in result - it is accepted. + t2 = my_table.lookup_and_replace lookup key_columns=["X"] + t2.column_names . should_equal ["X", "Y", "Z"] + m2 = t2 |> materialize |> _.order_by "Z" + m2.at "Z" . to_vector . should_equal [10, 20, 30, 40] + m2.at "X" . to_vector . should_equal [4, 2, 3, 2] + m2.at "Y" . to_vector . should_equal ["Z", "B", "ZZZ", "B"] + + my_table2 = table_builder [["X", [1, 2]], ["Y", ["Z", "ZZ"]], ["Z", [10, 20]]] + r2 = my_table2.lookup_and_replace lookup key_columns=["X"] r2.should_fail_with Non_Unique_Key + r2.catch.key_column_names . should_equal ["X"] + r2.catch.clashing_example_key_values . should_equal [1] + r2.catch.clashing_example_row_count . should_equal 2 lookup2 = table_builder [["X", [1, 1]], ["Y", ["A", "A"]], ["Z", [100, 100]]] - r3 = my_table.lookup_and_replace lookup2 key_columns=["X", "Y"] + Problems.assume_no_problems <| my_table.lookup_and_replace lookup2 key_columns=["X", "Y"] + r3 = my_table2.lookup_and_replace lookup2 key_columns=["X"] r3.should_fail_with Non_Unique_Key + m3 = my_table2.lookup_and_replace lookup2 key_columns=["X", "Y"] |> materialize |> _.order_by "Z" + m3.at "Z" . to_vector . should_equal [10, 20] + m3.at "X" . to_vector . should_equal [1, 2] + m3.at "Y" . to_vector . should_equal ["Z", "ZZ"] + + my_table3 = table_builder [["X", [1, 1, 2]], ["Y", ["A", "Z", "ZZ"]], ["Z", [10, 20, 30]]] + r4 = my_table3.lookup_and_replace lookup2 key_columns=["X", "Y"] + r4.should_fail_with Non_Unique_Key + r4.catch.key_column_names . should_equal ["X", "Y"] + r4.catch.clashing_example_key_values . should_equal [1, "A"] + r4.catch.clashing_example_row_count . should_equal 2 + Test.specify "will preserve count of rows, even if there are duplicates" <| lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] my_table = table_builder [["X", [1, 2, 2, 2, 1]], ["Z", [10, 20, 20, 20, 50]]] @@ -171,6 +197,64 @@ spec setup = m2.at "Y" . to_vector . should_equal ["A", "B", "B", "B", "A"] m2.at "Z" . to_vector . should_equal [10, 20, 20, 20, 50] + Test.specify "should correctly preserve types of original, merged and added columns" <| + table = table_builder [["key1", [0, 1]], ["key2", ["o", "?"]], ["X", [1, 10]], ["Y", ["A", "E"]], ["Z", [1.5, 2.0]], ["W", [True, False]], ["A", [2, 22]], ["B", ["1", "2"]], ["C", [2.0, 2.5]], ["D", [False, False]]] + lookup = table_builder [["key1", [0, 2]], ["key2", ["o", "?"]], ["X2", [100, 1000]], ["Y2", ["foo", "bar"]], ["Z2", [0.5, 4.0]], ["W2", [False, True]], ["A", [3, 55]], ["B", ["F", "F"]], ["C", [3.0, 10.5]], ["D", [True, False]]] + + [True, False].each allow_unmatched_rows-> + table_prepared = if allow_unmatched_rows then table else + # If we don't allow unmatched rows, we filter the input to only contain the matching row - this test is not about matching, but about the types. + table.filter "key1" (Filter_Condition.Equal to=0) + t2 = table_prepared.lookup_and_replace lookup key_columns=["key1", "key2"] allow_unmatched_rows=allow_unmatched_rows + t2.column_names . should_equal ["key1", "key2", "X", "Y", "Z", "W", "A", "B", "C", "D", "X2", "Y2", "Z2", "W2"] + + t2.at "key1" . value_type . should_be_a (Value_Type.Integer ...) + t2.at "key2" . value_type . should_equal Value_Type.Char + + t2.at "X" . value_type . should_be_a (Value_Type.Integer ...) + t2.at "Y" . value_type . should_equal Value_Type.Char + t2.at "Z" . value_type . should_equal Value_Type.Float + t2.at "W" . value_type . should_equal Value_Type.Boolean + + t2.at "A" . value_type . should_be_a (Value_Type.Integer ...) + t2.at "B" . value_type . should_equal Value_Type.Char + t2.at "C" . value_type . should_equal Value_Type.Float + t2.at "D" . value_type . should_equal Value_Type.Boolean + + t2.at "X2" . value_type . should_be_a (Value_Type.Integer ...) + t2.at "Y2" . value_type . should_equal Value_Type.Char + t2.at "Z2" . value_type . should_equal Value_Type.Float + t2.at "W2" . value_type . should_equal Value_Type.Boolean + + if setup.test_selection.fixed_length_text_columns then Test.specify "should correctly preserve types of original, merged and added columns (various Char types test case)" <| + table2 = table_builder [["key", ["0"]], ["X", ["a"]], ["A", ["bbbbb"]]] . cast "key" (Value_Type.Char size=50) . cast "X" (Value_Type.Char size=1) . cast "A" (Value_Type.Char size=5 variable_length=False) + lookup2 = table_builder [["key", ["0"]], ["X2", ["ccc"]], ["A", ["dddd"]]] . cast "key" (Value_Type.Char size=100) . cast "X2" (Value_Type.Char size=3 variable_length=False) . cast "A" (Value_Type.Char size=4 variable_length=False) + + table2.at "key" . value_type . should_equal (Value_Type.Char size=50 variable_length=True) + table2.at "X" . value_type . should_equal (Value_Type.Char size=1 variable_length=True) + table2.at "A" . value_type . should_equal (Value_Type.Char size=5 variable_length=False) + + lookup2.at "key" . value_type . should_equal (Value_Type.Char size=100 variable_length=True) + lookup2.at "X2" . value_type . should_equal (Value_Type.Char size=3 variable_length=False) + lookup2.at "A" . value_type . should_equal (Value_Type.Char size=4 variable_length=False) + + [True, False].each allow_unmatched_rows-> + t3 = table2.lookup_and_replace lookup2 key_columns=["key"] allow_unmatched_rows=allow_unmatched_rows + t3.column_names . should_equal ["key", "X", "A", "X2"] + + # The key column keeps its original type + t3.at "key" . value_type . should_equal (Value_Type.Char size=50 variable_length=True) + t3.at "X" . value_type . should_equal (Value_Type.Char size=1 variable_length=True) + t3.at "X2" . value_type . should_equal (Value_Type.Char size=3 variable_length=False) + + case allow_unmatched_rows of + True -> + # The merged column gets a merged type (max size in that case, but gets variable length due to differences), because it may contain both old and new values. + t3.at "A" . value_type . should_equal (Value_Type.Char size=5 variable_length=True) + False -> + # If unmatched rows are not allowed, we can guarantee only _new_ values (from the lookup table) will be in the result, so instead of merging the type we inherit the type from the lookup table. + t3.at "A" . value_type . should_equal (Value_Type.Char size=4 variable_length=False) + Test.specify "will report Floating_Point_Equality if floating-point columns are used as key" <| lookup = table_builder [["X", [1.0, 2.0, 3.0]], ["Y", ["A", "B", "C"]]] my_table = table_builder [["X", [2.0, 3.0, 2.0, 3.0]], ["Y", ["Z", "ZZ", "ZZZ", "ZZZZ"]], ["Z", [10, 20, 30, 40]]] @@ -224,6 +308,17 @@ spec setup = m2.at "Y" . to_vector . should_equal ["B", Nothing, Nothing, Nothing] m2.at "Z" . to_vector . should_equal [10, 20, 30, 40] + # But NULLs in source are ok, but mean such rows will never match: + r3 = my_table2.lookup_and_replace lookup2 key_columns="X" allow_unmatched_rows=False + r3.should_fail_with Unmatched_Rows_In_Lookup + + Test.specify "will not allow providing no key_columns" <| + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + my_table = table_builder [["X", [2, 1]], ["Z", [10, 20]]] + + r2 = my_table.lookup_and_replace lookup key_columns=[] + r2.should_fail_with Illegal_Argument + if setup.is_database.not then Test.specify "(in-memory only) will preserve the order of rows from the original table" <| lookup = table_builder [["Y", [1, 0]], ["V", ["TRUE", "FALSE"]]] xs = 0.up_to 50 . to_vector @@ -237,3 +332,106 @@ spec setup = t2.at "Y" . to_vector . should_equal ys vs = xs.map x-> if (x%2) == 1 then "TRUE" else "FALSE" t2.at "V" . to_vector . should_equal vs + + if setup.is_database then Test.specify "(database-only) will fail if pre-checked invariants get invalidated between the query is constructed and then materialized" <| + Test.with_clue "(lookup is unique check) " <| + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] + + r1 = table.lookup_and_replace lookup key_columns="X" + # Immediately, the query is all good. + Problems.assume_no_problems r1 + + m1 = r1 |> materialize |> _.order_by "Z" + m1.at "X" . to_vector . should_equal [1, 2, 2] + m1.at "Y" . to_vector . should_equal ["A", "B", "B"] + m1.at "Z" . to_vector . should_equal [10, 20, 30] + + # Now, we change the table, so that the lookup is no longer unique - even if the values are the same - we care about row uniqueness. + Problems.assume_no_problems <| + lookup.update_rows (Table.new [["X", [2]], ["Y", ["B"]]]) key_columns=[] update_action=Update_Action.Insert + lookup.filter "X" (Filter_Condition.Equal to=2) . row_count . should_equal 2 + + # Now, the query is no longer valid: + m2 = r1 |> materialize + m2.should_fail_with Invariant_Violation + m2.catch.to_display_text . should_contain "database has been modified" + m2.catch.to_display_text . should_contain "re-run the workflow" + + ## Now, even if we drop the lookup-related columns, we still should get an error + (otherwise we would risk duplicating records due to the non-uniqueness!) + r3 = r1.select_columns ["Z"] + Problems.assume_no_problems r3 + + m3 = r3 |> materialize + ## Alternatively, theoretically, this query could succeed + (knowing that it no longer depends on the lookup at all). + But if it does so, we need to guarantee that we do not get duplicated rows in the result: + + r3.row_count . should_equal 3 + r3.at "Z" . to_vector . length . should_equal 3 + r3.at "Z" . to_vector . should_contain_the_same_elements_as [10, 20, 30] + m3.should_fail_with Invariant_Violation + r3.at "Z" . to_vector . should_fail_with Invariant_Violation + + Test.with_clue "(no unmatched rows check - added a row in source) " <| + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] + + r1 = table.lookup_and_replace lookup key_columns="X" allow_unmatched_rows=False + # Immediately, the query is all good. + Problems.assume_no_problems r1 + + m1 = r1 |> materialize |> _.order_by "Z" + m1.at "X" . to_vector . should_equal [1, 2, 2] + m1.at "Y" . to_vector . should_equal ["A", "B", "B"] + m1.at "Z" . to_vector . should_equal [10, 20, 30] + + # Now, we change the source table, so that there are unmatched rows: + Problems.assume_no_problems <| + table.update_rows (Table.new [["X", [33]], ["Y", ["F"]], ["Z", [40]]]) key_columns=[] update_action=Update_Action.Insert + table.row_count . should_equal 4 + + # Now, the query is no longer valid: + m2 = r1 |> materialize + m2.should_fail_with Invariant_Violation + + Test.with_clue "(no unmatched rows check - removed a row in lookup) " <| + lookup = table_builder [["X", [1, 2]], ["Y", ["A", "B"]]] + table = table_builder [["X", [1, 2, 2]], ["Y", ["Z", "ZZ", "ZZZ"]], ["Z", [10, 20, 30]]] + + r1 = table.lookup_and_replace lookup key_columns="X" allow_unmatched_rows=False + # Immediately, the query is all good. + Problems.assume_no_problems r1 + + m1 = r1 |> materialize |> _.order_by "Z" + m1.at "X" . to_vector . should_equal [1, 2, 2] + m1.at "Y" . to_vector . should_equal ["A", "B", "B"] + m1.at "Z" . to_vector . should_equal [10, 20, 30] + + # Now, we change the lookup table, so that there are unmatched rows: + Problems.assume_no_problems <| + lookup.delete_rows (Table.new [["X", [1]]]) key_columns=["X"] + lookup.row_count . should_equal 1 + + # Now, the query is no longer valid: + m2 = r1 |> materialize + m2.should_fail_with Invariant_Violation + + # This does not seem useful really, but there is no reason to disallow it, so we should ensure it does not crash. + Test.specify "(edge-case) should allow lookup with itself" <| + table = table_builder [["X", [1, 2, 3]], ["Y", ["A", "B", "C"]]] + t2 = table.lookup_and_replace table key_columns="X" + t2.column_names . should_equal ["X", "Y"] + + m2 = t2 |> materialize |> _.order_by "X" + m2.at "X" . to_vector . should_equal [1, 2, 3] + m2.at "Y" . to_vector . should_equal ["A", "B", "C"] + + Test.specify "should gracefully handle tables from different backends" <| + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + alternative_connection = Database.connect (SQLite In_Memory) + t0 = (Table.new [["A", [3, 2, 1]], ["B", ["x", "y", "z"]]]).select_into_database_table alternative_connection "T0" temporary=True + + r1 = t1.lookup_and_replace t0 key_columns="A" + r1.should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso index 79ac41a5c2eb..87fd037b5d0d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Union_Spec.enso @@ -1,10 +1,12 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Table.Data.Type.Value_Type.Bits from Standard.Table import all from Standard.Table.Errors import all -from Standard.Database.Errors import Unsupported_Database_Operation +from Standard.Database import all +from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -393,3 +395,12 @@ spec setup = # And this should report Column_Type_Mismatch as the more important error too. #t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with Column_Type_Mismatch t1.union t2 allow_type_widening=False on_problems=Problem_Behavior.Report_Error . should_fail_with No_Output_Columns + + Test.specify "should gracefully handle tables from different backends" <| + t1 = table_builder [["A", [1, 2, 3]], ["B", ["a", "b", "c"]]] + + alternative_connection = Database.connect (SQLite In_Memory) + t0 = (Table.new [["A", [1, 2, 4]], ["B", ["10", "20", "30"]]]).select_into_database_table alternative_connection "T0" temporary=True + + r1 = t1.union t0 + r1.should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso index 5011cf4095c3..ebe88b0ecdfd 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Join/Zip_Spec.enso @@ -1,10 +1,12 @@ from Standard.Base import all +import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State -from Standard.Table import all hiding Table +from Standard.Table import all from Standard.Table.Errors import all -from Standard.Database.Errors import Unsupported_Database_Operation +from Standard.Database import all +from Standard.Database.Errors import Unsupported_Database_Operation, Integrity_Error from Standard.Test import Test, Problems import Standard.Test.Extensions @@ -236,3 +238,12 @@ spec setup = padded.at "times" . value_type . should_equal Value_Type.Time padded.at "datetimes" . value_type . should_equal Value_Type.Date_Time padded.at "mixed" . value_type . should_equal Value_Type.Mixed + + Test.specify "should gracefully handle tables from different backends" <| + t1 = table_builder [["X", [1, 2, 3]], ["Y", [4, 5, 6]]] + + alternative_connection = Database.connect (SQLite In_Memory) + t0 = (Table.new [["W", [1, 2, 4]]]).select_into_database_table alternative_connection "T0" temporary=True + + r1 = t1.zip t0 + r1.should_fail_with Illegal_Argument diff --git a/test/Table_Tests/src/Database/Common/Common_Spec.enso b/test/Table_Tests/src/Database/Common/Common_Spec.enso index 51ddddb553cb..27d5beadf57d 100644 --- a/test/Table_Tests/src/Database/Common/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common/Common_Spec.enso @@ -135,6 +135,54 @@ run_tests prefix connection upload = r3 = connection.query "SELECT * FROM ........" r3.should_fail_with SQL_Error + Test.specify "will fail if the table is modified and a column gets removed" <| + name = Name_Generator.random_name "removing-column" + Problems.assume_no_problems <| + (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table connection name temporary=True + + t1 = connection.query name + m1 = t1.read + Problems.assume_no_problems m1 + m1.at "a" . to_vector . should_equal [1, 2, 3] + m1.at "b" . to_vector . should_equal [4, 5, 6] + + Problems.assume_no_problems <| connection.drop_table name + Problems.assume_no_problems <| + (Table.new [["a", [100, 200]]]).select_into_database_table connection name temporary=True + + # Reading a column that was kept will work OK + t1.at "a" . to_vector . should_equal [100, 200] + + # But reading the whole table will fail on the missing column: + m2 = t1.read + m2.should_fail_with SQL_Error + + Test.specify "will not fail if the table is modified and a column gets added" <| + name = Name_Generator.random_name "adding-column" + Problems.assume_no_problems <| + (Table.new [["a", [1, 2, 3]], ["b", [4, 5, 6]]]).select_into_database_table connection name temporary=True + + t1 = connection.query name + m1 = t1.read + Problems.assume_no_problems m1 + m1.at "a" . to_vector . should_equal [1, 2, 3] + m1.at "b" . to_vector . should_equal [4, 5, 6] + + Problems.assume_no_problems <| connection.drop_table name + Problems.assume_no_problems <| + (Table.new [["a", [100, 200]], ["b", [300, 400]], ["c", [500, 600]]]).select_into_database_table connection name temporary=True + + m2 = t1.read + Problems.assume_no_problems m2 + m2.column_names . should_equal ["a", "b"] + m2.at "a" . to_vector . should_equal [100, 200] + m2.at "b" . to_vector . should_equal [300, 400] + + t1.at "c" . should_fail_with No_Such_Column + + t2 = connection.query name + t2.column_names . should_equal ["a", "b", "c"] + Test.group prefix+"Masking Tables" <| Test.specify "should allow to select rows from a table or column based on an expression" <| t2 = t1.filter (t1.at "a" == 1) From 1480f502076d8787e8dcb9c313403ae8239cc781 Mon Sep 17 00:00:00 2001 From: GregoryTravis Date: Tue, 31 Oct 2023 11:25:37 -0400 Subject: [PATCH 04/12] Overhaul the random number and item generation code (#8127) Rewrite most of Random.enso. --- .../0.0.0-dev/src/Data/Index_Sub_Range.enso | 13 +- .../Base/0.0.0-dev/src/Data/Noise.enso | 58 --- .../src/Data/Text/Text_Sub_Range.enso | 6 +- .../Base/0.0.0-dev/src/Data/Vector.enso | 7 +- .../lib/Standard/Base/0.0.0-dev/src/Main.enso | 6 +- .../Standard/Base/0.0.0-dev/src/Random.enso | 369 +++++++++++++++--- .../src/Internal/Table_Naming_Helper.enso | 2 +- .../Test/0.0.0-dev/src/Extensions.enso | 58 +++ .../AvoidIdInstrumentationTagTest.java | 6 +- .../org/enso/compiler/ExecCompilerTest.java | 2 +- .../base/random/RandomInstanceHolder.java | 38 ++ .../enso/base/{ => random}/Random_Utils.java | 7 +- test/Benchmarks/src/Map/Hash_Map.enso | 3 +- test/Benchmarks/src/Table/Add_Row_Number.enso | 2 +- .../src/Table/Internal/Multi_Value_Key.enso | 6 +- .../Take_Drop_Spec.enso | 7 + .../src/Database/Helpers/Name_Generator.enso | 3 +- test/Tests/src/Data/Noise/Generator_Spec.enso | 20 - test/Tests/src/Data/Noise_Spec.enso | 21 - test/Tests/src/Data/Vector_Spec.enso | 17 +- test/Tests/src/Main.enso | 4 - test/Tests/src/Random_Spec.enso | 170 +++++--- test/Tests/src/Semantic/Equals_Spec.enso | 2 +- 23 files changed, 583 insertions(+), 244 deletions(-) create mode 100644 std-bits/base/src/main/java/org/enso/base/random/RandomInstanceHolder.java rename std-bits/base/src/main/java/org/enso/base/{ => random}/Random_Utils.java (87%) delete mode 100644 test/Tests/src/Data/Noise/Generator_Spec.enso delete mode 100644 test/Tests/src/Data/Noise_Spec.enso diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso index 7c48d030b2c6..f0bde74827e9 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Index_Sub_Range.enso @@ -12,13 +12,14 @@ import project.Math import project.Meta import project.Metadata.Display import project.Metadata.Widget +import project.Nothing.Nothing import project.Panic.Panic -import project.Random import project.Runtime.Ref.Ref from project.Data.Boolean import Boolean, False, True from project.Data.Range.Extensions import all from project.Metadata.Choice import Option from project.Metadata.Widget import Single_Choice +from project.Random import Random, get_default_seed type Index_Sub_Range ## Select the first `count` items. @@ -52,7 +53,7 @@ type Index_Sub_Range If `count` is greater than the length of the input, a random permutation of all elements from the input is selected. - Sample (count:Integer) (seed:Integer=Random.get_default_seed) + Sample (count:Integer) (seed:Integer|Nothing=Nothing) ## Gets every Nth entry. @@ -211,8 +212,8 @@ take_helper length at single_slice slice_ranges range = case range of trimmed = resolve_ranges indices length slice_ranges trimmed Index_Sub_Range.Sample count seed -> - rng = Random.new seed - indices_to_take = Random.random_indices length count rng + rng = Random.new_generator seed + indices_to_take = rng.indices length count take_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index indices_to_take) Index_Sub_Range.Every step start -> if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else @@ -264,8 +265,8 @@ drop_helper length at single_slice slice_ranges range = case range of inverted = invert_range_selection normalized length needs_sorting=True slice_ranges inverted Index_Sub_Range.Sample count seed -> - rng = Random.new seed - indices_to_drop = Random.random_indices length count rng + rng = Random.new_generator seed + indices_to_drop = rng.indices length count drop_helper length at single_slice slice_ranges (Index_Sub_Range.By_Index indices_to_drop) Index_Sub_Range.Every step start -> if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso index bd65c3cd48b4..e69de29bb2d1 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Noise.enso @@ -1,58 +0,0 @@ -import project.Any.Any -import project.Data.Interval.Interval -import project.Data.Interval.Interval_Type -import project.Data.Numbers.Number - -polyglot java import java.lang.Long -polyglot java import java.util.Random - -## A noise generator that implements a seeded deterministic random perturbation - of the input. - - It produces what is commonly termed "white" noise, where any value in the - range has an equal chance of occurring. -type Deterministic_Random - ## GROUP Random - Step the generator to produce the next value. - - Arguments: - - input: The seed number to perturb. - - interval: The interval over which the noise should be generated. - - > Example - Step the generator with the input 1 and range 0 to 1 - - from Standard.Base.Data.Noise.Generator import Deterministic_Random - - example_det_random = Deterministic_Random.step 1 (Interval.new 0 1 Interval_Type.Inclusive) - step : Number -> Interval -> Number - step self input interval = - max_long = Long.MAX_VALUE - seed = input.floor % max_long - gen = Random.new seed - value_range = (interval.end.n - interval.start.n).abs - offset = (interval.start.n) - gen.nextDouble - val = gen.nextDouble - (val * value_range) + offset - -## GROUP Random - Generate noise based on the input number. - - Arguments: - - interval: An interval in which the noise should be generated. - - gen: The generator to use for generating noise. - - The output of the noise generator will depend on the input and the range over - which the noise is being generated. - - By default, this uses a seeded deterministic generator that will always - return the same input for the same output. In addition, it will, by default, - generate values in the exclusive range 0 to 1. - - > Example - Deterministically perturb the input number 1. - 1.noise -Number.noise : Interval -> Deterministic_Random -> Any -Number.noise self (interval = Interval.new 0 1 Interval_Type.Exclusive) gen=Deterministic_Random = - gen.step self interval diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso index 9106bda27b88..5596a882ccc2 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Text/Text_Sub_Range.enso @@ -14,7 +14,7 @@ import project.Metadata.Display import project.Metadata.Widget import project.Nothing.Nothing import project.Panic.Panic -import project.Random +import project.Random.Random from project.Data.Boolean import Boolean, False, True from project.Data.Index_Sub_Range import handle_unmatched_type from project.Data.Range.Extensions import all @@ -149,8 +149,8 @@ type Codepoint_Ranges batch_resolve_indices_or_ranges text indices _ -> resolve_index_or_range text indices Index_Sub_Range.Sample count seed -> - rng = Random.new seed - indices = Random.random_indices text.length count rng + rng = Random.new_generator seed + indices = rng.indices text.length count Codepoint_Ranges.resolve text (Index_Sub_Range.By_Index indices) Index_Sub_Range.Every step start -> if step <= 0 then Error.throw (Illegal_Argument.Error "Step within Every must be positive.") else diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso index 3a1c620f3fa8..8f349aba5861 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Data/Vector.enso @@ -22,7 +22,7 @@ import project.Math import project.Meta import project.Nothing.Nothing import project.Panic.Panic -import project.Random +import project.Random.Random import project.Warning.Warning from project.Data.Boolean import Boolean, False, True from project.Data.Filter_Condition import unify_condition_or_predicate, unify_condition_predicate_or_element @@ -89,6 +89,7 @@ type Vector a > Example Turn a list into a vector. Vector.collect (List.Cons 1 <| List.Cons 2 <| List.Nil) .x .xs stop_at=(_==List.Nil) + collect : Any -> (Any -> Any) -> (Any -> Any) -> Integer | Nothing -> ((Any -> Boolean) | Nothing) -> Vector Any collect seq element:(Any -> Any) next:(Any->Any) limit:(Integer|Nothing)=Nothing stop_at:(Any->Boolean)=(_==Nothing) = b = Vector.new_builder (if limit.is_nothing then 10 else limit) iterate item remaining = if remaining == 0 || (stop_at item) then b.to_vector else @@ -824,8 +825,8 @@ type Vector a collection) generates a random set of indices and then selects these indices, but we can sample the vector directly. Index_Sub_Range.Sample count seed -> - rng = Random.new seed - Random.sample self count rng + rng = Random.new_generator seed + rng.items self count _ -> take_helper self.length (self.at _) self.slice (slice_ranges self) range diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso index 283880e82bc7..af9ebc105879 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Main.enso @@ -13,7 +13,6 @@ import project.Data.List.List import project.Data.Locale.Locale import project.Data.Map.Map import project.Data.Maybe.Maybe -import project.Data.Noise import project.Data.Numbers import project.Data.Ordering.Comparable import project.Data.Ordering.Default_Comparator @@ -70,7 +69,7 @@ import project.Nothing.Nothing import project.Panic.Panic import project.Polyglot.Java import project.Polyglot.Polyglot -import project.Random +import project.Random.Random import project.Runtime import project.System import project.System.Environment @@ -161,7 +160,7 @@ export project.Nothing.Nothing export project.Panic.Panic export project.Polyglot.Java export project.Polyglot.Polyglot -export project.Random +export project.Random.Random export project.Runtime export project.System export project.System.Environment @@ -178,7 +177,6 @@ export project.Warning.Warning from project.Data.Boolean export Boolean, False, True from project.Data.Index_Sub_Range.Index_Sub_Range export First, Last from project.Data.Json.Extensions export all -from project.Data.Noise export all hiding Deterministic_Random, Generator, Long, Noise, Random from project.Data.Numbers export Float, Integer, Number from project.Data.Range.Extensions export all from project.Data.Statistics export all hiding to_moment_statistic, wrap_java_call, calculate_correlation_statistics, calculate_spearman_rank, calculate_correlation_statistics_matrix, compute_fold, empty_value, is_valid diff --git a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso index aba492d82deb..1cecad82063b 100644 --- a/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso +++ b/distribution/lib/Standard/Base/0.0.0-dev/src/Random.enso @@ -1,99 +1,340 @@ import project.Any.Any -import project.Data.Boolean.Boolean -import project.Data.Json.JS_Object -import project.Data.Numbers.Float -import project.Data.Numbers.Integer +import project.Data.Array.Array +import project.Data.Range.Range import project.Data.Text.Text +import project.Data.Time.Date.Date +import project.Data.Time.Date_Range.Date_Range +import project.Data.Time.Period.Period +import project.Data.Time.Time_Of_Day.Time_Of_Day +import project.Data.Time.Time_Period.Time_Period +import project.Data.Map.Map +import project.Data.Set.Set import project.Data.Vector.Vector import project.Error.Error import project.Errors.Illegal_Argument.Illegal_Argument +import project.Nothing.Nothing +import project.Runtime.Ref.Ref import project.System +from project.Data.Boolean import Boolean, False, True +from project.Data.Numbers import Float, Integer +from project.Data.Range.Extensions import all polyglot java import java.lang.Integer as Java_Integer polyglot java import java.util.Random as Java_Random polyglot java import java.util.UUID -polyglot java import org.enso.base.Random_Utils +polyglot java import org.enso.base.random.Random_Utils +polyglot java import org.enso.base.random.RandomInstanceHolder -## PRIVATE - UNSTABLE - Returns a default seed to use for random number generation. +type Random + ## PRIVATE + GROUP Random - The returned seed may differ between each call to this method. -get_default_seed : Integer -get_default_seed = System.nano_time + Create a new `Random_Generator` with the specified seed. + + Arguments: + - seed: The seed used to initialze the `Random_Generator`. + + > Example + Create a new `Random_Generator` and use it. + + import Standard.Base.Random.Random + + rng = Random.oew_generator 42 + i = rng.integer 0 10 + new_generator : Integer -> Random_Generator + new_generator (seed:Integer|Nothing=Nothing) = Random_Generator.new seed + + ## TEXT_ONLY + GROUP Random + + Set the seed of the default `Random_Generator` instance. + + Arguments: + - seed: The seed used to initialze the default `Random_Generator`. + + > Example + Set the seed of the default `Random_Generator` to 42. -## Constructs a new random number generator. -new : Integer -> Random_Number_Generator -new seed=get_default_seed = - Random_Number_Generator.Value (Java_Random.new seed) + import Standard.Base.Random.Random -type Random_Number_Generator - ## A random number generator. - Value java_random + Random.set_seed 42 + set_seed : Integer -> Nothing + set_seed seed:Integer = Random_Generator.global_random_generator.set_seed seed ## GROUP Random - Gets the next random Boolean. - boolean : Float - boolean self = - self.java_random.nextBoolean + Return a random `Integer` between `min` (inclusive) and `max` + (exclusive). + + Arguments: + - min: The minimum value of the range to pick from (inclusive). + - max: The maximum value of the range to pick from (exclusive). + + > Example + Generate an integer between 1 and 10. + + import Standard.Base.Random.Random + + i = Random.integer 1 11 + integer : Integer -> Integer -> Integer ! Illegal_Argument + integer (min:Integer) (max:Integer) = Random_Generator.global_random_generator.integer min max + + ## GROUP Random + Return a random `Float` in the specified range. + + Arguments: + - min: The minimum value of the range to pick from (inclusive). + - max: The maximum value of the range to pick from (exclusive). + + > Example + Generate a float between 2.3 and 3.4. + + import Standard.Base.Random.Random + + i = Random.float 2.3 3.4 + float : Float -> Float -> Float + float (min:Float=0.0) (max:Float=1.0) = Random_Generator.global_random_generator.float min max + + ## GROUP Random + Return a random `Boolean` using the default instance of + `Random_Generator`. + + > Example + Generate a random boolean. + + import Standard.Base.Random.Random + + b = Random.boolean + boolean : Boolean + boolean = Random_Generator.global_random_generator.boolean + + ## GROUP Random + Return a random Gaussian-distributed `Float` with the specified mean and + standard deviation. + + Arguments: + - standard_deviation: The standard deviation of the sampled distribution. + - mean: The mean of the sampled distribution. + + > Example + Generate an gaussian float. + + import Standard.Base.Random.Random + + f = Random.gaussian + gaussian : Float -> Float -> Float + gaussian (standard_deviation:Float=1.0) (mean:Float=0.0) = Random_Generator.global_random_generator.gaussian standard_deviation mean + + ## GROUP Random + Returns `count` indices sampled from the range 0 (inclusive) to n + (exclusive) without replacement. + + Arguments: + - n: The end of the range to sample from (exclusive). + - count: The number of samples to take. + + > Example + Generate 10 random indices between 0 and 100. + + import Standard.Base.Random.Random + + indices = Random.indices 100 10 + indices : Integer -> Integer -> Vector Integer + indices (n:Integer) (count:Integer=1) = Random_Generator.global_random_generator.indices n count + + ## GROUP Random + Return a `Date` within the specified `Date_Range`. + + Arguments: + - start_date: The lower bound of the range to pick from (inclusive). + - end_date: The upper bound of the range to pick from (exclusive). + + > Example + Generate a random date. + + import Standard.Base.Random.Random + + d = Random.date (Date.new 2023 03 01) (Date.new 2023 10 15) + date : Date -> Date -> Boolean -> Date + date start_date:Date end_date:Date end_exclusive=True = Random_Generator.global_random_generator.date start_date end_date end_exclusive ## GROUP Random - Gets the next random Float between 0 and 1. - float : Float - float self = - self.java_random.nextDouble + Return a `Time_Of_Day` between `start_time` and `end_time` (inclusive). + + Arguments: + - start_time: The lower bound of the range to pick from (inclusive). + - end_time: The upper bound of the range to pick from (inclusive). + + The generated time is at a granularity of 1 second. Random values are + generated for hours, minutes and seconds. The `nanoseconds` field is + always 0. + + Note: the time range end is inclusive because it is impossible to specify + an end time after the last second of the day. + + > Example + Generate a random time. + + import Standard.Base.Random.Random + + start = Time_Of_Day.new 8 12 15 + end = Time_Of_Day.new 9 40 2 + t = Random.time start end + time : Time_Of_Day -> Time_Of_Day -> Time_Of_Day + time (start_time:Time_Of_Day=(Time_Of_Day.new 0 0 0)) (end_time:Time_Of_Day=(Time_Of_Day.new 23 59 59)) = + Random_Generator.global_random_generator.time start_time end_time ## GROUP Random - Gets the next random Float from a normal distribution with mean 0 and std-dev 1. - gaussian : Float - gaussian self = - self.java_random.nextGaussian + Return a new UUID. + + ! Random Seed + `uuid` does not use the underlying `java.util.Random` instance, and is + therefore not affected by `set_seed`. + + > Example + Generate a uuid. + + import Standard.Base.Random.Random + + uuid = Random.uuid + uuid : Text + uuid = Random_Generator.global_random_generator.uuid ## GROUP Random - Gets the next random integer between min (inclusive) and max (exclusive). + Returns a new `Vector` containing a random sample of the input vector, + with or without replacement. Arguments: - - min: the minimum value (inclusive) of the random integer. - - max: the maximum value (exclusive) of the random integer. - integer : Integer -> Integer -> Integer - integer self min=0 max=100 = + - vector: The `Vector` to take items from. + - count: The number of items to return. + - with_replacement: If false, each value in the `Vector` can only be + selected once during a single call; the returned values will contain no + duplicates. If true, values from the `Vector` can be returned multiple + times and duplicates are possible. + + ? Selection without replacement + + If `with_replacement` is false, and `count` is equal to the size of the + collection, a random permutation of the collection is returned. + + If `with_replacement` is false, and `count` is greater than the size of + the collection, an `Illegal_Argument` error is thrown. (If `count` is + greater than the size of the collection, `items` will run out of values + to choose from, and will not be able to return enough values.) + + > Example + Sample a vector. + + import Standard.Base.Random.Random + + items = Random.items [1, 2, 3, 4] + items : (Vector Any | Array Any) -> Integer -> Boolean -> Vector Any ! Illegal_Argument + items (v : Vector Any | Array Any) (count:Integer=1) (with_replacement:Boolean=False) = Random_Generator.global_random_generator.items v count with_replacement + + ## GROUP Random + Returns a random permutation of the input `Vector`. + + Arguments: + - vector: The `Vector` to permute. + + > Example + Permute a vector. + + import Standard.Base.Random.Random + + permuted = Random.permute [1, 2, 3, 4] + permute : Vector Any -> Vector Any + permute (v:Vector) = Random_Generator.global_random_generator.permute v + +# PRIVATE +type Random_Generator + ## PRIVATE + Create a new rng with the given seed. + new : Integer|Nothing -> Random_Generator + new (seed:Integer|Nothing) = + seed_resolved = seed.if_nothing get_default_seed + Random_Generator.Value (RandomInstanceHolder.new seed_resolved) + + ## PRIVATE + Value (random_instance_holder:RandomInstanceHolder) + + ## PRIVATE + global_random_generator : Random_Generator + global_random_generator = RandomInstanceHolder.singleton Random_Generator.Value + + ## PRIVATE + set_seed : Integer -> Nothing + set_seed self seed = self.random_instance_holder.setSeed seed + + ## PRIVATE + java_random : Java_Random + java_random self = self.random_instance_holder.getCurrentRandom + + ## PRIVATE + integer : Integer -> Integer -> Integer ! Illegal_Argument + integer self (min:Integer) (max:Integer) = range = max - min - if range < Java_Integer.MAX_VALUE then min + (self.java_random.nextInt range) else - Error.throw (Illegal_Argument.Error "Currently only integer ranges of up to 2^31-1 are supported.") + if range >= Java_Integer.MAX_VALUE then Error.throw (Illegal_Argument.Error "Currently only integer ranges of up to 2^31-1 are supported.") else + min + (self.java_random.nextInt range) ## PRIVATE - Serialise to JS_Object - to_js_object : JS_Object - to_js_object self = - JS_Object.from_pairs [["type", "Random_Number_Generator"], ["constructor", "new"]] + float : Float -> Float -> Float + float self (min:Float=0.0) (max:Float=1.0) = + range = max - min + self.java_random.nextDouble * range + min ## PRIVATE - Convert to a display representation of this Random_Number_Generator. - to_display_text : Text - to_display_text self = "Random_Number_Generator" + boolean : Boolean + boolean self = + self.java_random.nextBoolean -## PRIVATE - Returns a new vector containing a random sample of the input vector, without - replacement. + ## PRIVATE + gaussian : Float -> Float -> Float + gaussian self (standard_deviation:Float=1.0) (mean:Float=0.0) = + self.java_random.nextGaussian * standard_deviation + mean - If the amount of elements to select is larger than the input vector size, it - returns a random permutation of the input vector. -sample : Vector Any -> Integer -> Random_Number_Generator -> Vector Any -sample vector k rng = - new_array = Random_Utils.sample vector k rng.java_random - Vector.from_polyglot_array new_array + ## PRIVATE + indices : Integer -> Integer -> Vector Integer + indices self (n:Integer) (count:Integer=1) = + array = Random_Utils.random_indices n count self.java_random + Vector.from_polyglot_array array -## PRIVATE - Returns `k` indices sampled from the range [0, n-1] without replacement. + ## PRIVATE + date : Date -> Date -> Boolean -> Date + date self start_date:Date end_date:Date end_exclusive=True = + date_range = start_date.up_to end_date include_end=end_exclusive.not + date_range.at (self.integer 0 date_range.length) - If `k >= n`, it will return a random permutation of the indices. -random_indices : Integer -> Integer -> Random_Number_Generator -> Vector Integer -random_indices n k rng = - array = Random_Utils.random_indices n k rng.java_random - Vector.from_polyglot_array array + ## PRIVATE + time : Time_Of_Day -> Time_Of_Day -> Time_Of_Day + time self (start_time:Time_Of_Day=(Time_Of_Day.new 0 0 0)) (end_time:Time_Of_Day=(Time_Of_Day.new 23 59 59)) = + ## The time range end is inclusive because it is impossible to specify + an end time after the last second of the day. + seconds = self.integer 0 (end_time.to_seconds - start_time.to_seconds + 1) + start_time.date_add seconds Time_Period.Second + + ## PRIVATE + uuid : Text + uuid self = UUID.randomUUID.to_text + + ## PRIVATE + items : (Vector Any | Array Any) -> Integer -> Boolean -> Any ! Illegal_Argument + items self (v : Vector Any | Array Any) (count:Integer=1) (with_replacement:Boolean=False) = + case with_replacement of + True -> + len = v.length + 0.up_to count . map _-> v.at (self.integer 0 len) + False -> + if count > v.length then Error.throw (Illegal_Argument.Error "`count` cannot be greater than the size of the collection when `with_replacement` is false") else + new_array = Random_Utils.sample v count self.java_random + Vector.from_polyglot_array new_array + + ## PRIVATE + permute : Vector Any -> Vector Any + permute self (v:Vector) = self.items v v.length False ## PRIVATE - Generates a text representation of a randomly generated UUID. -random_uuid : Text -random_uuid = - UUID.randomUUID.to_text + Returns a default seed to use for random number generation. + + The returned seed will likely differ between each call to this method. +get_default_seed : Integer +get_default_seed = Random_Utils.getDefaultSeed diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Table_Naming_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Table_Naming_Helper.enso index f9db9fde95ca..2718518984e8 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Table_Naming_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Table_Naming_Helper.enso @@ -94,7 +94,7 @@ type Table_Naming_Helper if max_size.is_nothing.not && (naming_properties.encoded_size (prefix + minimum_randomness_template) > max_size) then Error.throw (Illegal_State.Error "The prefix has size "+(naming_properties.encoded_size prefix).to_text+" while max table name size is "+max_size.to_text+" - there is not enough space to safely generate random names.") else go ix = if ix > maximum_retries then Error.throw (Illegal_State.Error "Could not generate a non-assigned random table name after "+maximum_retries+". Giving up.") else - base_name = prefix + Random.random_uuid + base_name = prefix + Random.uuid name = self.truncate_if_needed base_name if self.connection.table_exists name . not then name else @Tail_Call go (ix + 1) diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso index 5be640c8a057..b64bc6406943 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Extensions.enso @@ -509,6 +509,64 @@ Error.should_contain_the_same_elements_as self that frames_to_skip=0 = _ = [that] Test.fail_match_on_unexpected_error self 1+frames_to_skip +## Asserts that `self` value contains only elements in `that`. + + It checks that all elements from `self` are also present in `that`. It does + not require that all elements of `that` are contained in `self`. Arities of + elements are not checked, so `self` may still contain more elements than + `that` by containing duplicates. + + It will work on any collection which supports the methods + `each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`. + + Arguments: + - that: The collection to compare. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one vector should contain only elements in another. + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] +Any.should_only_contain_elements_in : Any -> Integer -> Test_Result +Any.should_only_contain_elements_in self that frames_to_skip=0 = + loc = Meta.get_source_location 1+frames_to_skip + self.each element-> + if that.contains element . not then + msg = "The collection contained an element ("+element.to_text+") which was not expected (at " + loc + ")." + Test.fail msg + Test_Result.Success + +## Asserts that `self` value contains only elements in `that`. + + It checks that all elements from `self` are also present in `that`. It does + not require that all elements of `that` are contained in `self`. Arities of + elements are not checked, so the collections can still differ in length by + containing duplicate elements. + + It will work on any collection which supports the methods + `each : (Any -> Nothing) -> Any` and `contains : Any -> Boolean`. + + Arguments: + - that: The collection to compare. + - frames_to_skip (optional, advanced): used to alter the location which is + displayed as the source of this error. + + > Example + Assert that one vector should contain only elements in another. + + import Standard.Examples + from Standard.Test import Test + + example_should_equal = [1, 2] . should_only_contain_elements_in [1, 2, 3, 4] +Error.should_only_contain_elements_in : Any -> Integer -> Test_Result +Error.should_only_contain_elements_in self that frames_to_skip=0 = + _ = [that] + Test.fail_match_on_unexpected_error self 1+frames_to_skip + ## Asserts that `self` value contains an element. Arguments: diff --git a/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/AvoidIdInstrumentationTagTest.java b/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/AvoidIdInstrumentationTagTest.java index 0639a47f7a98..0b261ee8fa52 100644 --- a/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/AvoidIdInstrumentationTagTest.java +++ b/engine/runtime-with-instruments/src/test/java/org/enso/interpreter/test/instrument/AvoidIdInstrumentationTagTest.java @@ -64,12 +64,12 @@ public void disposeContext() { } @Test - public void avoidIdInstrumentationInLambdaMapFunctionWithNoise() throws Exception { + public void avoidIdInstrumentationInLambdaMapFunctionWithFloor() throws Exception { var code = """ from Standard.Base import all import Standard.Visualization - run n = 0.up_to n . map i-> 1.noise * i + run n = 0.up_to n . map i-> 1.floor * i """; var src = Source.newBuilder("enso", code, "TestLambda.enso").build(); var module = context.eval(src); @@ -80,7 +80,7 @@ public void avoidIdInstrumentationInLambdaMapFunctionWithNoise() throws Exceptio Predicate isLambda = (ss) -> { var sameSrc = ss.getSource().getCharacters().toString().equals(src.getCharacters().toString()); var st = ss.getCharacters().toString(); - return sameSrc && st.contains("noise") && !st.contains("map"); + return sameSrc && st.contains("floor") && !st.contains("map"); }; assertAvoidIdInstrumentationTag(isLambda); diff --git a/engine/runtime/src/test/java/org/enso/compiler/ExecCompilerTest.java b/engine/runtime/src/test/java/org/enso/compiler/ExecCompilerTest.java index d1643b14af59..27353b5519b3 100644 --- a/engine/runtime/src/test/java/org/enso/compiler/ExecCompilerTest.java +++ b/engine/runtime/src/test/java/org/enso/compiler/ExecCompilerTest.java @@ -153,7 +153,7 @@ public void testDoubledRandom() throws Exception { polyglot java import java.util.Random run seed = - operator1 = Random.new seed + operator1 = Random.new_generator seed """); var run = module.invokeMember("eval_expression", "run"); try { diff --git a/std-bits/base/src/main/java/org/enso/base/random/RandomInstanceHolder.java b/std-bits/base/src/main/java/org/enso/base/random/RandomInstanceHolder.java new file mode 100644 index 000000000000..9d0d557abeb6 --- /dev/null +++ b/std-bits/base/src/main/java/org/enso/base/random/RandomInstanceHolder.java @@ -0,0 +1,38 @@ +package org.enso.base.random; + +import java.util.Random; +import java.util.function.Function; + +/** + * Container for a `Random` object. Setting the seed requires recreating the `Random` object, so the + * `Random` should always be obtained through `getCurrentRandom`. + */ +public class RandomInstanceHolder { + private RandomInstanceHolder() {} + + public RandomInstanceHolder(long seed) { + setSeed(seed); + } + + public static Object SINGLETON = null; + + private Random currentRandom = null; + + public Random getCurrentRandom() { + if (currentRandom == null) { + setSeed(Random_Utils.getDefaultSeed()); + } + return currentRandom; + } + + public void setSeed(long seed) { + currentRandom = new Random(seed); + } + + public static Object singleton(Function factory) { + if (SINGLETON == null) { + SINGLETON = factory.apply(new RandomInstanceHolder()); + } + return SINGLETON; + } +} diff --git a/std-bits/base/src/main/java/org/enso/base/Random_Utils.java b/std-bits/base/src/main/java/org/enso/base/random/Random_Utils.java similarity index 87% rename from std-bits/base/src/main/java/org/enso/base/Random_Utils.java rename to std-bits/base/src/main/java/org/enso/base/random/Random_Utils.java index 33d160ed14d2..9d11d23bddf8 100644 --- a/std-bits/base/src/main/java/org/enso/base/Random_Utils.java +++ b/std-bits/base/src/main/java/org/enso/base/random/Random_Utils.java @@ -1,9 +1,14 @@ -package org.enso.base; +package org.enso.base.random; import java.util.Arrays; import java.util.Random; public class Random_Utils { + /** Default `seed` used to initialize new instances of `Random` */ + public static long getDefaultSeed() { + return java.lang.System.nanoTime(); + } + /** Samples k random values from the input. */ public static Object[] sample(Object[] array, int k, Random rng) { k = Math.min(k, array.length); diff --git a/test/Benchmarks/src/Map/Hash_Map.enso b/test/Benchmarks/src/Map/Hash_Map.enso index dfb00d327a02..cacb8bafe188 100644 --- a/test/Benchmarks/src/Map/Hash_Map.enso +++ b/test/Benchmarks/src/Map/Hash_Map.enso @@ -14,9 +14,8 @@ type Data create n = create_ints = - rng = Random.new Vector.new n _-> - rng.integer 0 (n.div 100) + Random.integer 0 (n.div 100) Data.Value create_ints type Scenario diff --git a/test/Benchmarks/src/Table/Add_Row_Number.enso b/test/Benchmarks/src/Table/Add_Row_Number.enso index a58c00095cd0..18fac2c5cc11 100644 --- a/test/Benchmarks/src/Table/Add_Row_Number.enso +++ b/test/Benchmarks/src/Table/Add_Row_Number.enso @@ -10,7 +10,7 @@ options = Bench.options . set_warmup (Bench.phase_conf 2 2) . set_measure (Bench create_table : Integer -> Table create_table num_rows = - rng = Random.new 42 + rng = Random.new_generator 42 x = Vector.new num_rows _-> rng.integer min=0 max=50 y = Vector.new num_rows _-> rng.integer min=0 max=10000 t = Table.new [["X", x], ["Y", y]] diff --git a/test/Benchmarks/src/Table/Internal/Multi_Value_Key.enso b/test/Benchmarks/src/Table/Internal/Multi_Value_Key.enso index 75b51eabe5bc..c35b23252e8b 100644 --- a/test/Benchmarks/src/Table/Internal/Multi_Value_Key.enso +++ b/test/Benchmarks/src/Table/Internal/Multi_Value_Key.enso @@ -25,12 +25,12 @@ Comparable.from (_:My_Pair) = My_Pair_Comparator create_table : Integer -> Table create_table num_rows = - rng = Random.new 42 + rng = Random.new_generator 42 x = Vector.new num_rows _-> rng.integer min=0 max=100 y = Vector.new num_rows _-> rng.integer min=0 max=20 . to_text z = Vector.new num_rows _-> - a = rng.integer min=0 max=100 - b = rng.integer min=0 max=100 + a = Random.integer min=0 max=100 + b = Random.integer min=0 max=100 My_Pair.Value a b t = Table.new [["X", x], ["Y", y], ["Z", z]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso index 3fbc7250422c..9c04d14eeb6d 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Take_Drop_Spec.enso @@ -158,6 +158,13 @@ spec setup = rnd.at "alpha" . to_vector . should_equal alpha_sample rnd.at "beta" . to_vector . should_equal beta_sample + Test.specify "sampling should be deterministic when a seed is supplied" <| + table.take (Sample 3 seed=4200000) . should_equal (table.take (Sample 3 seed=4200000)) + + Test.specify "sampling should be non-deterministic when a seed is not supplied" <| + 0.up_to 3 . map _-> + table.take (Sample 3) . should_not_equal (table.take (Sample 3)) + if setup.is_database.not then Test.specify "should allow selecting rows as long as they satisfy a predicate" <| t = table_builder [["a", [1, 2, 3, 4]], ["b", [5, 6, 7, 8]]] diff --git a/test/Table_Tests/src/Database/Helpers/Name_Generator.enso b/test/Table_Tests/src/Database/Helpers/Name_Generator.enso index 5d3974f40d6f..8f96fd9e3d19 100644 --- a/test/Table_Tests/src/Database/Helpers/Name_Generator.enso +++ b/test/Table_Tests/src/Database/Helpers/Name_Generator.enso @@ -1,8 +1,7 @@ from Standard.Base import all -from Standard.Base.Random import random_uuid ## Generates a random name with a given prefix, ensuring relative uniqueness by appending a random UUID. random_name : Text -> Text random_name prefix = - prefix + "-" + random_uuid + prefix + "-" + Random.uuid diff --git a/test/Tests/src/Data/Noise/Generator_Spec.enso b/test/Tests/src/Data/Noise/Generator_Spec.enso deleted file mode 100644 index ba2a63954796..000000000000 --- a/test/Tests/src/Data/Noise/Generator_Spec.enso +++ /dev/null @@ -1,20 +0,0 @@ -from Standard.Base import all - -import Standard.Base.Data.Noise.Deterministic_Random - -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions - -spec = - Test.group "Deterministic Random Noise Generator" <| - gen = Deterministic_Random - Test.specify "should always return the same output for the same input" <| - interval = Interval.new 0 1 Interval_Type.Inclusive - values = Vector.fill 10000 1 . map (gen.step _ interval) - values.all (== values.at 0) . should_be_true - Test.specify "should always produce values within the specified interval" <| - interval = Interval.new -100 100 Interval_Type.Inclusive - values = 1.up_to 10000 . to_vector . map (gen.step _ interval) - values.all (v -> (v >= -100) && (v <= 100)) . should_be_true - -main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Noise_Spec.enso b/test/Tests/src/Data/Noise_Spec.enso deleted file mode 100644 index ccafacfc5709..000000000000 --- a/test/Tests/src/Data/Noise_Spec.enso +++ /dev/null @@ -1,21 +0,0 @@ -from Standard.Base import all - -from Standard.Test import Test, Test_Suite -import Standard.Test.Extensions - -type My_Generator -My_Generator.step self _ _ = 1 - -spec = Test.group "Noise" <| - Test.specify "should be able to be called on numbers" <| - result = 1.noise - result-result . should_equal 0 - Test.specify "should allow the user to specify a generator" <| - result = 1.noise (gen=My_Generator) - result-result . should_equal 0 - Test.specify "should allow the user to specify the interval" <| - interval = Interval.new -250 250 Interval_Type.Inclusive - values = 1.up_to 10001 . to_vector . map (_.noise interval) - values.all (v -> (v >= -250) && (v <= 250)) . should_be_true - -main = Test_Suite.run_main spec diff --git a/test/Tests/src/Data/Vector_Spec.enso b/test/Tests/src/Data/Vector_Spec.enso index cfc3240dfeaf..23cadd1e7c28 100644 --- a/test/Tests/src/Data/Vector_Spec.enso +++ b/test/Tests/src/Data/Vector_Spec.enso @@ -458,6 +458,8 @@ type_spec name alter = Test.group name <| vec.take . should_equal [1] vec.drop . should_equal [2, 3, 4, 5, 6] + IO.println 'AAA' + IO.println (Meta.get_simple_type_name vec) vec.take (2.up_to 4) . should_equal [3, 4] vec.take (0.up_to 0) . should_equal [] vec.take (100.up_to 100) . should_fail_with Index_Out_Of_Bounds @@ -559,10 +561,11 @@ type_spec name alter = Test.group name <| vec.take (Sample 0) . should_equal [] alter [] . take (Sample 0) . should_equal [] - alter [] . take (Sample 1) . should_equal [] + alter [] . take (Sample 1) . should_fail_with Illegal_Argument alter ["a"] . take (Sample 1) . should_equal ["a"] alter ["a", "a", "a"] . take (Sample 1) . should_equal ["a"] - alter ["a", "a", "a"] . take (Sample 100) . should_equal ["a", "a", "a"] + alter ["a", "a", "a"] . take (Sample 3) . should_equal ["a", "a", "a"] + alter ["a", "a", "a"] . take (Sample 100) . should_fail_with Illegal_Argument vec.drop (Sample 0) . should_equal vec alter [] . drop (Sample 0) . should_equal [] @@ -571,6 +574,16 @@ type_spec name alter = Test.group name <| alter ["a", "a", "a"] . drop (Sample 1) . should_equal ["a", "a"] alter ["a", "a", "a"] . drop (Sample 100) . should_equal [] + Test.group "take/drop Sample non-determinism" <| + v = 0.up_to 20 . to_vector + + Test.specify "sampling should be deterministic when a seed is supplied" <| + v.take (Sample 3 seed=4200000) . should_equal (v.take (Sample 3 seed=4200000)) + + Test.specify "sampling should be non-deterministic when a seed is not supplied" <| + 0.up_to 3 . map _-> + v.take (Sample 3) . should_not_equal (v.take (Sample 3)) + Test.specify "take/drop should gracefully handle missing constructor arguments" <| [].take "FOO" . should_fail_with Type_Error [].drop "FOO" . should_fail_with Type_Error diff --git a/test/Tests/src/Main.enso b/test/Tests/src/Main.enso index 0cab9d93fd4d..0cfca9e258aa 100644 --- a/test/Tests/src/Main.enso +++ b/test/Tests/src/Main.enso @@ -34,8 +34,6 @@ import project.Data.List_Spec import project.Data.Locale_Spec import project.Data.Map_Spec import project.Data.Maybe_Spec -import project.Data.Noise.Generator_Spec as Noise_Generator_Spec -import project.Data.Noise_Spec import project.Data.Numbers_Spec import project.Data.Ordering_Spec import project.Data.Ordering.Comparator_Spec @@ -120,8 +118,6 @@ main = Test_Suite.run_main <| Meta_Location_Spec.spec Names_Spec.spec Equals_Spec.spec - Noise_Generator_Spec.spec - Noise_Spec.spec Numbers_Spec.spec Ordering_Spec.spec Comparator_Spec.spec diff --git a/test/Tests/src/Random_Spec.enso b/test/Tests/src/Random_Spec.enso index 8146ee805498..40d4a5f31825 100644 --- a/test/Tests/src/Random_Spec.enso +++ b/test/Tests/src/Random_Spec.enso @@ -1,79 +1,161 @@ from Standard.Base import all import Standard.Base.Errors.Illegal_Argument.Illegal_Argument +import Standard.Base.Random.Random_Generator from Standard.Test import Test, Test_Suite import Standard.Test.Extensions spec = Test.group "Random" <| - ## Random number generator seeded to make the test deterministic - - Test.specify "should allow generating random booleans" <| - rng = Random.new 0 - 0.up_to 3 . map _->rng.boolean . should_equal [True, True, False] - Test.specify "should allow generating random integers" <| - rng = Random.new 12345 - rng.integer . should_equal 51 - rng.integer 0 10000 . should_equal 9080 - rng.integer 0 100000000000000 . should_fail_with Illegal_Argument + Random.set_seed 12345 + Random.integer 0 100 . should_equal 51 + Random.integer 0 10000 . should_equal 9080 - random_range = 0.up_to 1000 . map _->rng.integer . compute_bulk [Statistic.Minimum, Statistic.Maximum] + random_range = 0.up_to 1000 . map (_-> Random.integer 0 100) . compute_bulk [Statistic.Minimum, Statistic.Maximum] (random_range.at 0 >= 0) . should_equal True (random_range.at 1 <= 100) . should_equal True Test.specify "should allow generating random floats" <| - rng = Random.new 12345 - rng.float . should_equal 0.3618031071604718 epsilon=0.00000001 - rng.float . should_equal 0.932993485288541 epsilon=0.00000001 + Random.set_seed 12345 + Random.float . should_equal 0.3618031071604718 epsilon=0.00000001 + Random.float . should_equal 0.932993485288541 epsilon=0.00000001 - random_range = 0.up_to 1000 . map _->rng.float . compute_bulk [Statistic.Minimum, Statistic.Maximum] + random_range = 0.up_to 1000 . map _->Random.float . compute_bulk [Statistic.Minimum, Statistic.Maximum] (random_range.at 0 >= 0) . should_equal True (random_range.at 1 <= 1) . should_equal True + Test.specify "should allow generating random booleans" <| + Random.set_seed 0 + 0.up_to 3 . map _->Random.boolean . should_equal [True, True, False] + Test.specify "should allow generating random gaussian floats" <| - rng = Random.new 12345 - rng.gaussian . should_equal -0.187808989658912 epsilon=0.00000001 - rng.gaussian . should_equal 0.5884363051154796 epsilon=0.00000001 + Random.set_seed 12345 + Random.gaussian . should_equal -0.187808989658912 epsilon=0.00000001 + Random.gaussian . should_equal 0.5884363051154796 epsilon=0.00000001 + Random.gaussian standard_deviation=2 . should_equal 1.8976095608800851 + Random.gaussian mean=5 . should_equal 4.505719279373956 + Random.gaussian standard_deviation=2 mean=5 . should_equal 2.55317612563977 + + 0.up_to 100 . map (_-> Random.gaussian standard_deviation=10) . any (_ > 3.0) . should_be_true + 0.up_to 100 . map (_-> Random.gaussian mean=1000000) . any (_ > 1000000) . should_be_true + 0.up_to 100 . map (_-> Random.gaussian) . any (_ > 1000000) . should_be_false Test.specify "should allow to generate random indices" <| - rng = Random.new 0 + Random.set_seed 0 + two_out_of_three = 0.up_to 100 . map _-> - Random.random_indices 3 2 rng + Random.indices 3 2 + two_out_of_four = 0.up_to 10000 . map seed-> + Random.set_seed seed + Random.indices 4 2 + + two_out_of_three . should_contain_the_same_elements_as [[0, 1], [0, 2], [1, 2], [1, 0], [2, 0], [2, 1]] + two_out_of_four . should_contain_the_same_elements_as <| + [[0, 1], [0, 2], [1, 2], [1, 0], [2, 0], [2, 1], [0, 3], [1, 3], [2, 3], [3, 0], [3, 1], [3, 2]] + permutations = 0.up_to 100 . map _-> - Random.random_indices 3 3 rng + Random.indices 3 3 permutations_2 = 0.up_to 100 . map _-> - Random.random_indices 3 100 rng - two_out_of_three . should_contain_the_same_elements_as [[0, 1], [0, 2], [1, 2], [1, 0], [2, 0], [2, 1]] + Random.indices 3 100 all_permutations = [[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]] permutations . should_contain_the_same_elements_as all_permutations permutations_2 . should_contain_the_same_elements_as all_permutations - Random.random_indices 0 0 rng . should_equal [] - Random.random_indices 0 100 rng . should_equal [] - Random.random_indices 1 1 rng . should_equal [0] - Random.random_indices 1 100 rng . should_equal [0] - Random.random_indices 100 0 rng . should_equal [] - - two_out_of_four = 0.up_to 10000 . map seed-> - Random.random_indices 4 2 (Random.new seed) - two_out_of_four . should_contain_the_same_elements_as <| - [[0, 1], [0, 2], [1, 2], [1, 0], [2, 0], [2, 1], [0, 3], [1, 3], [2, 3], [3, 0], [3, 1], [3, 2]] - - Test.specify "should allow to select a random sample from a vector" <| - rng = Random.new 0 + Random.indices 0 0 . should_equal [] + Random.indices 0 100 . should_equal [] + Random.indices 1 1 . should_equal [0] + Random.indices 1 . should_equal [0] + Random.indices 1 100 . should_equal [0] + Random.indices 100 0 . should_equal [] + + Test.specify "should allow generating random dates" <| + Random.set_seed 4000 + Random.date (Date.new 2023 03 01) (Date.new 2023 10 15) . should_equal (Date.new 2023 6 9) + Random.date (Date.new 2023 03 01) (Date.new 2023 10 15) . should_equal (Date.new 2023 7 16) + Random.date (Date.new 2023 03 01) (Date.new 2023 10 15) . should_equal (Date.new 2023 10 12) + + all_from_small_range = [Date.new 2023 03 01, Date.new 2023 03 02, Date.new 2023 03 03] + dates = 0.up_to 100 . map (_-> Random.date (Date.new 2023 03 01) (Date.new 2023 03 04)) + dates.should_contain_the_same_elements_as all_from_small_range + + Test.specify "should allow generating random dates, with end_exclusive=False" <| + all_from_small_range = [Date.new 2023 03 01, Date.new 2023 03 02, Date.new 2023 03 03, Date.new 2023 03 04] + dates = 0.up_to 100 . map (_-> Random.date (Date.new 2023 03 01) (Date.new 2023 03 04) end_exclusive=False) + dates.should_contain_the_same_elements_as all_from_small_range + + Test.specify "should allow generating random times" <| + Random.set_seed 12345 + start = Time_Of_Day.new 8 12 15 + end = Time_Of_Day.new 9 40 2 + Random.time start end . should_equal (Time_Of_Day.new 9 31 10) + Random.time start end . should_equal (Time_Of_Day.new 8 34 31) + Random.time start end . should_equal (Time_Of_Day.new 9 27 48) + Random.time . should_equal (Time_Of_Day.new 0 13 48) + Random.time . should_equal (Time_Of_Day.new 14 20 55) + Random.time . should_equal (Time_Of_Day.new 8 48 4) + + small_range_start = Time_Of_Day.new 8 12 15 + small_range_end = Time_Of_Day.new 8 12 18 + all_from_small_range = [Time_Of_Day.new 8 12 15, Time_Of_Day.new 8 12 16, Time_Of_Day.new 8 12 17, Time_Of_Day.new 8 12 18] + times = 0.up_to 100 . map (_-> Random.time small_range_start small_range_end) + times.should_contain_the_same_elements_as all_from_small_range + + Test.specify "should allow generating random UUIDs" <| + Random.uuid . should_only_contain_elements_in "0123456789abcdef-" + + Test.specify "should allow to select random items from a vector, without replacement" <| + Random.set_seed 0 vector = ["A", "B", "C"] shuffles = 0.up_to 100 . map _-> - Random.sample vector 2 rng + Random.items vector 2 shuffles . should_contain_the_same_elements_as [["A", "B"], ["A", "C"], ["B", "A"], ["B", "C"], ["C", "A"], ["C", "B"]] - overflow = Random.sample vector 100 rng - overflow.length . should_equal 3 - overflow.should_contain_the_same_elements_as vector + Random.items ["A", "A", "A"] 2 . should_equal ["A", "A"] + Random.items ["A", "A", "A"] 0 . should_equal [] + Random.items ["A", "A", "A"] 3 . should_equal ["A", "A", "A"] + + Test.specify "should allow to select random items from a vector, with replacement" <| + Random.set_seed 0 + vector = 0.up_to 10 . to_vector + + Random.items vector with_replacement=True . length . should_equal 1 + Random.items vector 4 with_replacement=True . length . should_equal 4 + Random.items vector 10 with_replacement=True . length . should_equal 10 + Random.items vector 11 with_replacement=True . length . should_equal 11 + Random.items vector 100 with_replacement=True . length . should_equal 100 + + 0.up_to 20 . map _-> + items = Random.items vector 5 with_replacement=True + items.should_only_contain_elements_in vector + items.length . should_equal 5 + + Test.specify "should not allow to select more items than the vector has, without replacement" <| + vector = 0.up_to 10 . to_vector + Random.items vector 11 . should_fail_with Illegal_Argument + Random.items vector 100 . should_fail_with Illegal_Argument + + Test.specify "should allow to generate random permutations" <| + Random.set_seed 0 + list = [0, 1, 2] + permutations = 0.up_to 100 . map _-> + Random.permute list + permutations_2 = 0.up_to 100 . map _-> + Random.permute list + + all_permutations = [[0, 1, 2], [0, 2, 1], [1, 0, 2], [1, 2, 0], [2, 0, 1], [2, 1, 0]] + permutations . should_contain_the_same_elements_as all_permutations + permutations_2 . should_contain_the_same_elements_as all_permutations + + Test.specify "should not allow using a too-large integer range" <| + high = 9223372036854775807000 + Random.integer 0 high . should_fail_with Illegal_Argument - Random.sample ["A", "A", "A"] 2 rng . should_equal ["A", "A"] - Random.sample ["A", "A", "A"] 0 rng . should_equal [] - Random.sample ["A", "A", "A"] 3 rng . should_equal ["A", "A", "A"] - Random.sample ["A", "A", "A"] 100 rng . should_equal ["A", "A", "A"] + Test.specify "Can call an instance directly" <| + Random.new_generator . should_be_a Random_Generator + Random.new_generator 12345 . should_be_a Random_Generator + Random.new_generator 12345 . integer 0 100 . should_equal 51 + Random.new_generator 12345 . float . should_equal 0.3618031071604718 epsilon=0.00000001 + Random.new_generator 12345 . gaussian . should_equal -0.187808989658912 epsilon=0.00000001 main = Test_Suite.run_main spec diff --git a/test/Tests/src/Semantic/Equals_Spec.enso b/test/Tests/src/Semantic/Equals_Spec.enso index 4ab805e897f2..c016561e3a8b 100644 --- a/test/Tests/src/Semantic/Equals_Spec.enso +++ b/test/Tests/src/Semantic/Equals_Spec.enso @@ -217,7 +217,7 @@ spec = ([Child.Value 1].to_array == [Child.Value 101].to_array).should_be_true Test.specify "should handle recursive atoms without custom `==`" <| - rnd = (Random.new seed=42).java_random + rnd = (Random.new_generator seed=42).java_random trees = (0.up_to 5).map _-> create_random_tree 5 rnd trees.each tree-> From 3fd22498641794b01e1f75922a58dc7dde42fd30 Mon Sep 17 00:00:00 2001 From: Jaroslav Tulach Date: Wed, 1 Nov 2023 12:42:34 +0100 Subject: [PATCH 05/12] Introducing engine/runtime-compiler project (#8197) --- build.sbt | 24 +++++-- .../enso/compiler/PackageRepositoryUtils.java | 0 .../compiler/context/CompilerContext.java | 0 .../org/enso/compiler/context/ExportsMap.java | 0 .../enso/compiler/context/FramePointer.java | 7 ++ .../enso/compiler/context/SimpleUpdate.java | 0 .../CompilationAbortedException.java | 0 .../pass/analyse/ExportSymbolAnalysis.java | 0 .../pass/analyse/PrivateModuleAnalysis.java | 0 .../scala/org/enso/compiler/Compiler.scala | 0 .../org/enso/compiler/CompilerResult.scala | 0 .../org/enso/compiler/PackageRepository.scala | 0 .../main/scala/org/enso/compiler/Passes.scala | 0 .../compiler/context/ExportsBuilder.scala | 0 .../compiler/context/FreshNameSupply.scala | 0 .../enso/compiler/context/InlineContext.scala | 2 +- .../enso/compiler/context}/LocalScope.scala | 71 ++----------------- .../enso/compiler/context/ModuleContext.scala | 0 .../compiler/context/ModuleExportsDiff.scala | 0 .../compiler/context/SuggestionBuilder.scala | 0 .../compiler/context/SuggestionDiff.scala | 0 .../org/enso/compiler/data/BindingsMap.scala | 0 .../enso/compiler/data/CompilerConfig.scala | 0 .../compiler/exception/BadPatternMatch.scala | 0 .../compiler/exception/UnhandledEntity.scala | 0 .../scala/org/enso/compiler/pass/IRPass.scala | 0 .../scala/org/enso/compiler/pass/Pass.scala | 0 .../compiler/pass/PassConfiguration.scala | 0 .../org/enso/compiler/pass/PassManager.scala | 0 .../compiler/pass/analyse/AliasAnalysis.scala | 0 .../analyse/AmbiguousImportsAnalysis.scala | 0 .../pass/analyse/AutomaticParallelism.scala | 0 .../pass/analyse/BindingAnalysis.scala | 0 .../analyse/CachePreferenceAnalysis.scala | 0 .../pass/analyse/DataflowAnalysis.scala | 0 .../pass/analyse/DemandAnalysis.scala | 0 .../pass/analyse/GatherDiagnostics.scala | 0 .../pass/analyse/ImportSymbolAnalysis.scala | 0 .../enso/compiler/pass/analyse/TailCall.scala | 0 .../compiler/pass/desugar/ComplexType.scala | 0 .../pass/desugar/FunctionBinding.scala | 0 .../pass/desugar/GenerateMethodBodies.scala | 0 .../enso/compiler/pass/desugar/Imports.scala | 0 .../desugar/LambdaShorthandToLambda.scala | 0 .../pass/desugar/NestedPatternMatch.scala | 0 .../pass/desugar/OperatorToFunction.scala | 0 .../pass/desugar/SectionsToBinOp.scala | 0 .../pass/lint/ModuleNameConflicts.scala | 0 .../compiler/pass/lint/NoSelfInStatic.scala | 0 .../pass/lint/ShadowedPatternFields.scala | 0 .../compiler/pass/lint/UnusedBindings.scala | 0 .../pass/optimise/LambdaConsolidate.scala | 0 .../optimise/UnreachableMatchBranches.scala | 0 .../pass/resolve/DocumentationComments.scala | 0 .../pass/resolve/ExpressionAnnotations.scala | 0 .../resolve/FullyAppliedFunctionUses.scala | 0 .../pass/resolve/FullyQualifiedNames.scala | 0 .../pass/resolve/GenericAnnotations.scala | 0 .../compiler/pass/resolve/GlobalNames.scala | 0 .../pass/resolve/IgnoredBindings.scala | 0 .../compiler/pass/resolve/MethodCalls.scala | 0 .../pass/resolve/MethodDefinitions.scala | 0 .../pass/resolve/ModuleAnnotations.scala | 0 .../pass/resolve/OverloadsResolution.scala | 0 .../enso/compiler/pass/resolve/Patterns.scala | 0 .../pass/resolve/SuspendedArguments.scala | 0 .../compiler/pass/resolve/TypeFunctions.scala | 0 .../compiler/pass/resolve/TypeNames.scala | 0 .../pass/resolve/TypeSignatures.scala | 0 .../compiler/phase/BuiltinsIrBuilder.scala | 0 .../compiler/phase/ExportsResolution.scala | 0 .../enso/compiler/phase/ImportResolver.scala | 0 .../enso/compiler/refactoring/IRUtils.scala | 0 .../test/context/ChangesetBuilderTest.scala | 2 +- .../instrument/ReplDebuggerInstrument.java | 6 +- .../org/enso/interpreter/EnsoLanguage.java | 2 +- .../interpreter/node/ClosureRootNode.java | 2 +- .../enso/interpreter/node/EnsoRootNode.java | 31 +++++++- .../enso/interpreter/node/MethodRootNode.java | 2 +- .../node/callable/CaptureCallerInfoNode.java | 2 +- .../node/expression/debug/EvalNode.java | 2 +- .../node/scope/ReadLocalVariableNode.java | 22 +++--- .../org/enso/interpreter/runtime/Module.java | 2 +- .../runtime/callable/CallerInfo.java | 2 +- .../callable/atom/AtomConstructor.java | 2 +- .../runtime/scope/DebugLocalScope.java | 13 ++-- .../runtime/scope/FramePointer.java | 38 ---------- .../interpreter/runtime/IrToTruffle.scala | 8 +-- .../org/enso/compiler/test/CompilerTest.scala | 2 +- .../pass/analyse/DataflowAnalysisTest.scala | 2 +- .../pass/analyse/DemandAnalysisTest.scala | 2 +- .../test/pass/analyse/TailCallTest.scala | 2 +- .../test/pass/lint/UnusedBindingsTest.scala | 2 +- .../pass/optimise/LambdaConsolidateTest.scala | 2 +- .../pass/resolve/SuspendedArgumentsTest.scala | 2 +- 95 files changed, 104 insertions(+), 150 deletions(-) rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/PackageRepositoryUtils.java (100%) rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/context/CompilerContext.java (100%) rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/context/ExportsMap.java (100%) create mode 100644 engine/runtime-compiler/src/main/java/org/enso/compiler/context/FramePointer.java rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/context/SimpleUpdate.java (100%) rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java (100%) rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java (100%) rename engine/{runtime => runtime-compiler}/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/Compiler.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/CompilerResult.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/PackageRepository.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/Passes.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/ExportsBuilder.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/FreshNameSupply.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/InlineContext.scala (98%) rename engine/{runtime/src/main/scala/org/enso/interpreter/runtime/scope => runtime-compiler/src/main/scala/org/enso/compiler/context}/LocalScope.scala (71%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/ModuleContext.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/ModuleExportsDiff.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/context/SuggestionDiff.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/data/BindingsMap.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/data/CompilerConfig.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/exception/BadPatternMatch.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/exception/UnhandledEntity.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/IRPass.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/Pass.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/PassConfiguration.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/PassManager.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/AmbiguousImportsAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/AutomaticParallelism.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/BindingAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/CachePreferenceAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/DataflowAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/DemandAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/GatherDiagnostics.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/ImportSymbolAnalysis.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/analyse/TailCall.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/ComplexType.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/FunctionBinding.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/GenerateMethodBodies.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/Imports.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/LambdaShorthandToLambda.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/NestedPatternMatch.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/OperatorToFunction.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/desugar/SectionsToBinOp.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/lint/ModuleNameConflicts.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/lint/NoSelfInStatic.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/lint/ShadowedPatternFields.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/lint/UnusedBindings.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/optimise/UnreachableMatchBranches.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/DocumentationComments.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/ExpressionAnnotations.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/FullyAppliedFunctionUses.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/FullyQualifiedNames.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/GenericAnnotations.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/GlobalNames.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/IgnoredBindings.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/MethodCalls.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/MethodDefinitions.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/ModuleAnnotations.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/OverloadsResolution.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/Patterns.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/SuspendedArguments.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/TypeFunctions.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/phase/BuiltinsIrBuilder.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/phase/ExportsResolution.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/phase/ImportResolver.scala (100%) rename engine/{runtime => runtime-compiler}/src/main/scala/org/enso/compiler/refactoring/IRUtils.scala (100%) delete mode 100644 engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/FramePointer.java diff --git a/build.sbt b/build.sbt index fb264da255f0..255d2b8283ec 100644 --- a/build.sbt +++ b/build.sbt @@ -281,6 +281,7 @@ lazy val enso = (project in file(".")) launcher, downloader, `runtime-parser`, + `runtime-compiler`, `runtime-language-epb`, `runtime-instrument-common`, `runtime-instrument-id-execution`, @@ -1352,8 +1353,7 @@ lazy val runtime = (project in file("engine/runtime")) "org.typelevel" %% "cats-core" % catsVersion, "junit" % "junit" % junitVersion % Test, "com.github.sbt" % "junit-interface" % junitIfVersion % Test, - "org.hamcrest" % "hamcrest-all" % hamcrestVersion % Test, - "com.lihaoyi" %% "fansi" % fansiVersion + "org.hamcrest" % "hamcrest-all" % hamcrestVersion % Test ), Compile / compile / compileInputs := (Compile / compile / compileInputs) .dependsOn(CopyTruffleJAR.preCompileTask) @@ -1418,8 +1418,7 @@ lazy val runtime = (project in file("engine/runtime")) .dependsOn(`logging-truffle-connector`) .dependsOn(`polyglot-api`) .dependsOn(`text-buffer`) - .dependsOn(`runtime-parser`) - .dependsOn(pkg) + .dependsOn(`runtime-compiler`) .dependsOn(`connected-lock-manager`) .dependsOn(testkit % Test) .dependsOn(`logging-service-logback` % "test->test") @@ -1438,6 +1437,23 @@ lazy val `runtime-parser` = .dependsOn(syntax) .dependsOn(`syntax-rust-definition`) +lazy val `runtime-compiler` = + (project in file("engine/runtime-compiler")) + .settings( + frgaalJavaCompilerSetting, + instrumentationSettings, + libraryDependencies ++= Seq( + "junit" % "junit" % junitVersion % Test, + "com.github.sbt" % "junit-interface" % junitIfVersion % Test, + "org.scalatest" %% "scalatest" % scalatestVersion % Test, + "com.lihaoyi" %% "fansi" % fansiVersion + ) + ) + .dependsOn(`runtime-parser`) + .dependsOn(pkg) + .dependsOn(`polyglot-api`) + .dependsOn(editions) + lazy val `runtime-instrument-common` = (project in file("engine/runtime-instrument-common")) .configs(Benchmark) diff --git a/engine/runtime/src/main/java/org/enso/compiler/PackageRepositoryUtils.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/PackageRepositoryUtils.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/PackageRepositoryUtils.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/PackageRepositoryUtils.java diff --git a/engine/runtime/src/main/java/org/enso/compiler/context/CompilerContext.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/context/CompilerContext.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/context/CompilerContext.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/context/CompilerContext.java diff --git a/engine/runtime/src/main/java/org/enso/compiler/context/ExportsMap.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/context/ExportsMap.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/context/ExportsMap.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/context/ExportsMap.java diff --git a/engine/runtime-compiler/src/main/java/org/enso/compiler/context/FramePointer.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/context/FramePointer.java new file mode 100644 index 000000000000..9e8671ca5609 --- /dev/null +++ b/engine/runtime-compiler/src/main/java/org/enso/compiler/context/FramePointer.java @@ -0,0 +1,7 @@ +package org.enso.compiler.context; + +/** + * A representation of a pointer into a stack frame at a given number of levels above the current. + */ +public record FramePointer(int parentLevel, int frameSlotIdx) { +} diff --git a/engine/runtime/src/main/java/org/enso/compiler/context/SimpleUpdate.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/context/SimpleUpdate.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/context/SimpleUpdate.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/context/SimpleUpdate.java diff --git a/engine/runtime/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/exception/CompilationAbortedException.java diff --git a/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/ExportSymbolAnalysis.java diff --git a/engine/runtime/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java b/engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java similarity index 100% rename from engine/runtime/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java rename to engine/runtime-compiler/src/main/java/org/enso/compiler/pass/analyse/PrivateModuleAnalysis.java diff --git a/engine/runtime/src/main/scala/org/enso/compiler/Compiler.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/Compiler.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/Compiler.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/Compiler.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/CompilerResult.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/CompilerResult.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/CompilerResult.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/CompilerResult.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/PackageRepository.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/PackageRepository.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/PackageRepository.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/PackageRepository.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/Passes.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/Passes.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/Passes.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/Passes.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/ExportsBuilder.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/ExportsBuilder.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/context/ExportsBuilder.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/ExportsBuilder.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/FreshNameSupply.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/FreshNameSupply.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/context/FreshNameSupply.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/FreshNameSupply.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/InlineContext.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala similarity index 98% rename from engine/runtime/src/main/scala/org/enso/compiler/context/InlineContext.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala index 8477a78b5472..bf15aaa806d9 100644 --- a/engine/runtime/src/main/scala/org/enso/compiler/context/InlineContext.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/InlineContext.scala @@ -1,9 +1,9 @@ package org.enso.compiler.context import org.enso.compiler.PackageRepository +import org.enso.compiler.context.LocalScope import org.enso.compiler.data.CompilerConfig import org.enso.compiler.pass.PassConfiguration -import org.enso.interpreter.runtime.scope.LocalScope /** A type containing the information about the execution context for an inline * expression. diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/runtime/scope/LocalScope.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/LocalScope.scala similarity index 71% rename from engine/runtime/src/main/scala/org/enso/interpreter/runtime/scope/LocalScope.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/LocalScope.scala index 27942b4ddc13..099a3beb0412 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/runtime/scope/LocalScope.scala +++ b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/LocalScope.scala @@ -1,6 +1,5 @@ -package org.enso.interpreter.runtime.scope +package org.enso.compiler.context -import com.oracle.truffle.api.frame.{FrameDescriptor, FrameSlotKind} import org.enso.compiler.pass.analyse.AliasAnalysis.Graph import org.enso.compiler.pass.analyse.AliasAnalysis.Graph.{ Id, @@ -8,11 +7,6 @@ import org.enso.compiler.pass.analyse.AliasAnalysis.Graph.{ Scope => AliasScope } import org.enso.compiler.pass.analyse.{AliasAnalysis, DataflowAnalysis} -import org.enso.interpreter.runtime.error.DataflowError -import org.enso.interpreter.runtime.scope.LocalScope.{ - internalSlots, - monadicStateSlotName -} import scala.jdk.CollectionConverters._ @@ -45,7 +39,6 @@ class LocalScope( final val flattenToParent: Boolean = false, private val parentFrameSlotIdxs: Map[Graph.Id, Int] = Map() ) { - lazy val frameDescriptor: FrameDescriptor = buildFrameDescriptor() private lazy val localFrameSlotIdxs: Map[Graph.Id, Int] = gatherLocalFrameSlotIdxs() @@ -82,20 +75,6 @@ class LocalScope( ) } - /** Returns frame slot index to a monadic state, which is considered an internal slot. - * @return - */ - def monadicStateSlotIdx: Int = { - internalSlots.zipWithIndex - .find { case ((_, name), _) => name == monadicStateSlotName } - .map(_._2) - .getOrElse( - throw new IllegalStateException( - s"$monadicStateSlotName slot should be present in every frame descriptor" - ) - ) - } - /** Get a frame slot index for a given identifier. * * The identifier must be present in the local scope. @@ -138,39 +117,6 @@ class LocalScope( def flattenBindings: java.util.Map[String, FramePointer] = flattenBindingsWithLevel(0).asJava - private def addInternalSlots( - descriptorBuilder: FrameDescriptor.Builder - ): Unit = { - for ((slotKind, name) <- internalSlots) { - descriptorBuilder.addSlot(slotKind, name, null) - } - } - - /** Builds a [[FrameDescriptor]] from the alias analysis scope metadata for the local scope. - * See [[AliasAnalysis.Graph.Scope.allDefinitions]]. - * - * @return [[FrameDescriptor]] built from the variable definitions in the local scope. - */ - private def buildFrameDescriptor(): FrameDescriptor = { - val descriptorBuilder = FrameDescriptor.newBuilder() - addInternalSlots(descriptorBuilder) - for (definition <- scope.allDefinitions) { - val returnedFrameIdx = - descriptorBuilder.addSlot( - FrameSlotKind.Illegal, - definition.symbol, - null - ) - assert(localFrameSlotIdxs(definition.id) == returnedFrameIdx) - } - descriptorBuilder.defaultValue(DataflowError.UNINITIALIZED) - val frameDescriptor = descriptorBuilder.build() - assert( - internalSlots.length + localFrameSlotIdxs.size == frameDescriptor.getNumberOfSlots - ) - frameDescriptor - } - /** Gather local variables from the alias scope information. * Does not include any variables from the parent scopes. * @return Mapping of local variable identifiers to their @@ -179,7 +125,7 @@ class LocalScope( */ private def gatherLocalFrameSlotIdxs(): Map[Id, Int] = { scope.allDefinitions.zipWithIndex.map { case (definition, i) => - definition.id -> (i + internalSlots.size) + definition.id -> (i + LocalScope.internalSlotsSize) }.toMap } @@ -207,13 +153,12 @@ class LocalScope( } override def toString: String = { - s"LocalScope(${frameDescriptor.toString})" + s"LocalScope(${allFrameSlotIdxs.keySet})" } } object LocalScope { - /** Constructs a local scope for an - * [[org.enso.interpreter.node.EnsoRootNode]]. + /** Constructs a local scope for an [[EnsoRootNode]]. * * @return a defaulted local scope */ @@ -227,13 +172,11 @@ object LocalScope { ) } - private val monadicStateSlotName = "<>" - /** Internal slots are prepended at the beginning of every [[FrameDescriptor]]. * Every tuple of the list denotes frame slot kind and its name. * Note that `info` for a frame slot is not used by Enso. */ - def internalSlots: List[(FrameSlotKind, String)] = List( - (FrameSlotKind.Object, monadicStateSlotName) - ) + def monadicStateSlotName: String = "<>" + private def internalSlotsSize: Int = 1 + } diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/ModuleContext.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/ModuleContext.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/context/ModuleContext.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/ModuleContext.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/ModuleExportsDiff.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/ModuleExportsDiff.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/context/ModuleExportsDiff.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/ModuleExportsDiff.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/SuggestionBuilder.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionDiff.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/context/SuggestionDiff.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/context/SuggestionDiff.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/context/SuggestionDiff.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/data/BindingsMap.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/data/BindingsMap.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/data/BindingsMap.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/data/BindingsMap.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/data/CompilerConfig.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/data/CompilerConfig.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/data/CompilerConfig.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/data/CompilerConfig.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/exception/BadPatternMatch.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/exception/BadPatternMatch.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/exception/BadPatternMatch.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/exception/BadPatternMatch.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/exception/UnhandledEntity.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/exception/UnhandledEntity.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/exception/UnhandledEntity.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/exception/UnhandledEntity.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/IRPass.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/IRPass.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/IRPass.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/IRPass.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/Pass.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/Pass.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/Pass.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/Pass.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/PassConfiguration.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/PassConfiguration.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/PassConfiguration.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/PassConfiguration.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/PassManager.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/PassManager.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/PassManager.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/PassManager.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AliasAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/AmbiguousImportsAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AmbiguousImportsAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/AmbiguousImportsAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AmbiguousImportsAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/AutomaticParallelism.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AutomaticParallelism.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/AutomaticParallelism.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/AutomaticParallelism.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/BindingAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/BindingAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/BindingAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/BindingAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/CachePreferenceAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/CachePreferenceAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/CachePreferenceAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/CachePreferenceAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/DataflowAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/DataflowAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/DataflowAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/DataflowAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/DemandAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/DemandAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/DemandAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/DemandAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/GatherDiagnostics.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/GatherDiagnostics.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/GatherDiagnostics.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/GatherDiagnostics.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/ImportSymbolAnalysis.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/ImportSymbolAnalysis.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/ImportSymbolAnalysis.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/ImportSymbolAnalysis.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/TailCall.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/TailCall.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/analyse/TailCall.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/analyse/TailCall.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/ComplexType.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/ComplexType.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/ComplexType.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/ComplexType.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/FunctionBinding.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/FunctionBinding.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/FunctionBinding.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/FunctionBinding.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/GenerateMethodBodies.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/GenerateMethodBodies.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/GenerateMethodBodies.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/GenerateMethodBodies.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/Imports.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/Imports.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/Imports.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/Imports.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/LambdaShorthandToLambda.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/LambdaShorthandToLambda.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/LambdaShorthandToLambda.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/LambdaShorthandToLambda.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/NestedPatternMatch.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/NestedPatternMatch.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/NestedPatternMatch.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/NestedPatternMatch.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/OperatorToFunction.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/OperatorToFunction.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/OperatorToFunction.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/OperatorToFunction.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/SectionsToBinOp.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/SectionsToBinOp.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/desugar/SectionsToBinOp.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/desugar/SectionsToBinOp.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/lint/ModuleNameConflicts.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/ModuleNameConflicts.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/lint/ModuleNameConflicts.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/ModuleNameConflicts.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/lint/NoSelfInStatic.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/NoSelfInStatic.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/lint/NoSelfInStatic.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/NoSelfInStatic.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/lint/ShadowedPatternFields.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/ShadowedPatternFields.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/lint/ShadowedPatternFields.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/ShadowedPatternFields.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/lint/UnusedBindings.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/UnusedBindings.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/lint/UnusedBindings.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/lint/UnusedBindings.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/LambdaConsolidate.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/optimise/UnreachableMatchBranches.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/UnreachableMatchBranches.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/optimise/UnreachableMatchBranches.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/optimise/UnreachableMatchBranches.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/DocumentationComments.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/DocumentationComments.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/DocumentationComments.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/DocumentationComments.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/ExpressionAnnotations.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/ExpressionAnnotations.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/ExpressionAnnotations.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/ExpressionAnnotations.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/FullyAppliedFunctionUses.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/FullyAppliedFunctionUses.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/FullyAppliedFunctionUses.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/FullyAppliedFunctionUses.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/FullyQualifiedNames.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/FullyQualifiedNames.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/FullyQualifiedNames.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/FullyQualifiedNames.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/GenericAnnotations.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/GenericAnnotations.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/GenericAnnotations.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/GenericAnnotations.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/GlobalNames.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/GlobalNames.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/GlobalNames.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/GlobalNames.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/IgnoredBindings.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/IgnoredBindings.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/IgnoredBindings.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/IgnoredBindings.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/MethodCalls.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/MethodCalls.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/MethodCalls.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/MethodCalls.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/MethodDefinitions.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/MethodDefinitions.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/MethodDefinitions.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/MethodDefinitions.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/ModuleAnnotations.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/ModuleAnnotations.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/ModuleAnnotations.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/ModuleAnnotations.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/OverloadsResolution.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/OverloadsResolution.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/OverloadsResolution.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/OverloadsResolution.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/Patterns.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/Patterns.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/Patterns.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/Patterns.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/SuspendedArguments.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/SuspendedArguments.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/SuspendedArguments.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/SuspendedArguments.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeFunctions.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/TypeFunctions.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeFunctions.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/TypeFunctions.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/TypeNames.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/pass/resolve/TypeSignatures.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/phase/BuiltinsIrBuilder.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/phase/BuiltinsIrBuilder.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/phase/BuiltinsIrBuilder.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/phase/BuiltinsIrBuilder.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/phase/ExportsResolution.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/phase/ExportsResolution.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/phase/ExportsResolution.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/phase/ExportsResolution.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/phase/ImportResolver.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/phase/ImportResolver.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/phase/ImportResolver.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/phase/ImportResolver.scala diff --git a/engine/runtime/src/main/scala/org/enso/compiler/refactoring/IRUtils.scala b/engine/runtime-compiler/src/main/scala/org/enso/compiler/refactoring/IRUtils.scala similarity index 100% rename from engine/runtime/src/main/scala/org/enso/compiler/refactoring/IRUtils.scala rename to engine/runtime-compiler/src/main/scala/org/enso/compiler/refactoring/IRUtils.scala diff --git a/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala b/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala index 90d843f38833..feef93103b41 100644 --- a/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala +++ b/engine/runtime-instrument-common/src/test/scala/org/enso/compiler/test/context/ChangesetBuilderTest.scala @@ -14,7 +14,7 @@ import org.enso.compiler.core.ir.expression.errors import org.enso.compiler.core.ir.module.scope.definition import org.enso.compiler.pass.PassManager import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope import org.enso.text.buffer.Rope import org.enso.text.editing.JavaEditorAdapter import org.enso.text.editing.model.{Position, Range, TextEdit} diff --git a/engine/runtime-instrument-repl-debugger/src/main/java/org/enso/interpreter/instrument/ReplDebuggerInstrument.java b/engine/runtime-instrument-repl-debugger/src/main/java/org/enso/interpreter/instrument/ReplDebuggerInstrument.java index 4737e04ddbab..9ec70f210129 100644 --- a/engine/runtime-instrument-repl-debugger/src/main/java/org/enso/interpreter/instrument/ReplDebuggerInstrument.java +++ b/engine/runtime-instrument-repl-debugger/src/main/java/org/enso/interpreter/instrument/ReplDebuggerInstrument.java @@ -17,6 +17,7 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; +import org.enso.compiler.context.FramePointer; import org.enso.interpreter.node.expression.builtin.debug.DebugBreakpointNode; import org.enso.interpreter.node.expression.builtin.text.util.ToJavaStringNode; import org.enso.interpreter.node.expression.debug.CaptureResultScopeNode; @@ -25,7 +26,6 @@ import org.enso.interpreter.runtime.callable.CallerInfo; import org.enso.interpreter.runtime.callable.function.Function; import org.enso.interpreter.runtime.data.text.Text; -import org.enso.interpreter.runtime.scope.FramePointer; import org.enso.interpreter.runtime.state.State; import org.enso.polyglot.debugger.DebugServerInfo; import org.graalvm.options.OptionDescriptor; @@ -105,12 +105,12 @@ private ReplExecutionEventNodeImpl( } private Object getValue(MaterializedFrame frame, FramePointer ptr) { - return getProperFrame(frame, ptr).getValue(ptr.getFrameSlotIdx()); + return getProperFrame(frame, ptr).getValue(ptr.frameSlotIdx()); } private MaterializedFrame getProperFrame(MaterializedFrame frame, FramePointer ptr) { MaterializedFrame currentFrame = frame; - for (int i = 0; i < ptr.getParentLevel(); i++) { + for (int i = 0; i < ptr.parentLevel(); i++) { currentFrame = Function.ArgumentsHelper.getLocalScope(currentFrame.getArguments()); } return currentFrame; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/EnsoLanguage.java b/engine/runtime/src/main/java/org/enso/interpreter/EnsoLanguage.java index dc3eb6c659e2..13e9215dead4 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/EnsoLanguage.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/EnsoLanguage.java @@ -24,7 +24,7 @@ import org.enso.interpreter.node.ProgramRootNode; import org.enso.interpreter.runtime.EnsoContext; import org.enso.interpreter.runtime.IrToTruffle; -import org.enso.interpreter.runtime.scope.LocalScope; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.runtime.state.ExecutionEnvironment; import org.enso.interpreter.runtime.tag.AvoidIdInstrumentationTag; import org.enso.interpreter.runtime.tag.IdentifiedTag; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/ClosureRootNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/ClosureRootNode.java index 79623899974e..f2ac6c9653fb 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/ClosureRootNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/ClosureRootNode.java @@ -5,8 +5,8 @@ import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.NodeInfo; import com.oracle.truffle.api.source.SourceSection; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.EnsoLanguage; -import org.enso.interpreter.runtime.scope.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; /** diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/EnsoRootNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/EnsoRootNode.java index 7fb5b58e1425..2763af83190e 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/EnsoRootNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/EnsoRootNode.java @@ -4,11 +4,18 @@ import com.oracle.truffle.api.nodes.RootNode; import com.oracle.truffle.api.source.Source; import com.oracle.truffle.api.source.SourceSection; + import java.util.Objects; + import org.enso.interpreter.EnsoLanguage; import org.enso.interpreter.runtime.EnsoContext; -import org.enso.interpreter.runtime.scope.LocalScope; +import org.enso.interpreter.runtime.error.DataflowError; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; +import org.enso.interpreter.util.ScalaConversions; + +import com.oracle.truffle.api.frame.FrameDescriptor; +import com.oracle.truffle.api.frame.FrameSlotKind; /** A common base class for all kinds of root node in Enso. */ @NodeInfo(shortName = "Root", description = "A root node for Enso computations") @@ -35,7 +42,7 @@ protected EnsoRootNode( ModuleScope moduleScope, String name, SourceSection sourceSection) { - super(language, localScope.frameDescriptor()); + super(language, buildFrameDescriptor(localScope)); Objects.requireNonNull(language); this.name = name; this.localScope = localScope; @@ -49,6 +56,26 @@ protected EnsoRootNode( this.sourceLength = sourceSection == null ? NO_SOURCE : sourceSection.getCharLength(); } + /** + * Builds a {@link FrameDescriptor} from the alias analysis scope metadata + * for the local scope. See [[AliasAnalysis.Graph.Scope.allDefinitions]]. + * + * @return {@link FrameDescriptor} built from the variable definitions in + * the local localScope. + */ + private static FrameDescriptor buildFrameDescriptor(LocalScope localScope) { + var descriptorBuilder = FrameDescriptor.newBuilder(); + descriptorBuilder.addSlot(FrameSlotKind.Object, LocalScope.monadicStateSlotName(), null); + for (var definition : ScalaConversions.asJava(localScope.scope().allDefinitions())) { + descriptorBuilder.addSlot( + FrameSlotKind.Illegal, definition.symbol(), null + ); + } + descriptorBuilder.defaultValue(DataflowError.UNINITIALIZED); + var frameDescriptor = descriptorBuilder.build(); + return frameDescriptor; + } + /** * Gets a reference to the language context associated with this program. * diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/MethodRootNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/MethodRootNode.java index 3c24b43a2457..34e71b2bb6bd 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/MethodRootNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/MethodRootNode.java @@ -3,12 +3,12 @@ import java.util.function.Supplier; import org.enso.compiler.core.CompilerError; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.EnsoLanguage; import org.enso.interpreter.runtime.EnsoContext; import org.enso.interpreter.runtime.data.Type; import org.enso.interpreter.runtime.data.text.Text; import org.enso.interpreter.runtime.error.PanicException; -import org.enso.interpreter.runtime.scope.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; import com.oracle.truffle.api.CompilerDirectives; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/CaptureCallerInfoNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/CaptureCallerInfoNode.java index 8dfd54fd7d8c..e5020fde3d32 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/callable/CaptureCallerInfoNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/callable/CaptureCallerInfoNode.java @@ -10,9 +10,9 @@ import com.oracle.truffle.api.frame.MaterializedFrame; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.nodes.NodeInfo; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.node.EnsoRootNode; import org.enso.interpreter.runtime.callable.CallerInfo; -import org.enso.interpreter.runtime.scope.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; /** diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java index 1e8221acbc2d..e0c036b62d11 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/expression/debug/EvalNode.java @@ -8,6 +8,7 @@ import com.oracle.truffle.api.dsl.Specialization; import com.oracle.truffle.api.nodes.NodeInfo; import org.enso.compiler.context.InlineContext; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.Constants; import org.enso.interpreter.node.BaseNode; import org.enso.interpreter.node.ClosureRootNode; @@ -18,7 +19,6 @@ import org.enso.interpreter.runtime.callable.CallerInfo; import org.enso.interpreter.runtime.callable.function.Function; import org.enso.interpreter.runtime.data.text.Text; -import org.enso.interpreter.runtime.scope.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; import org.enso.interpreter.runtime.state.State; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/node/scope/ReadLocalVariableNode.java b/engine/runtime/src/main/java/org/enso/interpreter/node/scope/ReadLocalVariableNode.java index 24e20ca24e13..63b73723e508 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/node/scope/ReadLocalVariableNode.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/node/scope/ReadLocalVariableNode.java @@ -8,9 +8,9 @@ import com.oracle.truffle.api.frame.VirtualFrame; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.NodeInfo; +import org.enso.compiler.context.FramePointer; import org.enso.interpreter.node.ExpressionNode; import org.enso.interpreter.runtime.callable.function.Function; -import org.enso.interpreter.runtime.scope.FramePointer; /** * Reads from a local target (variable or call target). @@ -45,10 +45,10 @@ public static ReadLocalVariableNode build(FramePointer pointer) { */ @Specialization(rewriteOn = FrameSlotTypeException.class) protected long readLong(VirtualFrame frame) throws FrameSlotTypeException { - if (getFramePointer().getParentLevel() == 0) - return frame.getLong(getFramePointer().getFrameSlotIdx()); + if (getFramePointer().parentLevel() == 0) + return frame.getLong(getFramePointer().frameSlotIdx()); MaterializedFrame currentFrame = getProperFrame(frame); - return currentFrame.getLong(getFramePointer().getFrameSlotIdx()); + return currentFrame.getLong(getFramePointer().frameSlotIdx()); } /** @@ -61,18 +61,18 @@ protected long readLong(VirtualFrame frame) throws FrameSlotTypeException { */ @Specialization(rewriteOn = FrameSlotTypeException.class) protected Object readGeneric(VirtualFrame frame) throws FrameSlotTypeException { - if (getFramePointer().getParentLevel() == 0) - return frame.getObject(getFramePointer().getFrameSlotIdx()); + if (getFramePointer().parentLevel() == 0) + return frame.getObject(getFramePointer().frameSlotIdx()); MaterializedFrame currentFrame = getProperFrame(frame); - return currentFrame.getObject(getFramePointer().getFrameSlotIdx()); + return currentFrame.getObject(getFramePointer().frameSlotIdx()); } @Specialization protected Object readGenericValue(VirtualFrame frame) { - if (getFramePointer().getParentLevel() == 0) - return frame.getValue(getFramePointer().getFrameSlotIdx()); + if (getFramePointer().parentLevel() == 0) + return frame.getValue(getFramePointer().frameSlotIdx()); MaterializedFrame currentFrame = getProperFrame(frame); - return currentFrame.getValue(getFramePointer().getFrameSlotIdx()); + return currentFrame.getValue(getFramePointer().frameSlotIdx()); } /** @@ -97,7 +97,7 @@ public MaterializedFrame getParentFrame(Frame frame) { @ExplodeLoop public MaterializedFrame getProperFrame(Frame frame) { MaterializedFrame currentFrame = getParentFrame(frame); - for (int i = 1; i < getFramePointer().getParentLevel(); i++) { + for (int i = 1; i < getFramePointer().parentLevel(); i++) { currentFrame = getParentFrame(currentFrame); } return currentFrame; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java index 9c6d56a65e54..366b9b85db81 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/Module.java @@ -23,6 +23,7 @@ import java.util.WeakHashMap; import java.util.logging.Level; import org.enso.compiler.context.CompilerContext; +import org.enso.compiler.context.LocalScope; import org.enso.compiler.context.SimpleUpdate; import org.enso.compiler.core.IR; import org.enso.compiler.core.ir.Expression; @@ -37,7 +38,6 @@ import org.enso.interpreter.runtime.data.Type; import org.enso.interpreter.runtime.data.text.Text; import org.enso.interpreter.runtime.data.vector.ArrayLikeHelpers; -import org.enso.interpreter.runtime.scope.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; import org.enso.interpreter.runtime.type.Types; import org.enso.pkg.Package; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/CallerInfo.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/CallerInfo.java index f6411bcdf49d..e16d50591845 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/CallerInfo.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/CallerInfo.java @@ -1,7 +1,7 @@ package org.enso.interpreter.runtime.callable; import com.oracle.truffle.api.frame.MaterializedFrame; -import org.enso.interpreter.runtime.scope.LocalScope; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; /** diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/atom/AtomConstructor.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/atom/AtomConstructor.java index 38a8a2cbc2ee..54dc3eb334d1 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/atom/AtomConstructor.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/callable/atom/AtomConstructor.java @@ -13,6 +13,7 @@ import com.oracle.truffle.api.source.SourceSection; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReentrantLock; +import org.enso.compiler.context.LocalScope; import org.enso.interpreter.EnsoLanguage; import org.enso.interpreter.node.ClosureRootNode; import org.enso.interpreter.node.ExpressionNode; @@ -29,7 +30,6 @@ import org.enso.interpreter.runtime.data.EnsoObject; import org.enso.interpreter.runtime.data.Type; import org.enso.interpreter.runtime.library.dispatch.TypesLibrary; -import org.enso.interpreter.runtime.scope.LocalScope; import org.enso.interpreter.runtime.scope.ModuleScope; import org.enso.pkg.QualifiedName; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/DebugLocalScope.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/DebugLocalScope.java index 570007eeb9bb..4eb5cad9cd5b 100644 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/DebugLocalScope.java +++ b/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/DebugLocalScope.java @@ -16,6 +16,7 @@ import java.util.Map; import java.util.Map.Entry; import java.util.stream.Collectors; +import org.enso.compiler.context.FramePointer; import org.enso.interpreter.EnsoLanguage; import org.enso.interpreter.node.EnsoRootNode; import org.enso.interpreter.runtime.callable.function.Function; @@ -100,9 +101,9 @@ private static List> gatherBindingsByLevels(Map> bindingsByLevels = new ArrayList<>(maxParentLevel + 1); @@ -110,7 +111,7 @@ private static List> gatherBindingsByLevels(Map levelBindings = bindings.entrySet().stream() - .filter(entry -> entry.getValue().getParentLevel() == finalLevel) + .filter(entry -> entry.getValue().parentLevel() == finalLevel) .map(Entry::getKey) .collect(Collectors.toList()); bindingsByLevels.add(levelBindings); @@ -253,18 +254,18 @@ public String toString() { } private Object getValue(MaterializedFrame frame, FramePointer ptr) { - return ptr == null ? null : getProperFrame(frame, ptr).getValue(ptr.getFrameSlotIdx()); + return ptr == null ? null : getProperFrame(frame, ptr).getValue(ptr.frameSlotIdx()); } private void setValue(MaterializedFrame frame, FramePointer ptr, Object value) { assert ptr != null; MaterializedFrame properFrame = getProperFrame(frame, ptr); - properFrame.setObject(ptr.getFrameSlotIdx(), value); + properFrame.setObject(ptr.frameSlotIdx(), value); } private MaterializedFrame getProperFrame(MaterializedFrame frame, FramePointer ptr) { MaterializedFrame currentFrame = frame; - for (int i = 0; i < ptr.getParentLevel(); i++) { + for (int i = 0; i < ptr.parentLevel(); i++) { currentFrame = Function.ArgumentsHelper.getLocalScope(currentFrame.getArguments()); } return currentFrame; diff --git a/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/FramePointer.java b/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/FramePointer.java deleted file mode 100644 index 592133d9660c..000000000000 --- a/engine/runtime/src/main/java/org/enso/interpreter/runtime/scope/FramePointer.java +++ /dev/null @@ -1,38 +0,0 @@ -package org.enso.interpreter.runtime.scope; - -/** - * A representation of a pointer into a stack frame at a given number of levels above the current. - */ -public class FramePointer { - private final int parentLevel; - private final int frameSlotIdx; - - /** - * A representation of a frame slot at a given level above the current frame. - * - * @param parentLevel the number of parents to move from the current frame to get here - * @param frameSlotIdx the index of the slot in the n-th parent frame - */ - public FramePointer(int parentLevel, int frameSlotIdx) { - this.parentLevel = parentLevel; - this.frameSlotIdx = frameSlotIdx; - } - - /** - * Gets the parent level. - * - * @return the parent level represented by this {@code FramePointer} - */ - public int getParentLevel() { - return parentLevel; - } - - /** - * Gets the index of the frame slot. - * - * @return the frame slot index represented by this {@code FramePointer} - */ - public int getFrameSlotIdx() { - return frameSlotIdx; - } -} diff --git a/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala b/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala index ed27f4ca48d7..d3b9f759c5d4 100644 --- a/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala +++ b/engine/runtime/src/main/scala/org/enso/interpreter/runtime/IrToTruffle.scala @@ -3,6 +3,8 @@ package org.enso.interpreter.runtime import com.oracle.truffle.api.source.{Source, SourceSection} import com.oracle.truffle.api.interop.InteropLibrary import org.enso.compiler.context.CompilerContext +import org.enso.compiler.context.FramePointer +import org.enso.compiler.context.LocalScope import org.enso.compiler.core.CompilerError import org.enso.compiler.core.ConstantsNames import org.enso.compiler.core.Implicits.AsMetadata @@ -104,11 +106,7 @@ import org.enso.interpreter.runtime.callable.{ } import org.enso.interpreter.runtime.data.Type import org.enso.interpreter.runtime.data.text.Text -import org.enso.interpreter.runtime.scope.{ - FramePointer, - LocalScope, - ModuleScope -} +import org.enso.interpreter.runtime.scope.{ModuleScope} import org.enso.interpreter.{Constants, EnsoLanguage} import java.math.BigInteger diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/CompilerTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/CompilerTest.scala index 7f4194a93b4e..505b0cded624 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/CompilerTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/CompilerTest.scala @@ -15,7 +15,7 @@ import org.scalatest.matchers.should.Matchers import org.scalatest.wordspec.AnyWordSpecLike import org.enso.interpreter.runtime import org.enso.interpreter.runtime.ModuleTestUtils -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope import org.enso.pkg.QualifiedName import org.enso.polyglot.CompilationStage diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DataflowAnalysisTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DataflowAnalysisTest.scala index 8ebc0a524000..943e2501ff48 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DataflowAnalysisTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DataflowAnalysisTest.scala @@ -26,7 +26,7 @@ import org.enso.compiler.pass.analyse.DataflowAnalysis.{ import org.enso.compiler.pass.analyse.{AliasAnalysis, DataflowAnalysis} import org.enso.compiler.pass.{PassConfiguration, PassGroup, PassManager} import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope import org.enso.interpreter.test.Metadata import org.scalatest.Assertion diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DemandAnalysisTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DemandAnalysisTest.scala index 56ccaad06f1f..6fd9e7b33464 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DemandAnalysisTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/DemandAnalysisTest.scala @@ -15,7 +15,7 @@ import org.enso.compiler.pass.PassConfiguration._ import org.enso.compiler.pass.analyse.{AliasAnalysis, DemandAnalysis} import org.enso.compiler.pass.{PassConfiguration, PassGroup, PassManager} import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope class DemandAnalysisTest extends CompilerTest { diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/TailCallTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/TailCallTest.scala index 00c970cbd304..cee464602c1a 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/TailCallTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/analyse/TailCallTest.scala @@ -18,7 +18,7 @@ import org.enso.compiler.pass.analyse.TailCall.TailPosition import org.enso.compiler.pass.analyse.{AliasAnalysis, TailCall} import org.enso.compiler.pass.{PassConfiguration, PassGroup, PassManager} import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope class TailCallTest extends CompilerTest { diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/lint/UnusedBindingsTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/lint/UnusedBindingsTest.scala index f582de5d9015..b6983880f729 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/lint/UnusedBindingsTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/lint/UnusedBindingsTest.scala @@ -10,7 +10,7 @@ import org.enso.compiler.pass.analyse._ import org.enso.compiler.pass.lint.UnusedBindings import org.enso.compiler.pass.{PassConfiguration, PassGroup, PassManager} import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope import org.scalatest.Inside class UnusedBindingsTest extends CompilerTest with Inside { diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/optimise/LambdaConsolidateTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/optimise/LambdaConsolidateTest.scala index a3f85e0422e1..dae40ebaed62 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/optimise/LambdaConsolidateTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/optimise/LambdaConsolidateTest.scala @@ -17,7 +17,7 @@ import org.enso.compiler.pass.analyse.AliasAnalysis import org.enso.compiler.pass.optimise.LambdaConsolidate import org.enso.compiler.pass.{PassConfiguration, PassGroup, PassManager} import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope class LambdaConsolidateTest extends CompilerTest { diff --git a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/resolve/SuspendedArgumentsTest.scala b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/resolve/SuspendedArgumentsTest.scala index 6a39943a6785..8e67c933be80 100644 --- a/engine/runtime/src/test/scala/org/enso/compiler/test/pass/resolve/SuspendedArgumentsTest.scala +++ b/engine/runtime/src/test/scala/org/enso/compiler/test/pass/resolve/SuspendedArgumentsTest.scala @@ -12,7 +12,7 @@ import org.enso.compiler.pass.analyse.AliasAnalysis import org.enso.compiler.pass.resolve.SuspendedArguments import org.enso.compiler.pass.{PassConfiguration, PassGroup, PassManager} import org.enso.compiler.test.CompilerTest -import org.enso.interpreter.runtime.scope.LocalScope +import org.enso.compiler.context.LocalScope import org.enso.compiler.pass.PassConfiguration._ class SuspendedArgumentsTest extends CompilerTest { From f37ec961499e1e35746a60f4f977ec6a85a2ffae Mon Sep 17 00:00:00 2001 From: Ilya Bogdanov Date: Wed, 1 Nov 2023 16:22:49 +0400 Subject: [PATCH 06/12] Avoid crash when new directory created in project root (#8199) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit While working on #8158, I noticed a crash when the `data` directory is created at the project root. Turns out it is the issue in the `ydoc-server`, which thinks every filesystem event is about files. Unfortunately, we don‘t have the needed info available, so we need to make the `file/info` request. --- app/gui2/ydoc-server/languageServerSession.ts | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/app/gui2/ydoc-server/languageServerSession.ts b/app/gui2/ydoc-server/languageServerSession.ts index d2a6f86720a6..90e8ca745917 100644 --- a/app/gui2/ydoc-server/languageServerSession.ts +++ b/app/gui2/ydoc-server/languageServerSession.ts @@ -70,13 +70,18 @@ export class LanguageServerSession extends ObservableV2 { } }) - this.ls.on('file/event', (event) => { + this.ls.on('file/event', async (event) => { if (DEBUG_LOG_SYNC) { console.log('file/event', event) } switch (event.kind) { case 'Added': - this.getModuleModel(event.path).open() + if (isSourceFile(event.path)) { + const fileInfo = await this.ls.fileInfo(event.path) + if (fileInfo.attributes.kind.type == 'File') { + this.getModuleModel(event.path).open() + } + } break case 'Modified': this.getModuleModelIfExists(event.path)?.reload() @@ -180,6 +185,10 @@ export class LanguageServerSession extends ObservableV2 { } } +const isSourceFile = (path: Path): boolean => { + return path.segments[0] === 'src' && path.segments[path.segments.length - 1].endsWith('.enso') +} + const pathToModuleName = (path: Path): string => { if (path.segments[0] === 'src') { return path.segments.slice(1).join('/') From 8bc17bd37088651ec438b3aa68bfc537ee17589f Mon Sep 17 00:00:00 2001 From: somebody1234 Date: Wed, 1 Nov 2023 23:07:18 +1000 Subject: [PATCH 07/12] Fix flaky `partitionPoint` test (#8198) The tests for `partitionPoint` were previously failing when `NaN` or duplicates were present in the array. # Important Notes None --- app/gui2/src/util/__tests__/array.test.ts | 20 ++++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/app/gui2/src/util/__tests__/array.test.ts b/app/gui2/src/util/__tests__/array.test.ts index 52017cec326c..c3e5a3286333 100644 --- a/app/gui2/src/util/__tests__/array.test.ts +++ b/app/gui2/src/util/__tests__/array.test.ts @@ -14,18 +14,26 @@ fcTest.prop({ }) fcTest.prop({ - arr: fc - .array(fc.float()) - .map((a) => ({ arr: a.sort((a, b) => a - b), i: Math.floor(Math.random() * a.length) })), + arr: fc.array(fc.float({ noNaN: true })).chain((a) => { + const sorted = a.sort((a, b) => a - b) + return fc.record({ + arr: fc.constant(sorted), + i: fc.nat({ max: Math.max(sorted.length - 1, 0) }).map((i) => Math.max(0, a.indexOf(a[i]!))), + }) + }), })('partitionPoint (ascending)', ({ arr: { arr, i } }) => { const target = arr[i]! expect(partitionPoint(arr, (n) => n < target)).toEqual(i) }) fcTest.prop({ - arr: fc - .array(fc.float()) - .map((a) => ({ arr: a.sort((a, b) => b - a), i: Math.floor(Math.random() * a.length) })), + arr: fc.array(fc.float({ noNaN: true })).chain((a) => { + const sorted = a.sort((a, b) => b - a) + return fc.record({ + arr: fc.constant(sorted), + i: fc.nat({ max: Math.max(sorted.length - 1, 0) }).map((i) => Math.max(0, a.indexOf(a[i]!))), + }) + }), })('partitionPoint (descending)', ({ arr: { arr, i } }) => { const target = arr[i]! expect(partitionPoint(arr, (n) => n > target)).toEqual(i) From d467683ed1f7a834049e0b5a3c2d1e84e797084e Mon Sep 17 00:00:00 2001 From: GregoryTravis Date: Wed, 1 Nov 2023 10:41:03 -0400 Subject: [PATCH 08/12] Constant columns (in expressions and Column_Operations) should have clearer names (#8188) Previously, constant columns were given generated names with UUIDs in them, which are long and provide no information. Instead, we now use the constant value itself to form the name. Since these new generated names are less unique, we must explicitly make them unique, in cases where the caller did not explicilty set a name. --- .../Database/0.0.0-dev/src/Data/Table.enso | 24 ++++++++++--- .../Table/0.0.0-dev/src/Data/Table.enso | 24 ++++++++++--- .../Column_Operations_Spec.enso | 2 +- .../Derived_Columns_Spec.enso | 35 +++++++++++++++++++ 4 files changed, 76 insertions(+), 9 deletions(-) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index ccebfecdf22e..1c59245c4942 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -835,6 +835,10 @@ type Table @new_name Widget_Helpers.make_column_name_selector set : Column | Text | Array | Vector | Range | Date_Range | Constant_Column | Column_Operation -> Text -> Set_Mode -> Problem_Behavior -> Table ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error set self column new_name="" set_mode=Set_Mode.Add_Or_Update on_problems=Report_Warning = + problem_builder = Problem_Builder.new + unique = self.column_naming_helper.create_unique_name_strategy + unique.mark_used self.column_names + resolved = case column of _ : Text -> self.evaluate_expression column on_problems _ : Column -> @@ -848,19 +852,31 @@ type Table _ : Date_Range -> Error.throw (Unsupported_Database_Operation.Error "Cannot use `Date_Range` for `set` in the database.") _ -> Error.throw (Illegal_Argument.Error "Unsupported type for `Table.set`.") - renamed = if new_name == "" then resolved else resolved.rename new_name - renamed.if_not_error <| self.column_naming_helper.check_ambiguity self.column_names new_name <| + name_is_generated column = case column of + _ : Column -> False + _ -> True + + ## If `new_name` was specified, use that. Otherwise, if `column` is a + `Column`, use its name. In these two cases, do not make it unique. + Otherwise, make it unique. + new_column_name = if new_name != "" then new_name else + if name_is_generated column then unique.make_unique resolved.name else resolved.name + renamed = resolved.rename new_column_name + renamed.if_not_error <| self.column_naming_helper.check_ambiguity self.column_names renamed.name <| index = self.internal_columns.index_of (c -> c.name == renamed.name) check_add = case set_mode of Set_Mode.Add_Or_Update -> True Set_Mode.Add -> if index.is_nothing then True else Error.throw (Existing_Column.Error renamed.name) Set_Mode.Update -> if index.is_nothing then Error.throw (Missing_Column.Error renamed.name) else True - check_add.if_not_error <| + new_table = check_add.if_not_error <| new_col = renamed.as_internal new_cols = if index.is_nothing then self.internal_columns + [new_col] else Vector.new self.column_count i-> if i == index then new_col else self.internal_columns.at i self.updated_columns new_cols + problem_builder.report_unique_name_strategy unique + problem_builder.attach_problems_after on_problems new_table + ## Given an expression, create a derived column where each value is the result of evaluating the expression for the row. @@ -905,7 +921,7 @@ type Table _ -> type_mapping.value_type_to_sql argument_value_type Problem_Behavior.Ignore expr = SQL_Expression.Constant value new_type_ref = SQL_Type_Reference.from_constant sql_type - Column.Value ("Constant_" + UUID.randomUUID.to_text) self.connection new_type_ref expr self.context + Column.Value value.pretty self.connection new_type_ref expr self.context ## PRIVATE Create a unique temporary column name. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index f2bd8c35691a..eef7e58075e0 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -1475,6 +1475,10 @@ type Table @column Column_Operation.default_widget set : Text | Column -> Text -> Set_Mode -> Problem_Behavior -> Table ! Existing_Column | Missing_Column | No_Such_Column | Expression_Error set self column:(Text | Column | Constant_Column | Column_Operation) new_name="" set_mode=Set_Mode.Add_Or_Update on_problems=Report_Warning = + problem_builder = Problem_Builder.new + unique = self.column_naming_helper.create_unique_name_strategy + unique.mark_used self.column_names + resolved = case column of _ : Text -> self.evaluate_expression column on_problems _ : Column -> column @@ -1482,8 +1486,17 @@ type Table _ : Column_Operation -> column.evaluate self (set_mode==Set_Mode.Update && new_name=="") on_problems _ -> Error.throw (Illegal_Argument.Error "Unsupported type for `Table.set`.") - renamed = if new_name == "" then resolved else resolved.rename new_name - renamed.if_not_error <| self.column_naming_helper.check_ambiguity self.column_names new_name <| + name_is_generated column = case column of + _ : Column -> False + _ -> True + + ## If `new_name` was specified, use that. Otherwise, if `column` is a + `Column`, use its name. In these two cases, do not make it unique. + Otherwise, make it unique. + new_column_name = if new_name != "" then new_name else + if name_is_generated column then unique.make_unique resolved.name else resolved.name + renamed = resolved.rename new_column_name + renamed.if_not_error <| self.column_naming_helper.check_ambiguity self.column_names renamed.name <| check_add_mode = case set_mode of Set_Mode.Add_Or_Update -> True Set_Mode.Add -> if self.java_table.getColumnByName renamed.name . is_nothing then True else @@ -1491,10 +1504,13 @@ type Table Set_Mode.Update -> if self.java_table.getColumnByName renamed.name . is_nothing . not then True else Error.throw (Missing_Column.Error renamed.name) - check_add_mode.if_not_error <| + new_table = check_add_mode.if_not_error <| if resolved.length != self.row_count then Error.throw (Row_Count_Mismatch.Error self.row_count resolved.length) else Table.Value (self.java_table.addOrReplaceColumn renamed.java_column) + problem_builder.report_unique_name_strategy unique + problem_builder.attach_problems_after on_problems new_table + ## Given an expression, create a derived column where each value is the result of evaluating the expression for the row. @@ -1532,7 +1548,7 @@ type Table make_constant_column : Any -> Column make_constant_column self value = if Table_Helpers.is_column value then Error.throw (Illegal_Argument.Error "A constant value may only be created from a scalar, not a Column") else - Column.from_vector_repeated ("Constant_" + UUID.randomUUID.to_text) [value] self.row_count + Column.from_vector_repeated value.pretty [value] self.row_count ## PRIVATE Create a unique temporary column name. diff --git a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso index 6d9f20c4e2fb..6397707bdf1a 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Column_Operations_Spec.enso @@ -1237,7 +1237,7 @@ spec setup = Test.specify "Should create the correct column name" <| t = table_builder [["x", ["1", "2", "3"]]] - t.at "x" . const 12 . name . take 9 . should_equal "Constant_" + t.at "x" . const 12 . name . should_equal "12" Test.specify "Should not allow the creation of a constant column of columns" <| t = table_builder [["x", ["1", "2", "3"]]] diff --git a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso index 0defadc9ee8e..bcec0ae5483e 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Derived_Columns_Spec.enso @@ -197,3 +197,38 @@ spec setup = t.set (Column_Operation.Add (Column_Ref.Name "X") (Column_Ref.Name "Z")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Name "zzz")) . should_fail_with No_Such_Column t.set (Column_Operation.Not (Column_Ref.Index 42)) . should_fail_with Index_Out_Of_Bounds + + Test.group "Unique derived column names" <| + Test.specify "Should disambiguate two derived columns that would otherwise have had the same name" <| + t = table_builder [["X", [1, 2, 3]]] + column_op = Column_Operation.Power 2 (Column_Ref.Name "X") + t2 = t.set column_op . set column_op + t2.column_names . should_equal ["X", "[2] ^ [X]", "[2] ^ [X] 1"] + t2.at "X" . to_vector . should_equal [1, 2, 3] + t2.at "[2] ^ [X]" . to_vector . should_equal [2, 4, 8] + t2.at "[2] ^ [X] 1" . to_vector . should_equal [2, 4, 8] + + Test.specify "Should disambiguate two derived columns that would otherwise have had the same name, within the same expression" <| + t = table_builder [["X", [1, 2, 3]]] + expression = "2 + (2 * 2) + (2 ^ [X])" + t2 = t.set expression + t2.column_names . should_equal ["X", expression] + t2.at "X" . to_vector . should_equal [1, 2, 3] + t2.at expression . to_vector . should_equal [8, 10, 14] + + Test.specify "Should use .pretty to distinguish string constants from regular column names" <| + t = table_builder [["X", ["a", "b", "c"]]] + expression = '"foo" + [X] + "bar"' + t2 = t.set expression + t2.column_names . should_equal ["X", expression] + t2.at "X" . to_vector . should_equal ["a", "b", "c"] + t2.at expression . to_vector . should_equal ["fooabar", "foobbar", "foocbar"] + + Test.specify "Should disambiguate between a column reference and a literal string" <| + t = table_builder [["X", ["a", "b", "c"]]] + t2 = t.set (Column_Operation.Add "prefix" (Column_Ref.Name "X")) + t3 = t2.set (Column_Operation.Add "prefix" "X") + + t3.column_names . should_equal ["X", "['prefix'] + [X]", "['prefix'] + 'X'"] + t3.at "['prefix'] + [X]" . to_vector . should_equal ["prefixa", "prefixb", "prefixc"] + t3.at "['prefix'] + 'X'" . to_vector . should_equal ["prefixX", "prefixX", "prefixX"] From b5d6628c571cec08f00f1780278327d0c0de4ea3 Mon Sep 17 00:00:00 2001 From: Cassandra-Clark <40743109+Cassandra-Clark@users.noreply.github.com> Date: Wed, 1 Nov 2023 12:51:15 -0400 Subject: [PATCH 09/12] Change filter_blank_rows when_any parameter to have a more user-friendly type (#7935) Added Blank_Selector constructor and applied to remove_blank_columns, select_blank_columns, filter_blank_rows for #7931 . Changed when_any to when for readability. --- CHANGELOG.md | 3 ++ .../Database/0.0.0-dev/src/Data/Table.enso | 36 ++++++++++--------- .../0.0.0-dev/src/Data/Blank_Selector.enso | 6 ++++ .../Table/0.0.0-dev/src/Data/Table.enso | 35 +++++++++--------- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 25 +++++++------ .../Standard/Table/0.0.0-dev/src/Main.enso | 2 ++ .../Missing_Values_Spec.enso | 34 +++++++++--------- .../src/Database/Codegen_Spec.enso | 6 ++-- .../Table_Tests/src/In_Memory/Table_Spec.enso | 6 ++-- 9 files changed, 87 insertions(+), 66 deletions(-) create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/src/Data/Blank_Selector.enso diff --git a/CHANGELOG.md b/CHANGELOG.md index 6e912dfd9e5d..35ba065b957d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -588,6 +588,8 @@ - [Implemented `Table.expand_to_rows` for the in-memory backend.][8029] - [Added XML support for `.to Table` and `.expand_column`.][8083] - [Added `Previous_Value` option to `fill_nothing` and `fill_empty`.][8105] +- [Implemented new selector for when parameter in `filter_blank_rows`, + `select_blank_columns`, `remove_blank_columns`][7935] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -839,6 +841,7 @@ [8029]: https://github.com/enso-org/enso/pull/8029 [8083]: https://github.com/enso-org/enso/pull/8083 [8105]: https://github.com/enso-org/enso/pull/8105 +[7935]: https://github.com/enso-org/enso/pull/7935 #### Enso Compiler diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index 1c59245c4942..ba4f6b2c1c47 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -14,6 +14,7 @@ from Standard.Base.Widget_Helpers import make_delimiter_selector import Standard.Table.Data.Calculations.Column_Operation.Column_Operation import Standard.Table.Data.Column_Ref.Column_Ref import Standard.Table.Data.Constants.Previous_Value +import Standard.Table.Data.Blank_Selector.Blank_Selector import Standard.Table.Data.Expression.Expression import Standard.Table.Data.Expression.Expression_Error import Standard.Table.Data.Join_Condition.Join_Condition @@ -271,9 +272,9 @@ type Table rows are present, all columns are considered blank. Arguments: - - when_any: By default, only columns consisting of all blank cells are - selected. If set to `True`, columns with one or more blank values are - selected. + - when: By default, only columns consisting of all blank cells are + selected. If set to Blank_Selector.Any_Cell_Blank, columns with one or + more blank values are selected. - treat_nans_as_blank: specified whether `Number.nan` is considered as blank. By default, it is not. @@ -284,9 +285,9 @@ type Table Select completely blank columns from a table. table.select_blank_columns - select_blank_columns : Boolean -> Boolean -> Table - select_blank_columns self (when_any : Boolean = False) (treat_nans_as_blank : Boolean = False) = - new_columns = self.columns_helper.select_blank_columns_helper when_any treat_nans_as_blank + select_blank_columns : Blank_Selector -> Boolean -> Table + select_blank_columns self (when:Blank_Selector = Blank_Selector.All_Cells_Blank) treat_nans_as_blank=False = + new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank if new_columns.length == 0 then Error.throw (No_Output_Columns) else self.updated_columns new_columns @@ -297,9 +298,9 @@ type Table rows are present, all columns are considered blank. Arguments: - - when_any: By default, only columns consisting of all blank cells are - selected. If set to `True`, columns with one or more blank values are - selected. + - when: By default, only columns consisting of all blank cells are + selected. If set to Blank_Selector.Any_Cell_Blank, columns with one or + more blank values are selected. - treat_nans_as_blank: specified whether `Number.nan` is considered as blank. By default, it is not. @@ -310,9 +311,9 @@ type Table Remove completely blank columns from a table. table.remove_blank_columns - remove_blank_columns : Boolean -> Boolean -> Table - remove_blank_columns self (when_any : Boolean = False) (treat_nans_as_blank : Boolean = False) = - new_columns = self.columns_helper.select_blank_columns_helper when_any treat_nans_as_blank invert_selection=True + remove_blank_columns : Blank_Selector -> Boolean -> Table + remove_blank_columns self (when:Blank_Selector = Blank_Selector.All_Cells_Blank) treat_nans_as_blank=False = + new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank invert_selection=True if new_columns.length == 0 then Error.throw (No_Output_Columns) else self.updated_columns new_columns @@ -2122,15 +2123,16 @@ type Table Remove rows which are all blank or containing blank values. Arguments: - - when_any: If `True`, then remove any row containing any blank values. - If `False`, then only remove rows with all blank values. + - when: If Blank_Selector.Any_Cell_Blank, then remove any row containing + any blank values. + If Blank_Selector.All_Cells_Blank, then only remove rows with all blank values. - treat_nans_as_blank: If `True`, then `Number.nan` is considered as blank. ? Blank values Blank values are `Nothing`, `""` and depending on setting `Number.nan`. - filter_blank_rows : Boolean -> Boolean -> Table - filter_blank_rows self when_any=False treat_nans_as_blank=False = - Table_Helpers.filter_blank_rows self when_any treat_nans_as_blank + filter_blank_rows : Blank_Selector -> Boolean -> Table + filter_blank_rows self (when:Blank_Selector = Blank_Selector.All_Cells_Blank) treat_nans_as_blank=False = + Table_Helpers.filter_blank_rows self when treat_nans_as_blank ## ALIAS count GROUP Standard.Base.Metadata diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Blank_Selector.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Blank_Selector.enso new file mode 100644 index 000000000000..25db6abff43a --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Blank_Selector.enso @@ -0,0 +1,6 @@ +## TODO Documents +type Blank_Selector + ## Blank_Selector is used as a constructor for other functions. + Any_Cell_Blank + + All_Cells_Blank \ No newline at end of file diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index eef7e58075e0..f02b81da5226 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -15,6 +15,7 @@ from Standard.Base.Widget_Helpers import make_delimiter_selector import project.Data.Aggregate_Column.Aggregate_Column import project.Data.Calculations.Column_Operation.Column_Operation +import project.Data.Blank_Selector.Blank_Selector import project.Data.Column as Column_Module import project.Data.Column.Column import project.Data.Column_Ref.Column_Ref @@ -52,6 +53,7 @@ import project.Internal.Split_Tokenize import project.Internal.Table_Helpers import project.Internal.Table_Helpers.Table_Column_Helper import project.Internal.Widget_Helpers + from project.Data.Column import get_item_string, normalize_string_for_display from project.Data.Type.Value_Type import Auto, Value_Type from project.Errors import all @@ -412,9 +414,9 @@ type Table rows are present, all columns are considered blank. Arguments: - - when_any: By default, only columns consisting of all blank cells are - selected. If set to `True`, columns with one or more blank values are - selected. + - when: By default, only columns consisting of all blank cells are + selected. If set to Blank_Selector.Any_Cell_Blank, columns with one or + more blank values are selected. - treat_nans_as_blank: specifies whether `Number.nan` is considered as blank. By default, it is not. @@ -425,9 +427,9 @@ type Table Select completely blank columns from a table. table.select_blank_columns - select_blank_columns : Boolean -> Boolean -> Table ! No_Output_Columns - select_blank_columns self (when_any : Boolean = False) (treat_nans_as_blank : Boolean = False) = - new_columns = self.columns_helper.select_blank_columns_helper when_any treat_nans_as_blank + select_blank_columns : Blank_Selector -> Boolean -> Table ! No_Output_Columns + select_blank_columns self (when:Blank_Selector = Blank_Selector.All_Cells_Blank) (treat_nans_as_blank : Boolean = False) = + new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank if new_columns.length == 0 then Error.throw (No_Output_Columns) else Table.new new_columns @@ -438,8 +440,8 @@ type Table rows are present, all columns are considered blank. Arguments: - - when_any: By default, only columns consisting of all blank cells are - selected. If set to `True`, columns with one or more blank values are + - when By default, only columns consisting of all blank cells are + selected. If set to Blank_Selector.Any_Cell_Blank, columns with one or more blank values are selected. - treat_nans_as_blank: specified whether `Number.nan` is considered as blank. By default, it is not. @@ -451,9 +453,9 @@ type Table Remove completely blank columns from a table. table.remove_blank_columns - remove_blank_columns : Boolean -> Boolean -> Table ! No_Output_Columns - remove_blank_columns self (when_any : Boolean = False) (treat_nans_as_blank : Boolean = False) = - new_columns = self.columns_helper.select_blank_columns_helper when_any treat_nans_as_blank invert_selection=True + remove_blank_columns : Blank_Selector -> Boolean -> Table ! No_Output_Columns + remove_blank_columns self (when:Blank_Selector = Blank_Selector.All_Cells_Blank) (treat_nans_as_blank : Boolean = False) = + new_columns = self.columns_helper.select_blank_columns_helper when treat_nans_as_blank invert_selection=True if new_columns.length == 0 then Error.throw (No_Output_Columns) else Table.new new_columns @@ -2016,15 +2018,16 @@ type Table Remove rows which are all blank or containing blank values. Arguments: - - when_any: If `True`, then remove any row containing any blank values. - If `False`, then only remove rows with all blank values. + - when: If Blank_Selector.Any_Cell_Blank, then remove any row containing + any blank values. + If Blank_Selector.All_Cells_Blank, then only remove rows with all blank values. - treat_nans_as_blank: If `True`, then `Number.nan` is considered as blank. ? Blank values Blank values are `Nothing`, `""` and depending on setting `Number.nan`. - filter_blank_rows : Boolean -> Boolean -> Table - filter_blank_rows self when_any=False treat_nans_as_blank=False = - Table_Helpers.filter_blank_rows self when_any treat_nans_as_blank + filter_blank_rows : Blank_Selector -> Boolean -> Table + filter_blank_rows self when=Blank_Selector.Any_Cell_Blank treat_nans_as_blank=False = + Table_Helpers.filter_blank_rows self when treat_nans_as_blank ## ALIAS count GROUP Standard.Base.Metadata diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 10930b43d4e8..413ceb21726c 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -3,6 +3,7 @@ import Standard.Base.Errors.Illegal_Argument.Illegal_Argument import Standard.Base.Errors.Illegal_State.Illegal_State import project.Data.Aggregate_Column.Aggregate_Column +import project.Data.Blank_Selector.Blank_Selector import project.Data.Column.Column import project.Data.Position.Position import project.Data.Set_Mode.Set_Mode @@ -196,14 +197,15 @@ type Table_Column_Helper completely blank or have some blanks. Arguments: - - when_any: By default, only columns consisting of all blank cells are - selected. If set to `True`, columns with one or more blank values are + -TODO docs + - when: By default, only columns consisting of all blank cells are + selected. If set to Blank_Selector.Any_Cell_Blank, columns with one or more blank values are selected. - treat_nans_as_blank: If `True`, then `Number.nan` is considered as blank. - invert_selection: If `True`, then the selection is inverted. - select_blank_columns_helper : Boolean -> Boolean -> Boolean -> Vector - select_blank_columns_helper self when_any:Boolean treat_nans_as_blank:Boolean invert_selection:Boolean=False = + select_blank_columns_helper : Blank_Selector -> Boolean -> Boolean -> Vector + select_blank_columns_helper self (when:Blank_Selector = Blank_Selector.All_Cells_Blank) treat_nans_as_blank:Boolean invert_selection:Boolean=False = blanks = self.internal_columns.map_with_index ix-> internal_column-> column = self.make_column internal_column blank_indicator = column.is_blank treat_nans_as_blank @@ -222,7 +224,9 @@ type Table_Column_Helper just_indicators = table_with_blank_indicators.select_columns (blanks.map .name) on_problems=Problem_Behavior.Report_Error # Maximum is equivalent to Exists and Minimum is equivalent to Forall. - col_aggregate = if when_any then Aggregate_Column.Maximum _ else Aggregate_Column.Minimum _ + col_aggregate = case when of + Blank_Selector.Any_Cell_Blank -> Aggregate_Column.Maximum _ + Blank_Selector.All_Cells_Blank -> Aggregate_Column.Minimum _ aggregates = blanks.map blanks_col-> col_aggregate blanks_col.name aggregate_result = just_indicators.aggregate aggregates on_problems=Problem_Behavior.Report_Error @@ -440,14 +444,15 @@ resolve_order_by internal_columns column_selectors problem_builder = ## PRIVATE A helper method gathering the common logic for constructing expressions that can filter out blank rows. -filter_blank_rows : Table -> Boolean -> Boolean -> Table -filter_blank_rows table when_any treat_nans_as_blank = +filter_blank_rows : Table -> Blank_Selector -> Boolean -> Table +filter_blank_rows table when treat_nans_as_blank = cols = table.columns case cols.not_empty of True -> - merge = if when_any then (||) else (&&) - missing_mask = cols.map (_.is_blank treat_nans_as_blank) . reduce col1-> col2-> - merge col1 col2 . rename "blank_indicator" + merge = case when of + Blank_Selector.Any_Cell_Blank -> (||) + Blank_Selector.All_Cells_Blank -> (&&) + missing_mask = cols.map (_.is_blank treat_nans_as_blank) . reduce merge non_missing_mask = missing_mask.not table.filter non_missing_mask Filter_Condition.Is_True False -> table diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso index 16711b8c2e4a..486b2b102c79 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Main.enso @@ -2,6 +2,7 @@ from Standard.Base import all import project.Data.Aggregate_Column.Aggregate_Column import project.Data.Calculations.Column_Operation.Column_Operation +import project.Data.Blank_Selector.Blank_Selector import project.Data.Column.Column import project.Data.Column_Ref.Column_Ref import project.Data.Column_Vector_Extensions @@ -31,6 +32,7 @@ from project.Extensions.Table_Conversions import all export project.Data.Aggregate_Column.Aggregate_Column export project.Data.Calculations.Column_Operation.Column_Operation +export project.Data.Blank_Selector.Blank_Selector export project.Data.Column.Column export project.Data.Column_Ref.Column_Ref export project.Data.Column_Vector_Extensions diff --git a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso index 51bd2fd7cf9a..458874e4c860 100644 --- a/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso +++ b/test/Table_Tests/src/Common_Table_Operations/Missing_Values_Spec.enso @@ -1,6 +1,6 @@ from Standard.Base import all -from Standard.Table import Value_Type, Column_Ref, Previous_Value +from Standard.Table import Value_Type, Column_Ref, Previous_Value, Blank_Selector from Standard.Table.Data.Aggregate_Column.Aggregate_Column import Count_Distinct from Standard.Table.Errors import all @@ -29,14 +29,14 @@ spec setup = table_builder [a, b, c, d, e, f] Test.specify "filter_blank_rows should drop rows that contain at least one missing cell" <| - d = t0.filter_blank_rows when_any=True + d = t0.filter_blank_rows when=Blank_Selector.Any_Cell_Blank d.row_count . should_equal 1 d.at "a" . to_vector . should_equal [5] d.at "b" . to_vector . should_equal [False] d.at "c" . to_vector . should_equal [" "] Test.specify "filter_blank_rows should drop rows that are all blank" <| - d2 = t0.filter_blank_rows when_any=False + d2 = t0.filter_blank_rows when=Blank_Selector.All_Cells_Blank d2.at "a" . to_vector . should_equal [0, 1, Nothing, 42, 5] d2.at "b" . to_vector . should_equal [True, Nothing, True, False, False] d2.at "c" . to_vector . should_equal ["", "foo", "bar", Nothing, " "] @@ -49,12 +49,12 @@ spec setup = t1.row_count . should_equal 3 t1.at "X" . to_vector . should_equal [Nothing, Nothing, Nothing] - t2 = t1.filter_blank_rows when_any=True + t2 = t1.filter_blank_rows when=Blank_Selector.Any_Cell_Blank t2.row_count . should_equal 0 t2.at "X" . to_vector . should_equal [] t3 = table_builder [["X", ["", "", Nothing]]] - t4 = t3.filter_blank_rows when_any=False + t4 = t3.filter_blank_rows when=Blank_Selector.All_Cells_Blank t4.row_count . should_equal 0 t4.at "X" . to_vector . should_equal [] @@ -72,7 +72,7 @@ spec setup = r1.columns.map .name . should_equal ["f"] r1.at "f" . to_vector . should_equal [Nothing, "", Nothing, ""] - r2 = t1.select_blank_columns when_any=True + r2 = t1.select_blank_columns when=Blank_Selector.Any_Cell_Blank r2.columns.map .name . should_equal ["a", "b", "d", "e", "f"] r2.at "d" . to_vector . should_equal [Nothing, True, False, True] @@ -81,7 +81,7 @@ spec setup = r1.columns.map .name . should_equal ["a", "b", "c", "d", "e"] r1.at "a" . to_vector . should_equal [1, Nothing, 3, 4] - r2 = t1.remove_blank_columns when_any=True + r2 = t1.remove_blank_columns when=Blank_Selector.Any_Cell_Blank r2.columns.map .name . should_equal ["c"] r2.at "c" . to_vector . should_equal [10, 20, 30, 40] @@ -93,12 +93,12 @@ spec setup = table_builder [c, g, h] if test_selection.is_nan_and_nothing_distinct then Test.specify "should not treat NaNs as blank by default" <| - r1 = t3.filter_blank_rows when_any=True + r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell_Blank # We cannot use `Vector.==` because `NaN != NaN`. r1.at "X" . to_vector . to_text . should_equal "[1.5, NaN]" r1.at "Y" . to_vector . should_equal [2.0, 5.0] - r2 = t3.filter_blank_rows when_any=False + r2 = t3.filter_blank_rows when=Blank_Selector.All_Cells_Blank r2.at "X" . to_vector . to_text . should_equal "[2.0, 1.5, NaN, NaN]" r2.at "Y" . to_vector . should_equal [Nothing, 2.0, Nothing, 5.0] @@ -106,37 +106,37 @@ spec setup = r3.columns.map .name . should_equal ["c", "g", "h"] r3.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]" - r4 = t4.remove_blank_columns when_any=True + r4 = t4.remove_blank_columns when=Blank_Selector.Any_Cell_Blank r4.columns.map .name . should_equal ["c", "g"] r4.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]" - r5 = t4.select_blank_columns when_any=True + r5 = t4.select_blank_columns when=Blank_Selector.Any_Cell_Blank r5.columns.map .name . should_equal ["h"] r5.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" Test.specify "should allow to treat NaNs as blank if asked" <| - r1 = t3.filter_blank_rows when_any=True treat_nans_as_blank=True + r1 = t3.filter_blank_rows when=Blank_Selector.Any_Cell_Blank treat_nans_as_blank=True # We cannot use `Vector.==` because `NaN != NaN`. r1.at "X" . to_vector . should_equal [1.5] r1.at "Y" . to_vector . should_equal [2.0] - r2 = t3.filter_blank_rows when_any=False treat_nans_as_blank=True + r2 = t3.filter_blank_rows when=Blank_Selector.All_Cells_Blank treat_nans_as_blank=True r2.at "X" . to_vector . to_text . should_equal "[2.0, 1.5, NaN]" r2.at "Y" . to_vector . should_equal [Nothing, 2.0, 5.0] - r3 = t4.remove_blank_columns when_any=False treat_nans_as_blank=True + r3 = t4.remove_blank_columns when=Blank_Selector.All_Cells_Blank treat_nans_as_blank=True r3.columns.map .name . should_equal ["c", "g"] r3.at "g" . to_vector . to_text . should_equal "[NaN, 1.0, 2.0, 3.4]" - r4 = t4.select_blank_columns when_any=False treat_nans_as_blank=True + r4 = t4.select_blank_columns when=Blank_Selector.All_Cells_Blank treat_nans_as_blank=True r4.columns.map .name . should_equal ["h"] r4.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" - r5 = t4.remove_blank_columns when_any=True treat_nans_as_blank=True + r5 = t4.remove_blank_columns when=Blank_Selector.Any_Cell_Blank treat_nans_as_blank=True r5.columns.map .name . should_equal ["c"] r5.at "c" . to_vector . should_equal [10, 20, 40, 30] - r6 = t4.select_blank_columns when_any=True treat_nans_as_blank=True + r6 = t4.select_blank_columns when=Blank_Selector.Any_Cell_Blank treat_nans_as_blank=True r6.columns.map .name . should_equal ["g", "h"] r6.at "h" . to_vector . to_text . should_equal "[NaN, Nothing, NaN, Nothing]" diff --git a/test/Table_Tests/src/Database/Codegen_Spec.enso b/test/Table_Tests/src/Database/Codegen_Spec.enso index ab271472dcf9..e3357bf6fa6d 100644 --- a/test/Table_Tests/src/Database/Codegen_Spec.enso +++ b/test/Table_Tests/src/Database/Codegen_Spec.enso @@ -1,7 +1,7 @@ from Standard.Base import all import Standard.Base.Errors.Illegal_State.Illegal_State -from Standard.Table import Sort_Column, Value_Type +from Standard.Table import Sort_Column, Value_Type, Blank_Selector from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First, Last from Standard.Table.Errors import No_Input_Columns_Selected, Missing_Input_Columns, No_Such_Column @@ -81,10 +81,10 @@ spec = c.to_sql.prepare . should_equal ['SELECT CAST(COALESCE("T1"."B", ?) AS TEXT) AS "B" FROM "T1" AS "T1"', ["not-applicable"]] Test.specify "filter_blank_rows should drop rows that contain at least one missing column in a Table" <| - t2 = t1.filter_blank_rows when_any=True + t2 = t1.filter_blank_rows when=Blank_Selector.Any_Cell_Blank t2.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (NOT ((("T1"."A" IS NULL) OR (("T1"."B" IS NULL) OR ("T1"."B" = \'\'))) OR ("T1"."C" IS NULL)))', []] - t3 = t1.filter_blank_rows when_any=False + t3 = t1.filter_blank_rows when=Blank_Selector.All_Cells_Blank t3.to_sql.prepare . should_equal ['SELECT "T1"."A" AS "A", "T1"."B" AS "B", "T1"."C" AS "C" FROM "T1" AS "T1" WHERE (NOT ((("T1"."A" IS NULL) AND (("T1"."B" IS NULL) OR ("T1"."B" = \'\'))) AND ("T1"."C" IS NULL)))', []] Test.group "[Codegen] Sorting" <| diff --git a/test/Table_Tests/src/In_Memory/Table_Spec.enso b/test/Table_Tests/src/In_Memory/Table_Spec.enso index 5f166d0932ca..698873f13648 100644 --- a/test/Table_Tests/src/In_Memory/Table_Spec.enso +++ b/test/Table_Tests/src/In_Memory/Table_Spec.enso @@ -4,7 +4,7 @@ import Standard.Base.Errors.Common.Index_Out_Of_Bounds import Standard.Base.Errors.Common.Type_Error import Standard.Base.Errors.Illegal_Argument.Illegal_Argument -from Standard.Table import Table, Column, Sort_Column, Aggregate_Column +from Standard.Table import Table, Column, Sort_Column, Aggregate_Column, Blank_Selector from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all hiding First, Last import Standard.Table.Data.Type.Value_Type.Value_Type from Standard.Table.Errors import Invalid_Column_Names, Duplicate_Output_Column_Names, No_Input_Columns_Selected, Missing_Input_Columns, No_Such_Column, Floating_Point_Equality, Invalid_Value_Type, Row_Count_Mismatch @@ -305,7 +305,7 @@ spec = Test.group "Dropping Missing Values" <| Test.specify "should correctly handle NaNs with mixed type columns" <| t = Table.new [["X", [1, 2, 3, 4, 5]], ["Y", ["A", "", Nothing, Number.nan, 0]]] - t1 = t.filter_blank_rows when_any=True treat_nans_as_blank=False + t1 = t.filter_blank_rows when=Blank_Selector.Any_Cell_Blank treat_nans_as_blank=False t1.at "X" . to_vector . should_equal [1, 4, 5] # Comparing text value because `Number.nan != Number.nan`. t1.at "Y" . to_vector . to_text . should_equal "[A, NaN, 0]" @@ -314,7 +314,7 @@ spec = c.to_vector . should_equal [False, True, True, True, False] c.value_type . should_equal Value_Type.Boolean - t2 = t.filter_blank_rows when_any=True treat_nans_as_blank=True + t2 = t.filter_blank_rows when=Blank_Selector.Any_Cell_Blank treat_nans_as_blank=True t2.at "X" . to_vector . should_equal [1, 5] t2.at "Y" . to_vector . should_equal ['A', 0] From a9118ee0c355912c299b9fdc9b45934c2e6a96f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wawrzyniec=20Urba=C5=84czyk?= Date: Wed, 1 Nov 2023 20:58:28 +0100 Subject: [PATCH 10/12] Use the new notarization tool from Apple (#8192) --- .github/workflows/gui.yml | 2 + .github/workflows/release.yml | 14 +++-- .../lib/client/electron-builder-config.ts | 15 ++--- app/ide-desktop/lib/client/package.json | 2 +- app/ide-desktop/lib/types/globals.d.ts | 1 + build/build/src/ci_gen.rs | 1 + build/build/src/ci_gen/job.rs | 4 ++ build/build/src/ide/web.rs | 3 + package-lock.json | 56 ++++++++++++++----- 9 files changed, 68 insertions(+), 30 deletions(-) diff --git a/.github/workflows/gui.yml b/.github/workflows/gui.yml index 4a6f1a2c7cce..163ee5d1b0c9 100644 --- a/.github/workflows/gui.yml +++ b/.github/workflows/gui.yml @@ -821,6 +821,7 @@ jobs: env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + APPLETEAMID: ${{ secrets.APPLE_NOTARIZATION_TEAM_ID }} CSC_IDENTITY_AUTO_DISCOVERY: "true" CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} @@ -1008,6 +1009,7 @@ jobs: env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + APPLETEAMID: ${{ secrets.APPLE_NOTARIZATION_TEAM_ID }} CSC_IDENTITY_AUTO_DISCOVERY: "true" CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ce6827785dcd..5dd9d52929b8 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -628,6 +628,7 @@ jobs: env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + APPLETEAMID: ${{ secrets.APPLE_NOTARIZATION_TEAM_ID }} CSC_IDENTITY_AUTO_DISCOVERY: "true" CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} @@ -725,7 +726,7 @@ jobs: steps: - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') name: Setup conda (GH runners only) - uses: s-weigand/setup-conda@v1.0.6 + uses: s-weigand/setup-conda@v1.2.1 with: update-conda: false conda-channels: anaconda, conda-forge @@ -747,7 +748,7 @@ jobs: run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" shell: bash - name: Checking out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: clean: false submodules: recursive @@ -787,7 +788,7 @@ jobs: steps: - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') name: Setup conda (GH runners only) - uses: s-weigand/setup-conda@v1.0.6 + uses: s-weigand/setup-conda@v1.2.1 with: update-conda: false conda-channels: anaconda, conda-forge @@ -809,7 +810,7 @@ jobs: run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" shell: bash - name: Checking out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: clean: false submodules: recursive @@ -826,6 +827,7 @@ jobs: env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + APPLETEAMID: ${{ secrets.APPLE_NOTARIZATION_TEAM_ID }} CSC_IDENTITY_AUTO_DISCOVERY: "true" CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} @@ -856,7 +858,7 @@ jobs: steps: - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') name: Setup conda (GH runners only) - uses: s-weigand/setup-conda@v1.0.6 + uses: s-weigand/setup-conda@v1.2.1 with: update-conda: false conda-channels: anaconda, conda-forge @@ -878,7 +880,7 @@ jobs: run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" shell: bash - name: Checking out the repository - uses: actions/checkout@v2 + uses: actions/checkout@v4 with: clean: false submodules: recursive diff --git a/app/ide-desktop/lib/client/electron-builder-config.ts b/app/ide-desktop/lib/client/electron-builder-config.ts index 91ba3dca2730..4cfcf778800a 100644 --- a/app/ide-desktop/lib/client/electron-builder-config.ts +++ b/app/ide-desktop/lib/client/electron-builder-config.ts @@ -10,7 +10,7 @@ import * as childProcess from 'node:child_process' import * as fs from 'node:fs/promises' import * as electronBuilder from 'electron-builder' -import * as electronNotarize from 'electron-notarize' +import * as electronNotarize from '@electron/notarize' import type * as macOptions from 'app-builder-lib/out/options/macOptions' import yargs from 'yargs' @@ -230,8 +230,6 @@ export function createElectronBuilderConfig(passedArgs: Arguments): electronBuil ) { const { packager: { - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment - platformSpecificBuildOptions: buildOptions, appInfo: { productFilename: appName }, config: { mac: macConfig }, }, @@ -250,20 +248,17 @@ export function createElectronBuilderConfig(passedArgs: Arguments): electronBuil }) console.log(' • Notarizing.') - // The type-cast is safe because this is only executes - // when `platform === electronBuilder.Platform.MAC`. - // eslint-disable-next-line no-restricted-syntax - const macBuildOptions = buildOptions as macOptions.MacConfiguration + await electronNotarize.notarize({ - // This will always be defined since we set it at the top of this object. - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - appBundleId: macBuildOptions.appId!, + tool: 'notarytool', appPath: `${appOutDir}/${appName}.app`, // It is a mistake for either of these to be undefined. // eslint-disable-next-line @typescript-eslint/no-non-null-assertion appleId: process.env.APPLEID!, // eslint-disable-next-line @typescript-eslint/no-non-null-assertion appleIdPassword: process.env.APPLEIDPASS!, + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + teamId: process.env.APPLETEAMID!, }) } }, diff --git a/app/ide-desktop/lib/client/package.json b/app/ide-desktop/lib/client/package.json index 07c1c1b4ff5d..e24426d794eb 100644 --- a/app/ide-desktop/lib/client/package.json +++ b/app/ide-desktop/lib/client/package.json @@ -36,7 +36,7 @@ "crypto-js": "4.1.1", "electron": "25.7.0", "electron-builder": "^22.14.13", - "electron-notarize": "1.2.2", + "@electron/notarize": "2.1.0", "enso-common": "^1.0.0", "esbuild": "^0.19.3", "fast-glob": "^3.2.12", diff --git a/app/ide-desktop/lib/types/globals.d.ts b/app/ide-desktop/lib/types/globals.d.ts index 0bfbe4a0e6a1..b31a64b004b1 100644 --- a/app/ide-desktop/lib/types/globals.d.ts +++ b/app/ide-desktop/lib/types/globals.d.ts @@ -81,6 +81,7 @@ declare global { /* eslint-disable @typescript-eslint/naming-convention */ APPLEID?: string APPLEIDPASS?: string + APPLETEAMID?: string /* eslint-enable @typescript-eslint/naming-convention */ } } diff --git a/build/build/src/ci_gen.rs b/build/build/src/ci_gen.rs index e6e2daf51c1a..77c600a1942d 100644 --- a/build/build/src/ci_gen.rs +++ b/build/build/src/ci_gen.rs @@ -94,6 +94,7 @@ pub mod secret { pub const APPLE_CODE_SIGNING_CERT_PASSWORD: &str = "APPLE_CODE_SIGNING_CERT_PASSWORD"; pub const APPLE_NOTARIZATION_USERNAME: &str = "APPLE_NOTARIZATION_USERNAME"; pub const APPLE_NOTARIZATION_PASSWORD: &str = "APPLE_NOTARIZATION_PASSWORD"; + pub const APPLE_NOTARIZATION_TEAM_ID: &str = "APPLE_NOTARIZATION_TEAM_ID"; // === Windows Code Signing === /// Name of the GitHub Actions secret that stores path to the Windows code signing certificate diff --git a/build/build/src/ci_gen/job.rs b/build/build/src/ci_gen/job.rs index cf60b4f5eb35..6e45eae2eba1 100644 --- a/build/build/src/ci_gen/job.rs +++ b/build/build/src/ci_gen/job.rs @@ -232,6 +232,10 @@ pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step { secret::APPLE_NOTARIZATION_PASSWORD, &crate::ide::web::env::APPLEIDPASS, ) + .with_secret_exposed_as( + secret::APPLE_NOTARIZATION_TEAM_ID, + &crate::ide::web::env::APPLETEAMID, + ) .with_env(&crate::ide::web::env::CSC_IDENTITY_AUTO_DISCOVERY, "true"), _ => step, } diff --git a/build/build/src/ide/web.rs b/build/build/src/ide/web.rs index 1c0bed76588a..915a503677d9 100644 --- a/build/build/src/ide/web.rs +++ b/build/build/src/ide/web.rs @@ -85,6 +85,9 @@ pub mod env { /// https://support.apple.com/HT204397 APPLEIDPASS, String; + /// Apple Team ID. + APPLETEAMID, String; + /// `true` or `false`. Defaults to `true` — on a macOS development machine valid and /// appropriate identity from your keychain will be automatically used. CSC_IDENTITY_AUTO_DISCOVERY, bool; diff --git a/package-lock.json b/package-lock.json index 09a578070d4d..3976cb67d15f 100644 --- a/package-lock.json +++ b/package-lock.json @@ -146,10 +146,10 @@ "yargs": "17.6.2" }, "devDependencies": { + "@electron/notarize": "2.1.0", "crypto-js": "4.1.1", "electron": "25.7.0", "electron-builder": "^22.14.13", - "electron-notarize": "1.2.2", "enso-common": "^1.0.0", "esbuild": "^0.19.3", "fast-glob": "^3.2.12", @@ -1753,6 +1753,20 @@ "node": ">= 4.0.0" } }, + "node_modules/@electron/notarize": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/@electron/notarize/-/notarize-2.1.0.tgz", + "integrity": "sha512-Q02xem1D0sg4v437xHgmBLxI2iz/fc0D4K7fiVWHa/AnW8o7D751xyKNXgziA6HrTOme9ul1JfWN5ark8WH1xA==", + "dev": true, + "dependencies": { + "debug": "^4.1.1", + "fs-extra": "^9.0.1", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": ">= 10.0.0" + } + }, "node_modules/@electron/universal": { "version": "1.0.5", "dev": true, @@ -7543,18 +7557,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/electron-notarize": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.1.1", - "fs-extra": "^9.0.1" - }, - "engines": { - "node": ">= 10.0.0" - } - }, "node_modules/electron-osx-sign": { "version": "0.5.0", "dev": true, @@ -7803,6 +7805,12 @@ "node": ">=6" } }, + "node_modules/err-code": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/err-code/-/err-code-2.0.3.tgz", + "integrity": "sha512-2bmlRpNKBxT/CRmPOlyISQpNj+qSeYvcym/uT0Jx2bMOlKLtSy1ZmLuVxSEKKyor/N5yhvp/ZiG1oE3DEYMSFA==", + "dev": true + }, "node_modules/errno": { "version": "0.1.8", "license": "MIT", @@ -13126,6 +13134,19 @@ "node": ">=0.4.0" } }, + "node_modules/promise-retry": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/promise-retry/-/promise-retry-2.0.1.tgz", + "integrity": "sha512-y+WKFlBR8BGXnsNlIHFGPZmyDf3DFMoLhaflAnyZgV6rG6xu+JwesTo2Q9R6XwYmtmwAFCkAk3e35jEdoeh/3g==", + "dev": true, + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/prop-types": { "version": "15.8.1", "license": "MIT", @@ -13682,6 +13703,15 @@ "node": ">=0.12" } }, + "node_modules/retry": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/retry/-/retry-0.12.0.tgz", + "integrity": "sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, "node_modules/reusify": { "version": "1.0.4", "license": "MIT", From 2db4f4c5d936f34c3fb225f152673cf45a0dc606 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Thu, 2 Nov 2023 12:24:26 +0100 Subject: [PATCH 11/12] Upgrade directory-watcher library (#8201) The change upgrades `directory-watcher` library, hoping that it will fix the problem reported in #7695 (there has been a number of bug fixes in MacOS listener since then). Once upgraded, tests in `WatcherAdapterSpec` because the logic that attempted to ensure the proper initialization order in the test using semaphore was wrong. Now starting the watcher using `watchAsync` which only returns the future when the watcher successfully registers for paths. Ideally authors of the library would make the registration bit public (https://github.com/gmethvin/directory-watcher/blob/3218d68a845ebd803ebd98af3be4692d1b63e12c/core/src/main/java/io/methvin/watcher/DirectoryWatcher.java#L229C7-L229C20) but it is the best we can do so far. Had to adapt to the new API in PathWatcher as well, ensuring the right order of initialization. Should fix #7695. --- build.sbt | 2 +- distribution/engine/THIRD-PARTY/NOTICE | 6 +-- .../NOTICE.txt | 5 ++ .../NOTICES | 4 ++ .../NOTICES | 0 .../net.java.dev.jna.jna-5.12.1/NOTICES | 25 +++++++++ .../net.java.dev.jna.jna-5.5.0/NOTICES | 5 -- .../NOTICES | 34 ++++++++++-- .../org/enso/languageserver/effect/Exec.scala | 53 +++++++++++-------- .../filemanager/PathWatcher.scala | 38 ++++++++----- .../org/enso/filewatcher/NoopWatcher.scala | 4 +- .../scala/org/enso/filewatcher/Watcher.scala | 8 ++- .../org/enso/filewatcher/WatcherAdapter.scala | 11 ++-- .../enso/filewatcher/WatcherAdapterSpec.scala | 13 ++--- .../copyright-ignore | 2 + .../copyright-keep | 2 + .../files-ignore | 1 + .../files-keep | 1 + .../copyright-keep | 0 .../copyright-ignore | 1 + .../copyright-keep} | 6 ++- .../net.java.dev.jna.jna-5.5.0/copyright-keep | 3 -- .../copyright-keep | 18 +++++-- tools/legal-review/engine/report-state | 4 +- 24 files changed, 172 insertions(+), 74 deletions(-) create mode 100644 distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICE.txt rename distribution/engine/THIRD-PARTY/{io.methvin.directory-watcher-0.9.10 => io.methvin.directory-watcher-0.18.0}/NOTICES (100%) create mode 100644 distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.12.1/NOTICES delete mode 100644 distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.5.0/NOTICES create mode 100644 tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-ignore create mode 100644 tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-keep create mode 100644 tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-ignore create mode 100644 tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-keep rename tools/legal-review/engine/{io.methvin.directory-watcher-0.9.10 => io.methvin.directory-watcher-0.18.0}/copyright-keep (100%) create mode 100644 tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-ignore rename tools/legal-review/engine/{net.java.dev.jna.jna-5.5.0/copyright-ignore => net.java.dev.jna.jna-5.12.1/copyright-keep} (75%) delete mode 100644 tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-keep diff --git a/build.sbt b/build.sbt index 255d2b8283ec..8834ed5e3a1e 100644 --- a/build.sbt +++ b/build.sbt @@ -460,7 +460,7 @@ val zio = Seq( val bcpkixJdk15Version = "1.70" val bumpVersion = "0.1.3" val declineVersion = "2.4.1" -val directoryWatcherVersion = "0.9.10" +val directoryWatcherVersion = "0.18.0" val flatbuffersVersion = "1.12.0" val guavaVersion = "32.0.0-jre" val jlineVersion = "3.23.0" diff --git a/distribution/engine/THIRD-PARTY/NOTICE b/distribution/engine/THIRD-PARTY/NOTICE index 3de39d393e24..0452cfd9ee8f 100644 --- a/distribution/engine/THIRD-PARTY/NOTICE +++ b/distribution/engine/THIRD-PARTY/NOTICE @@ -253,7 +253,7 @@ Copyright notices related to this dependency can be found in the directory `io.c 'directory-watcher', licensed under the Apache-2.0, is distributed with the engine. The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `io.methvin.directory-watcher-0.9.10`. +Copyright notices related to this dependency can be found in the directory `io.methvin.directory-watcher-0.18.0`. 'spray-json_2.13', licensed under the Apache 2, is distributed with the engine. @@ -261,9 +261,9 @@ The license file can be found at `licenses/APACHE2.0`. Copyright notices related to this dependency can be found in the directory `io.spray.spray-json_2.13-1.3.6`. -'jna', licensed under the Apache License v2.0, is distributed with the engine. +'jna', licensed under the Apache-2.0, is distributed with the engine. The license file can be found at `licenses/APACHE2.0`. -Copyright notices related to this dependency can be found in the directory `net.java.dev.jna.jna-5.5.0`. +Copyright notices related to this dependency can be found in the directory `net.java.dev.jna.jna-5.12.1`. 'bump_2.13', licensed under the MIT, is distributed with the engine. diff --git a/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICE.txt b/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICE.txt new file mode 100644 index 000000000000..639cd1fb37a1 --- /dev/null +++ b/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICE.txt @@ -0,0 +1,5 @@ +Apache Commons Codec +Copyright 2002-2023 The Apache Software Foundation + +This product includes software developed at +The Apache Software Foundation (https://www.apache.org/). diff --git a/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICES b/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICES index 8b137891791f..c64219251bb6 100644 --- a/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICES +++ b/distribution/engine/THIRD-PARTY/commons-codec.commons-codec-1.16.0/NOTICES @@ -1 +1,5 @@ + +Some portions of this file Copyright (c) 2004-2006 Intel Corporation + +this work for additional information regarding copyright ownership. diff --git a/distribution/engine/THIRD-PARTY/io.methvin.directory-watcher-0.9.10/NOTICES b/distribution/engine/THIRD-PARTY/io.methvin.directory-watcher-0.18.0/NOTICES similarity index 100% rename from distribution/engine/THIRD-PARTY/io.methvin.directory-watcher-0.9.10/NOTICES rename to distribution/engine/THIRD-PARTY/io.methvin.directory-watcher-0.18.0/NOTICES diff --git a/distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.12.1/NOTICES b/distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.12.1/NOTICES new file mode 100644 index 000000000000..649803da66e1 --- /dev/null +++ b/distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.12.1/NOTICES @@ -0,0 +1,25 @@ +Copyright (c) 2007 Timothy Wall + +Copyright (c) 2007 Timothy Wall, All Rights Reserved + +Copyright (c) 2007 Wayne Meissner, All Rights Reserved + +Copyright (c) 2007-2008 Timothy Wall, All Rights Reserved + +Copyright (c) 2007-2012 Timothy Wall, All Rights Reserved + +Copyright (c) 2007-2013 Timothy Wall, All Rights Reserved + +Copyright (c) 2007-2015 Timothy Wall, All Rights Reserved + +Copyright (c) 2009 Timothy Wall, All Rights Reserved + +Copyright (c) 2011 Timothy Wall, All Rights Reserved + +Copyright (c) 2012 Timothy Wall, All Rights Reserved + +Copyright (c) 2017 Matthias Bläsing, All Rights Reserved + +Copyright (c) 2018 Matthias Bläsing + +Copyright (c) 2019 Matthias Bläsing, All Rights Reserved diff --git a/distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.5.0/NOTICES b/distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.5.0/NOTICES deleted file mode 100644 index c89a59e31480..000000000000 --- a/distribution/engine/THIRD-PARTY/net.java.dev.jna.jna-5.5.0/NOTICES +++ /dev/null @@ -1,5 +0,0 @@ -Copyright (c) 2007 Timothy Wall, All Rights Reserved - -Copyright (c) 2007 Wayne Meissner, All Rights Reserved - -Copyright (c) 2017 Matthias Bläsing, All Rights Reserved diff --git a/distribution/engine/THIRD-PARTY/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/NOTICES b/distribution/engine/THIRD-PARTY/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/NOTICES index c794f1cad0a7..97b68c0c6f42 100644 --- a/distribution/engine/THIRD-PARTY/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/NOTICES +++ b/distribution/engine/THIRD-PARTY/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/NOTICES @@ -88,6 +88,8 @@ Copyright (C) 2008, 2022 Marek Zawirski and others Copyright (C) 2008, 2022 Shawn O. Pearce and others +Copyright (C) 2008, 2023 Google Inc. and others + Copyright (C) 2008, Charles O'Farrell Copyright (C) 2008, Florian Köberle @@ -338,8 +340,6 @@ Copyright (C) 2011, 2019 GitHub Inc. and others Copyright (C) 2011, 2019 Google Inc. and others -Copyright (C) 2011, 2020 Matthias Sohn and others - Copyright (C) 2011, 2020, Matthias Sohn and others Copyright (C) 2011, 2021 IBM Corporation and others @@ -350,6 +350,8 @@ Copyright (C) 2011, 2022 Christoph Brill and others Copyright (C) 2011, 2022 Google Inc. and others +Copyright (C) 2011, 2023 Matthias Sohn and others + Copyright (C) 2011, Chris Aniszczyk Copyright (C) 2011, Chris Aniszczyk and others @@ -398,10 +400,10 @@ Copyright (C) 2012 Christian Halstrick and others Copyright (C) 2012 Google Inc. and others -Copyright (C) 2012, 2021 GitHub Inc. and others - Copyright (C) 2012, 2022, Robin Rosenberg and others +Copyright (C) 2012, 2023 GitHub Inc. and others + Copyright (C) 2012, Christian Halstrick and others Copyright (C) 2012, Daniel Megert @@ -638,6 +640,10 @@ Copyright (C) 2021, Google Inc. and others Copyright (C) 2021, Google LLC. and others +Copyright (C) 2021, Matthias Sohn and others + +Copyright (C) 2021, Tencent. + Copyright (C) 2021, Thomas Wolf and others Copyright (C) 2022 Thomas Wolf and others @@ -646,7 +652,9 @@ Copyright (C) 2022, Matthias Sohn and others Copyright (C) 2022, Fabio Ponciroli and others -Copyright (C) 2022, Google Inc. and others +Copyright (C) 2022, Google LLC and others + +Copyright (C) 2022, Matthias Sohn and others Copyright (C) 2022, Simeon Andreev and others @@ -654,6 +662,20 @@ Copyright (C) 2022, Tencent. Copyright (C) 2022, Workday Inc. +Copyright (C) 2023, Google Inc. and others + +Copyright (C) 2023, Google LLC + +Copyright (C) 2023, Google LLC and others + +Copyright (C) 2023, Google LLC. + +Copyright (C) 2023, SAP SE and others + +Copyright (C) 2023, Tencent. + +Copyright (C) 2023, Thomas Wolf and others + Copyright (c) 2014 Konrad Kügler and others Copyright (c) 2019 Matthias Sohn @@ -672,6 +694,8 @@ Copyright (c) 2020, Google LLC and others Copyright (c) 2021 Qualcomm Innovation Center, Inc. +Copyright (c) 2023 Qualcomm Innovation Center, Inc. + Copyright 2017 Marc Stevens , Dan Shumow IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/effect/Exec.scala b/engine/language-server/src/main/scala/org/enso/languageserver/effect/Exec.scala index 8b6c18f642d5..7e35a7f5b6bd 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/effect/Exec.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/effect/Exec.scala @@ -14,7 +14,8 @@ trait Exec[-F[_, _]] { /** Execute Zio effect. * - * @param op effect to execute + * @param op effect to execute + * @param trace object prevents the library from messing up the user's execution trace * @return a future containing either a failure or a result */ def exec[E, A](op: F[E, A])(implicit trace: Trace): Future[Either[E, A]] @@ -22,8 +23,10 @@ trait Exec[-F[_, _]] { /** Execute Zio effect with timeout. * * @param timeout execution timeout - * @param op effect to execute - * @return a future + * @param op effect to execute + * @param trace object prevents the library from messing up the user's execution trace + * @return a future. On timeout future is failed with `TimeoutException`. + * Otherwise future contains either a failure or a result. */ def execTimed[E, A]( timeout: FiniteDuration, @@ -32,9 +35,19 @@ trait Exec[-F[_, _]] { /** Execute long running task in background. * - * @param op effect to execute + * @param op effect to execute + * @param trace object prevents the library from messing up the user's execution trace */ def exec_[E <: Throwable, A](op: F[E, A])(implicit trace: Trace): Unit + + /** Execute long running task in background. + * + * @param op effect to execute with the explicit executor + * @param trace object prevents the library from messing up the user's execution trace + */ + def exec__[E <: Throwable, A](op: Executor => F[E, A])(implicit + trace: Trace + ): Unit } /** Executor of Zio effects. @@ -43,12 +56,7 @@ trait Exec[-F[_, _]] { */ case class ZioExec(runtime: effect.Runtime) extends Exec[ZioExec.IO] { - /** Execute Zio effect. - * - * @param op effect to execute - * @param trace object prevents the library from messing up the user's execution trace - * @return a future containing either a failure or a result - */ + /** @inheritdoc */ override def exec[E, A]( op: ZIO[ZAny, E, A] )(implicit trace: Trace): Future[Either[E, A]] = @@ -63,14 +71,7 @@ case class ZioExec(runtime: effect.Runtime) extends Exec[ZioExec.IO] { promise.future } - /** Execute Zio effect with timeout. - * - * @param timeout execution timeout - * @param op effect to execute - * @param trace object prevents the library from messing up the user's execution trace - * @return a future. On timeout future is failed with `TimeoutException`. - * Otherwise future contains either a failure or a result. - */ + /** @inheritdoc */ override def execTimed[E, A]( timeout: FiniteDuration, op: ZIO[ZAny, E, A] @@ -90,17 +91,23 @@ case class ZioExec(runtime: effect.Runtime) extends Exec[ZioExec.IO] { promise.future } - /** Execute long running task in background. - * - * @param op effect to execute - * @param trace object prevents the library from messing up the user's execution trace - */ + /** @inheritdoc */ override def exec_[E <: Throwable, A]( op: ZIO[ZAny, E, A] )(implicit trace: Trace): Unit = zio.Unsafe.unsafe { implicit unsafe => runtime.instance.unsafe.fork(ZIO.blocking(op)) } + + /** @inheritdoc */ + override def exec__[E <: Throwable, A]( + op: Executor => ZioExec.IO[E, A] + )(implicit trace: Trace): Unit = + zio.Unsafe.unsafe { implicit unsafe => + runtime.instance.unsafe.fork( + ZIO.blockingExecutor.flatMap(ec => op(ec).onExecutor(ec)) + ) + } } object ZioExec { diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/filemanager/PathWatcher.scala b/engine/language-server/src/main/scala/org/enso/languageserver/filemanager/PathWatcher.scala index 0cc2201d76a4..0e9928aac765 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/filemanager/PathWatcher.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/filemanager/PathWatcher.scala @@ -23,11 +23,11 @@ import org.enso.languageserver.filemanager.PathWatcher.{ ForwardResponse } import org.enso.languageserver.util.UnhandledLogging -import zio._ +import zio.ZIO import java.io.File import scala.concurrent.Await -import scala.util.{Failure, Success} +import scala.util.{Failure, Success, Try} /** Starts [[Watcher]], handles errors, converts and sends * events to the client. @@ -81,7 +81,7 @@ final class PathWatcher( _ <- ZIO.fromEither(startWatcher(watcher)) } yield () - exec + val executedResult = exec .exec(result) .map { case Right(()) => CapabilityAcquired @@ -89,15 +89,29 @@ final class PathWatcher( } .pipeTo(sender()) - pathToWatchResult.onComplete { - case Success(Right(root)) => - logger.info("Initialized [{}] for [{}].", watcherFactory.getClass, path) - context.become(initializedStage(root, path, clients)) - case Success(Left(err)) => - logger.error("Failed to resolve the path [{}]. {}", path, err) - context.stop(self) + Try(Await.ready(executedResult, config.timeout)) match { + case Success(_) => + pathToWatchResult.onComplete { + case Success(Right(root)) => + logger.info( + "Initialized [{}] for [{}].", + watcherFactory.getClass, + path + ) + context.become(initializedStage(root, path, clients)) + case Success(Left(err)) => + logger.error("Failed to resolve the path [{}]. {}", path, err) + context.stop(self) + case Failure(err) => + logger.error("Failed to resolve the path [{}]", path, err) + context.stop(self) + } case Failure(err) => - logger.error("Failed to resolve the path [{}]", path, err) + logger.error( + "Failed to initialize path watcher for path [{}]. {}", + path, + err + ) context.stop(self) } } @@ -182,7 +196,7 @@ final class PathWatcher( Either .catchNonFatal { fileWatcher = Some(watcher) - exec.exec_(ZIO.attempt(watcher.start())) + exec.exec__(ec => ZIO.attempt(watcher.start(ec.asJava))) } .leftMap(errorHandler) diff --git a/engine/language-server/src/test/scala/org/enso/filewatcher/NoopWatcher.scala b/engine/language-server/src/test/scala/org/enso/filewatcher/NoopWatcher.scala index 4668cd29e867..87484682cae1 100644 --- a/engine/language-server/src/test/scala/org/enso/filewatcher/NoopWatcher.scala +++ b/engine/language-server/src/test/scala/org/enso/filewatcher/NoopWatcher.scala @@ -1,10 +1,12 @@ package org.enso.filewatcher +import java.util.concurrent.Executor + /** A file watcher that does nothing. */ class NoopWatcher extends Watcher { /** @inheritdoc */ - override def start(): Unit = () + override def start(ec: Executor): Unit = () /** @inheritdoc */ override def stop(): Unit = () diff --git a/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/Watcher.scala b/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/Watcher.scala index 6d23ffda4f96..a0b7d892a16a 100644 --- a/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/Watcher.scala +++ b/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/Watcher.scala @@ -1,11 +1,15 @@ package org.enso.filewatcher import java.nio.file.Path +import java.util.concurrent.Executor trait Watcher { - /** Start the file watcher. */ - def start(): Unit + /** Start the file watcher. + * + * @param executor executor under which this watcher should be run in the background + */ + def start(executor: Executor): Unit /** Stop the file watcher. */ def stop(): Unit diff --git a/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/WatcherAdapter.scala b/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/WatcherAdapter.scala index b5a77f7fbae2..6063ae5e762e 100644 --- a/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/WatcherAdapter.scala +++ b/lib/scala/filewatcher/src/main/scala/org/enso/filewatcher/WatcherAdapter.scala @@ -1,7 +1,12 @@ package org.enso.filewatcher -import io.methvin.watcher._ +import io.methvin.watcher.{ + DirectoryChangeEvent, + DirectoryChangeListener, + DirectoryWatcher +} +import java.util.concurrent.Executor import java.nio.file.Path /** Watches the root with subdirectories and executes callback on file event. @@ -24,8 +29,8 @@ final class WatcherAdapter( .build() /** @inheritdoc */ - override def start(): Unit = { - watcher.watch() + override def start(executor: Executor): Unit = { + watcher.watchAsync(executor) } /** @inheritdoc */ diff --git a/lib/scala/filewatcher/src/test/scala/org/enso/filewatcher/WatcherAdapterSpec.scala b/lib/scala/filewatcher/src/test/scala/org/enso/filewatcher/WatcherAdapterSpec.scala index 54e670a60712..2c597e301272 100644 --- a/lib/scala/filewatcher/src/test/scala/org/enso/filewatcher/WatcherAdapterSpec.scala +++ b/lib/scala/filewatcher/src/test/scala/org/enso/filewatcher/WatcherAdapterSpec.scala @@ -1,9 +1,9 @@ package org.enso.filewatcher -import java.nio.file.{Files, Path, Paths} -import java.util.concurrent.{Executors, LinkedBlockingQueue, Semaphore} - import org.apache.commons.io.FileUtils + +import java.nio.file.{Files, Path, Paths} +import java.util.concurrent.{Executors, LinkedBlockingQueue} import org.enso.testkit.RetrySpec import org.scalatest.flatspec.AnyFlatSpec import org.scalatest.matchers.should.Matchers @@ -66,19 +66,14 @@ class WatcherAdapterSpec extends AnyFlatSpec with Matchers with RetrySpec { def withWatcher( test: (Path, LinkedBlockingQueue[Watcher.WatcherEvent]) => Any ): Any = { - val lock = new Semaphore(0) val executor = Executors.newSingleThreadExecutor() val tmp = Files.createTempDirectory(null).toRealPath() val queue = new LinkedBlockingQueue[Watcher.WatcherEvent]() val watcher = new WatcherAdapterFactory().build(tmp, queue.put, println(_)) - executor.submit[Any] { () => - lock.release() - watcher.start() - } + watcher.start(executor) try { - lock.tryAcquire(Timeout.length, Timeout.unit) test(tmp, queue) } finally { watcher.stop() diff --git a/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-ignore b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-ignore new file mode 100644 index 000000000000..8ddf57cc354d --- /dev/null +++ b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-ignore @@ -0,0 +1,2 @@ +

This is public domain code with no copyrights. From home page of +This is public domain code with no copyrights. From home page of diff --git a/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-keep b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-keep new file mode 100644 index 000000000000..8a5e85404897 --- /dev/null +++ b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/copyright-keep @@ -0,0 +1,2 @@ +Some portions of this file Copyright (c) 2004-2006 Intel Corporation +this work for additional information regarding copyright ownership. diff --git a/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-ignore b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-ignore new file mode 100644 index 000000000000..0256724c8d06 --- /dev/null +++ b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-ignore @@ -0,0 +1 @@ +META-INF/LICENSE.txt diff --git a/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-keep b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-keep new file mode 100644 index 000000000000..f9a3ec844f02 --- /dev/null +++ b/tools/legal-review/engine/commons-codec.commons-codec-1.16.0/files-keep @@ -0,0 +1 @@ +META-INF/NOTICE.txt diff --git a/tools/legal-review/engine/io.methvin.directory-watcher-0.9.10/copyright-keep b/tools/legal-review/engine/io.methvin.directory-watcher-0.18.0/copyright-keep similarity index 100% rename from tools/legal-review/engine/io.methvin.directory-watcher-0.9.10/copyright-keep rename to tools/legal-review/engine/io.methvin.directory-watcher-0.18.0/copyright-keep diff --git a/tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-ignore b/tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-ignore new file mode 100644 index 000000000000..60ceb70d65a8 --- /dev/null +++ b/tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-ignore @@ -0,0 +1 @@ +Copyright 2007 Timothy Wall diff --git a/tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-ignore b/tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-keep similarity index 75% rename from tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-ignore rename to tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-keep index cfc27f100295..8cda094bc74e 100644 --- a/tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-ignore +++ b/tools/legal-review/engine/net.java.dev.jna.jna-5.12.1/copyright-keep @@ -1,11 +1,13 @@ Copyright (c) 2007 Timothy Wall +Copyright (c) 2007 Timothy Wall, All Rights Reserved +Copyright (c) 2007 Wayne Meissner, All Rights Reserved Copyright (c) 2007-2008 Timothy Wall, All Rights Reserved -Copyright (c) 2007-2012 Timothy Wall, All Rights Reserved Copyright (c) 2007-2013 Timothy Wall, All Rights Reserved Copyright (c) 2007-2015 Timothy Wall, All Rights Reserved Copyright (c) 2009 Timothy Wall, All Rights Reserved Copyright (c) 2011 Timothy Wall, All Rights Reserved Copyright (c) 2012 Timothy Wall, All Rights Reserved +Copyright (c) 2017 Matthias Bläsing, All Rights Reserved Copyright (c) 2018 Matthias Bläsing Copyright (c) 2019 Matthias Bläsing, All Rights Reserved -Copyright 2007 Timothy Wall +Copyright (c) 2007-2012 Timothy Wall, All Rights Reserved diff --git a/tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-keep b/tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-keep deleted file mode 100644 index ab004d769c85..000000000000 --- a/tools/legal-review/engine/net.java.dev.jna.jna-5.5.0/copyright-keep +++ /dev/null @@ -1,3 +0,0 @@ -Copyright (c) 2007 Timothy Wall, All Rights Reserved -Copyright (c) 2007 Wayne Meissner, All Rights Reserved -Copyright (c) 2017 Matthias Bläsing, All Rights Reserved diff --git a/tools/legal-review/engine/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/copyright-keep b/tools/legal-review/engine/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/copyright-keep index 467486811b76..1da289ef1ce4 100644 --- a/tools/legal-review/engine/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/copyright-keep +++ b/tools/legal-review/engine/org.eclipse.jgit.org.eclipse.jgit-6.7.0.202309050840-r/copyright-keep @@ -70,7 +70,6 @@ Copyright (C) 2008, Imran M Yousuf Copyright (C) 2021, Google Inc. and others Copyright (C) 2022 Thomas Wolf and others Copyright (C) 2022, Simeon Andreev and others -Copyright (C) 2022, Google Inc. and others Copyright (C) 2022, Fabio Ponciroli and others Copyright (C) 2022, Matthias Sohn and others Copyright (C) 2021, Thomas Wolf and others @@ -202,7 +201,6 @@ Copyright (C) 2011, 2013 Google Inc., and others. and others Copyright (C) 2011, 2013 Robin Rosenberg and others Copyright (C) 2011, 2019 GitHub Inc. and others Copyright (C) 2011, 2019 Google Inc. and others -Copyright (C) 2011, 2020 Matthias Sohn and others Copyright (C) 2011, 2020, Matthias Sohn and others Copyright (C) 2011, 2021 IBM Corporation and others Copyright (C) 2011, 2022 Chris Aniszczyk and others @@ -232,7 +230,6 @@ Copyright (C) 2011-2013, Chris Aniszczyk and others Copyright (C) 2011-2013, Robin Rosenberg and others Copyright (C) 2012 Christian Halstrick and others Copyright (C) 2012 Google Inc. and others -Copyright (C) 2012, 2021 GitHub Inc. and others Copyright (C) 2012, 2022, Robin Rosenberg and others Copyright (C) 2012, Christian Halstrick and others Copyright (C) 2012, Daniel Megert @@ -339,3 +336,18 @@ other copyright owners as documented in the project's IP log. copyright notice, this list of conditions and the following Copyright (C) 2010, Mathias Kinzler and Copyright (C) 2010, Christian Halstrick and others +Copyright (C) 2021, Tencent. +Copyright (C) 2021, Matthias Sohn and others +Copyright (C) 2022, Matthias Sohn and others +Copyright (C) 2023, Google Inc. and others +Copyright (C) 2023, Google LLC +Copyright (C) 2023, Google LLC and others +Copyright (C) 2023, Google LLC. +Copyright (C) 2023, SAP SE and others +Copyright (C) 2023, Tencent. +Copyright (C) 2023, Thomas Wolf and others +Copyright (c) 2023 Qualcomm Innovation Center, Inc. +Copyright (C) 2022, Google LLC and others +Copyright (C) 2008, 2023 Google Inc. and others +Copyright (C) 2012, 2023 GitHub Inc. and others +Copyright (C) 2011, 2023 Matthias Sohn and others diff --git a/tools/legal-review/engine/report-state b/tools/legal-review/engine/report-state index 3394cce92080..6d014f0d145d 100644 --- a/tools/legal-review/engine/report-state +++ b/tools/legal-review/engine/report-state @@ -1,3 +1,3 @@ -BBA040F93183A17D3A861F0013540424DEBD31136C001A4653F7C6CEFC212A78 -3933B106917A6381759200AEE4B93A368AE355CB8DFF87C1819A346C54D91E0E +111746D8663350778901C6EE9D428BCA659E04160F16055FF33CDD29294D03DB +D6B194A8D9FE9212D9077E0A83DF32895A4840E704817E4479D98B4329497B49 0 From c6cae8cd11f68ff156e53ca2b822cf6d3254e58e Mon Sep 17 00:00:00 2001 From: Dmitry Bushev Date: Thu, 2 Nov 2023 11:51:37 +0000 Subject: [PATCH 12/12] Unlock resources initialization asynchronously (#8206) Debugging the issue reported by @PabloBuchu when the language server initialization hangs in the cloud. I'm still not sure what is happening in the cloud because I was not able to reproduce it when trying to connect two clients simultaneously. Another potential source of the issue may be the Scala Future -> Java CompletableFuture conversion, but I didn't find anything suspicious there. --- .../boot/resource/BlockingInitialization.java | 7 +++++-- .../languageserver/boot/resource/RepoInitialization.java | 7 ++++--- .../enso/languageserver/boot/ResourcesInitialization.scala | 3 ++- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/BlockingInitialization.java b/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/BlockingInitialization.java index c56692857d33..640a3057a89b 100644 --- a/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/BlockingInitialization.java +++ b/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/BlockingInitialization.java @@ -1,12 +1,14 @@ package org.enso.languageserver.boot.resource; import java.util.concurrent.CompletableFuture; +import java.util.concurrent.Executor; import java.util.concurrent.Semaphore; /** Initialization component ensuring that only one initialization sequence is running at a time. */ public final class BlockingInitialization implements InitializationComponent { private final InitializationComponent component; + private final Executor executor; private final Semaphore lock = new Semaphore(1); /** @@ -14,8 +16,9 @@ public final class BlockingInitialization implements InitializationComponent { * * @param component the underlying initialization component to run */ - public BlockingInitialization(InitializationComponent component) { + public BlockingInitialization(InitializationComponent component, Executor executor) { this.component = component; + this.executor = executor; } @Override @@ -30,6 +33,6 @@ public CompletableFuture init() { } catch (InterruptedException e) { return CompletableFuture.failedFuture(e); } - return component.init().whenComplete((res, err) -> lock.release()); + return component.init().whenCompleteAsync((res, err) -> lock.release(), executor); } } diff --git a/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/RepoInitialization.java b/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/RepoInitialization.java index b17fa85b29eb..14bb6825ed78 100644 --- a/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/RepoInitialization.java +++ b/engine/language-server/src/main/java/org/enso/languageserver/boot/resource/RepoInitialization.java @@ -67,7 +67,7 @@ public boolean isInitialized() { public CompletableFuture init() { return initSqlDatabase() .thenComposeAsync(v -> initSuggestionsRepo(), executor) - .thenRun(() -> isInitialized = true); + .thenRunAsync(() -> isInitialized = true, executor); } private CompletableFuture initSqlDatabase() { @@ -92,7 +92,8 @@ private CompletableFuture initSuggestionsRepo() { () -> logger.info("Initializing suggestions repo [{}]...", sqlDatabase), executor) .thenComposeAsync( v -> - doInitSuggestionsRepo().exceptionallyComposeAsync(this::recoverInitializationError), + doInitSuggestionsRepo() + .exceptionallyComposeAsync(this::recoverInitializationError, executor), executor) .thenRunAsync( () -> logger.info("Initialized Suggestions repo [{}].", sqlDatabase), executor) @@ -171,6 +172,6 @@ private CompletableFuture recoverClearDatabaseFile(Throwable error, int re } private CompletionStage doInitSuggestionsRepo() { - return FutureConverters.asJava(sqlSuggestionsRepo.init()).thenAccept(res -> {}); + return FutureConverters.asJava(sqlSuggestionsRepo.init()).thenAcceptAsync(res -> {}, executor); } } diff --git a/engine/language-server/src/main/scala/org/enso/languageserver/boot/ResourcesInitialization.scala b/engine/language-server/src/main/scala/org/enso/languageserver/boot/ResourcesInitialization.scala index 36a93359ad69..bb583d78fa0a 100644 --- a/engine/language-server/src/main/scala/org/enso/languageserver/boot/ResourcesInitialization.scala +++ b/engine/language-server/src/main/scala/org/enso/languageserver/boot/ResourcesInitialization.scala @@ -61,7 +61,8 @@ object ResourcesInitialization { ), new TruffleContextInitialization(ec, truffleContext, eventStream) ) - ) + ), + ec ) } }