From 45276b243d655477f24096e3ec3f2239f3b4aa56 Mon Sep 17 00:00:00 2001 From: James Dunkerley Date: Tue, 8 Nov 2022 15:57:59 +0000 Subject: [PATCH 1/4] Expanding Derived Columns and Expression Syntax (#3782) - Added expression ANTLR4 grammar and sbt based build. - Added expression support to `set` and `filter` on the Database and InMemory `Table`. - Added expression support to `aggregate` on the Database and InMemory `Table`. - Removed old aggregate functions (`sum`, `max`, `min` and `mean`) from `Column` types. - Adjusted database `Column` `+` operator to do concatenation (`||`) when text types. - Added power operator `^` to both `Column` types. - Adjust `iif` to allow for columns to be passed for `when_true` and `when_false` parameters. - Added `is_present` to database `Column` type. - Added `coalesce`, `min` and `max` functions to both `Column` types performing row based operation. - Added support for `Date`, `Time_Of_Day` and `Date_Time` constants in database. - Added `read` method to InMemory `Column` returning `self` (or a slice). # Important Notes - Moved approximate type computation to `SQL_Type`. - Fixed issue in `LongNumericOp` where it was always casting to a double. - Removed `head` from InMemory Table (still has `first` method). --- CHANGELOG.md | 3 + build.sbt | 11 +- .../0.0.0-dev/src/Connection/Connection.enso | 4 +- .../Database/0.0.0-dev/src/Data/Column.enso | 230 ++++++----- .../0.0.0-dev/src/Data/SQL_Statement.enso | 4 + .../Database/0.0.0-dev/src/Data/SQL_Type.enso | 38 +- .../Database/0.0.0-dev/src/Data/Table.enso | 68 ++-- .../src/Internal/Aggregate_Helper.enso | 54 +-- .../src/Internal/Base_Generator.enso | 15 +- .../0.0.0-dev/src/Internal/IR/Context.enso | 10 +- .../0.0.0-dev/src/Internal/IR/From_Spec.enso | 6 +- .../src/Internal/IR/Internal_Column.enso | 4 +- .../src/Internal/IR/Order_Descriptor.enso | 4 +- .../0.0.0-dev/src/Internal/IR/Query.enso | 4 +- .../{Expression.enso => SQL_Expression.enso} | 4 +- .../src/Internal/JDBC_Connection.enso | 3 + .../Internal/Postgres/Postgres_Dialect.enso | 6 +- .../Table/0.0.0-dev/THIRD-PARTY/NOTICE | 5 + .../org.antlr.antlr4-runtime-4.10.1/NOTICES | 1 + .../Table/0.0.0-dev/src/Data/Column.enso | 268 +++++++------ .../Table/0.0.0-dev/src/Data/Expression.enso | 49 +++ .../Table/0.0.0-dev/src/Data/Table.enso | 36 +- .../0.0.0-dev/src/Internal/Table_Helpers.enso | 9 +- .../Standard/Test/0.0.0-dev/src/Bench.enso | 2 +- project/plugins.sbt | 1 + std-bits/table/src/main/antlr4/Expression.g4 | 126 ++++++ .../operation/aggregate/Aggregator.java | 28 -- .../operation/aggregate/CountAggregator.java | 33 -- .../aggregate/FunctionAggregator.java | 59 --- .../numeric/LongToLongAggregator.java | 59 --- .../aggregate/numeric/NumericAggregator.java | 78 ---- .../operation/map/numeric/LongNumericOp.java | 17 +- .../data/column/storage/BoolStorage.java | 18 +- .../data/column/storage/DoubleStorage.java | 10 +- .../data/column/storage/LongStorage.java | 78 +--- .../data/column/storage/NumericStorage.java | 45 --- .../table/data/column/storage/Storage.java | 42 +- .../org/enso/table/data/table/Column.java | 26 +- .../expressions/ExpressionVisitorImpl.java | 373 ++++++++++++++++++ test/Table_Tests/src/Column_Spec.enso | 31 +- .../src/Database/Codegen_Spec.enso | 2 +- .../Table_Tests/src/Database/Common_Spec.enso | 12 - test/Table_Tests/src/Expression_Spec.enso | 290 ++++++++++++++ test/Table_Tests/src/Main.enso | 4 +- test/Table_Tests/src/Table_Spec.enso | 8 - .../copyright-ignore | 1 + .../copyright-keep | 1 + tools/legal-review/Table/report-state | 4 +- 48 files changed, 1391 insertions(+), 793 deletions(-) rename distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/{Expression.enso => SQL_Expression.enso} (94%) create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/org.antlr.antlr4-runtime-4.10.1/NOTICES create mode 100644 distribution/lib/Standard/Table/0.0.0-dev/src/Data/Expression.enso create mode 100644 std-bits/table/src/main/antlr4/Expression.g4 delete mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/Aggregator.java delete mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/CountAggregator.java delete mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/FunctionAggregator.java delete mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/LongToLongAggregator.java delete mode 100644 std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/NumericAggregator.java create mode 100644 std-bits/table/src/main/java/org/enso/table/expressions/ExpressionVisitorImpl.java create mode 100644 test/Table_Tests/src/Expression_Spec.enso create mode 100644 tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-ignore create mode 100644 tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-keep diff --git a/CHANGELOG.md b/CHANGELOG.md index f7a1b9e1fc9b..342ff3ec046c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -230,6 +230,8 @@ - [Implemented `Table.rows` giving access to a vector of rows.][3827] - [Define Enso epoch start as 15th October 1582][3804] - [Implemented `Period` type][3818] +- [Implemented new functions on Column and added expression syntax support to + create derived Columns.][3782] [debug-shortcuts]: https://github.com/enso-org/enso/blob/develop/app/gui/docs/product/shortcuts.md#debug @@ -368,6 +370,7 @@ [3818]: https://github.com/enso-org/enso/pull/3818 [3776]: https://github.com/enso-org/enso/pull/3776 [3836]: https://github.com/enso-org/enso/pull/3836 +[3782]: https://github.com/enso-org/enso/pull/3782 #### Enso Compiler diff --git a/build.sbt b/build.sbt index 5e01534dba73..30225f2a4ca8 100644 --- a/build.sbt +++ b/build.sbt @@ -1872,17 +1872,26 @@ lazy val `std-base` = project lazy val `std-table` = project .in(file("std-bits") / "table") + .enablePlugins(Antlr4Plugin) .settings( frgaalJavaCompilerSetting, autoScalaLibrary := false, Compile / packageBin / artifactPath := `table-polyglot-root` / "std-table.jar", + Antlr4 / antlr4PackageName := Some("org.enso.table.expressions"), + Antlr4 / antlr4Version := "4.10.1", + Antlr4 / antlr4GenVisitor := true, + Antlr4 / antlr4TreatWarningsAsErrors := true, + Compile / managedSourceDirectories += { + (Antlr4 / sourceManaged).value / "main" / "antlr4" + }, libraryDependencies ++= Seq( "org.graalvm.truffle" % "truffle-api" % graalVersion % "provided", "org.netbeans.api" % "org-openide-util-lookup" % netbeansApiVersion % "provided", "com.univocity" % "univocity-parsers" % "2.9.1", "org.apache.poi" % "poi-ooxml" % "5.2.2", - "org.apache.xmlbeans" % "xmlbeans" % "5.1.0" + "org.apache.xmlbeans" % "xmlbeans" % "5.1.0", + "org.antlr" % "antlr4-runtime" % "4.10.1" ), Compile / packageBin := Def.task { val result = (Compile / packageBin).value diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso index 48c1935f5012..c7381378e620 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Connection/Connection.enso @@ -8,7 +8,7 @@ import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Data.Table as Database_Table import project.Internal.IR.Context.Context -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Query.Query from project.Internal.Result_Set import read_column, result_set_to_table @@ -182,7 +182,7 @@ type Connection db_table = if create_table.is_error then create_table else self.query (SQL_Query.Table_Name name) if db_table.is_error.not then - pairs = db_table.internal_columns.map col->[col.name, Expression.Constant col.sql_type Nothing] + pairs = db_table.internal_columns.map col->[col.name, SQL_Expression.Constant col.sql_type Nothing] insert_query = self.dialect.generate_sql <| Query.Insert name pairs insert_template = insert_query.prepare.first self.jdbc_connection.load_table insert_template db_table table batch_size diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso index a5013664aed7..f30a28c08374 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Column.enso @@ -9,7 +9,7 @@ import project.Data.SQL_Type.SQL_Type import project.Data.Table.Integrity_Error import project.Internal.Helpers import project.Internal.IR.Context.Context -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Internal_Column.Internal_Column import project.Internal.IR.Query.Query @@ -36,7 +36,7 @@ type Column which they come. Combined expressions must come from the same context - they must both have the same filtering, grouping etc. rules applied to be able to be combined. - Value name:Text connection:Connection sql_type:SQL_Type expression:Expression context:Context + Value name:Text connection:Connection sql_type:SQL_Type expression:SQL_Expression context:Context ## UNSTABLE @@ -75,7 +75,7 @@ type Column ## UNSTABLE - Returns a materialized dataframe containing rows of this table. + Returns a materialized column containing rows of this column. Arguments: - max_rows: specifies a maximum amount of rows to fetch; if not set, all @@ -135,12 +135,13 @@ type Column Error.throw <| Unsupported_Database_Operation_Error_Data "Cannot use columns coming from different contexts in one expression without a join." constant -> actual_operand_type = operand_type.if_nothing self.sql_type - Expression.Constant actual_operand_type constant + SQL_Expression.Constant actual_operand_type constant + actual_operand_types = operand_types.if_nothing (Vector.fill operands.length Nothing) expressions = operands.zip actual_operand_types prepare_operand actual_new_type = new_type.if_nothing self.sql_type - new_expr = Expression.Operation op_kind ([self.expression] + expressions) + new_expr = SQL_Expression.Operation op_kind ([self.expression] + expressions) Column.Value self.name self.connection actual_new_type new_expr self.context ## PRIVATE @@ -195,41 +196,6 @@ type Column join self other on=Nothing drop_unmatched=False left_suffix='_left' right_suffix='_right' = self.to_table.join other on drop_unmatched left_suffix right_suffix - ## UNSTABLE - - Sums the values in this column. - sum : Any - sum self = self.compute_aggregate "SUM" - - ## UNSTABLE - - Computes the maximum element of this column. - max : Any - max self = self.compute_aggregate "MAX" - - ## UNSTABLE - - Computes the minimum element of this column. - min : Any - min self = self.compute_aggregate "MIN" - - ## UNSTABLE - - Computes the mean of non-missing elements of this column. - mean : Any - mean self = self.compute_aggregate "AVG" - - ## PRIVATE - - Computes an aggregate operator. - - Arguments: - - op_name: The name of the operator to compute. - compute_aggregate : Text - compute_aggregate self op_name = - agg = make_aggregate self op_name - agg.to_vector . at 0 - ## UNSTABLE Returns the length of this column. @@ -355,7 +321,12 @@ type Column of `self`. If `other` is a column, the operation is performed pairwise between corresponding elements of `self` and `other`. + : Column | Any -> Column - + self other = self.make_binary_op "+" other + + self other = + ## TODO: Revisit this as part of the column value type work. + op = case other of + _ : Column -> if self.sql_type.is_definitely_numeric || other.sql_type.is_definitely_numeric then 'ADD_NUMBER' else 'ADD_TEXT' + _ -> if self.sql_type.is_definitely_numeric then 'ADD_NUMBER' else 'ADD_TEXT' + self.make_binary_op op other ## UNSTABLE @@ -422,6 +393,34 @@ type Column % : Column | Any -> Column % self other = self.make_binary_op "%" other + ## ALIAS Power + + Element-wise raising to the power. + + Arguments: + - other: The exponent to raise `self` by. If `other` is a column, the + power operation is performed pairwise between corresponding elements + of `self` and `other`. + + Returns a column containing the result of raising each element of `self` + by `other`. + + > Example + Squares the elements of one column. + + import Standard.Examples + + example_div = Examples.decimal_column ^ 2 + + > Example + Raises each value in a column by the value in another column. + + import Standard.Examples + + example_div = Examples.decimal_column ^ Examples.integer_column + ^ : Column | Any -> Column + ^ self other = self.make_binary_op '^' other + ## UNSTABLE Element-wise boolean conjunction. @@ -456,12 +455,14 @@ type Column not : Column not self = self.make_unary_op "NOT" - ## UNSTABLE + ## ALIAS IF + Replaces `True` values with `when_true` and `False` with `when_false`. Only meant for use with boolean columns. - TODO: Currently `when_true` and `when_false` need to be a single value. - In the future the API will also support row-based IIF if they are columns. + Arguments: + - when_true: value or column when `self` is `True`. + - when_false: value or column when `self` is `False`. iif : Any -> Any -> Column iif self when_true when_false = ## TODO we should adjust new_type based on types when_true and @@ -473,17 +474,68 @@ type Column when_false being either columns or regular values and rely on a mapping of Enso base types to SQL types, and a rule for extracting a common type. - approximate_type x = case x of - _ : Integer -> SQL_Type.integer - _ : Decimal -> SQL_Type.real - _ : Text -> SQL_Type.text - _ : Boolean -> SQL_Type.boolean - _ -> Error.throw (Illegal_Argument_Error_Data "Unsupported type.") - left_type = approximate_type when_true - right_type = approximate_type when_false + left_type = get_approximate_type when_true self.sql_type + right_type = get_approximate_type when_false self.sql_type if left_type != right_type then Error.throw (Illegal_Argument_Error_Data "when_true and when_false types do not match") else self.make_op "IIF" [when_true, when_false] new_type=left_type + ## Returns a column of first non-`Nothing` value on each row of `self` and + `values` list. + + Arguments: + - values: list of columns or values to coalesce with `self`. + + > Example + Get the first non-`Nothing` value in two columns. + + import Standard.Examples + + example_coalesce = Examples.decimal_column.coalesce Examples.integer_column + coalesce : (Any | Vector Any) -> Column + coalesce self values = case values of + _ : Vector.Vector -> + if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument_Error_Data "self and values types do not all match") else + self.make_op "COALESCE" values new_type=self.sql_type + _ : Array -> self.coalesce (Vector.from_polyglot_array values) + _ -> self.coalesce [values] + + ## Returns a column of minimum on each row of `self` and `values` list. + + Arguments: + - values: list of columns or values to minimum with `self`. + + > Example + Get the minimum value in two columns. + + import Standard.Examples + + example_min = Examples.decimal_column.min Examples.integer_column + min : (Any | Vector Any) -> Column + min self values = case values of + _ : Vector.Vector -> + if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument_Error_Data "self and values types do not all match") else + self.make_op "ROW_MIN" values new_type=self.sql_type + _ : Array -> self.min (Vector.from_polyglot_array values) + _ -> self.min [values] + + ## Returns a column of maximum on each row of `self` and `values` list. + + Arguments: + - values: list of columns or values to maximum with `self`. + + > Example + Get the maximum value in two columns. + + import Standard.Examples + + example_max = Examples.decimal_column.max Examples.integer_column + max : (Any | Vector Any) -> Column + max self values = case values of + _ : Vector.Vector -> + if values.any (v->(self.sql_type != get_approximate_type v self.sql_type)) then Error.throw (Illegal_Argument_Error_Data "self and values types do not all match") else + self.make_op "ROW_MAX" values new_type=self.sql_type + _ : Array -> self.max (Vector.from_polyglot_array values) + _ -> self.max [values] ## UNSTABLE @@ -504,6 +556,18 @@ type Column is_empty : Column is_empty self = self.make_unary_op "IS_EMPTY" new_type=SQL_Type.boolean + ## Returns a column of booleans, with `True` items at the positions where + this column does not contain a `Nothing`. + + > Example + Check a column for present values. + + import Standard.Examples + + example_is_present = Examples.decimal_column.is_present + is_present : Column + is_present self = self.is_missing.not + ## PRIVATE Returns a column of booleans with `True` at the positions where this column contains a blank value. @@ -667,6 +731,7 @@ type Column example_contains = Examples.text_column_1.is_in [1, 2, 5] is_in : Column | Vector -> Column is_in self vector = case vector of + _ : Array -> self.is_in (Vector.from_polyglot_array vector) _ : Vector.Vector -> ## This is slightly hacky - we don't provide operand types as we want to allow any type to get through and currently we do not have a mapping @@ -695,18 +760,17 @@ type Column column : Column -> if Helpers.check_connection self column . not then (Error.throw (Integrity_Error.Error "Column "+column.name)) else ## We slightly abuse the expression syntax putting a Query as one of the sub-expressions. Once type-checking is added, we may need to - amend the signature of `Expression.Operation` to account for + amend the signature of `SQL_Expression.Operation` to account for this. Also, unfortunately as `NULL IN (...)` is `NULL` in SQL, we need to do separate handling of nulls - we check if the target column has any nulls and if so, we will do `IS NULL` checks for our columns too. That is because, we want the containment check for `NULL` to work the same way as for any other value. in_subquery = Query.Select [Pair_Data column.name column.expression] column.context - has_nulls_expression = Expression.Operation "BOOL_OR" [column.is_missing.expression] + has_nulls_expression = SQL_Expression.Operation "BOOL_OR" [column.is_missing.expression] has_nulls_subquery = Query.Select [Pair_Data "has_nulls" has_nulls_expression] column.context - new_type = SQL_Type.boolean - new_expr = Expression.Operation "IS_IN_COLUMN" [self.expression, in_subquery, has_nulls_subquery] - Column.Value self.name self.connection new_type new_expr self.context + new_expr = SQL_Expression.Operation "IS_IN_COLUMN" [self.expression, in_subquery, has_nulls_subquery] + Column.Value self.name self.connection SQL_Type.boolean new_expr self.context ## PRIVATE as_internal : Internal_Column @@ -716,48 +780,14 @@ type Column to_text : Text to_text self = "(Database Column "+self.name.to_text+")" -## PRIVATE - - A helper method for creating an aggregated column by applying some - operation. - - Arguments: - - column: The column to aggregate. - - operation: The name of the aggregation operation. - - name_suffix: The suffix to apply to the name of the aggregate column. - - new_type: The SQL type of the result column. -make_aggregate : Column -> Text -> Text -> SQL_Type -> Column -make_aggregate column operation name_suffix="_agg" new_type=Nothing = - actual_new_type = new_type.if_nothing column.sql_type - expr = Expression.Operation operation [column.expression] - case Helpers.ensure_name_is_sane name_suffix of - True -> - new_name = column.name + name_suffix - lift_aggregate new_name column.connection actual_new_type expr column.context + ## PRIVATE + Helper for the expression to tell it which functions needs a Vector. + var_args_functions : Array + var_args_functions = ['is_in', 'coalesce', 'min', 'max'] ## PRIVATE - - A helper function that lifts an aggregate query into a subquery to ensure - correctness of further processing. - - Argument: - - new_name: The new name for the aggregate column. - - connection: The connection with which the aggregate is associated. - - expected_type: The expected SQL type of the column. - - expr: The expression for the query. - - context: The context in which the query exists. -lift_aggregate : Text -> Connection -> SQL_Type -> Expression -> Context -> Column -lift_aggregate new_name connection expected_type expr context = - # TODO [RW] This is a simple workaround for #1643 - we always wrap the - # aggregate into a subquery, thus making it safe to use it everywhere. A - # more complex solution may be adopted at some point. - ixes = freshen_columns [new_name] context.meta_index - col = Internal_Column.Value new_name expected_type expr - setup = context.as_subquery new_name+"_sub" [[col], ixes] - subquery = setup.first - cols = setup.second - new_col = cols.first.first - new_ixes = cols.second - new_ctx = Context.for_subquery subquery . set_index new_ixes - Column.Value new_name connection new_col.sql_type new_col.expression new_ctx - + TODO: Revisit this as part of the column value type work. +get_approximate_type value default = case value of + _ : Column -> value.sql_type + Nothing -> default + _ -> SQL_Type.approximate_type value \ No newline at end of file diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Statement.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Statement.enso index ad3e6a67bfb1..eee8e7bf875d 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Statement.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Statement.enso @@ -43,8 +43,12 @@ type SQL_Statement strings = self.internal_fragments . map <| case _ of SQL_Fragment.Code_Part code -> code # TODO at some point we may try more sophisticated serialization based on data type + # TODO #183734954: date and time formatting is limited and will lose sub-second precision and timezone offset. SQL_Fragment.Interpolation _ obj -> case obj of Number -> obj.to_text + Date_Time.Date_Time -> "'" + (obj.format "yyyy-MM-dd HH:mm:ss") + "'" + Date.Date -> "'" + (obj.format "yyyy-MM-dd") + "'" + Time_Of_Day.Time_Of_Day -> "'" + (obj.format "HH:mm:ss") + "'" _ -> "'" + obj.to_text.replace "'" "''" + "'" strings.join "" diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso index 55b97825496b..c7a27bfc8aae 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/SQL_Type.enso @@ -49,12 +49,12 @@ type SQL_Type numeric : SQL_Type numeric = SQL_Type.Value Types.NUMERIC "NUMERIC" - ## The SQL type representing one of the suppported textual types. + ## The SQL type representing one of the supported textual types. varchar : SQL_Type varchar = SQL_Type.Value Types.VARCHAR "VARCHAR" ## UNSTABLE - The SQL type representing one of the suppported textual types. + The SQL type representing one of the supported textual types. It seems that JDBC treats the `TEXT` and `VARCHAR` types as interchangeable. text : SQL_Type @@ -64,6 +64,40 @@ type SQL_Type blob : SQL_Type blob = SQL_Type.Value Types.BLOB "BLOB" + ## The SQL type representing a date type. + date : SQL_Type + date = SQL_Type.Value Types.DATE "DATE" + + ## The SQL type representing a time type. + time : SQL_Type + time = SQL_Type.Value Types.TIME "TIME" + + ## The SQL type representing a time type. + date_time : SQL_Type + date_time = SQL_Type.Value Types.TIMESTAMP_WITH_TIMEZONE "TIMESTAMP" + + ## ADVANCED + Given an Enso value gets the approximate SQL type. + approximate_type : Any -> SQL_Type ! Illegal_Argument_Error_Data + approximate_type value = case value of + _ : Boolean -> SQL_Type.boolean + _ : Integer -> SQL_Type.integer + _ : Decimal -> SQL_Type.double + _ : Text -> SQL_Type.varchar + _ : Date.Date -> SQL_Type.date + _ : Time_Of_Day.Time_Of_Day -> SQL_Type.time_of_day + _ : Date_Time.Date_Time -> SQL_Type.date_time + _ -> Error.throw (Illegal_Argument_Error_Data "Unsupported type.") + + ## PRIVATE + + Returns True if this type represents an integer or a double. + + It only handles the standard types so it may return false negatives for + non-standard ones. + is_definitely_numeric : Boolean + is_definitely_numeric self = self.is_definitely_double || self.is_definitely_integer + ## PRIVATE Returns True if this type represents an integer. diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso index ec9ba718788f..c53fac4886ff 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Data/Table.enso @@ -2,6 +2,8 @@ from Standard.Base import all from Standard.Base.Error.Problem_Behavior import Report_Warning from Standard.Table import Auto_Detect, Aggregate_Column, Data_Formatter, Column_Name_Mapping, Column_Selector, Sort_Column_Selector, Sort_Column, Match_Columns, Position +import Standard.Table.Data.Expression.Expression +import Standard.Table.Data.Expression.Expression_Error import Standard.Table.Data.Table.Table as Materialized_Table import Standard.Table.Internal.Java_Exports import Standard.Table.Internal.Table_Helpers @@ -18,7 +20,7 @@ import project.Data.SQL_Type.SQL_Type import project.Internal.Helpers import project.Internal.Aggregate_Helper import project.Internal.IR.Context.Context -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.From_Spec.From_Spec import project.Internal.IR.Internal_Column.Internal_Column import project.Internal.IR.Join_Kind.Join_Kind @@ -27,6 +29,7 @@ import project.Internal.IR.Query.Query from Standard.Database.Errors import Unsupported_Database_Operation_Error_Data polyglot java import java.sql.JDBCType +polyglot java import java.util.UUID ## Represents a column-oriented table data structure backed by a database. type Table @@ -396,13 +399,20 @@ type Table on_problems.handle_errors fallback=self.with_no_rows <| mask (make_filter_column column filter) _ : Function -> Error.throw (Unsupported_Database_Operation_Error_Data "Filtering with a custom predicate is not supported in the database.") + _ : Text -> + table_at = self.at column + if table_at.is_error.not then self.filter table_at filter on_problems else + expression = self.evaluate column + if expression.is_error.not then self.filter expression filter on_problems else + pick_error = expression.catch Expression_Error.Syntax_Error (_->table_at) + on_problems.handle_errors pick_error fallback=self _ -> case on_problems.handle_errors (self.at column) fallback=Nothing of Nothing -> self resolved_column -> self.filter resolved_column filter on_problems ## PRIVATE with_no_rows self = - false_expression = Expression.Operation "=" [Expression.Constant SQL_Type.integer 1, Expression.Constant SQL_Type.integer 2] + false_expression = SQL_Expression.Operation "=" [SQL_Expression.Constant SQL_Type.integer 1, SQL_Expression.Constant SQL_Type.integer 2] new_filters = self.context.where_filters + [false_expression] new_ctx = self.context.set_where_filters new_filters self.updated_context new_ctx @@ -477,21 +487,35 @@ type Table If a column with the given name already exists, it will be replaced. Otherwise a new column is added. - set : Text -> Column -> Table - set self name column = case Helpers.ensure_name_is_sane name of - True -> - is_used_in_index = self.context.meta_index.exists i-> i.name == name - case is_used_in_index of - True -> Error.throw <| Illegal_State_Error_Data "Cannot override column "+name+", because it is used as an index. Remove the index or use a different name." - False -> - new_col = Internal_Column.Value name column.sql_type column.expression - replace = self.internal_columns.exists (c -> c.name == name) - case replace of - True -> - new_cols = self.internal_columns.map (c -> if c.name == name then new_col else c) - self.updated_columns new_cols - False -> - self.updated_columns (self.internal_columns + [new_col]) + set : Text -> Column | Text -> Problem_Behavior -> Table + set self name column on_problems=Report_Warning = on_problems.handle_errors fallback=self <| + case Helpers.ensure_name_is_sane name of + True -> + is_used_in_index = self.context.meta_index.exists i-> i.name == name + case is_used_in_index of + True -> Error.throw <| Illegal_State_Error_Data "Cannot override column "+name+", because it is used as an index. Remove the index or use a different name." + False -> + resolved = case column of + _ : Text -> self.evaluate column + _ -> column + new_col = Internal_Column.Value name resolved.sql_type resolved.expression + replace = self.internal_columns.exists (c -> c.name == name) + case replace of + True -> + new_cols = self.internal_columns.map (c -> if c.name == name then new_col else c) + self.updated_columns new_cols + False -> + self.updated_columns (self.internal_columns + [new_col]) + + ## PRIVATE + evaluate : Text -> Column + evaluate self expression = + get_column name = self.at name + make_constant value = + new_type = SQL_Type.approximate_type value + other = SQL_Expression.Constant new_type value + Column.Value ("Constant_" + UUID.randomUUID.to_text) self.connection new_type other self.context + Expression.evaluate expression get_column make_constant "Standard.Database.Data.Column" "Column" Column.var_args_functions ## UNSTABLE @@ -732,7 +756,7 @@ type Table new_columns = left_renamed_columns + right_renamed_columns on_exprs = left_new_join_index.zip right_new_join_index l-> r-> - Expression.Operation "=" [l.expression, r.expression] + SQL_Expression.Operation "=" [l.expression, r.expression] new_from = From_Spec.Join kind left_subquery right_subquery on_exprs new_limit = Nothing new_ctx = Context.Value new_from [] [] [] new_index new_limit @@ -814,7 +838,7 @@ type Table ## Returns the amount of rows in this table. row_count : Integer row_count self = if self.internal_columns.is_empty then 0 else - expr = Expression.Operation "COUNT_ROWS" [] + expr = SQL_Expression.Operation "COUNT_ROWS" [] column_name = "row_count" ## We need to keep some column in the subquery which will determine if the query is performing regular selection or aggregation. To avoid @@ -889,7 +913,7 @@ type Table setup = self.context.as_subquery self.name [self.internal_columns] new_ctx = Context.for_subquery setup.first new_columns = setup.second.first.map column-> - [column.name, Expression.Operation "COUNT" [column.expression]] + [column.name, SQL_Expression.Operation "COUNT" [column.expression]] query = Query.Select new_columns new_ctx self.connection.dialect.generate_sql query count_table = self.connection.read_statement count_query @@ -971,7 +995,7 @@ type Table _ -> Error.throw <| Illegal_State_Error_Data "Inserting can only be performed on tables as returned by `query`, any further processing is not allowed." # TODO [RW] before removing the PRIVATE tag, add a check that no bad stuff was done to the table as described above pairs = self.internal_columns.zip values col-> value-> - [col.name, Expression.Constant col.sql_type value] + [col.name, SQL_Expression.Constant col.sql_type value] query = self.connection.dialect.generate_sql <| Query.Insert table_name pairs affected_rows = self.connection.execute_update query case affected_rows == 1 of @@ -1071,7 +1095,7 @@ type Integrity_Error # make_table : Connection -> Text -> Vector [Text, SQL_Type] -> Context -> Table make_table : Connection -> Text -> Vector -> Context -> Table make_table connection table_name columns ctx = - cols = columns.map (p -> Internal_Column.Value p.first p.second (Expression.Column table_name p.first)) + cols = columns.map (p -> Internal_Column.Value p.first p.second (SQL_Expression.Column table_name p.first)) Table.Value table_name connection cols ctx ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso index 0adfa75645d6..f0d82f24de51 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Aggregate_Helper.enso @@ -3,7 +3,7 @@ from Standard.Base import all hiding First, Last from Standard.Table.Data.Aggregate_Column.Aggregate_Column import all import project.Data.SQL_Type.SQL_Type -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Internal_Column.Internal_Column from project.Errors import Unsupported_Database_Operation_Error_Data @@ -23,48 +23,48 @@ make_aggregate_column table aggregate new_name = ## PRIVATE Creates an Internal Representation of the expression that computes a requested statistic. -make_expression : Aggregate_Column -> Dialect -> Expression +make_expression : Aggregate_Column -> Dialect -> SQL_Expression make_expression aggregate dialect = is_non_empty_selector v = if v.is_nothing then False else v.columns.not_empty case aggregate of Group_By c _ -> c.expression - Count _ -> Expression.Operation "COUNT_ROWS" [] + Count _ -> SQL_Expression.Operation "COUNT_ROWS" [] Count_Distinct columns _ ignore_nothing -> if columns.is_empty then Error.throw (Illegal_Argument_Error_Data "Count_Distinct must have at least one column.") else case ignore_nothing of - True -> Expression.Operation "COUNT_DISTINCT" (columns.map .expression) - False -> Expression.Operation "COUNT_DISTINCT_INCLUDE_NULL" (columns.map .expression) - Count_Not_Nothing c _ -> Expression.Operation "COUNT" [c.expression] - Count_Nothing c _ -> Expression.Operation "COUNT_IS_NULL" [c.expression] - Count_Not_Empty c _ -> Expression.Operation "COUNT_NOT_EMPTY" [c.expression] - Count_Empty c _ -> Expression.Operation "COUNT_EMPTY" [c.expression] - Percentile p c _ -> Expression.Operation "PERCENTILE" [Expression.Constant SQL_Type.double p, c.expression] - Mode c _ -> Expression.Operation "MODE" [c.expression] + True -> SQL_Expression.Operation "COUNT_DISTINCT" (columns.map .expression) + False -> SQL_Expression.Operation "COUNT_DISTINCT_INCLUDE_NULL" (columns.map .expression) + Count_Not_Nothing c _ -> SQL_Expression.Operation "COUNT" [c.expression] + Count_Nothing c _ -> SQL_Expression.Operation "COUNT_IS_NULL" [c.expression] + Count_Not_Empty c _ -> SQL_Expression.Operation "COUNT_NOT_EMPTY" [c.expression] + Count_Empty c _ -> SQL_Expression.Operation "COUNT_EMPTY" [c.expression] + Percentile p c _ -> SQL_Expression.Operation "PERCENTILE" [SQL_Expression.Constant SQL_Type.double p, c.expression] + Mode c _ -> SQL_Expression.Operation "MODE" [c.expression] First c _ ignore_nothing order_by -> case is_non_empty_selector order_by of False -> Error.throw (Unsupported_Database_Operation_Error_Data "`First` aggregation requires at least one `order_by` column.") True -> order_bys = order_by.columns.map c-> dialect.prepare_order_descriptor c.column.as_internal c.direction Text_Ordering.Default case ignore_nothing of - False -> Expression.Operation "FIRST" [c.expression]+order_bys - True -> Expression.Operation "FIRST_NOT_NULL" [c.expression]+order_bys + False -> SQL_Expression.Operation "FIRST" [c.expression]+order_bys + True -> SQL_Expression.Operation "FIRST_NOT_NULL" [c.expression]+order_bys Last c _ ignore_nothing order_by -> case is_non_empty_selector order_by of False -> Error.throw (Unsupported_Database_Operation_Error_Data "`Last` aggregation requires at least one `order_by` column.") True -> order_bys = order_by.columns.map c-> dialect.prepare_order_descriptor c.column.as_internal c.direction Text_Ordering.Default case ignore_nothing of - False -> Expression.Operation "LAST" [c.expression]+order_bys - True -> Expression.Operation "LAST_NOT_NULL" [c.expression]+order_bys - Maximum c _ -> Expression.Operation "MAX" [c.expression] - Minimum c _ -> Expression.Operation "MIN" [c.expression] - Shortest c _ -> Expression.Operation "SHORTEST" [c.expression] - Longest c _ -> Expression.Operation "LONGEST" [c.expression] + False -> SQL_Expression.Operation "LAST" [c.expression]+order_bys + True -> SQL_Expression.Operation "LAST_NOT_NULL" [c.expression]+order_bys + Maximum c _ -> SQL_Expression.Operation "MAX" [c.expression] + Minimum c _ -> SQL_Expression.Operation "MIN" [c.expression] + Shortest c _ -> SQL_Expression.Operation "SHORTEST" [c.expression] + Longest c _ -> SQL_Expression.Operation "LONGEST" [c.expression] Standard_Deviation c _ population -> case population of - True -> Expression.Operation "STDDEV_POP" [c.expression] - False -> Expression.Operation "STDDEV_SAMP" [c.expression] + True -> SQL_Expression.Operation "STDDEV_POP" [c.expression] + False -> SQL_Expression.Operation "STDDEV_SAMP" [c.expression] Concatenate c _ separator prefix suffix quote_char -> - base_args = [c.expression, Expression.Constant SQL_Type.text separator, Expression.Constant SQL_Type.text prefix, Expression.Constant SQL_Type.text suffix] + base_args = [c.expression, SQL_Expression.Constant SQL_Type.text separator, SQL_Expression.Constant SQL_Type.text prefix, SQL_Expression.Constant SQL_Type.text suffix] case quote_char.is_empty of - True -> Expression.Operation "CONCAT" base_args - False -> Expression.Operation "CONCAT_QUOTE_IF_NEEDED" base_args+[Expression.Constant SQL_Type.text quote_char] - Sum c _ -> Expression.Operation "SUM" [c.expression] - Average c _ -> Expression.Operation "AVG" [c.expression] - Median c _ -> Expression.Operation "MEDIAN" [c.expression] + True -> SQL_Expression.Operation "CONCAT" base_args + False -> SQL_Expression.Operation "CONCAT_QUOTE_IF_NEEDED" base_args+[SQL_Expression.Constant SQL_Type.text quote_char] + Sum c _ -> SQL_Expression.Operation "SUM" [c.expression] + Average c _ -> SQL_Expression.Operation "AVG" [c.expression] + Median c _ -> SQL_Expression.Operation "MEDIAN" [c.expression] diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso index 864cf2bea820..486b200e2f01 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Base_Generator.enso @@ -3,7 +3,7 @@ from Standard.Base import all import project.Data.SQL import project.Data.SQL.Builder import project.Internal.IR.Context.Context -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.From_Spec.From_Spec import project.Internal.IR.Join_Kind.Join_Kind import project.Internal.IR.Order_Descriptor.Order_Descriptor @@ -169,15 +169,16 @@ base_dialect = unary = name -> [name, make_unary_op name] fun = name -> [name, make_function name] - arith = [bin "+", bin "-", bin "*", bin "/", bin "%"] + arith = [["ADD_NUMBER", make_binary_op "+"], ["ADD_TEXT", make_binary_op "||"], bin "-", bin "*", bin "/", bin "%", ["^", make_function "POWER"]] logic = [bin "AND", bin "OR", unary "NOT", ["IIF", make_iif]] compare = [bin "=", bin "!=", bin "<", bin ">", bin "<=", bin ">=", ["BETWEEN", make_between]] + functions = [["COALESCE", make_function "COALESCE"], ["ROW_MIN", make_function "MIN"], ["ROW_MAX", make_function "MAX"]] agg = [fun "MAX", fun "MIN", fun "AVG", fun "SUM"] counts = [fun "COUNT", ["COUNT_ROWS", make_constant "COUNT(*)"]] text = [is_empty, bin "LIKE"] nulls = [["IS_NULL", make_right_unary_op "IS NULL"], ["FILL_NULL", make_function "COALESCE"]] contains = [["IS_IN", make_is_in], ["IS_IN_COLUMN", make_is_in_column]] - base_map = Map.from_vector (arith + logic + compare + agg + counts + text + nulls + contains) + base_map = Map.from_vector (arith + logic + compare + functions + agg + counts + text + nulls + contains) Internal_Dialect.Value base_map wrap_in_quotes ## PRIVATE @@ -242,12 +243,12 @@ make_is_in_column arguments = case arguments.length of Arguments: - dialect: The SQL dialect in which the expression is being generated. - expr: The expression to generate SQL code for. -generate_expression : Internal_Dialect -> Expression | Order_Descriptor | Query -> Builder +generate_expression : Internal_Dialect -> SQL_Expression | Order_Descriptor | Query -> Builder generate_expression dialect expr = case expr of - Expression.Column origin name -> + SQL_Expression.Column origin name -> dialect.wrap_identifier origin ++ '.' ++ dialect.wrap_identifier name - Expression.Constant sql_type value -> SQL.interpolation sql_type value - Expression.Operation kind arguments -> + SQL_Expression.Constant sql_type value -> SQL.interpolation sql_type value + SQL_Expression.Operation kind arguments -> op = dialect.operation_map.get_or_else kind (Error.throw <| Unsupported_Database_Operation_Error_Data kind) parsed_args = arguments.map (generate_expression dialect) op parsed_args diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso index a4006d48a589..f3b002986c03 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Context.enso @@ -1,6 +1,6 @@ from Standard.Base import all -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.From_Spec.From_Spec import project.Internal.IR.Internal_Column.Internal_Column import project.Internal.IR.Order_Descriptor.Order_Descriptor @@ -31,7 +31,7 @@ type Context - meta_index: a list of internal columns to use for joining or grouping. - limit: an optional maximum number of elements that the equery should return. - Value (from_spec : From_Spec) (where_filters : Vector Expression) (orders : Vector Order_Descriptor) (groups : Vector Expression) (meta_index : Vector Internal_Column) (limit : Nothing | Integer) + Value (from_spec : From_Spec) (where_filters : Vector SQL_Expression) (orders : Vector Order_Descriptor) (groups : Vector SQL_Expression) (meta_index : Vector Internal_Column) (limit : Nothing | Integer) ## PRIVATE @@ -82,7 +82,7 @@ type Context Arguments: - new_filters: The new filters to set in the query. - set_where_filters : Vector Expression -> Context + set_where_filters : Vector SQL_Expression -> Context set_where_filters self new_filters = Context.Value self.from_spec new_filters self.orders self.groups self.meta_index self.limit @@ -119,7 +119,7 @@ type Context Arguments: - new_groups: The new grouping clauses to set in the query. - set_groups : Vector Expression -> Context + set_groups : Vector SQL_Expression -> Context set_groups self new_groups = Context.Value self.from_spec self.where_filters self.orders new_groups self.meta_index self.limit @@ -152,7 +152,7 @@ type Context as_subquery self alias column_lists = rewrite_internal_column : Internal_Column -> Internal_Column rewrite_internal_column column = - Internal_Column.Value column.name column.sql_type (Expression.Column alias column.name) + Internal_Column.Value column.name column.sql_type (SQL_Expression.Column alias column.name) new_columns = column_lists.map columns-> columns.map rewrite_internal_column diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso index 40ba84fb8f1b..0825ec425428 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/From_Spec.enso @@ -1,7 +1,7 @@ from Standard.Base import all import project.Internal.IR.Context.Context -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Join_Kind.Join_Kind ## PRIVATE @@ -44,7 +44,7 @@ type From_Spec - on: a list of expressions that will be used as join conditions, these are usually be equalities between expressions from the left and right sources. - Join (kind : Join_Kind) (left_spec : From_Spec) (right_spec : From_Spec) (on : Vector Expression) + Join (kind : Join_Kind) (left_spec : From_Spec) (right_spec : From_Spec) (on : Vector SQL_Expression) ## PRIVATE @@ -57,4 +57,4 @@ type From_Spec - context: the context for the sub-query. - alias: the name upon which the results of this sub-query can be referred to in other parts of the query. - Sub_Query (columns : Vector (Pair Text Expression)) (context : Context) (alias : Text) + Sub_Query (columns : Vector (Pair Text SQL_Expression)) (context : Context) (alias : Text) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Internal_Column.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Internal_Column.enso index db734304d811..85253612a271 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Internal_Column.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Internal_Column.enso @@ -1,7 +1,7 @@ from Standard.Base import all import project.Data.SQL_Type.SQL_Type -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression type Internal_Column ## PRIVATE @@ -12,7 +12,7 @@ type Internal_Column - name: The column name. - sql_type: The SQL type of the column. - expression: An expression for applying to the column. - Value name:Text sql_type:SQL_Type expression:Expression + Value name:Text sql_type:SQL_Type expression:SQL_Expression ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Order_Descriptor.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Order_Descriptor.enso index e899c311cddc..270acaca89e2 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Order_Descriptor.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Order_Descriptor.enso @@ -1,8 +1,8 @@ from Standard.Base import all -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Nulls_Order.Nulls_Order ## PRIVATE type Order_Descriptor - Value (expression : Expression) (direction : Sort_Direction) (nulls_order : Nothing | Nulls_Order = Nothing) (collation : Nothing | Text = Nothing) + Value (expression : SQL_Expression) (direction : Sort_Direction) (nulls_order : Nothing | Nulls_Order = Nothing) (collation : Nothing | Text = Nothing) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso index 543bc257310b..df03c13936ca 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Query.enso @@ -1,7 +1,7 @@ from Standard.Base import all import project.Internal.IR.Context.Context -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression ## PRIVATE @@ -17,7 +17,7 @@ type Query is a pair whose first element is the name of the materialized column and the second element is the expression to compute. - context: The query context, see `Context` for more detail. - Select (expressions : Vector (Pair Text Expression)) (context : Context) + Select (expressions : Vector (Pair Text SQL_Expression)) (context : Context) ## PRIVATE diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Expression.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso similarity index 94% rename from distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Expression.enso rename to distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso index 4ebf104aa3e6..5b328a8e5ef8 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/Expression.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/IR/SQL_Expression.enso @@ -7,7 +7,7 @@ import project.Data.SQL_Type.SQL_Type The internal representation of an SQL expression which can be a column reference, an interpolated constant or an operation that combines other expressions. -type Expression +type SQL_Expression ## PRIVATE The internal representation of an SQL expression that gets a value from a @@ -43,4 +43,4 @@ type Expression dialect. - expressions: a list of expressions which are arguments to the operation different operations support different amounts of arguments. - Operation (kind : Text) (expressions : Vector Expression) + Operation (kind : Text) (expressions : Vector SQL_Expression) diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso index edfe446abc33..8b907b29e75d 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/JDBC_Connection.enso @@ -198,6 +198,9 @@ default_storage_type storage_type = case storage_type of Storage.Integer -> SQL_Type.integer Storage.Decimal -> SQL_Type.double Storage.Boolean -> SQL_Type.boolean + Storage.Date -> SQL_Type.date + Storage.Time_Of_Day -> SQL_Type.time_of_day + Storage.Date_Time -> SQL_Type.date_time ## Support for mixed type columns in Table upload is currently very limited, falling back to treating everything as text. Storage.Any -> SQL_Type.text diff --git a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso index 4ad0cb16c015..3a0259ef13b7 100644 --- a/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso +++ b/distribution/lib/Standard/Database/0.0.0-dev/src/Internal/Postgres/Postgres_Dialect.enso @@ -7,7 +7,7 @@ import project.Data.SQL import project.Data.SQL_Statement.SQL_Statement import project.Data.SQL_Type.SQL_Type import project.Internal.Base_Generator -import project.Internal.IR.Expression.Expression +import project.Internal.IR.SQL_Expression.SQL_Expression import project.Internal.IR.Internal_Column.Internal_Column import project.Internal.IR.Order_Descriptor.Order_Descriptor import project.Internal.IR.Nulls_Order.Nulls_Order @@ -244,8 +244,8 @@ make_order_descriptor internal_column sort_direction text_ordering = False -> Error.throw (Unsupported_Database_Operation_Error_Data "Case insensitive ordering with custom locale is currently not supported. You may need to materialize the Table to perform this operation.") True -> - upper = Expression.Operation "UPPER" [internal_column.expression] - folded_expression = Expression.Operation "LOWER" [upper] + upper = SQL_Expression.Operation "UPPER" [internal_column.expression] + folded_expression = SQL_Expression.Operation "LOWER" [upper] Order_Descriptor.Value folded_expression sort_direction nulls_order=nulls collation=Nothing False -> Order_Descriptor.Value internal_column.expression sort_direction nulls_order=nulls collation=Nothing diff --git a/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/NOTICE b/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/NOTICE index 7d3f410db397..e14dd46a6bda 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/NOTICE +++ b/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/NOTICE @@ -26,6 +26,11 @@ The license file can be found at `licenses/APACHE2.0`. Copyright notices related to this dependency can be found in the directory `commons-io.commons-io-2.11.0`. +'antlr4-runtime', licensed under the The BSD License, is distributed with the Table. +The license file can be found at `licenses/BSD-3-Clause`. +Copyright notices related to this dependency can be found in the directory `org.antlr.antlr4-runtime-4.10.1`. + + 'commons-collections4', licensed under the Apache License, Version 2.0, is distributed with the Table. The license information can be found along with the copyright notices. Copyright notices related to this dependency can be found in the directory `org.apache.commons.commons-collections4-4.4`. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/org.antlr.antlr4-runtime-4.10.1/NOTICES b/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/org.antlr.antlr4-runtime-4.10.1/NOTICES new file mode 100644 index 000000000000..960b2a8926c1 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/THIRD-PARTY/org.antlr.antlr4-runtime-4.10.1/NOTICES @@ -0,0 +1 @@ +Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso index 3ded48eb913c..7a6bc96b36d1 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Column.enso @@ -400,6 +400,34 @@ type Column % : Column | Any -> Column % self other = run_vectorized_binary_op self '%' (%) other + ## ALIAS Power Columns + + Element-wise raising to the power. + + Arguments: + - other: The exponent to raise `self` by. If `other` is a column, the + power operation is performed pairwise between corresponding elements + of `self` and `other`. + + Returns a column containing the result of raising each element of `self` + by `other`. + + > Example + Squares the elements of one column. + + import Standard.Examples + + example_div = Examples.decimal_column ^ 2 + + > Example + Raises each value in a column by the value in another column. + + import Standard.Examples + + example_div = Examples.decimal_column ^ Examples.integer_column + ^ : Column | Any -> Column + ^ self other = run_vectorized_binary_op self '^' (^) other + ## ALIAS AND Columns Element-wise boolean conjunction. @@ -460,7 +488,7 @@ type Column || self other = run_vectorized_binary_op self "||" (||) other - ## ALIAS NOT Columns + ## ALIAS NOT Boolean negation of each element in self column. @@ -473,21 +501,91 @@ type Column not : Column not self = run_vectorized_unary_op self "not" .not - ## UNSTABLE + ## ALIAS IF + Replaces `True` values with `when_true` and `False` with `when_false`. Only meant for use with boolean columns. - TODO: Currently `when_true` and `when_false` need to be a single value. - In the future the API will also support row-based IIF if they are columns. + Arguments: + - when_true: value or column when `self` is `True`. + - when_false: value or column when `self` is `False`. + + > Example + If the value in a column is `True`, replace it with `1`, otherwise `0`. + + import Standard.Examples + + example_if = Examples.bool_column_1.iif 1 0 iif : Any -> Any -> Column iif self when_true when_false = case self.storage_type of Storage.Boolean -> s = self.java_column.getStorage ix = self.java_column.getIndex - rs = s.iif when_true when_false + + true_val = case when_true of + _ : Column -> when_true.java_column.getStorage + _ -> when_true + + false_val = case when_false of + _ : Column -> when_false.java_column.getStorage + _ -> when_false + + rs = s.iif true_val false_val Column.Column_Data (Java_Column.new "Result" ix rs) _ -> Error.throw (Illegal_Argument_Error "`iif` can only be used with boolean columns.") + ## Returns a column of first non-`Nothing` value on each row of `self` and + `values` list. + + Arguments: + - values: list of columns or values to coalesce with `self`. + + > Example + Get the first non-`Nothing` value in two columns. + + import Standard.Examples + + example_coalesce = Examples.decimal_column.coalesce Examples.integer_column + coalesce : (Any | Vector Any) -> Column + coalesce self values = + fallback a b = a.if_nothing b + run_vectorized_many_op self "coalesce" fallback values + + ## Returns a column of minimum on each row of `self` and `values` list. + + Arguments: + - values: list of columns or values to minimum with `self`. + + > Example + Get the minimum value in two columns. + + import Standard.Examples + + example_min = Examples.decimal_column.min Examples.integer_column + min : (Any | Vector Any) -> Column + min self values = + fallback a b = if a.is_nothing then b else + if b.is_nothing then a else + if b < a then b else a + run_vectorized_many_op self "minimum" fallback values + + ## Returns a column of maximum on each row of `self` and `values` list. + + Arguments: + - values: list of columns or values to maximum with `self`. + + > Example + Get the maximum value in two columns. + + import Standard.Examples + + example_max = Examples.decimal_column.max Examples.integer_column + max : (Any | Vector Any) -> Column + max self values = + fallback a b = if a.is_nothing then b else + if b.is_nothing then a else + if b > a then b else a + run_vectorized_many_op self "maximum" fallback values ## Returns a column of booleans, with `True` items at the positions where this column contains a `Nothing`. @@ -696,7 +794,11 @@ type Column True -> fallback_fn _ _ = Panic.throw (Illegal_State_Error_Data "Impossible: This is a bug in the Standard.Table library.") - run_vectorized_binary_op self op_name fallback_fn vector skip_nulls=False new_name=result_name + true_vector = case vector of + _ : Array -> Vector.from_polyglot_array vector + _ : Vector.Vector -> vector + column : Column -> column.to_vector + run_vectorized_binary_op self op_name fallback_fn true_vector skip_nulls=False new_name=result_name False -> ## We have custom code for the non-vectorized case, because usually a vectorized binary op will apply the fallback @@ -705,6 +807,7 @@ type Column against the whole other column, instead of just the corresponding row - so we need to go around a bit. true_vector = case vector of + _ : Array -> Vector.from_polyglot_array vector _ : Vector.Vector -> vector ## This does no copying, as `Column.to_vector` just returns a view of the storage. @@ -881,6 +984,17 @@ type Column if storage.isNa index then Nothing else storage.getItem index + ## UNSTABLE + + Returns a column containing rows of this column. + + Arguments: + - max_rows: specifies a maximum amount of rows to fetch; if not set, all + available rows are fetched. + read : (Nothing | Integer) -> Column + read self max_rows=Nothing = + if max_rows.is_nothing then self else self.slice 0 max_rows + ## Returns a vector containing all the elements in this column. > Example @@ -996,98 +1110,6 @@ type Column data = ['data', self.to_vector.take (First max_data)] Json.from_pairs [size, name, data] . to_text - ## ALIAS Sum Columns - - Sums the values in this column. - - > Example - Sum the values in a column. - - import Standard.Examples - - example_sum = Examples.integer_column.sum - sum : Any - sum self = self.java_column.aggregate 'sum' (x-> Vector.from_polyglot_array x . reduce (+)) True - - ## ALIAS Max Columns - - Computes the maximum element of this column. - - > Example - Compute the maximum value of a column. - - import Standard.Examples - - example_max = Examples.integer_column.max - max : Any - max self = - self.java_column.aggregate 'max' (x-> Vector.from_polyglot_array x . reduce Math.max) True - - ## ALIAS Min Columns - - Computes the minimum element of this column. - - > Example - Compute the minimum value of a column. - - import Standard.Examples - - example_min = Examples.integer_column.min - min : Any - min self = - self.java_column.aggregate 'min' (x-> Vector.from_polyglot_array x . reduce Math.min) True - - ## ALIAS Mean Columns - - Computes the mean of non-missing elements of this column. - - > Example - Compute the mean value of a column. - - import Standard.Examples - - example_mean = Examples.integer_column.mean - mean : Any - mean self = - vec_mean v = if v.length == 0 then Nothing else - (Vector.from_polyglot_array v).reduce (+) / v.length - self.java_column.aggregate 'mean' vec_mean True - - ## Computes the variance of the sample represented by this column. - - Arguments: - - degrees_of_freedom_correction: a correction to account for the - missing degrees of freedom in the sample. The default value of `1` - computes a sample variance. Setting it to `0` will compute population - variance instead. - variance self degrees_of_freedom_correction=1 = - mean = self.mean - shifted = self - mean - sq = shifted * shifted - sq.sum / (self.length - degrees_of_freedom_correction) - - ## Computes the standard deviation of the sample represented by this column. - - Arguments: - - degrees_of_freedom_correction: a correction to account for the - missing degrees of freedom in the sample. The default value of `1` - computes a sample standard deviation. Setting it to `0` will compute - population standard deviation instead. - standard_deviation self degrees_of_freedom_correction=1 = - self.variance degrees_of_freedom_correction . sqrt - - ## Computes the coefficient of determination of a given prediction column. - - Arguments: - - predictions: the column predicting the values of this column. - r_squared self predictions = - prediction_diff = self - predictions - ss_res = prediction_diff*prediction_diff . sum - ss_tot_lin = self - self.mean - ss_tot = ss_tot_lin*ss_tot_lin . sum - 1 - ss_res / ss_tot - - ## UNSTABLE Sorts the column according to the specified rules. @@ -1181,22 +1203,6 @@ type Column first : Any ! Empty_Error first self = self.at 0 . catch Index_Out_Of_Bounds_Error_Data (_ -> Error.throw Empty_Error) - ## UNSTABLE - - Returns the first element in the column, if it exists. - - If the column is empty, this method will return a dataflow error - containing an `Empty_Error`. - - > Example - Get the first element of a column. - - import Standard.Examples - - example_head = Examples.integer_column.head - head : Any ! Empty_Error - head self = self.first - ## UNSTABLE Returns the last element in the column, if it exists. @@ -1243,6 +1249,11 @@ type Column duplicate_count : Column duplicate_count self = Column_Data self.java_column.duplicateCount + ## PRIVATE + Helper for the expression to tell it which functions needs a Vector. + var_args_functions : Vector + var_args_functions = ['is_in', 'coalesce', 'min', 'max'] + ## UNSTABLE An error for when the column contains no elements. @@ -1253,6 +1264,33 @@ type Empty_Error to_display_text : Text to_display_text self = "The column is empty." +## PRIVATE + + Folds the vectorized operation over the provided column and values. When more + than one value to is provided, the result is folded with subsequent values. + + Arguments: + - column: The column to execute the operation over. + - name: The name of the vectorized operation. + - fallback_fn: A function used if the vectorized operation isn't available. + - operands: The vector of operands to apply to the function after `column`. + - skip_nulls: Specifies if nulls should be skipped. If set to `True`, a null + value results in null without passing it to the function. If set to + `False`, the null values are passed as any other value and can have custom + handling logic. + - new_name: The name of the column created as the result of this operation. +run_vectorized_many_op : Column -> Text -> (Any -> Any -> Any) -> Vector -> Boolean -> Text -> Column +run_vectorized_many_op column name fallback_fn operands skip_nulls=False new_name=(name + "_" + column.name) = + case operands of + _ : Vector.Vector -> + folded = operands.fold column.java_column.getStorage current-> operand-> + case operand of + _ : Column -> current.zip name fallback_fn operand.java_column.getStorage skip_nulls + _ -> current.bimap name fallback_fn operand skip_nulls + Column.Column_Data (Java_Column.new new_name column.java_column.getIndex folded) + _ : Array -> run_vectorized_many_op column name fallback_fn (Vector.from_polyglot_array operands) skip_nulls new_name + _ -> run_vectorized_many_op column name fallback_fn [operands] skip_nulls new_name + ## PRIVATE Executes a vectorized binary operation over the provided column. @@ -1267,7 +1305,7 @@ type Empty_Error `False`, the null values are passed as any other value and can have custom handling logic. - new_name: The name of the column created as the result of this operation. -run_vectorized_binary_op : Column -> Text -> (Any -> Any) -> Any -> Boolean -> Text -> Column +run_vectorized_binary_op : Column -> Text -> (Any -> Any -> Any) -> Any -> Boolean -> Text -> Column run_vectorized_binary_op column name fallback_fn operand skip_nulls=True new_name="Result" = case operand of Column.Column_Data col2 -> s1 = column.java_column.getStorage diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Expression.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Expression.enso new file mode 100644 index 000000000000..d42d7c7ee893 --- /dev/null +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Expression.enso @@ -0,0 +1,49 @@ +from Standard.Base import all + +polyglot java import org.enso.table.expressions.ExpressionVisitorImpl +polyglot java import java.lang.IllegalArgumentException +polyglot java import java.lang.UnsupportedOperationException + +type Expression + ## Evaluates an expression and returns the result + + Arguments: + - expression: the expression to evaluate + - get_column: a function that takes a column name and returns the + associated Column object. + - make_constant`: a function that takes an object and returns a + constant Column object. + - module_name: the name of the Column module that the expression is + being evaluated against. + - type_name: the name of the Column type that the expression is being + evaluated against. + - var_args_functions: a Vector of function names which take a single + Vector argument but which should be exposed with variable parameters. + evaluate : Text -> (Text -> Any) -> (Any -> Any) -> Text -> Text -> Vector Text -> Any + evaluate expression get_column make_constant module_name type_name var_args_functions = + handle_parse_error = Panic.catch_java ExpressionVisitorImpl.SyntaxErrorException handler=(cause-> Error.throw (Expression_Error.Syntax_Error cause.getMessage cause.getLine cause.getColumn)) + handle_unsupported = handle_java_error UnsupportedOperationException Expression_Error.Unsupported_Operation + handle_arguments = handle_java_error IllegalArgumentException Expression_Error.Argument_Mismatch + + handle_parse_error <| handle_unsupported <| handle_arguments <| + ExpressionVisitorImpl.evaluate expression get_column make_constant module_name type_name var_args_functions.to_array + +type Expression_Error + ## The expression supplied could not be parsed due to a syntax error. + Syntax_Error message:Text line:Integer column:Integer + + ## Expression error when a function could not be found on the target type. + Unsupported_Operation name:Text + + ## Expression error when the number of arguments for a function is incorrect. + Argument_Mismatch message:Text + + to_display_text : Text + to_display_text self = case self of + Expression_Error.Syntax_Error _ _ _ -> "Expression.Syntax_Error: " + self.message + " (line " + self.line.to_text + ", column " + self.column.to_text + ")." + Expression_Error.Unsupported_Operation _ -> "Expression.Unsupported: " + self.name + " is not a supported method." + Expression_Error.Argument_Mismatch _ -> "Expression.Argument_Mismatch: " + self.message + +## PRIVATE +handle_java_error java_type enso_constructor = + Panic.catch_java java_type handler=(cause-> Error.throw (enso_constructor cause.getMessage)) diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso index 1cd06f462459..e30813807ebf 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Data/Table.enso @@ -24,6 +24,8 @@ import project.Internal.Table_Helpers import project.Internal.Aggregate_Column_Helper import project.Internal.Parse_Values_Helper import project.Internal.Problem_Builder.Problem_Builder +import project.Data.Expression.Expression +import project.Data.Expression.Expression_Error from project.Data.Column import get_item_string from project.Data.Column_Type_Selection import Column_Type_Selection, Auto @@ -38,6 +40,7 @@ polyglot java import org.enso.table.data.table.Table as Java_Table polyglot java import org.enso.table.data.table.Column as Java_Column polyglot java import org.enso.table.operations.OrderBuilder polyglot java import org.enso.table.data.mask.OrderMask +polyglot java import java.util.UUID ## Represents a column-oriented table data structure. type Table @@ -865,6 +868,13 @@ type Table on_problems.handle_errors fallback=self.with_no_rows <| mask (make_filter_column column filter) _ : Function -> mask (column.map filter) + _ : Text -> + table_at = self.at column + if table_at.is_error.not then self.filter table_at filter on_problems else + expression = self.evaluate column + if expression.is_error.not then self.filter expression filter on_problems else + pick_error = expression.catch Expression_Error.Syntax_Error (_->table_at) + on_problems.handle_errors pick_error fallback=self _ -> case on_problems.handle_errors (self.at column) fallback=Nothing of Nothing -> self resolved_column -> self.filter resolved_column filter on_problems @@ -922,7 +932,10 @@ type Table Arguments: - name: The name of the column to set the value of. - - column: The new value for the column called `name`. + - column: The new value for the column either a `Column` or `Text` of an + expression. + - on_problems: Specifies how to handle if a problem occurs, raising as a + warning by default. If a column with the given name already exists, it will be replaced. Otherwise a new column is added. @@ -937,13 +950,24 @@ type Table table = Examples.inventory_table double_inventory = table.at "total_stock" * 2 table.set "total_stock" double_inventory - set : Text -> Column | Vector.Vector -> Table - set self name column = case column of - _ : Vector.Vector -> - self.set name (Column.from_vector name column) - Column.Column_Data _ -> + table.set "total_stock_expr" "2 * [total_stock]" + set : Text -> Column | Vector.Vector | Text -> Problem_Behavior -> Table + set self name column on_problems=Report_Warning = case column of + _ : Text -> + expression = self.evaluate column + if expression.is_error.not then self.set name expression on_problems else + on_problems.handle_errors expression fallback=self + _ : Vector.Vector -> self.set name (Column.from_vector name column) + _ : Column -> Table.Table_Data (self.java_table.addOrReplaceColumn (column.rename name . java_column)) + ## PRIVATE + evaluate : Text -> Column + evaluate self expression = + get_column name = self.at name + make_constant value = Column.from_vector (UUID.randomUUID.to_text) (Vector.new self.row_count _->value) + Expression.evaluate expression get_column make_constant "Standard.Table.Data.Column" "Column" Column.var_args_functions + ## Returns the vector of columns contained in this table. > Examples diff --git a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso index 60433768b31a..fb05a306fe47 100644 --- a/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso +++ b/distribution/lib/Standard/Table/0.0.0-dev/src/Internal/Table_Helpers.enso @@ -192,10 +192,13 @@ type Table_Column_Helper resolve_column_helper : (Integer | Text | Column) -> Problem_Builder -> a | Nothing resolve_column_helper self selector problem_builder = case selector of _ : Text -> - matched_columns = Matching.match_criteria_callback Text_Matcher.Case_Sensitive self.internal_columns [selector] reorder=True name_mapper=(_.name) problem_callback=problem_builder.report_missing_input_columns + matched_columns = self.internal_columns.filter column->(column.name==selector) if matched_columns.length == 1 then matched_columns.first else - if matched_columns.length == 0 then Nothing else - Panic.throw (Illegal_State_Error_Data "A single exact match should never match more than one column. Perhaps the table breaks the invariant of unique column names?") + if matched_columns.length != 0 then Panic.throw (Illegal_State_Error_Data "A single exact match should never match more than one column. Perhaps the table breaks the invariant of unique column names?") else + expression = (self.table.evaluate selector).catch Any _->Nothing + if expression != Nothing then expression else + problem_builder.report_missing_input_columns [selector] + Nothing _ : Integer -> case is_index_valid self.internal_columns.length selector of True -> self.internal_columns.at selector False -> diff --git a/distribution/lib/Standard/Test/0.0.0-dev/src/Bench.enso b/distribution/lib/Standard/Test/0.0.0-dev/src/Bench.enso index 6ba3d1980108..79312f8db844 100644 --- a/distribution/lib/Standard/Test/0.0.0-dev/src/Bench.enso +++ b/distribution/lib/Standard/Test/0.0.0-dev/src/Bench.enso @@ -97,7 +97,7 @@ len_list list = Arguments: - act: The action to perform `count` number of times. -times : Integer-> List Any +times : Integer -> (Integer -> Any) -> List Any times count act = go = results -> number -> if number == 0 then results else @Tail_Call go (Cons (act number) results) number-1 diff --git a/project/plugins.sbt b/project/plugins.sbt index 68f47f9a8508..6270b42c4fcd 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -5,6 +5,7 @@ addSbtPlugin("org.scala-js" % "sbt-scalajs" % "1.10.1") addSbtPlugin("com.typesafe.sbt" % "sbt-license-report" % "1.2.0") addSbtPlugin("com.lightbend.sbt" % "sbt-java-formatter" % "0.7.0") addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.6") +addSbtPlugin("com.simplytyped" % "sbt-antlr4" % "0.8.3") libraryDependencies += "io.circe" %% "circe-yaml" % "0.14.1" libraryDependencies += "commons-io" % "commons-io" % "2.11.0" diff --git a/std-bits/table/src/main/antlr4/Expression.g4 b/std-bits/table/src/main/antlr4/Expression.g4 new file mode 100644 index 000000000000..825996d8845a --- /dev/null +++ b/std-bits/table/src/main/antlr4/Expression.g4 @@ -0,0 +1,126 @@ +grammar Expression; +prog: expr EOF ; + +expr: expr op=POWER expr # Power + | expr op=(MULTIPLY|DIVIDE|MODULO) expr # MultDivMod + | expr op=(ADD|MINUS) expr # AddSub + | expr op=(EQUALS|NOT_EQUALS|LESS_THAN_OR_EQUAL|GREATER_THAN_OR_EQUAL|LESS_THAN|GREATER_THAN) expr # Compare + | expr (IS_NULL|IS_EMPTY|IS_NOT_EMPTY|IS_NOT_NULL) # IsNull + | expr (LIKE|NOT_LIKE) expr # Like + | expr (IN|NOT_IN) '(' expr (',' expr)* ')' # In + | expr (NOT_BETWEEN | BETWEEN) expr AND expr # Between + | UNARY_NOT expr # UnaryNot + | expr op=(AND | '&&') expr # And + | expr op=(OR | '||') expr # Or + | IF expr THEN expr ELSE expr END? # If + | IDENTIFIER '(' (expr (',' expr)*)? ')' # Function // This allows for functions of 0 or more arguments within brackets (e.g. PI(), SIN(1), MOD(3,4) etc.) + | '(' expr ')' # Paren + | COLUMN_NAME # Column + | MINUS expr # UnaryMinus + | value # Literal + ; + +POWER : '^'; +MULTIPLY : '*'; +DIVIDE : '/'; +MODULO : '%'; +ADD : '+'; +MINUS : '-'; +EQUALS : '==' | '='; +NOT_EQUALS : '!=' | '<>'; +LESS_THAN_OR_EQUAL : '<='; +GREATER_THAN_OR_EQUAL : '>='; +LESS_THAN : '<'; +GREATER_THAN : '>'; + +WHITESPACE : [ \t\r\n]+ -> skip; + +fragment A:[aA]; +fragment B:[bB]; +fragment C:[cC]; +fragment D:[dD]; +fragment E:[eE]; +fragment F:[fF]; +fragment G:[gG]; +fragment H:[hH]; +fragment I:[iI]; +fragment J:[jJ]; +fragment K:[kK]; +fragment L:[lL]; +fragment M:[mM]; +fragment N:[nN]; +fragment O:[oO]; +fragment P:[pP]; +fragment Q:[qQ]; +fragment R:[rR]; +fragment S:[sS]; +fragment T:[tT]; +fragment U:[uU]; +fragment V:[vV]; +fragment W:[wW]; +fragment X:[xX]; +fragment Y:[yY]; +fragment Z:[zZ]; +fragment LETTER : [A-Za-z]; +fragment DIGIT : [0-9]; +fragment HEX : [0-9a-fA-F]; +fragment IS : I S; +fragment EMPTY : E M P T Y; + +AND : A N D ; +OR : O R ; +NULL : N U L L; +NOTHING : N O T H I N G; +IS_NULL: IS ' ' (NOTHING | NULL); +IS_NOT_NULL : IS ' ' N O T ' ' (NOTHING | NULL); +IS_EMPTY: IS ' ' EMPTY; +IS_NOT_EMPTY : IS ' ' N O T ' ' EMPTY; +LIKE : L I K E; +NOT_LIKE : N O T ' ' LIKE; +IN : I N; +NOT_IN : N O T ' ' IN; +BETWEEN : B E T W E E N; +NOT_BETWEEN : N O T ' ' BETWEEN; +TRUE : T R U E; +FALSE : F A L S E; +IF : I F; +THEN : T H E N; +ELSE : E L S E; +UNARY_NOT : (N O T) | '!'; +END : E N D IF?; + +IDENTIFIER : LETTER (LETTER|DIGIT|'_')*; + +EXCEL_STRING : '"' ('""'|~'"')* '"'; + +PYTHON_STRING : '\'' (ESC|~['])* '\''; +fragment ESC : '\\' [abtnfrv"'\\] | '\\u' HEX HEX HEX HEX | '\\U' HEX HEX HEX HEX HEX HEX HEX HEX | '\\x' HEX HEX; + +fragment YEAR : DIGIT DIGIT DIGIT DIGIT; +fragment DATE_PART : '-' DIGIT DIGIT; +fragment HOUR : DIGIT DIGIT; +fragment TIME_PART : ':' DIGIT DIGIT; +fragment NANO_PART : '.' DIGIT DIGIT? DIGIT? DIGIT? DIGIT? DIGIT? DIGIT? DIGIT? DIGIT?; +fragment UTCOFFSET : ('Z' | ('+'|'-') HOUR TIME_PART?); +fragment TIMEZONE : '[' (~']')+ ']'; +fragment INTEGER : '0' | [1-9] (DIGIT | '_')* ; +fragment DECIMAL : '.' (DIGIT | '_')+; + +DATE : YEAR DATE_PART DATE_PART ; +TIME : HOUR TIME_PART (TIME_PART NANO_PART?)? ; +DATE_TIME : YEAR DATE_PART DATE_PART ('T' | ' ') HOUR TIME_PART (TIME_PART NANO_PART?)? UTCOFFSET? TIMEZONE? ; + +NUMBER : INTEGER DECIMAL? ; + +value + : (NULL | NOTHING) # nullOrNothing + | (TRUE | FALSE) # boolean + | '#' text=DATE '#' # date + | '#' text=TIME '#' # time + | '#' text=DATE_TIME '#' # datetime + | NUMBER # number + | EXCEL_STRING # excelString + | PYTHON_STRING # pythonString + ; + +COLUMN_NAME : '[' (']]'|~']')* ']'; diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/Aggregator.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/Aggregator.java deleted file mode 100644 index 86d817f27c29..000000000000 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/Aggregator.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.enso.table.data.column.operation.aggregate; - -import org.enso.table.data.column.storage.Storage; - -import java.util.stream.IntStream; - -/** - * Represents a fold-like operation on a storage. An aggregator is usually created for a given - * storage, then {@link #nextGroup(IntStream)} is repeatedly called and the aggregator is - * responsible for collecting the results of such calls. After that, {@link #seal()} is called to - * obtain a storage containing all the results. - */ -public abstract class Aggregator { - /** - * Requests the aggregator to append the result of aggregating the values at the specified - * positions. - * - * @param positions the positions to aggregate in this round. - */ - public abstract void nextGroup(IntStream positions); - - /** - * Returns the results of all previous {@link #nextGroup(IntStream)} calls. - * - * @return the storage containing all aggregation results. - */ - public abstract Storage seal(); -} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/CountAggregator.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/CountAggregator.java deleted file mode 100644 index 391dbac5a23f..000000000000 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/CountAggregator.java +++ /dev/null @@ -1,33 +0,0 @@ -package org.enso.table.data.column.operation.aggregate; - -import org.enso.table.data.column.storage.LongStorage; -import org.enso.table.data.column.storage.Storage; - -import java.util.stream.IntStream; - -/** Aggregates a storage by counting the non-missing values in each group. */ -public class CountAggregator extends Aggregator { - private final Storage storage; - private final long[] counts; - private int position = 0; - - /** - * @param storage the storage used as data source - * @param resultSize the exact number of times {@link Aggregator#nextGroup(IntStream)} will be - * called. - */ - public CountAggregator(Storage storage, int resultSize) { - this.storage = storage; - this.counts = new long[resultSize]; - } - - @Override - public void nextGroup(IntStream positions) { - counts[position++] = positions.filter(i -> !storage.isNa(i)).count(); - } - - @Override - public Storage seal() { - return new LongStorage(counts); - } -} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/FunctionAggregator.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/FunctionAggregator.java deleted file mode 100644 index 411f0edaa664..000000000000 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/FunctionAggregator.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.enso.table.data.column.operation.aggregate; - -import org.enso.base.polyglot.Polyglot_Utils; -import org.enso.table.data.column.builder.object.InferredBuilder; -import org.enso.table.data.column.storage.Storage; -import org.graalvm.polyglot.Value; - -import java.util.List; -import java.util.Objects; -import java.util.function.Function; -import java.util.stream.Collectors; -import java.util.stream.IntStream; -import java.util.stream.Stream; - -/** Aggregates the storage using a provided {@link Function}. */ -public class FunctionAggregator extends Aggregator { - private final Function, Value> aggregateFunction; - private final boolean skipNa; - private final Storage storage; - private final InferredBuilder builder; - - /** - * @param aggregateFunction the function used to obtain aggregation of a group - * @param storage the storage serving as data source - * @param skipNa whether missing values should be passed to the function - * @param resultSize the number of times {@link Aggregator#nextGroup(IntStream)} will be called - */ - public FunctionAggregator( - Function, Value> aggregateFunction, - Storage storage, - boolean skipNa, - int resultSize) { - this.aggregateFunction = aggregateFunction; - this.storage = storage; - this.skipNa = skipNa; - this.builder = new InferredBuilder(resultSize); - } - - @Override - public void nextGroup(IntStream positions) { - List items = getItems(positions); - Value result = aggregateFunction.apply(items); - Object converted = Polyglot_Utils.convertPolyglotValue(result); - builder.appendNoGrow(converted); - } - - private List getItems(IntStream positions) { - Stream items = positions.mapToObj(storage::getItemBoxed); - if (skipNa) { - items = items.filter(Objects::nonNull); - } - return items.collect(Collectors.toList()); - } - - @Override - public Storage seal() { - return builder.seal(); - } -} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/LongToLongAggregator.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/LongToLongAggregator.java deleted file mode 100644 index ea8919174748..000000000000 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/LongToLongAggregator.java +++ /dev/null @@ -1,59 +0,0 @@ -package org.enso.table.data.column.operation.aggregate.numeric; - -import org.enso.table.data.column.operation.aggregate.Aggregator; -import org.enso.table.data.column.storage.LongStorage; -import org.enso.table.data.column.storage.Storage; - -import java.util.BitSet; -import java.util.stream.IntStream; -import java.util.stream.LongStream; - -/** An aggregator consuming a {@link LongStorage} and returning a {@link LongStorage} */ -public abstract class LongToLongAggregator extends Aggregator { - private final LongStorage storage; - private final long[] items; - private final BitSet missing; - private int position = 0; - - /** - * @param storage the data source - * @param resultSize the number of times {@link Aggregator#nextGroup(IntStream)} will be called - */ - public LongToLongAggregator(LongStorage storage, int resultSize) { - this.storage = storage; - this.items = new long[resultSize]; - this.missing = new BitSet(); - } - - /** Used by subclasses to return a missing value from a given group. */ - protected void submitMissing() { - missing.set(position++); - } - - /** - * Used by subclasses to return a value from a given group. - * - * @param value the return value of a group - */ - protected void submit(long value) { - items[position++] = value; - } - - /** - * Runs the aggregation on a particular set of values. - * - * @param items the values contained in the current group - */ - protected abstract void runGroup(LongStream items); - - @Override - public void nextGroup(IntStream positions) { - LongStream items = positions.filter(x -> !storage.isNa(x)).mapToLong(storage::getItem); - runGroup(items); - } - - @Override - public Storage seal() { - return new LongStorage(items, items.length, missing); - } -} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/NumericAggregator.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/NumericAggregator.java deleted file mode 100644 index ed8bec7c0178..000000000000 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/aggregate/numeric/NumericAggregator.java +++ /dev/null @@ -1,78 +0,0 @@ -package org.enso.table.data.column.operation.aggregate.numeric; - -import org.enso.table.data.column.operation.aggregate.Aggregator; -import org.enso.table.data.column.storage.DoubleStorage; -import org.enso.table.data.column.storage.NumericStorage; -import org.enso.table.data.column.storage.Storage; - -import java.util.BitSet; -import java.util.OptionalDouble; -import java.util.stream.DoubleStream; -import java.util.stream.IntStream; - -/** - * An aggregator sourcing data from any {@link NumericStorage} and returning a {@link - * DoubleStorage}. - */ -public abstract class NumericAggregator extends Aggregator { - private final NumericStorage storage; - private final long[] data; - private final BitSet missing; - private int position = 0; - - /** - * @param storage the data source - * @param resultSize the number of times {@link Aggregator#nextGroup(IntStream)} will be called - */ - public NumericAggregator(NumericStorage storage, int resultSize) { - this.storage = storage; - this.data = new long[resultSize]; - this.missing = new BitSet(); - } - - /** - * Runs the aggregation on a particular set of values. - * - * @param elements the values contained in the current group - */ - protected abstract void runGroup(DoubleStream elements); - - /** - * Used by subclasses to return a value from a given group. - * - * @param value the return value of a group - */ - protected void submit(double value) { - data[position++] = Double.doubleToRawLongBits(value); - } - - /** - * Used by subclasses to return a value from a given group. - * - * @param value the return value of a group - */ - protected void submit(OptionalDouble value) { - if (value.isPresent()) { - submit(value.getAsDouble()); - } else { - submitMissing(); - } - } - - /** Used by subclasses to return a missing value from a given group. */ - protected void submitMissing() { - missing.set(position++); - } - - @Override - public void nextGroup(IntStream positions) { - DoubleStream elements = - positions.filter(i -> !storage.isNa(i)).mapToDouble(storage::getItemDouble); - runGroup(elements); - } - - @Override - public Storage seal() { - return new DoubleStorage(data, data.length, missing); - } -} diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java index b0657f4d0361..a72225584138 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/operation/map/numeric/LongNumericOp.java @@ -11,11 +11,11 @@ /** An operation expecting a numeric argument and returning a boolean. */ public abstract class LongNumericOp extends MapOperation { - private final boolean alwaysCast; + private final boolean alwaysCastToDouble; - public LongNumericOp(String name, boolean alwaysCast) { + public LongNumericOp(String name, boolean alwaysCastToDouble) { super(name); - this.alwaysCast = true; + this.alwaysCastToDouble = alwaysCastToDouble; } public LongNumericOp(String name) { @@ -28,8 +28,7 @@ public LongNumericOp(String name) { @Override public NumericStorage runMap(LongStorage storage, Object arg) { - if (arg instanceof Long && !alwaysCast) { - long x = (Long) arg; + if (!alwaysCastToDouble && arg instanceof Long x) { long[] newVals = new long[storage.size()]; for (int i = 0; i < storage.size(); i++) { if (!storage.isNa(i)) { @@ -57,12 +56,16 @@ public NumericStorage runZip(LongStorage storage, Storage arg) { BitSet newMissing = new BitSet(); for (int i = 0; i < storage.size(); i++) { if (!storage.isNa(i) && i < v.size() && !v.isNa(i)) { - out[i] = doLong(storage.getItem(i), v.getItem(i)); + out[i] = alwaysCastToDouble + ? Double.doubleToRawLongBits(doDouble(storage.getItem(i), v.getItem(i))) + : doLong(storage.getItem(i), v.getItem(i)); } else { newMissing.set(i); } } - return new LongStorage(out, storage.size(), newMissing); + return alwaysCastToDouble + ? new DoubleStorage(out, storage.size(), newMissing) + : new LongStorage(out, storage.size(), newMissing); } else if (arg instanceof DoubleStorage v) { long[] out = new long[storage.size()]; BitSet newMissing = new BitSet(); diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/BoolStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/BoolStorage.java index e5b7891d053c..82b9e11517f1 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/BoolStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/BoolStorage.java @@ -2,6 +2,8 @@ import java.util.BitSet; import java.util.List; +import java.util.function.IntFunction; + import org.enso.base.polyglot.Polyglot_Utils; import org.enso.table.data.column.builder.object.InferredBuilder; import org.enso.table.data.column.operation.map.MapOpStorage; @@ -168,21 +170,29 @@ public boolean isNegated() { } public Storage iif(Value when_true, Value when_false) { - Object on_true = Polyglot_Utils.convertPolyglotValue(when_true); - Object on_false = Polyglot_Utils.convertPolyglotValue(when_false); + var on_true = makeRowProvider(when_true); + var on_false = makeRowProvider(when_false); InferredBuilder builder = new InferredBuilder(size); for (int i = 0; i < size; i++) { if (isMissing.get(i)) { builder.append(null); } else if (getItem(i)) { - builder.append(on_true); + builder.append(on_true.apply(i)); } else { - builder.append(on_false); + builder.append(on_false.apply(i)); } } return builder.seal(); } + private static IntFunction makeRowProvider(Value value) { + if (value.isHostObject() && value.asHostObject() instanceof Storage s) { + return i->(Object)s.getItemBoxed(i); + } + var converted = Polyglot_Utils.convertPolyglotValue(value); + return i->converted; + } + private static MapOpStorage buildOps() { MapOpStorage ops = new MapOpStorage<>(); ops.add( diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java index 4553371e7e34..56f61b963094 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/DoubleStorage.java @@ -1,12 +1,9 @@ package org.enso.table.data.column.storage; import java.util.BitSet; -import java.util.HashSet; import java.util.List; -import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.builder.object.NumericBuilder; import org.enso.table.data.column.operation.map.MapOpStorage; -import org.enso.table.data.column.operation.map.SpecializedIsInOp; import org.enso.table.data.column.operation.map.UnaryMapOperation; import org.enso.table.data.column.operation.map.numeric.DoubleBooleanOp; import org.enso.table.data.column.operation.map.numeric.DoubleIsInOp; @@ -209,6 +206,13 @@ protected double doDouble(double a, double b) { return a % b; } }) + .add( + new DoubleNumericOp(Maps.POWER) { + @Override + protected double doDouble(double a, double b) { + return Math.pow(a, b); + } + }) .add( new DoubleBooleanOp(Maps.LT) { @Override diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java index 2215061ef5ca..3d056d9f9d67 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/LongStorage.java @@ -1,16 +1,9 @@ package org.enso.table.data.column.storage; import java.util.BitSet; -import java.util.HashSet; import java.util.List; -import java.util.OptionalLong; -import java.util.stream.LongStream; -import org.enso.base.polyglot.NumericConverter; import org.enso.table.data.column.builder.object.NumericBuilder; -import org.enso.table.data.column.operation.aggregate.Aggregator; -import org.enso.table.data.column.operation.aggregate.numeric.LongToLongAggregator; import org.enso.table.data.column.operation.map.MapOpStorage; -import org.enso.table.data.column.operation.map.SpecializedIsInOp; import org.enso.table.data.column.operation.map.UnaryMapOperation; import org.enso.table.data.column.operation.map.numeric.LongBooleanOp; import org.enso.table.data.column.operation.map.numeric.LongIsInOp; @@ -43,17 +36,13 @@ public LongStorage(long[] data) { this(data, data.length, new BitSet()); } - /** - * @inheritDoc - */ + /** @inheritDoc */ @Override public int size() { return size; } - /** - * @inheritDoc - */ + /** @inheritDoc */ @Override public int countMissing() { return isMissing.cardinality(); @@ -77,17 +66,13 @@ public Long getItemBoxed(int idx) { return isMissing.get(idx) ? null : data[idx]; } - /** - * @inheritDoc - */ + /** @inheritDoc */ @Override public int getType() { return Type.LONG; } - /** - * @inheritDoc - */ + /** @inheritDoc */ @Override public boolean isNa(long idx) { return isMissing.get((int) idx); @@ -108,46 +93,6 @@ protected Storage runVectorizedZip(String name, Storage argument) { return ops.runZip(name, this, argument); } - @Override - protected Aggregator getVectorizedAggregator(String name, int resultSize) { - return switch (name) { - case Aggregators.SUM -> new LongToLongAggregator(this, resultSize) { - @Override - protected void runGroup(LongStream items) { - long[] elements = items.toArray(); - if (elements.length == 0) { - submitMissing(); - } else { - submit(LongStream.of(elements).sum()); - } - } - }; - case Aggregators.MAX -> new LongToLongAggregator(this, resultSize) { - @Override - protected void runGroup(LongStream items) { - OptionalLong r = items.max(); - if (r.isPresent()) { - submit(r.getAsLong()); - } else { - submitMissing(); - } - } - }; - case Aggregators.MIN -> new LongToLongAggregator(this, resultSize) { - @Override - protected void runGroup(LongStream items) { - OptionalLong r = items.min(); - if (r.isPresent()) { - submit(r.getAsLong()); - } else { - submitMissing(); - } - } - }; - default -> super.getVectorizedAggregator(name, resultSize); - }; - } - private Storage fillMissingDouble(double arg) { final var builder = NumericBuilder.createDoubleBuilder(size()); long rawArg = Double.doubleToRawLongBits(arg); @@ -291,6 +236,19 @@ public long doLong(long in, long arg) { return in % arg; } }) + .add( + new LongNumericOp(Maps.POWER, true) { + @Override + public double doDouble(long in, double arg) { + return Math.pow(in, arg); + } + + @Override + public long doLong(long in, long arg) { + throw new IllegalStateException( + "Internal error: Power operation should cast to double."); + } + }) .add( new LongNumericOp(Maps.DIV, true) { @Override @@ -300,7 +258,7 @@ public double doDouble(long in, double arg) { @Override public long doLong(long in, long arg) { - return in / arg; + throw new UnsupportedOperationException("Divide operation should cast to double."); } }) .add( diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java index d577fc41f084..50ebd4bc3e28 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/NumericStorage.java @@ -1,9 +1,5 @@ package org.enso.table.data.column.storage; -import java.util.stream.DoubleStream; -import org.enso.table.data.column.operation.aggregate.Aggregator; -import org.enso.table.data.column.operation.aggregate.numeric.NumericAggregator; - /** A storage containing items representable as a {@code double}. */ public abstract class NumericStorage extends Storage { /** @@ -14,45 +10,4 @@ public abstract class NumericStorage extends Storage { * @return the value associated with {@code idx} */ public abstract double getItemDouble(int idx); - - @Override - protected Aggregator getVectorizedAggregator(String name, int resultSize) { - switch (name) { - case Aggregators.MAX: - return new NumericAggregator(this, resultSize) { - @Override - protected void runGroup(DoubleStream elements) { - submit(elements.max()); - } - }; - case Aggregators.MIN: - return new NumericAggregator(this, resultSize) { - @Override - protected void runGroup(DoubleStream elements) { - submit(elements.min()); - } - }; - case Aggregators.SUM: - return new NumericAggregator(this, resultSize) { - @Override - protected void runGroup(DoubleStream elements) { - double[] its = elements.toArray(); - if (its.length == 0) { - submitMissing(); - } else { - submit(DoubleStream.of(its).sum()); - } - } - }; - case Aggregators.MEAN: - return new NumericAggregator(this, resultSize) { - @Override - protected void runGroup(DoubleStream elements) { - submit(elements.average()); - } - }; - default: - return super.getVectorizedAggregator(name, resultSize); - } - } } diff --git a/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java b/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java index 545b1e3a302e..3c5433397b77 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java +++ b/std-bits/table/src/main/java/org/enso/table/data/column/storage/Storage.java @@ -9,9 +9,6 @@ import org.enso.table.data.column.builder.object.Builder; import org.enso.table.data.column.builder.object.InferredBuilder; import org.enso.table.data.column.builder.object.ObjectBuilder; -import org.enso.table.data.column.operation.aggregate.Aggregator; -import org.enso.table.data.column.operation.aggregate.CountAggregator; -import org.enso.table.data.column.operation.aggregate.FunctionAggregator; import org.enso.table.data.mask.OrderMask; import org.enso.table.data.mask.SliceRange; import org.graalvm.polyglot.Value; @@ -73,6 +70,7 @@ public static final class Maps { public static final String SUB = "-"; public static final String DIV = "/"; public static final String MOD = "%"; + public static final String POWER = "^"; public static final String NOT = "not"; public static final String AND = "&&"; public static final String OR = "||"; @@ -86,14 +84,6 @@ public static final class Maps { public static final String IS_IN = "is_in"; } - public static final class Aggregators { - public static final String SUM = "sum"; - public static final String MEAN = "mean"; - public static final String MAX = "max"; - public static final String MIN = "min"; - public static final String COUNT = "count"; - } - /** * Specifies if the given operation has a vectorized implementation available for this storage. */ @@ -137,36 +127,6 @@ public final Storage bimap( return builder.seal(); } - protected Aggregator getVectorizedAggregator(String name, int resultSize) { - if (name.equals(Aggregators.COUNT)) { - return new CountAggregator(this, resultSize); - } - return null; - } - - /** - * Returns an aggregator created based on the provided parameters. - * - * @param name name of a vectorized operation that can be used if possible. If null is passed, - * this parameter is unused. - * @param fallback the function to use if a vectorized operation is not available. - * @param skipNa whether missing values should be passed to the {@code fallback} function. - * @param resultSize the number of times the {@link - * Aggregator#nextGroup(java.util.stream.IntStream)} method will be called. - * @return an aggregator satisfying the above properties. - */ - public final Aggregator getAggregator( - String name, Function, Value> fallback, boolean skipNa, int resultSize) { - Aggregator result = null; - if (name != null) { - result = getVectorizedAggregator(name, resultSize); - } - if (result == null) { - result = new FunctionAggregator(fallback, this, skipNa, resultSize); - } - return result; - } - /** * Runs a function on each non-missing element in this storage and gathers the results. * diff --git a/std-bits/table/src/main/java/org/enso/table/data/table/Column.java b/std-bits/table/src/main/java/org/enso/table/data/table/Column.java index b3254ae03033..1f6967bee90b 100644 --- a/std-bits/table/src/main/java/org/enso/table/data/table/Column.java +++ b/std-bits/table/src/main/java/org/enso/table/data/table/Column.java @@ -2,7 +2,6 @@ import org.enso.base.polyglot.Polyglot_Utils; import org.enso.table.data.column.builder.object.InferredBuilder; -import org.enso.table.data.column.operation.aggregate.Aggregator; import org.enso.table.data.column.storage.BoolStorage; import org.enso.table.data.column.storage.Storage; import org.enso.table.data.index.DefaultIndex; @@ -15,8 +14,6 @@ import java.util.BitSet; import java.util.List; -import java.util.function.Function; -import java.util.stream.IntStream; /** A representation of a column. Consists of a column name and the underlying storage. */ public class Column { @@ -89,11 +86,11 @@ public Column mask(Index maskedIndex, BitSet mask, int cardinality) { * @return the result of masking this column with the provided column */ public Column mask(Column maskCol) { - if (!(maskCol.getStorage() instanceof BoolStorage storage)) { + if (!(maskCol.getStorage() instanceof BoolStorage boolStorage)) { throw new UnexpectedColumnTypeException("Boolean"); } - var mask = BoolStorage.toMask(storage); + var mask = BoolStorage.toMask(boolStorage); var localStorageMask = new BitSet(); localStorageMask.set(0, getStorage().size()); mask.and(localStorageMask); @@ -156,25 +153,6 @@ public Index getIndex() { return index; } - /** - * Aggregates the values in this column, using a given aggregation operation. - * - * @param aggName name of a vectorized operation that can be used if possible. If null is passed, - * this parameter is unused. - * @param aggregatorFunction the function to use if a vectorized operation is not available. - * @param skipNa whether missing values should be passed to the {@code fallback} function. - * @return a column indexed by the unique index of this aggregate, storing results of applying the - * specified operation. - */ - public Object aggregate( - String aggName, Function, Value> aggregatorFunction, boolean skipNa) { - Aggregator aggregator = storage.getAggregator(aggName, aggregatorFunction, skipNa, 1); - - IntStream ixes = IntStream.range(0, storage.size()); - aggregator.nextGroup(ixes); - return aggregator.seal().getItemBoxed(0); - } - /** * @param mask the reordering to apply * @return a new column, resulting from reordering this column according to {@code mask}. diff --git a/std-bits/table/src/main/java/org/enso/table/expressions/ExpressionVisitorImpl.java b/std-bits/table/src/main/java/org/enso/table/expressions/ExpressionVisitorImpl.java new file mode 100644 index 000000000000..331360c5a9d2 --- /dev/null +++ b/std-bits/table/src/main/java/org/enso/table/expressions/ExpressionVisitorImpl.java @@ -0,0 +1,373 @@ +package org.enso.table.expressions; + +import org.antlr.v4.runtime.BaseErrorListener; +import org.antlr.v4.runtime.CharStreams; +import org.antlr.v4.runtime.CommonTokenStream; +import org.antlr.v4.runtime.RecognitionException; +import org.antlr.v4.runtime.Recognizer; + +import org.graalvm.polyglot.Context; +import org.graalvm.polyglot.PolyglotException; +import org.graalvm.polyglot.Value; + +import java.time.LocalDate; +import java.time.LocalTime; +import java.time.LocalDateTime; +import java.time.ZonedDateTime; +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.util.Arrays; +import java.util.HashSet; +import java.util.Set; +import java.util.function.Function; +import java.util.regex.Pattern; + +public class ExpressionVisitorImpl extends ExpressionBaseVisitor { + private static class ThrowOnErrorListener extends BaseErrorListener { + public static final ThrowOnErrorListener INSTANCE = new ThrowOnErrorListener(); + + @Override + public void syntaxError( + Recognizer recognizer, + Object offendingSymbol, + int line, + int charPositionInLine, + String msg, + RecognitionException e) + throws SyntaxErrorException { + throw new SyntaxErrorException(msg, line, charPositionInLine); + } + } + + public static class SyntaxErrorException extends RuntimeException { + private final int line; + private final int column; + + public SyntaxErrorException(String message, int line, int column) { + super(message); + this.line = line; + this.column = column; + } + + public int getLine() { + return line; + } + + public int getColumn() { + return column; + } + } + + public static Value evaluate( + String expression, + Function getColumn, + Function makeConstantColumn, + String moduleName, + String typeName, + String[] variableArgumentFunctions) + throws UnsupportedOperationException, IllegalArgumentException { + var lexer = new ExpressionLexer(CharStreams.fromString(expression)); + lexer.removeErrorListeners(); + lexer.addErrorListener(ThrowOnErrorListener.INSTANCE); + + var tokens = new CommonTokenStream(lexer); + var parser = new ExpressionParser(tokens); + parser.removeErrorListeners(); + parser.addErrorListener(ThrowOnErrorListener.INSTANCE); + + var visitor = + new ExpressionVisitorImpl( + getColumn, makeConstantColumn, moduleName, typeName, variableArgumentFunctions); + + var expr = parser.prog(); + return visitor.visit(expr); + } + + private final Function getColumn; + private final Function makeConstantColumn; + private final Function getMethod; + private final Set variableArgumentFunctions; + + private ExpressionVisitorImpl( + Function getColumn, + Function makeConstantColumn, + String moduleName, + String typeName, + String[] variableArgumentFunctions) { + this.getColumn = getColumn; + this.makeConstantColumn = makeConstantColumn; + + final Value module = + Context.getCurrent().getBindings("enso").invokeMember("get_module", moduleName); + final Value type = module.invokeMember("get_type", typeName); + this.getMethod = name -> module.invokeMember("get_method", type, name); + + this.variableArgumentFunctions = new HashSet<>(Arrays.asList(variableArgumentFunctions)); + } + + private Value wrapAsColumn(Value value) { + if (value.isNull()) { + return makeConstantColumn.apply(value); + } + + var metaObject = value.getMetaObject(); + return metaObject != null && metaObject.asHostObject() instanceof Class + ? makeConstantColumn.apply(value) + : value; + } + + private Value executeMethod(String name, Value... args) { + Value method = getMethod.apply(name); + if (!method.canExecute()) { + throw new UnsupportedOperationException(name); + } + + Object[] objects; + if (this.variableArgumentFunctions.contains(name)) { + objects = new Object[2]; + objects[0] = args[0]; + objects[1] = Arrays.copyOfRange(args, 1, args.length, Object[].class); + } else { + objects = Arrays.copyOf(args, args.length, Object[].class); + } + objects[0] = wrapAsColumn(args[0]); + + try { + var result = method.execute(objects); + if (result.canExecute()) { + throw new IllegalArgumentException("Insufficient arguments for method " + name + "."); + } + return result; + } catch (PolyglotException e) { + if (e.getMessage().startsWith("Type error: expected a function")) { + throw new IllegalArgumentException("Too many arguments for method " + name + "."); + } + throw e; + } + } + + @Override + public Value visitProg(ExpressionParser.ProgContext ctx) { + Value base = visit(ctx.expr()); + return wrapAsColumn(base); + } + + @Override + public Value visitColumn(ExpressionParser.ColumnContext ctx) { + var text = ctx.getText(); + return getColumn.apply(text.substring(1, text.length() - 1).replace("]]", "]")); + } + + @Override + public Value visitPower(ExpressionParser.PowerContext ctx) { + return executeMethod("^", visit(ctx.expr(0)), visit(ctx.expr(1))); + } + + @Override + public Value visitMultDivMod(ExpressionParser.MultDivModContext ctx) { + return executeMethod(ctx.op.getText(), visit(ctx.expr(0)), visit(ctx.expr(1))); + } + + @Override + public Value visitCompare(ExpressionParser.CompareContext ctx) { + var op = ctx.op.getText(); + if (op.equals("=")) { + op = "=="; + } + if (op.equals("<>")) { + op = "!="; + } + + return executeMethod(op, visit(ctx.expr(0)), visit(ctx.expr(1))); + } + + @Override + public Value visitLike(ExpressionParser.LikeContext ctx) { + var condition = executeMethod("like", visit(ctx.expr(0)), visit(ctx.expr(1))); + return ctx.NOT_LIKE() != null ? executeMethod("not", condition) : condition; + } + + @Override + public Value visitIsNull(ExpressionParser.IsNullContext ctx) { + var op = ctx.IS_NULL() != null || ctx.IS_NOT_NULL() != null ? "is_missing" : "is_empty"; + var condition = executeMethod(op, visit(ctx.expr())); + return ctx.IS_NOT_NULL() != null || ctx.IS_NOT_EMPTY() != null + ? executeMethod("not", condition) + : condition; + } + + @Override + public Value visitIf(ExpressionParser.IfContext ctx) { + return executeMethod("iif", visit(ctx.expr(0)), visit(ctx.expr(1)), visit(ctx.expr(2))); + } + + @Override + public Value visitAddSub(ExpressionParser.AddSubContext ctx) { + return executeMethod(ctx.op.getText(), visit(ctx.expr(0)), visit(ctx.expr(1))); + } + + @Override + public Value visitAnd(ExpressionParser.AndContext ctx) { + return executeMethod("&&", visit(ctx.expr(0)), visit(ctx.expr(1))); + } + + @Override + public Value visitOr(ExpressionParser.OrContext ctx) { + return executeMethod("||", visit(ctx.expr(0)), visit(ctx.expr(1))); + } + + @Override + public Value visitUnaryNot(ExpressionParser.UnaryNotContext ctx) { + return executeMethod("not", visit(ctx.expr())); + } + + @Override + public Value visitUnaryMinus(ExpressionParser.UnaryMinusContext ctx) { + return executeMethod("*", visit(ctx.expr()), Value.asValue(-1)); + } + + @Override + public Value visitNullOrNothing(ExpressionParser.NullOrNothingContext ctx) { + return Value.asValue(null); + } + + @Override + public Value visitBoolean(ExpressionParser.BooleanContext ctx) { + return Value.asValue(ctx.TRUE() != null); + } + + @Override + public Value visitNumber(ExpressionParser.NumberContext ctx) { + var text = ctx.getText().replace("_", ""); + if (text.contains(".")) { + return Value.asValue(Double.parseDouble(text)); + } else { + return Value.asValue(Long.parseLong(text)); + } + } + + @Override + public Value visitExcelString(ExpressionParser.ExcelStringContext ctx) { + var text = ctx.getText(); + return Value.asValue(text.substring(1, text.length() - 1).replace("\"\"", "\"")); + } + + private static final Pattern pythonRegex = Pattern.compile("(\\\\[abtnfrv\"'\\\\])|(\\\\(x[0-9a-fA-F]{2}|u[0-9a-fA-F]{4}|U[0-9a-fA-F]{4}))|\\\\|([^\\\\]+)"); + + private static String unescapePython(String text) { + var matcher = pythonRegex.matcher(text); + var builder = new StringBuilder(text.length()); + while (matcher.find()) { + if (matcher.group(1) != null) { + builder.append(switch (matcher.group(1).charAt(1)) { + case 'a' -> (char) 0x07; + case 'f' -> (char) 0x0c; + case 'b' -> '\b'; + case 't' -> '\t'; + case 'r' -> '\r'; + case 'n' -> '\n'; + case 'v' -> (char) 0x0b; + case '\\' -> '\\'; + case '\'' -> '\''; + case '"' -> '"'; + default -> throw new IllegalArgumentException("Unknown Python escape sequence."); + }); + } else if (matcher.group(2) != null) { + builder.append((char) Integer.parseInt(matcher.group(2).substring(2), 16)); + } else { + builder.append(matcher.group(0)); + } + } + return builder.toString(); + } + + @Override + public Value visitPythonString(ExpressionParser.PythonStringContext ctx) { + var text = ctx.getText(); + return Value.asValue(unescapePython(text.substring(1, text.length() - 1))); + } + + @Override + public Value visitDate(ExpressionParser.DateContext ctx) { + var text = ctx.text.getText(); + try { + return Value.asValue(LocalDate.parse(ctx.text.getText())); + } catch (DateTimeParseException e) { + throw new SyntaxErrorException( + "Invalid Date format: " + text, + ctx.getStart().getLine(), + ctx.getStart().getCharPositionInLine()); + } + } + + @Override + public Value visitTime(ExpressionParser.TimeContext ctx) { + var text = ctx.text.getText(); + try { + return Value.asValue(LocalTime.parse(ctx.text.getText())); + } catch (DateTimeParseException e) { + throw new SyntaxErrorException( + "Invalid Time format: " + text, + ctx.getStart().getLine(), + ctx.getStart().getCharPositionInLine()); + } + } + + @Override + public Value visitDatetime(ExpressionParser.DatetimeContext ctx) { + var text = ctx.text.getText().replace(' ', 'T'); + var timezone = text.contains("[") ? text.substring(text.indexOf('[')) : ""; + text = text.substring(0, text.length() - timezone.length()); + + var zoneId = + timezone.equals("") + ? ZoneId.systemDefault() + : ZoneId.of(timezone.substring(1, timezone.length() - 1)); + + try { + var zonedDateTime = + ZonedDateTime.parse(text, DateTimeFormatter.ISO_OFFSET_DATE_TIME.withZone(zoneId)); + return Value.asValue(zonedDateTime); + } catch (DateTimeParseException ignored) { + } + + try { + var localDateTime = LocalDateTime.parse(text); + return Value.asValue(localDateTime.atZone(zoneId)); + } catch (DateTimeParseException e) { + throw new SyntaxErrorException( + "Invalid Date_Time format: " + text, + ctx.getStart().getLine(), + ctx.getStart().getCharPositionInLine()); + } + } + + @Override + public Value visitParen(ExpressionParser.ParenContext ctx) { + return visit(ctx.expr()); + } + + @Override + public Value visitBetween(ExpressionParser.BetweenContext ctx) { + var self = visit(ctx.expr(0)); + var lower = visit(ctx.expr(1)); + var upper = visit(ctx.expr(2)); + var condition = executeMethod("between", self, lower, upper); + return ctx.NOT_BETWEEN() != null ? executeMethod("not", condition) : condition; + } + + @Override + public Value visitIn(ExpressionParser.InContext ctx) { + var args = ctx.expr().stream().map(this::visit).toArray(Value[]::new); + var condition = executeMethod("is_in", args); + return ctx.NOT_IN() != null ? executeMethod("not", condition) : condition; + } + + @Override + public Value visitFunction(ExpressionParser.FunctionContext ctx) { + var name = ctx.IDENTIFIER().getText().toLowerCase(); + var args = ctx.expr().stream().map(this::visit).toArray(Value[]::new); + return executeMethod(name, args); + } +} diff --git a/test/Table_Tests/src/Column_Spec.enso b/test/Table_Tests/src/Column_Spec.enso index 16cb2dab8ae6..ce27c36c3337 100644 --- a/test/Table_Tests/src/Column_Spec.enso +++ b/test/Table_Tests/src/Column_Spec.enso @@ -53,11 +53,9 @@ spec = Test.group "Columns" <| test_column.take (Last 7) . to_vector . should_equal expected_2.to_vector test_column.take (Last 0) . to_vector . should_equal expected_3.to_vector - Test.specify "should be able to get the first / head element" <| + Test.specify "should be able to get the first element" <| test_column.first . should_equal 1 - test_column.head . should_equal 1 empty_column.first.should_fail_with Empty_Error - empty_column.head.should_fail_with Empty_Error Test.specify "should be able to get the last element" <| test_column.last . should_equal 6 @@ -85,25 +83,12 @@ spec = Test.group "Columns" <| col = Examples.decimal_column.set_index Examples.integer_column col.index.to_vector . should_equal Examples.integer_column.to_vector - Test.specify "should allow computing variance and standard deviation" <| - const = Column.from_vector 'const' [1, 1, 1, 1, 1] - const.variance . should_equal 0 - const.standard_deviation.should_equal 0 - - rand = Column.from_vector 'random' [10.0, 4.2, 6.8, 6.2, 7.2] - rand.variance . should_equal 4.372 - rand.variance degrees_of_freedom_correction=0 . should_equal 3.4976 - rand.standard_deviation . should_equal 2.090932806 epsilon=(10 ^ -6) - rand.standard_deviation degrees_of_freedom_correction=0 . should_equal 1.870187156 epsilon=(10 ^ -6) - - Test.specify "should allow computing the R² score of a prediction" <| - sample = Column.from_vector 'sample' [1,2,3,4,5] - mean_pred = Column.from_vector 'mean' [3,3,3,3,3] - perfect_pred = Column.from_vector 'perfect' [1,2,3,4,5] - bad_pred = Column.from_vector 'bad' [5,4,3,2,1] - - sample.r_squared mean_pred . should_equal 0 - sample.r_squared perfect_pred . should_equal 1 - sample.r_squared bad_pred . should_equal -3 + Test.specify "should result in correct Storage if operation allows it" <| + another = Column.from_vector "Test" [10, 20, 30, 40, 50, 60] + (test_column + 1).storage_type . should_equal Storage.Integer + (test_column - 1).storage_type . should_equal Storage.Integer + (test_column * 2).storage_type . should_equal Storage.Integer + (test_column * 1.5).storage_type . should_equal Storage.Decimal + (test_column + another).storage_type . should_equal Storage.Integer main = Test_Suite.run_main spec diff --git a/test/Table_Tests/src/Database/Codegen_Spec.enso b/test/Table_Tests/src/Database/Codegen_Spec.enso index 8bf160138972..2b8f242e47b8 100644 --- a/test/Table_Tests/src/Database/Codegen_Spec.enso +++ b/test/Table_Tests/src/Database/Codegen_Spec.enso @@ -79,7 +79,7 @@ spec = Test.specify "should support simple text operations" <| b = t1.at "B" add = b + "SUFFIX" - add.to_sql.prepare . should_equal ['SELECT ("T1"."B" + ?) AS "B" FROM "T1" AS "T1"', [["SUFFIX", str]]] + add.to_sql.prepare . should_equal ['SELECT ("T1"."B" || ?) AS "B" FROM "T1" AS "T1"', [["SUFFIX", str]]] ends = b.ends_with "suf" starts = b.starts_with "pref" diff --git a/test/Table_Tests/src/Database/Common_Spec.enso b/test/Table_Tests/src/Database/Common_Spec.enso index 7a4465b10715..bf4013ddcb26 100644 --- a/test/Table_Tests/src/Database/Common_Spec.enso +++ b/test/Table_Tests/src/Database/Common_Spec.enso @@ -12,8 +12,6 @@ from Standard.Test import Test, Problems import project.Database.Helpers.Name_Generator spec prefix connection pending=Nothing = - eps=0.000001 - tables_to_clean = Vector.new_builder upload prefix data = name = Name_Generator.random_name prefix @@ -185,16 +183,6 @@ spec prefix connection pending=Nothing = col.count . should_equal 3 col.count_missing . should_equal 2 - Test.group prefix+"Column-wide statistics" pending=pending <| - Test.specify 'should allow computing basic column-wide stats' <| - t7 = upload "T7" <| - Table.new [['price', [0.4, 3.5, Nothing, 6.7, Nothing, 97, Nothing]]] - price = t7.at 'price' - price.sum.should_equal 107.6 epsilon=eps - price.min.should_equal 0.4 epsilon=eps - price.max.should_equal 97 epsilon=eps - price.mean.should_equal 26.9 epsilon=eps - Test.group prefix+"Sorting" pending=pending <| df = upload "clothes" <| Table.new [["id", [1,2,3,4,5,6]], ["name", ["shoes","trousers","dress","skirt","blouse","t-shirt"]], ["quantity", [20,10,20,10,30,30]], ["rating", [3.0,Nothing,7.3,3.0,2.2,Nothing]], ["price", [37.2,42.1,64.1,87.4,13.5,64.2]]] diff --git a/test/Table_Tests/src/Expression_Spec.enso b/test/Table_Tests/src/Expression_Spec.enso new file mode 100644 index 000000000000..156096963ed8 --- /dev/null +++ b/test/Table_Tests/src/Expression_Spec.enso @@ -0,0 +1,290 @@ +from Standard.Base import all +from Standard.Base.Error.Problem_Behavior import Report_Error + +from Standard.Table import Table, Column, Sort_Column, Column_Selector, Sort_Column_Selector, Aggregate_Column +import Standard.Table.Data.Expression.Expression_Error + +import Standard.Visualization + +from Standard.Test import Test, Test_Suite, Problems + +import project.Common_Table_Spec +from project.Util import all + +spec detailed=False = + # Create Test Table + column_a = ["A", [1, 2, 3, 4, 5]] + column_b = ["B", [1.0, 1.5, 2.5, 4, 6]] + column_c = ["C", ["Hello", "World", "Hello World!", "", Nothing]] + column_odd = ["Bad] Name", [True, False, True, False, True]] + test_table = Table.new [column_a, column_b, column_c, column_odd] + + epsilon=0.0000000001 + + tester expression value = + new_table = test_table.set "NEW_COL" expression + new_table.column_count . should_equal (test_table.column_count + 1) + new_column = new_table.at -1 + new_column.name . should_equal "NEW_COL" + expected = case value of + _ : Vector.Vector -> value + _ -> Vector.new test_table.row_count _->value + + values = new_column.to_vector + values . each_with_index i->v-> + e = expected.at i + match = case e of + _ : Number -> e.equals v epsilon + _ -> e == v + if match.not then values.should_equal expected + + specify_test label action expression_test=tester = + case detailed of + True -> + specify_tester expression value = + Test.specify (label + ": " + expression) <| + expression_test expression value + action specify_tester + False -> + Test.specify label (action expression_test) + + Test.group "Expression Integer literals" <| + specify_test "should be able to add an integer column" expression_test-> + expression_test "1" 1 + expression_test "-3" -3 + expression_test "1_000" 1000 + + Test.group "Expression Decimal literals" <| + specify_test "should be able to add an decimal column" expression_test-> + expression_test "1.23" 1.23 + expression_test "-3.1415" -3.1415 + expression_test "1_000.456" 1000.456 + + Test.group "Expression Boolean literals" <| + specify_test "should be able to add a boolean column" expression_test-> + expression_test "True" True + expression_test "true" True + expression_test "TRUE" True + expression_test "tRuE" True + expression_test "False" False + expression_test "false" False + expression_test "FALSE" False + expression_test "FaLsE" False + + Test.group "Expression Text literals" <| + specify_test "should be able to add a text column" expression_test-> + expression_test "'Hello World'" 'Hello World' + expression_test "'Hello \'World\''" "Hello 'World'" + expression_test '"Hello World"' 'Hello World' + expression_test '"Hello ""World"""' 'Hello "World"' + expression_test '"Hello \\""World"""' 'Hello \\"World"' + expression_test "'Alpha\r\n\gBeta'" 'Alpha\r\n\\gBeta' + + Test.group "Expression Text literals" <| + specify_test "should be able to get a Column" expression_test-> + expression_test "[A]" (column_a.at 1) + expression_test "[Bad]] Name]" (column_odd.at 1) + + Test.group "Expression Nothing literals" <| + specify_test "should be able to add an nothing column" expression_test-> + expression_test "null" Nothing + expression_test "nUlL" Nothing + expression_test "Nothing" Nothing + expression_test "NOTHING" Nothing + + Test.group "Expression Date and Time literals" <| + specify_test "should be able to add a date or time column" expression_test-> + expression_test "#2020-12-23#" (Date.new 2020 12 23) + expression_test "#12:34#" (Time_Of_Day.new 12 34) + expression_test "#12:34:56#" (Time_Of_Day.new 12 34 56) + expression_test "#12:34:56.789#" (Time_Of_Day.new 12 34 56 789000000) + expression_test "#12:34:56.789000123#" (Time_Of_Day.new 12 34 56 789000123) + expression_test "#2020-12-23 12:34#" (Date_Time.new 2020 12 23 12 34) + expression_test "#2020-12-23 12:34:56#" (Date_Time.new 2020 12 23 12 34 56) + expression_test "#2020-12-23 12:34:56Z[UTC]#" (Date_Time.new 2020 12 23 12 34 56 zone=Time_Zone.utc) + expression_test "#2020-12-23 12:34:56+02:30[UTC]#" (Date_Time.new 2020 12 23 10 04 56 zone=Time_Zone.utc) + expression_test "#2020-12-23 12:34:56.157+01[UTC]#" (Date_Time.new 2020 12 23 11 34 56 157000000 zone=Time_Zone.utc) + expression_test "#2020-12-23T12:34[Europe/Warsaw]#" (Date_Time.new 2020 12 23 12 34 zone=Time_Zone.parse("Europe/Warsaw")) + + Test.group "Expression Arithmetic" <| + specify_test "should be able to do basic arithmetic" expression_test-> + expression_test "1+1" 2 + expression_test "23-15" 8 + expression_test "2.5*4.2" 10.5 + expression_test "1_000.456/2" 500.228 + expression_test "2^4" 16 + expression_test "11%3" 2 + + specify_test "should be able to do basic arithmetic with order" expression_test-> + expression_test "1+1*2+2" 5 + expression_test "23-15/3+6" 24 + expression_test "52.92/4.2^2" 3 + expression_test "(1+1)*2+2" 6 + + specify_test "should be able to do basic arithmetic with whitespace" expression_test-> + expression_test "1 + 1" 2 + expression_test " 23 -15 " 8 + expression_test "2.5* 4.2" 10.5 + expression_test "1_000.456/ 2" 500.228 + expression_test " 2 ^ 4 " 16 + expression_test " 11 % 3 " 2 + expression_test "1+1 * 2" 3 + expression_test "1 + 1*2" 3 + + Test.group "Column Arithmetic" <| + specify_test "should be able to perform arithmetic on columns" expression_test-> + expression_test "[A] + 2" [3, 4, 5, 6, 7] + expression_test "[B] - 2" [-1, -0.5, 0.5, 2, 4] + expression_test "[A] * 4" [4, 8, 12, 16, 20] + expression_test "[B] / 2" [0.5, 0.75, 1.25, 2, 3] + expression_test "[A] + [B]" [2, 3.5, 5.5, 8, 11] + expression_test "[A] - [B]" [0, 0.5, 0.5, 0, -1] + expression_test "[A] * [B]" [1, 3, 7.5, 16, 30] + expression_test "[B] / [A]" [1, 0.75, 0.8333333333333334, 1, 1.2] + expression_test "[A] ^ [B]" [1, 2.8284271247461903, 15.588457268119896, 256, 15625] + expression_test "[A] % [B]" [0, 0.5, 0.5, 0, 5] + expression_test "[A] + [B] + [A]" [3, 5.5, 8.5, 12, 16] + expression_test "[A] - [B] - [A]" [-1, -1.5, -2.5, -4, -6] + expression_test "[A] * [B] * [A]" [1, 6, 22.5, 64, 150] + expression_test "[A] / [B] / [A]" [1.0, 0.6666666666666667, 0.39999999999999999, 0.25, 0.16666666666666667] + expression_test "[A] ^ [B] * [A]" [1, 5.65685424949238, 46.7653718043597, 1024, 78125] + expression_test "[A] % [B] % [A]" [0, 0.5, 0.5, 0, 0] + + specify_test "should be able to perform arithmetic on columns with order" expression_test-> + expression_test "([A] + [B]) * 3" [6, 10.5, 16.5, 24, 33] + expression_test "[A] * (4 + [B])" [5, 11, 19.5, 32, 50] + expression_test "[A] * [B] + [A]" [2, 5, 10.5, 20, 35] + expression_test "[A] + [B] * [B]" [2, 4.25, 9.25, 20, 41] + expression_test "([A] + [B]) / [A]" [2, 1.75, 1.83333333333333, 2, 2.2] + expression_test "[A] / [B] + 2" [3, 3.33333333333333, 3.2, 3, 2.83333333333333] + expression_test "([A] + [B]) % 4" [2, 3.5, 1.5, 0, 3] + expression_test "[A] % [B] + 2" [2, 2.5, 2.5, 2, 7] + expression_test "([A] - [B]) ^ [A]" [0, 0.25, 0.125, 0, -1] + expression_test "[A] ^ ([B] - [A])" [1, 0.707106781186547, 0.577350269189626, 1, 5] + + Test.group "Comparison Operators" <| + specify_test "should be able to compare equality" expression_test-> + expression_test "2 = 1 + 1" True + expression_test "2 == 1 + 1" True + expression_test "[A] = 2" [False, True, False, False, False] + expression_test "[A] == 2" [False, True, False, False, False] + expression_test "3 != 1 + 1" True + expression_test "3 <> 1 + 1" True + expression_test "[A] != 2" [True, False, True, True, True] + expression_test "[A] <> 2" [True, False, True, True, True] + + specify_test "should be able to compare ordering" expression_test-> + expression_test "1 > 2" False + expression_test "1 < 2" True + expression_test "[A] > 2" [False, False, True, True, True] + expression_test "[A] >= 2" [False, True, True, True, True] + expression_test "[A] < 2" [True, False, False, False, False] + expression_test "[A] <= 2" [True, True, False, False, False] + + specify_test "should be able to use between" expression_test-> + expression_test "1 + 1 BETWEEN 1 AND 3" True + expression_test "1 + 1 between 2 AND 3" True + expression_test "1 + 1 bETWEEN 1 AND 2" True + expression_test "[A] between 2 AND 3" [False, True, True, False, False] + expression_test "1 + 1 NOT BETWEEN 1 AND 3" False + expression_test "[A] not between 2 AND 3" [True, False, False, True, True] + + specify_test "should be able to use in" expression_test-> + expression_test "1 + 1 IN (2, 4, 6)" True + expression_test "[A] IN (2, 4, 6)" [False, True, False, True, False] + expression_test "1 + 1 NOT IN (2, 4, 6)" False + expression_test "[A] NOT IN (2, 4, 6)" [True, False, True, False, True] + expression_test "[A] IN (3)" [False, False, True, False, False] + expression_test "[A] NOT IN (3)" [True, True, False, True, True] + + specify_test "should be able to check null" expression_test-> + expression_test "1 IS NULL" False + expression_test "1 IS NoTHing" False + expression_test "Nothing IS NULL" True + expression_test "1 IS NOT NULL" True + expression_test "Nothing IS NOT NULL" False + expression_test "[A] IS NULL" [False, False, False, False, False] + expression_test "[C] IS NULL" [False, False, False, False, True] + expression_test "[A] IS NOT NULL" [True, True, True, True, True] + expression_test "[C] IS NOT NULL" [True, True, True, True, False] + + specify_test "should be able to check empty" expression_test-> + expression_test "'Hello World' IS EMPTY" False + expression_test "'' IS EMPTY" True + expression_test "Nothing IS EMPTY" True + expression_test "'Hello World' IS NOT EMPTY" True + expression_test "'' IS NOT EMPTY" False + expression_test "Nothing IS NOT EMPTY" False + + Test.group "Text Operators" <| + specify_test "should be able to concatenate text" expression_test-> + expression_test "'Hello ' + 'World'" "Hello World" + expression_test "[C] + ' World'" ["Hello World", "World World", "Hello World! World", " World", Nothing] + expression_test "'Hello ' + [C]" ["Hello Hello", "Hello World", "Hello Hello World!", "Hello ", Nothing] + expression_test "[C] + [C]" ["HelloHello", "WorldWorld", "Hello World!Hello World!", "", Nothing] + + specify_test "should be able to use like" expression_test-> + expression_test "'Hello World' LIKE 'Hello%'" True + expression_test "'Hello' LIKE 'H_llo'" True + expression_test "'Hello' LIKE 'H_l%'" True + expression_test "'Hello' LIKE 'H___o'" True + expression_test "'World' LIKE 'H___o'" False + expression_test "'Hello World' NOT LIKE 'Hello%'" False + expression_test "[C] LIKE 'Hello%'" [True, False, True, False, Nothing] + expression_test "[C] NOT LIKE 'Hello%'" [False, True, False, True, Nothing] + + Test.group "Boolean Operators" <| + specify_test "should be able to AND booleans" expression_test-> + expression_test "True && TRUE" True + expression_test "True AND False" False + expression_test "True && [Bad]] Name]" [True, False, True, False, True] + expression_test "False AND [Bad]] Name]" False + + specify_test "should be able to OR booleans" expression_test-> + expression_test "True || TRUE" True + expression_test "True OR False" True + expression_test "False OR False" False + expression_test "True OR [Bad]] Name]" True + expression_test "False || [Bad]] Name]" [True, False, True, False, True] + + specify_test "should be able to NOT booleans" expression_test-> + expression_test "!TRUE" False + expression_test "Not False" True + expression_test "NOT [Bad]] Name]" [False, True, False, True, False] + + specify_test "should be able to use IF" expression_test-> + expression_test "IF True THEN 1 ELSE 0" 1 + expression_test "IF False THEN 'A' ELSE 'B' END" 'B' + expression_test "IF [Bad]] Name] THEN [A] ELSE [B] ENDIF" [1, 1.5, 3, 4, 5] + + Test.group "Function invocation" <| + specify_test "should be able to call a function with arguments" expression_test-> + expression_test "Not(True)" False + expression_test "not(False)" True + expression_test "iif(True, 1, 3)" 1 + expression_test "iif([Bad]] Name], 2, 3)" [2, 3, 2, 3, 2] + + specify_test "should be able to call a variable args function" expression_test-> + expression_test "min(10, 3, 8)" 3 + expression_test "max([A], [B], 3)" [3, 3, 3, 4, 6] + + Test.group "Errors should be handled" <| + error_tester expression fail_type = + test_table.set "NEW_COL" expression on_problems=Problem_Behavior.Report_Error . should_fail_with fail_type + test_table.set "NEW_COL" expression . column_count . should_equal test_table.column_count + + specify_test "should fail with Syntax_Error if badly formed" expression_test=error_tester expression_test-> + expression_test "IIF [A] THEN 1 ELSE 2" Expression_Error.Syntax_Error + expression_test "A + B" Expression_Error.Syntax_Error + expression_test "#2022-31-21#" Expression_Error.Syntax_Error + + specify_test "should fail with Unsupported_Operation if not sufficient arguments" expression_test=error_tester expression_test-> + expression_test "unknown([C])" Expression_Error.Unsupported_Operation + + specify_test "should fail with Argument_Mismatch if not sufficient arguments" expression_test=error_tester expression_test-> + expression_test "starts_with([C])" Expression_Error.Argument_Mismatch + + specify_test "should fail with Argument_Mismatch if too many arguments" expression_test=error_tester expression_test-> + expression_test "starts_with([C], 'Hello', 'World')" Expression_Error.Argument_Mismatch + +main = Test_Suite.run_main (spec True) diff --git a/test/Table_Tests/src/Main.enso b/test/Table_Tests/src/Main.enso index d22c805425a8..a8e0449740e8 100644 --- a/test/Table_Tests/src/Main.enso +++ b/test/Table_Tests/src/Main.enso @@ -5,8 +5,10 @@ from Standard.Test import Test_Suite import project.In_Memory_Tests import project.Database.Main as Database_Tests import project.Data_Formatter_Spec +import project.Expression_Spec main = Test_Suite.run_main <| In_Memory_Tests.in_memory_spec - Database_Tests.databases_spec Data_Formatter_Spec.spec + Expression_Spec.spec + Database_Tests.databases_spec diff --git a/test/Table_Tests/src/Table_Spec.enso b/test/Table_Tests/src/Table_Spec.enso index c569005b512c..9422181059fd 100644 --- a/test/Table_Tests/src/Table_Spec.enso +++ b/test/Table_Tests/src/Table_Spec.enso @@ -425,14 +425,6 @@ spec = i.at "Items Count" . to_vector . should_equal [3, 2, 4] i.at "Storage Type" . to_vector . should_equal [Storage.Text, Storage.Integer, Storage.Any] - Test.group "Column-wide statistics" <| - Test.specify 'should allow computing basic column-wide stats' <| - price = Column.from_vector 'price' [0.4, 3.5, Nothing, 6.7, Nothing, 97, Nothing] - price.sum.should_equal 107.6 - price.min.should_equal 0.4 - price.max.should_equal 97 - price.mean.should_equal 26.9 - Test.group "Sorting Tables" <| df = (enso_project.data / "clothes.csv").read diff --git a/tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-ignore b/tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-ignore new file mode 100644 index 000000000000..36df61a14780 --- /dev/null +++ b/tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-ignore @@ -0,0 +1 @@ +~ Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. diff --git a/tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-keep b/tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-keep new file mode 100644 index 000000000000..960b2a8926c1 --- /dev/null +++ b/tools/legal-review/Table/org.antlr.antlr4-runtime-4.10.1/copyright-keep @@ -0,0 +1 @@ +Copyright (c) 2012-2017 The ANTLR Project. All rights reserved. diff --git a/tools/legal-review/Table/report-state b/tools/legal-review/Table/report-state index f7615b777780..565a0f0f3b28 100644 --- a/tools/legal-review/Table/report-state +++ b/tools/legal-review/Table/report-state @@ -1,3 +1,3 @@ -3D20F317407799FC2002CA1A005A2F5CDBFE3A082AD7BA59D08F04270EF9B88C -0DF140BB506529B02B8A79B1E32040D7B4515E690EB2C8F32B7F74DD0E821719 +840031EDBA6D7166EE1BABF8D1AB65F7219F5258683A2D487D12D3D4B8387BD7 +4BC5787A7330388C3B8BF8C5955FEFB57E57CB47DFAA243180AF0DA066E3D0D6 0 From cee7f27dc1a5b372b5763488b555e67243a34aa0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Wojciech=20Dani=C5=82o?= Date: Tue, 8 Nov 2022 19:15:05 +0100 Subject: [PATCH 2/4] Text rendering quality improvements. (#3855) --- CHANGELOG.md | 4 + Cargo.lock | 7 + .../component/text/src/component/text.rs | 8 +- .../component/text/src/font/family/src/lib.rs | 2 +- .../component/text/src/font/glsl/glyph.glsl | 52 ++++---- .../text/src/font/glsl/glyph_mac.glsl | 59 --------- .../ensogl/component/text/src/font/glyph.rs | 122 ++++++++++++++++-- .../component/text/src/font/msdf/build.rs | 2 +- lib/rust/ensogl/component/text/src/lib.rs | 1 + .../core/src/animation/frp/animation.rs | 1 + lib/rust/ensogl/example/text-area/src/lib.rs | 67 +++++++++- lib/rust/frp/Cargo.toml | 1 + lib/rust/frp/src/io/keyboard.rs | 3 +- lib/rust/web/Cargo.toml | 2 + lib/rust/web/src/binding/mock.rs | 17 +++ lib/rust/web/src/platform.rs | 2 +- 16 files changed, 242 insertions(+), 108 deletions(-) delete mode 100644 lib/rust/ensogl/component/text/src/font/glsl/glyph_mac.glsl diff --git a/CHANGELOG.md b/CHANGELOG.md index 342ff3ec046c..a6c32edba3ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -89,6 +89,9 @@ instances are now reusing the shape shaders and the same sprite system under the hood. This drastically reduces the amount of required draw calls for scenes with a lot of text. +- [Text rendering quality improvements][3855]. Glyphs are now hinted in a better + way. Also, additional fine-tuning is performed per font and per host operating + system. #### Enso Standard Library @@ -369,6 +372,7 @@ [3804]: https://github.com/enso-org/enso/pull/3804 [3818]: https://github.com/enso-org/enso/pull/3818 [3776]: https://github.com/enso-org/enso/pull/3776 +[3855]: https://github.com/enso-org/enso/pull/3855 [3836]: https://github.com/enso-org/enso/pull/3836 [3782]: https://github.com/enso-org/enso/pull/3782 diff --git a/Cargo.lock b/Cargo.lock index 80b17a0c2d3b..59dc250e2441 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2018,6 +2018,7 @@ dependencies = [ "nalgebra 0.26.2", "percent-encoding 2.1.0", "unicode-segmentation", + "unidecode", "wasm-bindgen", "web-sys", ] @@ -7128,6 +7129,12 @@ version = "0.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +[[package]] +name = "unidecode" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "402bb19d8e03f1d1a7450e2bd613980869438e0666331be3e073089124aa1adc" + [[package]] name = "unreachable" version = "1.0.0" diff --git a/lib/rust/ensogl/component/text/src/component/text.rs b/lib/rust/ensogl/component/text/src/component/text.rs index b25967c622f3..910650818482 100644 --- a/lib/rust/ensogl/component/text/src/component/text.rs +++ b/lib/rust/ensogl/component/text/src/component/text.rs @@ -1321,6 +1321,10 @@ impl TextModel { if truncated { break; } + // FIXME[WD]: This is a workaround for a bug in the MSDFgen binding. It + // should be fixed after updating the MSDFgen library. + // See: https://www.pivotaltracker.com/n/projects/2539304/stories/183747513 + let magic_scale = 2048.0 / shaped_glyph_set.units_per_em as f32; for shaped_glyph in &shaped_glyph_set.glyphs { let glyph_byte_start = shaped_glyph.start_byte(); // Drop styles assigned to skipped bytes. One byte will be skipped @@ -1361,11 +1365,11 @@ impl TextModel { glyph.set_color(style.color); glyph.skip_color_animation(); glyph.set_sdf_weight(style.sdf_weight.value); - glyph.set_size(style.size); + glyph.set_size(formatting::Size(style.size.value * magic_scale)); glyph.set_properties(shaped_glyph_set.non_variable_variations); glyph.set_glyph_id(shaped_glyph.id()); glyph.x_advance.set(x_advance); - glyph.view.set_position_xy(glyph_render_offset); + glyph.view.set_position_xy(glyph_render_offset * magic_scale); glyph.set_position_xy(Vector2(glyph_offset_x, 0.0)); glyph_offset_x += x_advance; diff --git a/lib/rust/ensogl/component/text/src/font/family/src/lib.rs b/lib/rust/ensogl/component/text/src/font/family/src/lib.rs index 749ec466b570..f2eea3115dd2 100644 --- a/lib/rust/ensogl/component/text/src/font/family/src/lib.rs +++ b/lib/rust/ensogl/component/text/src/font/family/src/lib.rs @@ -67,7 +67,7 @@ pub use owned_ttf_parser::Width; /// eliminate accidental mistakes, the same way as it's done in CSS: /// https://stackoverflow.com/questions/17967371/are-property-values-in-css-case-sensitive #[allow(missing_docs)] -#[derive(Clone, Debug, Display, Hash, PartialEq, Eq)] +#[derive(Clone, Debug, Deref, Display, Hash, PartialEq, Eq)] pub struct Name { pub normalized: String, } diff --git a/lib/rust/ensogl/component/text/src/font/glsl/glyph.glsl b/lib/rust/ensogl/component/text/src/font/glsl/glyph.glsl index aa5d33fb2fd9..248c965d1aa8 100644 --- a/lib/rust/ensogl/component/text/src/font/glsl/glyph.glsl +++ b/lib/rust/ensogl/component/text/src/font/glsl/glyph.glsl @@ -1,51 +1,47 @@ +const bool DEBUG = false; + highp float median(highp vec3 v) { return max(min(v.x, v.y), min(max(v.x, v.y), v.z)); } -/// Compute the uv coordinates of the MSDF texture fragment where it should be sampled. -/// -/// Essentially, it's an input_uv which is a bit transformed to "cut off" the half of the MSDF cell from each side. This -/// way we have better pixel alignment on low resolutions. -highp vec2 msdf_fragment_uv() { - highp vec2 msdf_cell_size = 1.0/input_msdf_size; - highp vec2 offset = msdf_cell_size/2.0; - highp vec2 scale = 1.0 - msdf_cell_size; - return offset + input_uv * scale; -} - -highp vec2 get_texture_coord() { - highp vec2 msdf_fragment_size = input_msdf_size / vec2(textureSize(input_atlas, 0)); - highp vec2 offset = vec2(0.0, input_atlas_index) * msdf_fragment_size; - return offset + msdf_fragment_uv() * msdf_fragment_size; +highp vec2 uv_to_texture_coord(vec2 uv) { + highp vec2 texture_glyph_offset = input_msdf_size / vec2(textureSize(input_atlas, 0)); + highp vec2 offset = vec2(0.0, input_atlas_index) * texture_glyph_offset; + return offset + uv * texture_glyph_offset; } highp float get_fatting() { - highp vec2 local_to_px_ratio = 1.0 / fwidth(input_local.xy); - highp float font_size_px = input_font_size * (local_to_px_ratio.x + local_to_px_ratio.y) / 2.0; - highp float fatting = input_sdf_weight; + highp vec2 local_to_px_ratio = 1.0 / fwidth(input_local.xy); + highp float font_size_px = input_font_size * (local_to_px_ratio.x + local_to_px_ratio.y) / 2.0; + highp float fatting = input_sdf_weight; return font_size_px * fatting; } -highp float msdf_alpha() { - highp vec2 tex_coord = get_texture_coord(); - highp vec2 msdf_unit_tex = input_msdf_range / vec2(textureSize(input_atlas,0)); - highp vec2 msdf_unit_px = msdf_unit_tex / fwidth(tex_coord); +highp float get_alpha(vec2 uv) { + highp vec2 tex_coord = uv_to_texture_coord(uv); + highp vec2 msdf_unit_tex = input_msdf_range / vec2(textureSize(input_atlas, 0)); + highp vec2 msdf_unit_px = msdf_unit_tex / fwidth(tex_coord); highp float avg_msdf_unit_px = (msdf_unit_px.x + msdf_unit_px.y) / 2.0; - // We use this parameter to fatten somewhat font on low resolutions. The thershold and exact - // value of this fattening was picked by trial an error, searching for best rendering effect. - highp float dpi_dilate = avg_msdf_unit_px < input_msdf_range*0.49 ? 1.0 : 0.0; highp vec3 msdf_sample = texture(input_atlas,tex_coord).rgb; - highp float sig_dist = median(msdf_sample) - 0.5; + highp float sig_dist = median(msdf_sample) - 0.5; highp float sig_dist_px = sig_dist * avg_msdf_unit_px + get_fatting(); - highp float opacity = 0.5 + sig_dist_px + dpi_dilate * 0.08; + highp float opacity = 0.5 + sig_dist_px; + opacity += input_opacity_increase; opacity = clamp(opacity, 0.0, 1.0); + opacity = pow(opacity, input_opacity_exponent); return opacity; } highp vec4 color_from_msdf() { highp vec4 color = input_color; - color.a *= msdf_alpha(); + color.a *= get_alpha(input_uv); color.rgb *= color.a; // premultiply + + if(DEBUG) { + vec4 bg_box = vec4(input_uv * input_size / 10.0, 0.0, 1.0); + color = (color * 0.7 + bg_box * 0.3); + } return color; + } diff --git a/lib/rust/ensogl/component/text/src/font/glsl/glyph_mac.glsl b/lib/rust/ensogl/component/text/src/font/glsl/glyph_mac.glsl deleted file mode 100644 index fc848fb7cb8b..000000000000 --- a/lib/rust/ensogl/component/text/src/font/glsl/glyph_mac.glsl +++ /dev/null @@ -1,59 +0,0 @@ -highp float median(highp vec3 v) { - return max(min(v.x, v.y), min(max(v.x, v.y), v.z)); -} - -/// Compute the uv coordinates of the MSDF texture fragment where it should be sampled. -/// -/// Essentially, it's an input_uv which is a bit transformed to "cut off" the half of the MSDF cell from each side. This -/// way we have better pixel alignment on low resolutions. -highp vec2 msdf_fragment_uv() { - highp vec2 msdf_cell_size = 1.0/input_msdf_size; - highp vec2 offset = msdf_cell_size/2.0; - highp vec2 scale = 1.0 - msdf_cell_size; - return offset + input_uv * scale; -} - - -highp vec2 get_texture_coord() { - highp vec2 msdf_fragment_size = input_msdf_size / vec2(textureSize(input_atlas,0)); - highp vec2 offset = vec2(0.0, input_atlas_index) * msdf_fragment_size; - return offset + get_scaled_uv() * msdf_fragment_size; -} - -highp float get_fatting() { - highp vec2 local_to_px_ratio = 1.0 / fwidth(input_local.xy); - highp float font_size_px = input_font_size * (local_to_px_ratio.x + local_to_px_ratio.y) / 2.0; - highp float fatting = input_sdf_weight; - return font_size_px * fatting; -} - -// FIXME -// The following function uses non-standard font adjustiments (lines marked with FIXME). They make -// the font bolder and more crisp. It was designed to look nice on nodes in the GUI but leaves the -// fonts with a non-standard look (not the one defined by the font author). This should be -// revisited, generalized, and refactored out in the future. -highp float msdf_alpha() { - highp vec2 tex_coord = get_texture_coord(); - highp vec2 msdf_unit_tex = input_msdf_range / vec2(textureSize(input_atlas,0)); - highp vec2 msdf_unit_px = msdf_unit_tex / fwidth(tex_coord); - highp float avg_msdf_unit_px = (msdf_unit_px.x + msdf_unit_px.y) / 2.0; - - // We use this parameter to fatten somewhat font on low resolutions. The thershold and exact - // value of this fattening was picked by trial an error, searching for best rendering effect. - highp float dpi_dilate = avg_msdf_unit_px < input_msdf_range*0.49 ? 1.0 : 0.0; - highp vec3 msdf_sample = texture(input_atlas,tex_coord).rgb; - highp float sig_dist = median(msdf_sample) - 0.5; - highp float sig_dist_px = sig_dist * avg_msdf_unit_px + get_fatting(); - highp float opacity = 0.5 + sig_dist_px + dpi_dilate * 0.08; - opacity += 0.6; // FIXME: Widen + sharpen - opacity = clamp(opacity, 0.0, 1.0); - opacity = pow(opacity,3.0); // FIXME: sharpen - return opacity; -} - -highp vec4 color_from_msdf() { - highp vec4 color = input_color; - color.a *= msdf_alpha(); - color.rgb *= color.a; // premultiply - return color; -} diff --git a/lib/rust/ensogl/component/text/src/font/glyph.rs b/lib/rust/ensogl/component/text/src/font/glyph.rs index 741af6984002..0de4a5a18650 100644 --- a/lib/rust/ensogl/component/text/src/font/glyph.rs +++ b/lib/rust/ensogl/component/text/src/font/glyph.rs @@ -21,15 +21,61 @@ use ensogl_core::display::scene::Scene; use ensogl_core::display::symbol::material::Material; use ensogl_core::display::symbol::shader::builder::CodeTemplate; use ensogl_core::frp; +use ensogl_core::frp::io::keyboard::Key; use ensogl_core::system::gpu::texture; #[cfg(target_arch = "wasm32")] use ensogl_core::system::gpu::Texture; +use ensogl_core::system::web::platform; use font::FontWithAtlas; use font::GlyphRenderInfo; use font::Style; use font::Weight; use font::Width; use owned_ttf_parser::GlyphId; +use std::sync::LazyLock; + + + +// =============== +// === Hinting === +// =============== + +/// System- and font-specific hinting properties. They affect the way the font is rasterized. In +/// order to understand how these variables affect the font rendering, see the GLSL file (the +/// [`FUNCTIONS`] variable). +/// +/// Also, you can interactively change the values by holding `ctrl + alt + o` or `ctrl + alt + e` +/// keys and using the `+` and `-` key to increment or decrement the value. +#[allow(missing_docs)] +#[derive(Clone, Copy, Debug)] +pub struct Hinting { + pub opacity_increase: f32, + pub opacity_exponent: f32, +} + +impl Default for Hinting { + fn default() -> Self { + Self { opacity_increase: 0.0, opacity_exponent: 1.0 } + } +} + +static HINTING_MAP: LazyLock, &'static str), Hinting>> = + LazyLock::new(|| { + HashMap::from([ + ((Some(platform::Platform::MacOS), "mplus1p"), Hinting { + opacity_increase: 0.4, + opacity_exponent: 4.0, + }), + ((Some(platform::Platform::Windows), "mplus1p"), Hinting { + opacity_increase: 0.3, + opacity_exponent: 3.0, + }), + ((Some(platform::Platform::Linux), "mplus1p"), Hinting { + opacity_increase: 0.3, + opacity_exponent: 3.0, + }), + ]) + }); @@ -59,12 +105,7 @@ ensogl_core::define_endpoints_2! { #[allow(missing_docs)] pub struct SystemData {} -#[cfg(target_os = "macos")] -const FUNCTIONS: &str = include_str!("glsl/glyph_mac.glsl"); - -#[cfg(not(target_os = "macos"))] const FUNCTIONS: &str = include_str!("glsl/glyph.glsl"); - const MAIN: &str = "output_color = color_from_msdf(); output_id=vec4(0.0,0.0,0.0,0.0);"; impl SystemData { @@ -80,6 +121,9 @@ impl SystemData { material.add_input("font_size", 10.0); material.add_input("color", Vector4::new(0.0, 0.0, 0.0, 1.0)); material.add_input("sdf_weight", 0.0); + // === Adjusting look and feel of different fonts on different operating systems === + material.add_input("opacity_increase", 0.0); + material.add_input("opacity_exponent", 1.0); // TODO[WD]: We need to use this output, as we need to declare the same amount of shader // outputs as the number of attachments to framebuffer. We should manage this more // intelligent. For example, we could allow defining output shader fragments, @@ -118,7 +162,15 @@ mod glyph_shape { ensogl_core::shape! { type SystemData = SystemData; type ShapeData = ShapeData; - (style: Style, font_size: f32, color: Vector4, sdf_weight: f32, atlas_index: f32) { + ( + style: Style, + font_size: f32, + color: Vector4, + sdf_weight: f32, + atlas_index: f32, + opacity_increase: f32, + opacity_exponent: f32 + ) { // The shape does not matter. The [`SystemData`] defines custom GLSL code. Plane().into() } @@ -132,7 +184,6 @@ impl ensogl_core::display::shape::CustomSystemData for Syste shape_data: &ShapeData, ) -> Self { let font = &shape_data.font; - let size = font::msdf::Texture::size(); let sprite_system = &data.model.sprite_system; let symbol = sprite_system.symbol(); @@ -522,17 +573,72 @@ impl System { let color_animation = color::Animation::new(frp.network()); let x_advance = default(); let attached_to_cursor = default(); + let platform = platform::current(); + let hinting = HINTING_MAP.get(&(platform, font.name())).copied().unwrap_or_default(); let view = glyph_shape::View::new_with_data(ShapeData { font }); view.color.set(Vector4::new(0.0, 0.0, 0.0, 0.0)); view.atlas_index.set(0.0); + view.opacity_increase.set(hinting.opacity_increase); + view.opacity_exponent.set(hinting.opacity_exponent); display_object.add_child(&view); let network = frp.network(); - frp::extend! {network + let scene = scene(); + let keyboard = &scene.keyboard.frp; + frp::extend! { network frp.private.output.target_color <+ frp.set_color; color_animation.target <+ frp.set_color; color_animation.skip <+ frp.skip_color_animation; eval color_animation.value ((c) view.color.set(Rgba::from(c).into())); + + + // === Debug mode === + // Allows changing hinting parameters on the fly. See [`Hinting`] to learn more. + + debug_mode <- all_with(&keyboard.is_control_down, &keyboard.is_alt_down, |a, b| *a && *b); + plus <- keyboard.down.map(|t| t == &Key::Character("=".into())); + minus <- keyboard.down.map(|t| t == &Key::Character("-".into())); + + key_e_down <- keyboard.down.map(|t| t == &Key::Character("e".into())).on_true(); + key_e_up <- keyboard.up.map(|t| t == &Key::Character("e".into())).on_true(); + key_e <- bool(&key_e_up, &key_e_down); + + key_o_down <- keyboard.down.map(|t| t == &Key::Character("o".into())).on_true(); + key_o_up <- keyboard.up.map(|t| t == &Key::Character("o".into())).on_true(); + key_o <- bool(&key_o_up, &key_o_down); + + plus2 <- all_with(&plus, &debug_mode, |a, b| *a && *b); + minus2 <- all_with(&minus, &debug_mode, |a, b| *a && *b); + + plus_e <- keyboard.down.gate(&plus2).gate(&key_e); + minus_e <- keyboard.down.gate(&minus2).gate(&key_e); + + plus_o <- keyboard.down.gate(&plus2).gate(&key_o); + minus_o <- keyboard.down.gate(&minus2).gate(&key_o); + + eval_ plus_o (view.opacity_increase.modify(|t| { + let opacity_increase = t + 0.01; + warn!("opacity_increase: {opacity_increase}"); + opacity_increase + })); + + eval_ minus_o (view.opacity_increase.modify(|t| { + let opacity_increase = t - 0.01; + warn!("opacity_increase: {opacity_increase}"); + opacity_increase + })); + + eval_ plus_e (view.opacity_exponent.modify(|t| { + let opacity_exponent = t + 0.1; + warn!("opacity_exponent: {opacity_exponent}"); + opacity_exponent + })); + + eval_ minus_e (view.opacity_exponent.modify(|t| { + let opacity_exponent = t - 0.1; + warn!("opacity_exponent: {opacity_exponent}"); + opacity_exponent + })); } Glyph { diff --git a/lib/rust/ensogl/component/text/src/font/msdf/build.rs b/lib/rust/ensogl/component/text/src/font/msdf/build.rs index 19ad0db11670..06b3f4f6e160 100644 --- a/lib/rust/ensogl/component/text/src/font/msdf/build.rs +++ b/lib/rust/ensogl/component/text/src/font/msdf/build.rs @@ -9,7 +9,7 @@ use ide_ci::log::setup_logging; pub const PACKAGE: GithubRelease<&str> = GithubRelease { project_url: "https://github.com/enso-org/msdfgen-wasm", - version: "v1.4", + version: "v1.4.1", filename: "msdfgen_wasm.js", }; diff --git a/lib/rust/ensogl/component/text/src/lib.rs b/lib/rust/ensogl/component/text/src/lib.rs index 8c6937545519..828d1a05fd36 100644 --- a/lib/rust/ensogl/component/text/src/lib.rs +++ b/lib/rust/ensogl/component/text/src/lib.rs @@ -16,6 +16,7 @@ #![feature(let_chains)] #![feature(step_trait)] #![feature(specialization)] +#![feature(once_cell)] // === Standard Linter Configuration === #![deny(non_ascii_idents)] #![warn(unsafe_code)] diff --git a/lib/rust/ensogl/core/src/animation/frp/animation.rs b/lib/rust/ensogl/core/src/animation/frp/animation.rs index 798d7df599cc..fa3891a87bdc 100644 --- a/lib/rust/ensogl/core/src/animation/frp/animation.rs +++ b/lib/rust/ensogl/core/src/animation/frp/animation.rs @@ -17,6 +17,7 @@ pub mod hysteretic; pub mod overshoot; + // ================= // === Animation === // ================= diff --git a/lib/rust/ensogl/example/text-area/src/lib.rs b/lib/rust/ensogl/example/text-area/src/lib.rs index 624a8f39637d..bd8417c86ada 100644 --- a/lib/rust/ensogl/example/text-area/src/lib.rs +++ b/lib/rust/ensogl/example/text-area/src/lib.rs @@ -29,6 +29,9 @@ use ensogl_core::application::command::FrpNetworkProvider; use ensogl_core::application::Application; use ensogl_core::data::color; use ensogl_core::display::navigation::navigator::Navigator; +use ensogl_core::display::Scene; +use ensogl_core::frp::io::timer::DocumentOps; +use ensogl_core::frp::io::timer::HtmlElementOps; use ensogl_core::system::web; use ensogl_core::system::web::Closure; use ensogl_core::system::web::JsCast; @@ -169,8 +172,12 @@ fn init(app: Application) { let zalgo = "Z̮̞̠͙͔ͅḀ̗̞͈̻̗Ḷ͙͎̯̹̞͓G̻O̭̗̮"; let _text = quote.to_string() + snowman + zalgo; let _text = "test".to_string(); - area.set_content("aஓbc🧑🏾de\nfghij\nklmno\npqrst\n01234\n56789"); - area.set_property_default(color::Rgba::red()); + let content = "abcdefghijk"; + // This is a testing string left here for convenience. + // area.set_content("aஓbc🧑🏾de\nfghij\nklmno\npqrst\n01234\n56789"); + area.set_content(content); + area.set_font("mplus1p"); + area.set_property_default(color::Rgba::black()); area.focus(); area.hover(); @@ -182,15 +189,45 @@ fn init(app: Application) { app.display.default_scene.add_child(&area); let area = Rc::new(RefCell::new(Some(area))); - init_debug_hotkeys(&area); + // Initialization of HTML div displaying the same text. It allows for switching between + // WebGL and HTML versions to compare them. + let style = web::document.create_element_or_panic("style"); + let css = web::document.create_text_node("@import url('https://fonts.googleapis.com/css2?family=M+PLUS+1p:wght@400;700&display=swap');"); + style.append_child(&css).unwrap(); + web::document.head().unwrap().append_child(&style).unwrap(); + let div = web::document.create_div_or_panic(); + div.set_style_or_warn("width", "100px"); + div.set_style_or_warn("height", "100px"); + div.set_style_or_warn("position", "absolute"); + div.set_style_or_warn("z-index", "100"); + div.set_style_or_warn("font-family", "'M PLUS 1p'"); + div.set_style_or_warn("font-size", "12px"); + div.set_style_or_warn("display", "none"); + div.set_inner_text(content); + web::document.body().unwrap().append_child(&div).unwrap(); + + init_debug_hotkeys(&app.display.default_scene, &area, &div); + + let scene = scene.clone_ref(); + let handler = app.display.on.before_frame.add(move |_time| { + let shape = scene.dom.shape(); + div.set_style_or_warn("left", &format!("{}px", shape.width / 2.0)); + div.set_style_or_warn("top", &format!("{}px", shape.height / 2.0 - 0.5)); + }); + + mem::forget(handler); mem::forget(navigator); mem::forget(app); } -fn init_debug_hotkeys(area: &Rc>>) { +fn init_debug_hotkeys(scene: &Scene, area: &Rc>>, div: &web::HtmlDivElement) { + let html_version = Rc::new(Cell::new(false)); + let scene = scene.clone_ref(); let area = area.clone_ref(); - let closure: Closure = Closure::new(move |val: JsValue| { + let div = div.clone(); + let mut fonts_cycle = ["dejavusans", "dejavusansmono", "mplus1p"].iter().cycle(); + let closure: Closure = Closure::new(move |val: JsValue| { let event = val.unchecked_into::(); if event.ctrl_key() { let key = event.code(); @@ -200,10 +237,22 @@ fn init_debug_hotkeys(area: &Rc>>) { } } if let Some(area) = &*area.borrow() { + div.set_inner_text(&area.content.value().to_string()); if event.ctrl_key() { let key = event.code(); warn!("{:?}", key); - if key == "Digit1" { + if key == "KeyH" { + html_version.set(!html_version.get()); + if html_version.get() { + warn!("Showing the HTML version."); + area.unset_parent(); + div.set_style_or_warn("display", "block"); + } else { + warn!("Showing the WebGL version."); + scene.add_child(&area); + div.set_style_or_warn("display", "none"); + } + } else if key == "Digit1" { if event.shift_key() { area.set_property_default(color::Rgba::black()); } else { @@ -243,7 +292,7 @@ fn init_debug_hotkeys(area: &Rc>>) { } else { area.set_property(buffer::RangeLike::Selections, formatting::Weight::Bold); } - } else if key == "KeyH" { + } else if key == "KeyN" { if event.shift_key() { area.set_property_default(formatting::SdfWeight(0.02)); } else { @@ -258,6 +307,10 @@ fn init_debug_hotkeys(area: &Rc>>) { } else { area.set_property(buffer::RangeLike::Selections, formatting::Style::Italic); } + } else if key == "KeyF" { + let font = fonts_cycle.next().unwrap(); + warn!("Switching to font '{}'.", font); + area.set_font(font); } else if key == "Equal" { if event.shift_key() { area.set_property_default(formatting::Size(16.0)); diff --git a/lib/rust/frp/Cargo.toml b/lib/rust/frp/Cargo.toml index b4a08011939d..e26924864382 100644 --- a/lib/rust/frp/Cargo.toml +++ b/lib/rust/frp/Cargo.toml @@ -18,6 +18,7 @@ keyboard-types = { version = "0.5.0" } nalgebra = { version = "0.26.1" } percent-encoding = { version = "2.1.0" } unicode-segmentation = { version = "1.6.0" } +unidecode = { version = "0.3.0" } # We require exact version of wasm-bindgen because we do patching final js in our build process, # and this is vulnerable to any wasm-bindgen version change. wasm-bindgen = { version = "0.2.78", features = ["nightly"] } diff --git a/lib/rust/frp/src/io/keyboard.rs b/lib/rust/frp/src/io/keyboard.rs index 5c543d5af1ce..a5ac491beb9f 100644 --- a/lib/rust/frp/src/io/keyboard.rs +++ b/lib/rust/frp/src/io/keyboard.rs @@ -9,6 +9,7 @@ use crate::io::js::Listener; use enso_web::KeyboardEvent; use inflector::Inflector; use unicode_segmentation::UnicodeSegmentation; +use unidecode::unidecode; @@ -122,7 +123,7 @@ impl Key { if key == " " { Self::Space } else if key.graphemes(true).count() == 1 { - Self::Character(key) + Self::Character(unidecode(&key).to_lowercase()) } else { let key = KEY_NAME_MAP.get(key_ref).cloned().unwrap_or(Self::Other(key)); match (key, code) { diff --git a/lib/rust/web/Cargo.toml b/lib/rust/web/Cargo.toml index be70f513e3f1..7991f418eb91 100644 --- a/lib/rust/web/Cargo.toml +++ b/lib/rust/web/Cargo.toml @@ -33,6 +33,7 @@ features = [ 'Element', 'HtmlElement', 'HtmlDivElement', + 'HtmlHeadElement', 'HtmlCollection', 'CssStyleDeclaration', 'HtmlCanvasElement', @@ -49,6 +50,7 @@ features = [ 'Event', 'MouseEvent', 'EventTarget', + 'Text', 'DomRect', 'DomRectReadOnly', 'Location', diff --git a/lib/rust/web/src/binding/mock.rs b/lib/rust/web/src/binding/mock.rs index 72ce57d64233..8d37f409414e 100644 --- a/lib/rust/web/src/binding/mock.rs +++ b/lib/rust/web/src/binding/mock.rs @@ -464,6 +464,8 @@ mock_data! { Document => EventTarget fn body(&self) -> Option; fn create_element(&self, local_name: &str) -> Result; fn get_element_by_id(&self, element_id: &str) -> Option; + fn create_text_node(&self, data: &str) -> Text; + fn head(&self) -> Option; } @@ -484,6 +486,11 @@ mock_data! { Window => EventTarget } +// === HtmlHeadElement === +mock_data! { HtmlHeadElement => HtmlElement +} + + // === Function === mock_data! { Function fn call1(&self, context: &JsValue, arg1: &JsValue) -> Result; @@ -599,6 +606,16 @@ impl From for EventTarget { } +// === HtmlDivElement === +mock_data! { Text => CharacterData } + + + +// === CharacterData === +mock_data! { CharacterData => Node } + + + // === HtmlCanvasElement === mock_data! { HtmlCanvasElement => HtmlElement fn width(&self) -> u32; diff --git a/lib/rust/web/src/platform.rs b/lib/rust/web/src/platform.rs index f8a551564226..c6391004b248 100644 --- a/lib/rust/web/src/platform.rs +++ b/lib/rust/web/src/platform.rs @@ -9,7 +9,7 @@ use std::convert::TryFrom; // ================ /// This enumeration lists all the supported platforms. -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] #[allow(missing_docs)] pub enum Platform { Android, From 483028dbb0393e28eb1ab027d688223f835e729d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Micha=C5=82=20Wawrzyniec=20Urba=C5=84czyk?= Date: Wed, 9 Nov 2022 00:15:26 +0100 Subject: [PATCH 3/4] Cloud dispatch & fixes (#3843) This PR updates the build script: * fixed issue where program version check was not properly triggering; * improved `git-clean` command to correctly clear Scala artifacts; * added `run.ps1` wrapper to the build script that works better with PowerShell than `run.cmd`; * increased timeouts to work around failures on macOS nightly builds; * replaced depracated GitHub Actions APIs (set-output) with their new equivalents; * workaround for issue with electron builder (python2 lookup) on newer macOS runner images; * GUI and backend dispatches to cloud were completed; * release workflow allows creating RC releases. --- .github/workflows/benchmark.yml | 36 +- .github/workflows/changelog.yml | 36 +- .github/workflows/gui.yml | 432 +++--- .github/workflows/nightly.yml | 453 +++--- .github/workflows/release.yml | 1249 +++++++++-------- .github/workflows/scala-new.yml | 132 +- .gitignore | 9 +- Cargo.lock | 981 ++++++------- Cargo.toml | 1 + build/base/Cargo.toml | 15 + build/base/src/extensions.rs | 16 + .../src/extensions/from_string.rs | 5 + .../src/extensions/future.rs | 37 +- .../src/extensions/iterator.rs | 12 + .../{ci_utils => base}/src/extensions/maps.rs | 5 +- build/base/src/extensions/option.rs | 10 + build/base/src/extensions/path.rs | 184 +++ build/base/src/extensions/pathbuf.rs | 22 + .../src/extensions/result.rs | 40 + build/base/src/extensions/str.rs | 34 + build/base/src/fs.rs | 221 +++ build/base/src/fs/wrappers.rs | 87 ++ build/base/src/lib.rs | 112 ++ build/build/Cargo.toml | 4 +- build/build/examples/experiments.rs | 9 +- build/build/examples/s3.rs | 4 +- build/build/paths.yaml | 24 +- build/build/src/aws.rs | 58 +- build/build/src/aws/ecr.rs | 15 +- build/build/src/aws/ecr/runtime.rs | 6 +- build/build/src/aws/s3.rs | 198 +++ build/build/src/aws/s3/gui.rs | 36 + build/build/src/changelog/check.rs | 4 +- build/build/src/config.rs | 78 +- build/build/src/context.rs | 24 +- build/build/src/engine.rs | 4 +- build/build/src/engine/context.rs | 59 +- build/build/src/engine/env.rs | 18 +- build/build/src/enso.rs | 12 +- build/build/src/env.rs | 24 +- build/build/src/httpbin.rs | 16 +- build/build/src/ide/web.rs | 34 +- build/build/src/lib.rs | 4 + build/build/src/paths.rs | 33 +- build/build/src/postgres.rs | 6 +- build/build/src/project.rs | 43 +- build/build/src/project/backend.rs | 18 +- build/build/src/project/gui.rs | 24 +- build/build/src/project/wasm.rs | 3 +- build/build/src/release.rs | 226 ++- build/build/src/repo.rs | 7 + build/build/src/repo/cloud.rs | 60 + build/build/src/source.rs | 12 +- build/build/src/version.rs | 189 ++- build/ci_utils/Cargo.toml | 3 +- build/ci_utils/src/actions.rs | 4 + build/ci_utils/src/actions/artifacts.rs | 2 +- build/ci_utils/src/actions/context.rs | 3 +- build/ci_utils/src/actions/env.rs | 20 +- build/ci_utils/src/actions/env_file.rs | 83 ++ build/ci_utils/src/actions/workflow.rs | 32 +- .../src/actions/workflow/definition.rs | 69 +- build/ci_utils/src/anyhow.rs | 46 - build/ci_utils/src/archive.rs | 2 +- build/ci_utils/src/cache/artifact.rs | 7 +- build/ci_utils/src/cache/download.rs | 8 +- build/ci_utils/src/cache/goodie/graalvm.rs | 9 +- build/ci_utils/src/env.rs | 185 +-- build/ci_utils/src/extensions.rs | 7 - build/ci_utils/src/extensions/path.rs | 100 -- build/ci_utils/src/extensions/str.rs | 28 - build/ci_utils/src/fs.rs | 233 +-- build/ci_utils/src/fs/tokio.rs | 30 + build/ci_utils/src/fs/wrappers.rs | 77 - build/ci_utils/src/fs/wrappers/tokio.rs | 18 +- build/ci_utils/src/future.rs | 7 + build/ci_utils/src/github.rs | 188 +-- build/ci_utils/src/github/release.rs | 158 ++- build/ci_utils/src/github/repo.rs | 305 ++++ build/ci_utils/src/github/workflow.rs | 38 + build/ci_utils/src/lib.rs | 84 +- build/ci_utils/src/models/config.rs | 53 +- build/ci_utils/src/path/trie.rs | 6 + build/ci_utils/src/program/command.rs | 15 +- build/ci_utils/src/programs/git.rs | 89 +- build/ci_utils/src/programs/git/clean.rs | 72 +- build/ci_utils/src/programs/rustup.rs | 7 +- build/ci_utils/src/programs/seven_zip.rs | 51 +- build/ci_utils/src/programs/tar.rs | 4 +- build/ci_utils/src/programs/vswhere.rs | 10 +- build/cli/Cargo.toml | 1 + build/cli/src/arg.rs | 19 +- build/cli/src/arg/git_clean.rs | 3 + build/cli/src/arg/ide.rs | 2 +- build/cli/src/arg/release.rs | 9 +- .../cli/src/bin/enso-remove-draft-releases.rs | 6 +- build/cli/src/ci_gen.rs | 62 +- build/cli/src/ci_gen/job.rs | 46 +- build/cli/src/lib.rs | 122 +- build/cli/src/main.rs | 10 +- build/macros/Cargo.toml | 19 + build/macros/src/lib.rs | 41 + build/macros/src/program_args.rs | 178 +++ build/macros/tests/plain.rs | 38 + build/macros/tests/with_arg.rs | 32 + run.cmd | 6 +- run.ps1 | 20 + 107 files changed, 5019 insertions(+), 3029 deletions(-) create mode 100644 build/base/Cargo.toml create mode 100644 build/base/src/extensions.rs rename build/{ci_utils => base}/src/extensions/from_string.rs (71%) rename build/{ci_utils => base}/src/extensions/future.rs (62%) rename build/{ci_utils => base}/src/extensions/iterator.rs (70%) rename build/{ci_utils => base}/src/extensions/maps.rs (80%) create mode 100644 build/base/src/extensions/option.rs create mode 100644 build/base/src/extensions/path.rs create mode 100644 build/base/src/extensions/pathbuf.rs rename build/{ci_utils => base}/src/extensions/result.rs (58%) create mode 100644 build/base/src/extensions/str.rs create mode 100644 build/base/src/fs.rs create mode 100644 build/base/src/fs/wrappers.rs create mode 100644 build/base/src/lib.rs create mode 100644 build/build/src/aws/s3.rs create mode 100644 build/build/src/aws/s3/gui.rs create mode 100644 build/build/src/repo/cloud.rs create mode 100644 build/ci_utils/src/actions/env_file.rs delete mode 100644 build/ci_utils/src/anyhow.rs delete mode 100644 build/ci_utils/src/extensions/path.rs delete mode 100644 build/ci_utils/src/extensions/str.rs create mode 100644 build/ci_utils/src/github/repo.rs create mode 100644 build/ci_utils/src/github/workflow.rs create mode 100644 build/macros/Cargo.toml create mode 100644 build/macros/src/lib.rs create mode 100644 build/macros/src/program_args.rs create mode 100644 build/macros/tests/plain.rs create mode 100644 build/macros/tests/with_arg.rs create mode 100644 run.ps1 diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 3e22e2274eea..418ff7b3360d 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -18,28 +18,28 @@ jobs: runs-on: - benchmark steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -50,25 +50,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend benchmark runtime env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_BUILD_MINIMAL_RUN: ${{ true == inputs.just-check }} ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.github/workflows/changelog.yml b/.github/workflows/changelog.yml index 80e30c3568a3..fd26299b1f1b 100644 --- a/.github/workflows/changelog.yml +++ b/.github/workflows/changelog.yml @@ -13,28 +13,28 @@ jobs: runs-on: - X64 steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -45,24 +45,28 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run changelog-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.github/workflows/gui.yml b/.github/workflows/gui.yml index 37931de10844..132a8ef8d057 100644 --- a/.github/workflows/gui.yml +++ b/.github/workflows/gui.yml @@ -13,28 +13,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -45,52 +45,56 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend get env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-build-backend-macos: name: Build Backend (macos) runs-on: - macos-latest steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -101,25 +105,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend get env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-build-backend-windows: name: Build Backend (windows) runs-on: @@ -127,28 +135,28 @@ jobs: - Windows - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -159,25 +167,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend get env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-build-wasm-linux: name: Build GUI (WASM) (linux) runs-on: @@ -185,28 +197,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -217,52 +229,56 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-build-wasm-macos: name: Build GUI (WASM) (macos) runs-on: - macos-latest steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -273,25 +289,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-build-wasm-windows: name: Build GUI (WASM) (windows) runs-on: @@ -299,28 +319,28 @@ jobs: - Windows - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -331,25 +351,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-cancel-workflow-linux: name: Cancel Previous Runs runs-on: @@ -366,28 +390,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -398,25 +422,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run lint env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-native-test-linux: name: Native GUI tests (linux) runs-on: @@ -424,28 +452,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -456,25 +484,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run wasm test --no-wasm env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-package-ide-linux: name: Package IDE (linux) needs: @@ -485,28 +517,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -517,25 +549,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-package-ide-macos: name: Package IDE (macos) needs: @@ -544,28 +580,28 @@ jobs: runs-on: - macos-latest steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -576,11 +612,13 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} @@ -589,17 +627,19 @@ jobs: CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-package-ide-windows: name: Package IDE (windows) needs: @@ -610,28 +650,28 @@ jobs: - Windows - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -642,27 +682,31 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run ide build --wasm-source current-ci-run --backend-source current-ci-run env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }} WIN_CSC_LINK: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-wasm-test-linux: name: WASM GUI tests (linux) runs-on: @@ -670,28 +714,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -702,24 +746,28 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run wasm test --no-native env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 9a4b05f4fd96..c315affb36df 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -11,28 +11,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -43,10 +43,12 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - id: prepare run: ./run release create-draft env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 outputs: ENSO_RELEASE_ID: ${{ steps.prepare.outputs.ENSO_RELEASE_ID }} ENSO_VERSION: ${{ steps.prepare.outputs.ENSO_VERSION }} @@ -57,28 +59,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -89,56 +91,60 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - enso-build-cli-ci-gen-job-upload-backend-linux: - name: Upload Backend (linux) + timeout-minutes: 360 + enso-build-cli-ci-gen-job-deploy-gui-linux: + name: Upload GUI to S3 (linux) needs: - - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-upload-ide-linux runs-on: - self-hosted - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -149,57 +155,134 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - run: ./run backend upload + timeout-minutes: 360 + - run: ./run release deploy-gui + env: + AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }} + GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + enso-build-cli-ci-gen-job-deploy-runtime-linux: + name: Upload Runtime to ECR (linux) + needs: + - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-job-upload-backend-linux + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run release deploy-runtime + env: + AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: eu-west-1 + AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY }} + ENSO_BUILD_ECR_REPOSITORY: runtime + GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} - enso-build-cli-ci-gen-job-upload-backend-macos: - name: Upload Backend (macos) + enso-build-cli-ci-gen-job-upload-backend-linux: + name: Upload Backend (linux) needs: - enso-build-cli-ci-gen-draft-release-linux runs-on: - - macos-latest + - self-hosted + - Linux + - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -210,59 +293,61 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend upload env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} - enso-build-cli-ci-gen-job-upload-backend-windows: - name: Upload Backend (windows) + enso-build-cli-ci-gen-job-upload-backend-macos: + name: Upload Backend (macos) needs: - enso-build-cli-ci-gen-draft-release-linux runs-on: - - self-hosted - - Windows - - engine + - macos-latest steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -273,60 +358,63 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend upload env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} - enso-build-cli-ci-gen-job-upload-runtime-to-ecr-linux: - name: Upload Runtime to ECR (linux) + enso-build-cli-ci-gen-job-upload-backend-windows: + name: Upload Backend (windows) needs: - enso-build-cli-ci-gen-draft-release-linux - - enso-build-cli-ci-gen-job-upload-backend-linux runs-on: - self-hosted - - Linux + - Windows - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -337,29 +425,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - run: ./run release deploy-to-ecr + timeout-minutes: 360 + - run: ./run backend upload env: - AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }} - AWS_DEFAULT_REGION: eu-west-1 - AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY }} - ENSO_BUILD_ECR_REPOSITORY: runtime GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -367,7 +455,8 @@ jobs: name: Publish release (linux) needs: - enso-build-cli-ci-gen-draft-release-linux - - enso-build-cli-ci-gen-job-upload-runtime-to-ecr-linux + - enso-build-cli-ci-gen-job-deploy-gui-linux + - enso-build-cli-ci-gen-job-deploy-runtime-linux - enso-build-cli-ci-gen-upload-ide-linux - enso-build-cli-ci-gen-upload-ide-macos - enso-build-cli-ci-gen-upload-ide-windows @@ -376,28 +465,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -408,25 +497,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run release publish env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }} AWS_REGION: us-west-1 @@ -444,28 +537,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -476,25 +569,29 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -507,28 +604,28 @@ jobs: runs-on: - macos-latest steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -539,11 +636,13 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} @@ -552,17 +651,19 @@ jobs: CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} @@ -577,28 +678,28 @@ jobs: - Windows - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -609,27 +710,31 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }} WIN_CSC_LINK: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT }} - - name: List files if failed (Windows) + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3a550f920e2d..06ff63f9e649 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -1,600 +1,743 @@ -name: Release CI - +name: Release Candidate on: - push: - tags: - - "enso-*.*.*" - -env: - # Please ensure that this is in sync with graalVersion in build.sbt - graalVersion: 21.3.0 - # Please ensure that this is in sync with javaVersion in build.sbt - javaVersion: 11 - # Please ensure that this is in sync with project/build.properties - sbtVersion: 1.5.2 - # Please ensure that this is in sync with rustVersion in build.sbt - rustToolchain: nightly-2021-11-29 - # Please ensure that this is in sync with nodeVersion in scala.yml - nodeVersion: 14.17.2 - -concurrency: "releases" - + workflow_dispatch: {} jobs: - # This job should be kept up-to-date with scala.yml#build (but keep the added version check) - build: - name: Build - runs-on: ${{ matrix.os }} - timeout-minutes: 90 - strategy: - matrix: - os: [macOS-latest, ubuntu-18.04, windows-latest] - fail-fast: true + enso-build-cli-ci-gen-draft-release-linux: + name: Create release draft + runs-on: + - self-hosted + - Linux + - engine steps: - - name: Checkout + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository uses: actions/checkout@v2 with: - path: repo - - name: Configure Pagefile (Windows) - if: runner.os == 'Windows' - uses: al-cheb/configure-pagefile-action@v1.2 - with: - minimum-size: 16GB - maximum-size: 16GB - disk-root: "C:" - - name: Enable Developer Command Prompt (Windows) - uses: ilammy/msvc-dev-cmd@v1.9.0 - - name: Disable TCP/UDP Offloading (macOS) - if: runner.os == 'macOS' - shell: bash - run: | - sudo sysctl -w net.link.generic.system.hwcksum_tx=0 - sudo sysctl -w net.link.generic.system.hwcksum_rx=0 - - name: Disable TCP/UDP Offloading (Linux) - if: runner.os == 'Linux' - shell: bash - run: sudo ethtool -K eth0 tx off rx off - - name: Disable TCP/UDP Offloading (Windows) - if: runner.os == 'Windows' - shell: powershell - run: > - Disable-NetAdapterChecksumOffload -Name * -TcpIPv4 -UdpIPv4 -TcpIPv6 - -UdpIPv6 - - name: Install Rust - uses: actions-rs/toolchain@v1.0.6 - with: - toolchain: ${{ env.rustToolchain }} - override: true - - name: Setup conda + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - id: prepare + run: ./run release create-draft + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + outputs: + ENSO_RELEASE_ID: ${{ steps.prepare.outputs.ENSO_RELEASE_ID }} + ENSO_VERSION: ${{ steps.prepare.outputs.ENSO_VERSION }} + enso-build-cli-ci-gen-job-build-wasm-linux: + name: Build GUI (WASM) (linux) + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 with: - update-conda: true + update-conda: false conda-channels: anaconda, conda-forge - - name: Setup Conda Environment on Windows - if: runner.os == 'Windows' - run: | - conda create --name enso - conda init powershell - - name: Activate Conda Environment on Windows - if: runner.os == 'Windows' - run: conda activate enso - - name: Install FlatBuffers Compiler - run: conda install flatbuffers=1.12.0 - - name: Setup GraalVM Environment - uses: ayltai/setup-graalvm@v1 - with: - graalvm-version: ${{ env.graalVersion }} - java-version: ${{ env.javaVersion }} - native-image: true - - name: Download Project Template Files - working-directory: repo - shell: bash - run: | - curl --retry 4 --retry-connrefused -fsSL -o lib/scala/pkg/src/main/resources/orders/data/store_data.xlsx https://github.com/enso-org/project-templates/raw/main/Orders/data/store_data.xlsx - curl --retry 4 --retry-connrefused -fsSL -o lib/scala/pkg/src/main/resources/orders/src/Main.enso https://github.com/enso-org/project-templates/raw/main/Orders/src/Main.enso - curl --retry 4 --retry-connrefused -fsSL -o lib/scala/pkg/src/main/resources/restaurants/data/la_districts.csv https://github.com/enso-org/project-templates/raw/main/Restaurants/data/la_districts.csv - curl --retry 4 --retry-connrefused -fsSL -o lib/scala/pkg/src/main/resources/restaurants/data/restaurants.csv https://github.com/enso-org/project-templates/raw/main/Restaurants/data/restaurants.csv - curl --retry 4 --retry-connrefused -fsSL -o lib/scala/pkg/src/main/resources/restaurants/src/Main.enso https://github.com/enso-org/project-templates/raw/main/Restaurants/src/Main.enso - curl --retry 4 --retry-connrefused -fsSL -o lib/scala/pkg/src/main/resources/stargazers/src/Main.enso https://github.com/enso-org/project-templates/raw/main/Stargazers/src/Main.enso - - name: Set Up SBT - shell: bash - run: | - curl -fsSL -o sbt.tgz https://github.com/sbt/sbt/releases/download/v${{env.sbtVersion}}/sbt-${{env.sbtVersion}}.tgz - tar -xzf sbt.tgz - echo $GITHUB_WORKSPACE/sbt/bin/ >> $GITHUB_PATH - - # Caches - - name: Cache SBT - uses: actions/cache@v2 - with: - path: | - ~/.sbt - ~/.ivy2/cache - ~/.cache - key: ${{ runner.os }}-sbt-${{ hashFiles('**build.sbt') }} - restore-keys: ${{ runner.os }}-sbt- - - # Bootstrap - - name: Enable Release Mode - shell: bash - run: echo "ENSO_RELEASE_MODE=true" >> $GITHUB_ENV - - name: Bootstrap the Project - working-directory: repo - shell: bash - run: | - sleep 1 - sbt --no-colors bootstrap - - # Verify Legal Review - - name: Verify Packages - if: runner.os != 'Windows' # TODO [RW] CRLF handling in licenses task - working-directory: repo - shell: bash - run: | - sleep 1 - sbt --no-colors verifyLicensePackages - - # Prepare distributions - - name: Build the Launcher Native Image - working-directory: repo - shell: bash - run: | - sleep 1 - sbt --no-colors "launcher/assembly" - sbt --no-colors --mem 1536 "launcher/buildNativeImage" - - - name: Build the PM Native Image - working-directory: repo - shell: bash - run: | - sleep 1 - sbt --no-colors "project-manager/assembly" - sbt --no-colors --mem 1536 "project-manager/buildNativeImage" - - - name: Prepare Distribution Version (Unix) - working-directory: repo - if: runner.os != 'Windows' - shell: bash - run: | - chmod +x enso - DIST_VERSION=$(./enso version --json --only-launcher | jq -r '.version') - echo "DIST_VERSION=$DIST_VERSION" >> $GITHUB_ENV - - - name: Prepare Distribution Version (Windows) - working-directory: repo - if: runner.os == 'Windows' - shell: bash - run: | - DIST_VERSION=$(./enso.exe version --json --only-launcher | jq -r '.version') - echo "DIST_VERSION=$DIST_VERSION" >> $GITHUB_ENV - - # Currently the only architecture supported by Github runners is amd64 - - name: Prepare Distribution Environment - working-directory: repo - shell: bash - run: > - GRAAL_VERSION=$(echo ${{ env.graalVersion }}) DIST_OS=$(echo - ${{runner.os }} | awk '{print tolower($0)}') bash - tools/ci/prepare-distribution-env.sh - - - name: Prepare Launcher Distribution - working-directory: repo - shell: bash - run: | - sleep 1 - sbt buildLauncherDistribution - - name: Prepare Engine Distribution - working-directory: repo - shell: bash - run: | - sleep 1 - sbt buildEngineDistribution - - - name: Compile the Standard Libraries (Unix) - working-directory: repo - shell: bash - if: runner.os != 'Windows' - run: | - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Base/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Test/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Visualization/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Searcher/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Table/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Database/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Geo/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Google_Api/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Image/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Examples/$DIST_VERSION - - - name: Compile the Standard Libraries (Windows) - working-directory: repo - shell: bash - if: runner.os == 'Windows' - run: | - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Base/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Database/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Examples/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Geo/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Google_Api/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Image/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Searcher/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Table/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Test/$DIST_VERSION - $ENGINE_DIST_DIR/bin/enso.bat --ir-caches --no-compile-dependencies --no-global-cache --compile $ENGINE_DIST_DIR/lib/Standard/Visualization/$DIST_VERSION - - - name: Prepare Project Manager Distribution - working-directory: repo - shell: bash - run: | - sleep 1 - sbt buildProjectManagerDistribution - - name: Prepare GraalVM Distribution - working-directory: repo - shell: bash - run: | - sleep 1 - sbt buildGraalDistribution - - # Ensure that the versions encoded in the binary and in the release match - - name: Check Versions (Unix) - working-directory: repo - if: runner.os != 'Windows' - shell: bash - run: | - ref=${{ github.ref }} - refversion=${ref#"refs/tags/enso-"} - binversion=${{ env.DIST_VERSION }} - engineversion=$(${{ env.ENGINE_DIST_DIR }}/bin/enso --version --json | jq -r '.version') - test $binversion = $refversion || (echo "Tag version $refversion and the launcher version $binversion do not match" && false) - test $engineversion = $refversion || (echo "Tag version $refversion and the engine version $engineversion do not match" && false) - - name: Check Versions (Windows) - working-directory: repo - if: runner.os == 'Windows' - shell: bash - run: | - ref=${{ github.ref }} - refversion=${ref#"refs/tags/enso-"} - binversion=${{ env.DIST_VERSION }} - engineversion=$(${{ env.ENGINE_DIST_DIR }}/bin/enso.bat --version --json | jq -r '.version') - test $binversion = $refversion || (echo "Tag version $refversion and the launcher version $binversion do not match" && false) - test $engineversion = $refversion || (echo "Tag version $refversion and the engine version $engineversion do not match" && false) - - # Verify License Packages in Distributions - - name: Verify Distributed Licenses Package - working-directory: repo - if: runner.os != 'Windows' # TODO [RW] CRLF handling in licenses task - shell: bash - # We assume that standard library version is the same as engine version. - run: | - sleep 1 - sbt "enso/verifyGeneratedPackage engine ${{ env.ENGINE_DIST_DIR }}/THIRD-PARTY" - sbt "enso/verifyGeneratedPackage launcher ${{ env.LAUNCHER_DIST_DIR }}/THIRD-PARTY" - sbt "enso/verifyGeneratedPackage project-manager ${{ env.PROJECTMANAGER_DIST_DIR }}/THIRD-PARTY" - sbt "enso/verifyGeneratedPackage Base ${{ env.ENGINE_DIST_DIR }}/lib/Standard/Base/${{ env.DIST_VERSION }}/THIRD-PARTY" - sbt "enso/verifyGeneratedPackage Table ${{ env.ENGINE_DIST_DIR }}/lib/Standard/Table/${{ env.DIST_VERSION }}/THIRD-PARTY" - sbt "enso/verifyGeneratedPackage Image ${{ env.ENGINE_DIST_DIR }}/lib/Standard/Image/${{ env.DIST_VERSION }}/THIRD-PARTY" - sbt "enso/verifyGeneratedPackage Database ${{ env.ENGINE_DIST_DIR }}/lib/Standard/Database/${{ env.DIST_VERSION }}/THIRD-PARTY" - - # Publish - - name: Compress the built artifacts for upload - # The artifacts are compressed before upload to work around an error with long path handling in the upload-artifact action on Windows. - shell: bash - working-directory: repo/built-distribution - run: - 7z a -r built-distribution-${{ env.DIST_OS }}-${{ env.DIST_ARCH }}.zip - * - - name: Upload the Built Artifacts - uses: actions/upload-artifact@v2 - with: - name: built-distribution-${{ env.DIST_OS }}-${{ env.DIST_ARCH }} - path: - repo/built-distribution/built-distribution-${{ env.DIST_OS }}-${{ - env.DIST_ARCH }}.zip - - name: Upload the Manifest Artifact - uses: actions/upload-artifact@v2 - with: - name: manifest - path: repo/${{ env.ENGINE_DIST_DIR }}/manifest.yaml - - name: Upload the Launcher Manifest Artifact - uses: actions/upload-artifact@v2 - with: - name: launcher-manifest - path: repo/distribution/launcher-manifest.yaml - - create-release: - name: Prepare Release - runs-on: ubuntu-18.04 - needs: build - steps: - - name: Checkout code - uses: actions/checkout@v2 + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 with: - path: repo - # Without specifying options, it downloads all artifacts - - uses: actions/download-artifact@v2 + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 with: - path: repo/built-distribution - - name: Unpack Artifacts - shell: bash - working-directory: repo/built-distribution - run: for f in built-distribution-*; do unzip -n "$f/$f.zip"; done - - - name: Setup GraalVM Environment - uses: ayltai/setup-graalvm@v1 - with: - graalvm-version: ${{ env.graalVersion }} - java-version: ${{ env.javaVersion }} - native-image: true - - name: Set Up SBT - shell: bash - run: | - curl -fsSL -o sbt.tgz https://github.com/sbt/sbt/releases/download/v${{env.sbtVersion}}/sbt-${{env.sbtVersion}}.tgz - tar -xzf sbt.tgz - echo $GITHUB_WORKSPACE/sbt/bin/ >> $GITHUB_PATH - - # Caches - - name: Cache SBT - uses: actions/cache@v2 - with: - path: | - ~/.sbt - ~/.ivy2/cache - ~/.cache - key: ${{ runner.os }}-sbt-${{ hashFiles('**build.sbt') }} - restore-keys: ${{ runner.os }}-sbt- - - - name: Save Version to Environment - shell: bash - run: | - ref=${{ github.ref }} - DIST_VERSION=${ref#"refs/tags/enso-"} - echo "Preparing release for $DIST_VERSION" - echo "DIST_VERSION=$DIST_VERSION" >> $GITHUB_ENV - - - name: Prepare Packages - shell: bash - working-directory: repo - run: | - sleep 1 - sbt makePackages - - - name: Prepare Bundles - shell: bash - working-directory: repo - run: | - sleep 1 - sbt makeBundles - - - name: Create Release - id: create_release - uses: actions/create-release@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - tag_name: enso-${{ env.DIST_VERSION }} - release_name: Enso ${{ env.DIST_VERSION }} - body: "Please fill-in the release description" - draft: true - prerelease: true - - - name: Login to ACR - uses: docker/login-action@v1 - with: - registry: ensosharedwus2acr.azurecr.io - username: ${{ secrets.ACR_PUSH_CLIENT_ID }} - password: ${{ secrets.ACR_PUSH_CLIENT_SECRET }} - - - name: Copy the docker entrypoint script - shell: bash - run: | - sleep 1 - cp ./repo/tools/ci/docker/docker-entrypoint.sh ./repo/built-distribution/enso-engine-${{ env.DIST_VERSION }}-linux-amd64/enso-${{ env.DIST_VERSION }}/bin - - - name: Publish Runtime Docker Image - shell: bash - run: | - sleep 1 - docker build -t ensosharedwus2acr.azurecr.io/runtime:${{ env.DIST_VERSION }} -f ./repo/tools/ci/docker/Dockerfile ./repo/built-distribution/enso-engine-${{ env.DIST_VERSION }}-linux-amd64/enso-${{ env.DIST_VERSION }} - docker push ensosharedwus2acr.azurecr.io/runtime:${{ env.DIST_VERSION }} - - # Publish the launcher packages to the backup/fallback S3 bucket - - name: Prepare AWS Session - shell: bash - run: | - aws configure --profile s3-upload <<-EOF > /dev/null 2>&1 - ${{ secrets.LAUNCHER_DEPLOY_ACCESS_KEY_ID }} - ${{ secrets.LAUNCHER_DEPLOY_SECRET_ACCESS_KEY }} - eu-central-1 - text - EOF - - name: Upload the Linux Launcher Package to S3 - shell: bash - run: > - aws s3 cp repo/built-distribution/enso-launcher-${{ env.DIST_VERSION - }}-linux-amd64.tar.gz s3://launcherfallback/launcher/enso-${{ - env.DIST_VERSION }}/ --profile s3-upload --acl public-read - - name: Upload the macOS Launcher Package to S3 - shell: bash - run: > - aws s3 cp repo/built-distribution/enso-launcher-${{ env.DIST_VERSION - }}-macos-amd64.tar.gz s3://launcherfallback/launcher/enso-${{ - env.DIST_VERSION }}/ --profile s3-upload --acl public-read - - name: Upload the Windows Launcher Package to S3 - shell: bash - run: > - aws s3 cp repo/built-distribution/enso-launcher-${{ env.DIST_VERSION - }}-windows-amd64.zip s3://launcherfallback/launcher/enso-${{ - env.DIST_VERSION }}/ --profile s3-upload --acl public-read - - name: Upload the Launcher Manifest to S3 - shell: bash - run: > - aws s3 cp - repo/built-distribution/launcher-manifest/launcher-manifest.yaml - s3://launcherfallback/launcher/enso-${{ env.DIST_VERSION - }}/launcher-manifest.yaml --profile s3-upload --acl public-read - - name: Update the Release List in S3 - shell: bash - run: | - aws s3 cp s3://launcherfallback/release-list.json release-list.json --profile s3-upload - TAG="enso-${{ env.DIST_VERSION }}" - ./repo/tools/ci/releases/add-release.js release-list.json "$TAG" \ - launcher-manifest.yaml \ - "enso-launcher-${{ env.DIST_VERSION }}-linux-amd64.tar.gz" \ - "enso-launcher-${{ env.DIST_VERSION }}-macos-amd64.tar.gz" \ - "enso-launcher-${{ env.DIST_VERSION }}-windows-amd64.zip" - aws s3 cp release-list.json s3://launcherfallback/release-list.json --profile s3-upload --acl public-read - - name: Teardown AWS Session - shell: bash - run: | - aws configure --profile s3-upload <<-EOF > /dev/null 2>&1 - null - null - null - text - EOF - - # Upload the assets to the created release - - name: Publish the Engine (Linux) - uses: actions/upload-release-asset@v1 + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION - }}-linux-amd64.tar.gz - asset_name: enso-engine-${{ env.DIST_VERSION }}-linux-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Engine (MacOS) - uses: actions/upload-release-asset@v1 + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION - }}-macos-amd64.tar.gz - asset_name: enso-engine-${{ env.DIST_VERSION }}-macos-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Engine (Windows) - uses: actions/upload-release-asset@v1 + timeout-minutes: 360 + - run: ./run --upload-artifacts ${{ runner.os == 'Linux' }} wasm build env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-engine-${{ env.DIST_VERSION - }}-windows-amd64.zip - asset_name: enso-engine-${{ env.DIST_VERSION }}-windows-amd64.zip - asset_content_type: application/zip - - - name: Publish the Launcher (Linux) - uses: actions/upload-release-asset@v1 + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + enso-build-cli-ci-gen-job-deploy-gui-linux: + name: Upload GUI to S3 (linux) + needs: + - enso-build-cli-ci-gen-upload-ide-linux + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION - }}-linux-amd64.tar.gz - asset_name: enso-launcher-${{ env.DIST_VERSION }}-linux-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Launcher (MacOS) - uses: actions/upload-release-asset@v1 + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION - }}-macos-amd64.tar.gz - asset_name: enso-launcher-${{ env.DIST_VERSION }}-macos-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Launcher (Windows) - uses: actions/upload-release-asset@v1 + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-launcher-${{ env.DIST_VERSION - }}-windows-amd64.zip - asset_name: enso-launcher-${{ env.DIST_VERSION }}-windows-amd64.zip - asset_content_type: application/zip - - - name: Publish the Project Manager (Linux) - uses: actions/upload-release-asset@v1 + timeout-minutes: 360 + - run: ./run release deploy-gui + env: + AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }} + GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + enso-build-cli-ci-gen-job-deploy-runtime-linux: + name: Upload Runtime to ECR (linux) + needs: + - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-job-upload-backend-linux + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: - repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION - }}-linux-amd64.tar.gz - asset_name: enso-project-manager-${{ env.DIST_VERSION }}-linux-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Project Manager (MacOS) - uses: actions/upload-release-asset@v1 + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run release deploy-runtime + env: + AWS_ACCESS_KEY_ID: ${{ secrets.ECR_PUSH_RUNTIME_ACCESS_KEY_ID }} + AWS_DEFAULT_REGION: eu-west-1 + AWS_SECRET_ACCESS_KEY: ${{ secrets.ECR_PUSH_RUNTIME_SECRET_ACCESS_KEY }} + ENSO_BUILD_ECR_REPOSITORY: runtime + GITHUB_TOKEN: ${{ secrets.CI_PRIVATE_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-job-upload-backend-linux: + name: Upload Backend (linux) + needs: + - enso-build-cli-ci-gen-draft-release-linux + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: - repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION - }}-macos-amd64.tar.gz - asset_name: enso-project-manager-${{ env.DIST_VERSION }}-macos-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Project Manager (Windows) - uses: actions/upload-release-asset@v1 + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run backend upload + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-job-upload-backend-macos: + name: Upload Backend (macos) + needs: + - enso-build-cli-ci-gen-draft-release-linux + runs-on: + - macos-latest + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: - repo/built-distribution/enso-project-manager-${{ env.DIST_VERSION - }}-windows-amd64.zip - asset_name: enso-project-manager-${{ env.DIST_VERSION }}-windows-amd64.zip - asset_content_type: application/zip - - - name: Publish the Bundle (Linux) - uses: actions/upload-release-asset@v1 + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run backend upload + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-job-upload-backend-windows: + name: Upload Backend (windows) + needs: + - enso-build-cli-ci-gen-draft-release-linux + runs-on: + - self-hosted + - Windows + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION - }}-linux-amd64.tar.gz - asset_name: enso-bundle-${{ env.DIST_VERSION }}-linux-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Bundle (MacOS) - uses: actions/upload-release-asset@v1 + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run backend upload env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-publish-release-linux: + name: Publish release (linux) + needs: + - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-job-deploy-gui-linux + - enso-build-cli-ci-gen-job-deploy-runtime-linux + - enso-build-cli-ci-gen-upload-ide-linux + - enso-build-cli-ci-gen-upload-ide-macos + - enso-build-cli-ci-gen-upload-ide-windows + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION - }}-macos-amd64.tar.gz - asset_name: enso-bundle-${{ env.DIST_VERSION }}-macos-amd64.tar.gz - asset_content_type: application/x-tar - - name: Publish the Bundle (Windows) - uses: actions/upload-release-asset@v1 + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run release publish + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + AWS_ACCESS_KEY_ID: ${{ secrets.ARTEFACT_S3_ACCESS_KEY_ID }} + AWS_REGION: us-west-1 + AWS_SECRET_ACCESS_KEY: ${{ secrets.ARTEFACT_S3_SECRET_ACCESS_KEY }} + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-upload-ide-linux: + name: Build IDE (linux) + needs: + - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-job-build-wasm-linux + - enso-build-cli-ci-gen-job-upload-backend-linux + runs-on: + - self-hosted + - Linux + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/enso-bundle-${{ env.DIST_VERSION - }}-windows-amd64.zip - asset_name: enso-bundle-${{ env.DIST_VERSION }}-windows-amd64.zip - asset_content_type: application/zip - - - name: Publish the Manifest - uses: actions/upload-release-asset@v1 + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-upload-ide-macos: + name: Build IDE (macos) + needs: + - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-job-build-wasm-linux + - enso-build-cli-ci-gen-job-upload-backend-macos + runs-on: + - macos-latest + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 + with: + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/manifest/manifest.yaml - asset_name: manifest.yaml - asset_content_type: application/yaml - - name: Publish the Launcher Manifest - uses: actions/upload-release-asset@v1 + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} env: + APPLEID: ${{ secrets.APPLE_NOTARIZATION_USERNAME }} + APPLEIDPASS: ${{ secrets.APPLE_NOTARIZATION_PASSWORD }} + CSC_IDENTITY_AUTO_DISCOVERY: "true" + CSC_KEY_PASSWORD: ${{ secrets.APPLE_CODE_SIGNING_CERT_PASSWORD }} + CSC_LINK: ${{ secrets.APPLE_CODE_SIGNING_CERT }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} + enso-build-cli-ci-gen-upload-ide-windows: + name: Build IDE (windows) + needs: + - enso-build-cli-ci-gen-draft-release-linux + - enso-build-cli-ci-gen-job-build-wasm-linux + - enso-build-cli-ci-gen-job-upload-backend-windows + runs-on: + - self-hosted + - Windows + - engine + steps: + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) + uses: s-weigand/setup-conda@v1.0.5 + with: + update-conda: false + conda-channels: anaconda, conda-forge + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack + uses: jetli/wasm-pack-action@v0.3.0 with: - upload_url: ${{ steps.create_release.outputs.upload_url }} - asset_path: repo/built-distribution/launcher-manifest/launcher-manifest.yaml - asset_name: launcher-manifest.yaml - asset_content_type: application/yaml + version: v0.10.2 + - name: Expose Artifact API and context information. + uses: actions/github-script@v6 + with: + script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' + shell: cmd + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" + shell: bash + - name: Checking out the repository + uses: actions/checkout@v2 + with: + clean: false + submodules: recursive + - name: Build Script Setup + run: ./run --help + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + - run: ./run ide upload --wasm-source current-ci-run --backend-source release --backend-release ${{env.ENSO_RELEASE_ID}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + WIN_CSC_KEY_PASSWORD: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT_PASSWORD }} + WIN_CSC_LINK: ${{ secrets.MICROSOFT_CODE_SIGNING_CERT }} + timeout-minutes: 360 + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) + run: Get-ChildItem -Force -Recurse + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) + run: ls -lAR + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after + run: ./run git-clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 + env: + ENSO_RELEASE_ID: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_RELEASE_ID}} + ENSO_VERSION: ${{needs.enso-build-cli-ci-gen-draft-release-linux.outputs.ENSO_VERSION}} +env: + ENSO_BUILD_KIND: rc + ENSO_BUILD_SKIP_VERSION_CHECK: "true" + RUST_BACKTRACE: full +concurrency: release diff --git a/.github/workflows/scala-new.yml b/.github/workflows/scala-new.yml index b6934c46519d..d3aab579dc68 100644 --- a/.github/workflows/scala-new.yml +++ b/.github/workflows/scala-new.yml @@ -22,28 +22,28 @@ jobs: - Linux - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -54,70 +54,74 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend ci-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Engine Test Reporter + timeout-minutes: 360 + - if: success() || failure() + name: Engine Test Reporter uses: dorny/test-reporter@v1 - if: success() || failure() with: max-annotations: 50 name: Engine Tests (linux) path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml path-replace-backslashes: true reporter: java-junit - - name: Standard Library Test Reporter + - if: success() || failure() + name: Standard Library Test Reporter uses: dorny/test-reporter@v1 - if: success() || failure() with: max-annotations: 50 name: Standard Library Tests (linux) path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml path-replace-backslashes: true reporter: java-junit - - name: List files if failed (Windows) + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-ci-check-backend-macos: name: Engine (macos) runs-on: - macos-latest steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -128,43 +132,47 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend ci-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Engine Test Reporter + timeout-minutes: 360 + - if: success() || failure() + name: Engine Test Reporter uses: dorny/test-reporter@v1 - if: success() || failure() with: max-annotations: 50 name: Engine Tests (macos) path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml path-replace-backslashes: true reporter: java-junit - - name: Standard Library Test Reporter + - if: success() || failure() + name: Standard Library Test Reporter uses: dorny/test-reporter@v1 - if: success() || failure() with: max-annotations: 50 name: Standard Library Tests (macos) path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml path-replace-backslashes: true reporter: java-junit - - name: List files if failed (Windows) + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 enso-build-cli-ci-gen-job-ci-check-backend-windows: name: Engine (windows) runs-on: @@ -172,28 +180,28 @@ jobs: - Windows - engine steps: - - name: Setup conda (GH runners only) + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Setup conda (GH runners only) uses: s-weigand/setup-conda@v1.0.5 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: update-conda: false conda-channels: anaconda, conda-forge - - name: Installing wasm-pack + - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') + name: Installing wasm-pack uses: jetli/wasm-pack-action@v0.3.0 - if: startsWith(runner.name, 'GitHub Actions') || startsWith(runner.name, 'Hosted Agent') with: version: v0.10.2 - name: Expose Artifact API and context information. uses: actions/github-script@v6 with: script: "\n core.exportVariable(\"ACTIONS_RUNTIME_TOKEN\", process.env[\"ACTIONS_RUNTIME_TOKEN\"])\n core.exportVariable(\"ACTIONS_RUNTIME_URL\", process.env[\"ACTIONS_RUNTIME_URL\"])\n core.exportVariable(\"GITHUB_RETENTION_DAYS\", process.env[\"GITHUB_RETENTION_DAYS\"])\n console.log(context)\n " - - name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) + - if: runner.os == 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (Windows) run: '"c:\Program Files\Git\bin\bash.exe" -c "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :"' - if: runner.os == 'Windows' shell: cmd - - name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) + - if: runner.os != 'Windows' + name: Workaround for https://github.com/actions/checkout/issues/590 (non-Windows) run: "git checkout -f $(git -c user.name=x -c user.email=x@x commit-tree $(git hash-object -t tree /dev/null) < /dev/null) || :" - if: runner.os != 'Windows' shell: bash - name: Checking out the repository uses: actions/checkout@v2 @@ -204,42 +212,46 @@ jobs: run: ./run --help env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Clean before + timeout-minutes: 360 + - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean before run: ./run git-clean - if: "contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 - run: ./run backend ci-check env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - name: Engine Test Reporter + timeout-minutes: 360 + - if: success() || failure() + name: Engine Test Reporter uses: dorny/test-reporter@v1 - if: success() || failure() with: max-annotations: 50 name: Engine Tests (windows) path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*.xml path-replace-backslashes: true reporter: java-junit - - name: Standard Library Test Reporter + - if: success() || failure() + name: Standard Library Test Reporter uses: dorny/test-reporter@v1 - if: success() || failure() with: max-annotations: 50 name: Standard Library Tests (windows) path: ${{ env.ENSO_TEST_JUNIT_DIR }}/*/*.xml path-replace-backslashes: true reporter: java-junit - - name: List files if failed (Windows) + - if: failure() && runner.os == 'Windows' + name: List files if failed (Windows) run: Get-ChildItem -Force -Recurse - if: failure() && runner.os == 'Windows' - - name: List files if failed (non-Windows) + - if: failure() && runner.os != 'Windows' + name: List files if failed (non-Windows) run: ls -lAR - if: failure() && runner.os != 'Windows' - - name: Clean after + - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" + name: Clean after run: ./run git-clean - if: "always() && always() && contains(github.event.pull_request.labels.*.name, 'CI: Clean build required')" env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + timeout-minutes: 360 env: ENSO_BUILD_SKIP_VERSION_CHECK: "true" diff --git a/.gitignore b/.gitignore index 7bf38a7012a8..3e4ab5a00ea5 100644 --- a/.gitignore +++ b/.gitignore @@ -24,7 +24,12 @@ wasm-pack.log generated/ /target /build/rust/target/ -dist + +########### +## Scala ## +########### + +.metals/ ############ ## NodeJS ## @@ -119,7 +124,7 @@ build-cache/ ###################### ## Enso-Development ## ###################### - +/dist distribution/lib/Standard/Examples/*/data/scratch_file distribution/lib/Standard/Examples/*/data/image.png distribution/editions diff --git a/Cargo.lock b/Cargo.lock index 59dc250e2441..548c7a8c30bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -39,11 +39,22 @@ dependencies = [ "opaque-debug 0.3.0", ] +[[package]] +name = "ahash" +version = "0.7.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fcb51a0695d8f838b1ee009b3fbf66bda078cd64590202a864a8f3e8c4315c47" +dependencies = [ + "getrandom 0.2.7", + "once_cell", + "version_check 0.9.4", +] + [[package]] name = "aho-corasick" -version = "0.7.18" +version = "0.7.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e37cfd5e7657ada45f742d6e99ca5788580b5c529dc78faf11ece6dc702656f" +checksum = "b4f55bd91a0978cbfd91c457a164bab8b4001c833b7f323132c0a4e1922dd44e" dependencies = [ "memchr", ] @@ -62,6 +73,15 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "android_system_properties" +version = "0.1.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "819e7219dbd41043ac279b19830f2efc897156490d7fd6ea916720117ee66311" +dependencies = [ + "libc", +] + [[package]] name = "ansi_term" version = "0.12.1" @@ -106,9 +126,9 @@ dependencies = [ [[package]] name = "arc-swap" -version = "1.5.0" +version = "1.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" +checksum = "983cd8b9d4b02a6dc6ffa557262eb5858a27a0038ffffe21a0f133eaa819a164" [[package]] name = "ascii" @@ -166,9 +186,9 @@ dependencies = [ [[package]] name = "async-channel" -version = "1.6.1" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2114d64672151c0c5eaa5e131ec84a74f06e1e559830dabba01ca30605d66319" +checksum = "e14485364214912d3b19cc3435dde4df66065127f05fa0d75c712f36f12c2f28" dependencies = [ "concurrent-queue", "event-listener", @@ -204,26 +224,26 @@ dependencies = [ [[package]] name = "async-global-executor" -version = "2.0.4" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c290043c9a95b05d45e952fb6383c67bcb61471f60cfa21e890dba6654234f43" +checksum = "0da5b41ee986eed3f524c380e6d64965aea573882a8907682ad100f7859305ca" dependencies = [ "async-channel", "async-executor", "async-io", - "async-mutex", + "async-lock", "blocking", "futures-lite", - "num_cpus", "once_cell", ] [[package]] name = "async-io" -version = "1.7.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5e18f61464ae81cde0a23e713ae8fd299580c54d697a35820cfd0625b8b0e07" +checksum = "83e21f3a490c72b3b0cf44962180e60045de2925d8dff97918f7ee43c8f637c7" dependencies = [ + "autocfg 1.1.0", "concurrent-queue", "futures-lite", "libc", @@ -246,26 +266,17 @@ dependencies = [ "event-listener", ] -[[package]] -name = "async-mutex" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479db852db25d9dbf6204e6cb6253698f175c15726470f78af0d918e99d6156e" -dependencies = [ - "event-listener", -] - [[package]] name = "async-std" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52580991739c5cdb36cde8b2a516371c0a3b70dda36d916cc08b82372916808c" +checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" dependencies = [ "async-channel", "async-global-executor", "async-io", "async-lock", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", "futures-channel", "futures-core", "futures-io", @@ -274,7 +285,6 @@ dependencies = [ "kv-log-macro", "log 0.4.17", "memchr", - "num_cpus", "once_cell", "pin-project-lite", "pin-utils", @@ -305,15 +315,15 @@ dependencies = [ [[package]] name = "async-task" -version = "4.2.0" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30696a84d817107fc028e049980e09d5e140e8da8f1caeb17e8e950658a3cea9" +checksum = "7a40729d2133846d9ed0ea60a8b9541bccddab49cd30f0715a1da672fe9a2524" [[package]] name = "async-trait" -version = "0.1.53" +version = "0.1.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed6aa3524a2dfcf9fe180c51eae2b58738348d819517ceadf95789c51fff7600" +checksum = "76464446b8bc32758d7e88ee1a804d9914cd9b1cb264c029899680b0be29826f" dependencies = [ "proc-macro2", "quote", @@ -377,9 +387,9 @@ dependencies = [ "bytes 1.1.0", "hex", "http", - "hyper 0.14.18", + "hyper 0.14.20", "ring", - "time 0.3.9", + "time 0.3.14", "tokio", "tower", "tracing", @@ -542,7 +552,7 @@ dependencies = [ "percent-encoding 2.1.0", "regex", "ring", - "time 0.3.9", + "time 0.3.14", "tracing", ] @@ -574,7 +584,7 @@ dependencies = [ "http-body", "md-5", "pin-project-lite", - "sha1 0.10.1", + "sha1 0.10.5", "sha2", "tracing", ] @@ -593,7 +603,7 @@ dependencies = [ "fastrand", "http", "http-body", - "hyper 0.14.18", + "hyper 0.14.20", "hyper-rustls 0.22.1", "lazy_static", "pin-project-lite", @@ -626,7 +636,7 @@ dependencies = [ "futures-core", "http", "http-body", - "hyper 0.14.18", + "hyper 0.14.20", "once_cell", "percent-encoding 2.1.0", "pin-project-lite", @@ -675,10 +685,10 @@ version = "0.49.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e93b0c93a3b963da946a0b8ef3853a7252298eb75cdbfb21dad60f5ed0ded861" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "num-integer", "ryu", - "time 0.3.9", + "time 0.3.14", ] [[package]] @@ -708,9 +718,9 @@ dependencies = [ [[package]] name = "axum" -version = "0.5.17" +version = "0.5.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "acee9fd5073ab6b045a275b3e709c163dd36c90685219cb21804a147b58dba43" +checksum = "c9e3356844c4d6a6d6467b8da2cffb4a2820be256f50a3a386c9d152bab31043" dependencies = [ "async-trait", "axum-core", @@ -719,8 +729,8 @@ dependencies = [ "futures-util", "http", "http-body", - "hyper 0.14.18", - "itoa 1.0.2", + "hyper 0.14.20", + "itoa 1.0.3", "matchit", "memchr", "mime 0.3.16", @@ -737,9 +747,9 @@ dependencies = [ [[package]] name = "axum-core" -version = "0.2.9" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37e5939e02c56fecd5c017c37df4238c0a839fa76b7f97acdd7efb804fd181cc" +checksum = "d9f0c0a60006f2a293d82d571f635042a72edf927539b7685bd62d361963839b" dependencies = [ "async-trait", "bytes 1.1.0", @@ -753,16 +763,16 @@ dependencies = [ [[package]] name = "backtrace" -version = "0.3.65" +version = "0.3.66" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11a17d453482a265fd5f8479f2a3f405566e6ca627837aaddb85af8b1ab8ef61" +checksum = "cab84319d616cfb654d03394f38ab7e6f0919e181b1b57e1fd15e7fb4077d9a7" dependencies = [ "addr2line", "cc", "cfg-if 1.0.0", "libc", "miniz_oxide", - "object 0.28.4", + "object 0.29.0", "rustc-demangle", ] @@ -785,12 +795,6 @@ dependencies = [ "byteorder", ] -[[package]] -name = "base64" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b41b7ea54a0c9d92199de89e20e58d49f02f8e699814ef3fdf266f6f748d15c7" - [[package]] name = "base64" version = "0.13.0" @@ -865,11 +869,11 @@ dependencies = [ [[package]] name = "block-buffer" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf7fe51849ea569fd452f37822f606a5cabb684dc918707a0193fd4664ff324" +checksum = "69cce20737498f97b993470a6e536b8523f0af7892a4f928cceb1ac5e52ebe7e" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -933,9 +937,9 @@ dependencies = [ [[package]] name = "bumpalo" -version = "3.9.1" +version = "3.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4a45a46ab1f2412e53d3a0ade76ffad2025804294569aae387231a0cd6e0899" +checksum = "c1ad822118d20d2c234f427000d5acc36eabe1e29a348c89b63dd60b13f28e5d" [[package]] name = "byte-tools" @@ -989,9 +993,9 @@ checksum = "c4872d67bab6358e59559027aa3b9157c53d9358c51423c17554809a8858e0f8" [[package]] name = "bytes-utils" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1934a3ef9cac8efde4966a92781e77713e1ba329f1d42e446c7d7eba340d8ef1" +checksum = "e47d3a8076e283f3acd27400535992edb3ba4b5bb72f8891ad8fbe7932a7d4b9" dependencies = [ "bytes 1.1.0", "either", @@ -1034,7 +1038,7 @@ dependencies = [ "async_once", "cached_proc_macro", "cached_proc_macro_types", - "futures 0.3.21", + "futures 0.3.24", "hashbrown", "instant", "lazy_static", @@ -1063,12 +1067,9 @@ checksum = "3a4f925191b4367301851c6d99b09890311d74b0d43f274c0b34c86d308a3663" [[package]] name = "cast" -version = "0.2.7" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c24dab4283a142afa2fdca129b80ad2c6284e073930f964c3a1293c225ee39a" -dependencies = [ - "rustc_version 0.4.0", -] +checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" @@ -1099,15 +1100,17 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chrono" -version = "0.4.19" +version = "0.4.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "670ad68c9088c2a963aaa298cb369688cf3f9465ce5e2d4ca10e6e0098a1ce73" +checksum = "bfd4d1b31faaa3a89d7934dbded3111da0d2ef28e3ebccdb4f0179f5929d1ef1" dependencies = [ - "libc", + "iana-time-zone", + "js-sys", "num-integer", "num-traits", "serde", "time 0.1.44", + "wasm-bindgen", "winapi 0.3.9", ] @@ -1117,7 +1120,7 @@ version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7ee52072ec15386f770805afd189a01c8841be8696bed250fa2f13c4c0d6dfb7" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", ] [[package]] @@ -1133,9 +1136,9 @@ dependencies = [ [[package]] name = "clap" -version = "3.1.18" +version = "3.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2dbdf4bdacb33466e854ce889eee8dfd5729abf7ccd7664d0a2d60cd384440b" +checksum = "85a35a599b11c089a7f49105658d089b8f2cf0882993c17daf6de15285c2c35d" dependencies = [ "atty", "bitflags", @@ -1151,9 +1154,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "3.1.18" +version = "3.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "25320346e922cffe59c0bbc5410c8d8784509efb321488971081313cb1e1a33c" +checksum = "a3aab4734e083b809aaf5794e14e756d1c798d2c69c7f7de7a09a2f5214993c1" dependencies = [ "heck", "proc-macro-error", @@ -1164,9 +1167,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.2.0" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a37c35f1112dad5e6e0b1adaff798507497a18fceeb30cceb3bae7d1427b9213" +checksum = "2850f2f5a82cbf437dd5af4d49848fbdfc27c157c3d010345776f952765261c5" dependencies = [ "os_str_bytes", ] @@ -1213,9 +1216,9 @@ dependencies = [ [[package]] name = "combine" -version = "4.6.4" +version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a604e93b79d1808327a6fca85a6f2d69de66461e7620f5a4cbf5fb4d1d7c948" +checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ "bytes 1.1.0", "memchr", @@ -1223,9 +1226,9 @@ dependencies = [ [[package]] name = "concurrent-queue" -version = "1.2.2" +version = "1.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "30ed07550be01594c6026cff2a1d7fe9c8f683caa798e12b68694ac9e88286a3" +checksum = "af4780a44ab5696ea9e28294517f1fffb421a83a25af521333c838635509db9c" dependencies = [ "cache-padded", ] @@ -1236,19 +1239,18 @@ version = "0.1.0" dependencies = [ "Inflector", "serde", - "serde_yaml 0.8.24", + "serde_yaml 0.8.26", ] [[package]] name = "console" -version = "0.15.0" +version = "0.15.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a28b32d32ca44b70c3e4acd7db1babf555fa026e385fb95f18028f88848b3c31" +checksum = "89eab4d20ce20cea182308bca13088fecea9c05f6776cf287205d41a0ed3c847" dependencies = [ "encode_unicode", "libc", "once_cell", - "regex", "terminal_size", "unicode-width", "winapi 0.3.9", @@ -1274,8 +1276,8 @@ checksum = "e933c43a5db3779b3600cdab18856af2411ca2237e33ba8ab476d5d5b1a6c1e7" dependencies = [ "console-api", "crossbeam-channel", - "crossbeam-utils 0.8.8", - "futures 0.3.21", + "crossbeam-utils 0.8.11", + "futures 0.3.24", "hdrhistogram", "humantime 2.1.0", "prost-types", @@ -1301,9 +1303,9 @@ dependencies = [ [[package]] name = "const_format" -version = "0.2.23" +version = "0.2.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0936ffe6d0c8d6a51b3b0a73b2acbe925d786f346cf45bfddc8341d79fb7dc8a" +checksum = "939dc9e2eb9077e0679d2ce32de1ded8531779360b003b4a972a7a39ec263495" dependencies = [ "const_format_proc_macros", ] @@ -1364,9 +1366,9 @@ checksum = "5827cebf4670468b8772dd191856768aedcb1b0278a04f989f7766351917b9dc" [[package]] name = "cpufeatures" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59a6001667ab124aebae2a495118e11d30984c3a653e99d86d58971708cf5e4b" +checksum = "28d997bd5e24a5928dd43e46dc529867e207907fe0b239c3477d924f7f2ca320" dependencies = [ "libc", ] @@ -1391,16 +1393,16 @@ dependencies = [ [[package]] name = "criterion" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1604dafd25fba2fe2d5895a9da139f8dc9b319a5fe5354ca137cbbce4e178d10" +checksum = "b01d6de93b2b6c65e17c634a26653a29d107b3c98c607c765bf38d041531cd8f" dependencies = [ "atty", "cast", "clap 2.34.0", "criterion-plot", "csv", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "num-traits", "oorandom", @@ -1417,12 +1419,12 @@ dependencies = [ [[package]] name = "criterion-plot" -version = "0.4.4" +version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d00996de9f2f7559f7f4dc286073197f83e92256a59ed395f9aac01fe717da57" +checksum = "2673cc8207403546f45f5fd319a974b1e6983ad1a3ee7e6041650013be041876" dependencies = [ "cast", - "itertools 0.10.3", + "itertools 0.10.5", ] [[package]] @@ -1438,36 +1440,36 @@ dependencies = [ [[package]] name = "crossbeam-channel" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5aaa7bd5fb665c6864b5f963dd9097905c54125909c7aa94c9e18507cdbe6c53" +checksum = "c2dd04ddaf88237dc3b8d8f9a3c1004b506b54b3313403944054d23c0870c521" dependencies = [ "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", ] [[package]] name = "crossbeam-deque" -version = "0.8.1" +version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6455c0ca19f0d2fbf751b908d5c55c1f5cbc65e03c4225427254b46890bdde1e" +checksum = "715e8152b692bba2d374b53d4875445368fdf21a94751410af607a5ac677d1fc" dependencies = [ "cfg-if 1.0.0", "crossbeam-epoch", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", ] [[package]] name = "crossbeam-epoch" -version = "0.9.8" +version = "0.9.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1145cf131a2c6ba0615079ab6a638f7e1973ac9c2634fcbeaaad6114246efe8c" +checksum = "045ebe27666471bb549370b4b0b3e51b07f56325befa4284db65fc89c02511b1" dependencies = [ "autocfg 1.1.0", "cfg-if 1.0.0", - "crossbeam-utils 0.8.8", - "lazy_static", + "crossbeam-utils 0.8.11", "memoffset", + "once_cell", "scopeguard", ] @@ -1484,21 +1486,21 @@ dependencies = [ [[package]] name = "crossbeam-utils" -version = "0.8.8" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0bf124c720b7686e3c2663cf54062ab0f68a88af2fb6a030e87e30bf721fcb38" +checksum = "51887d4adc7b564537b15adcfb307936f8075dfcd5f00dde9a9f1d29383682bc" dependencies = [ "cfg-if 1.0.0", - "lazy_static", + "once_cell", ] [[package]] name = "crypto-common" -version = "0.1.3" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57952ca27b5e3606ff4dd79b0020231aaf9d6aa76dc05fd30137538c50bd3ce8" +checksum = "1bfb12502f3fc46cca1bb51ac28df9d618d813cdc3d2f25b9fe775a34af26bb3" dependencies = [ - "generic-array 0.14.5", + "generic-array 0.14.6", "typenum", ] @@ -1535,9 +1537,9 @@ dependencies = [ [[package]] name = "ctor" -version = "0.1.22" +version = "0.1.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f877be4f7c9f246b183111634f75baa039715e3f46ce860677d3b19a69fb229c" +checksum = "cdffe87e1d521a10f9696f833fe502293ea446d7f256c06128293a4119bdf4cb" dependencies = [ "quote", "syn", @@ -1702,11 +1704,11 @@ dependencies = [ [[package]] name = "digest" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2fb860ca6fafa5552fb6d0e816a69c8e49f0908bf524e30a90d97c85892d506" +checksum = "adfbc57365a37acbd2ebf2b64d7e69bb766e2fea813521ed536f5d0520dcf86c" dependencies = [ - "block-buffer 0.10.2", + "block-buffer 0.10.3", "crypto-common", "subtle", ] @@ -1750,7 +1752,7 @@ dependencies = [ "enso-profiler", "enso-text", "failure", - "itertools 0.10.3", + "itertools 0.10.5", "parser-scala", "regex", "serde", @@ -1766,9 +1768,9 @@ checksum = "4bb454f0228b18c7f4c3b0ebbee346ed9c52e7443b0999cd543ff3571205701d" [[package]] name = "either" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f107b87b6afc2a64fd13cac55fe06d6c8859f12d4b14cbcdd2c67d0976781be" +checksum = "90e5c1c8368803113bf0c9584fc495a58b86dc8a29edbf8fe877d21d9507e797" [[package]] name = "enclose" @@ -1807,7 +1809,7 @@ dependencies = [ "failure", "flatbuffers", "flatc-rust", - "futures 0.3.21", + "futures 0.3.24", "hex", "json-rpc", "mockall", @@ -1847,16 +1849,17 @@ dependencies = [ "cached", "cfg-if 1.0.0", "chrono", - "clap 3.1.18", + "clap 3.1.15", "console-subscriber", "derivative", "derive_more", "dirs", + "enso-build-base", "filetime", "flate2", "flume", "fs_extra", - "futures 0.3.21", + "futures 0.3.24", "futures-util", "glob", "heck", @@ -1864,15 +1867,16 @@ dependencies = [ "ide-ci", "indexmap", "indicatif", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", - "log 0.4.17", "mime 0.3.16", + "new_mime_guess", "nix", "octocrab", "ouroboros", - "paste 1.0.7", + "paste 1.0.9", "path-absolutize", + "path-slash", "pin-project", "platforms", "port_check", @@ -1882,10 +1886,10 @@ dependencies = [ "regex", "reqwest", "scopeguard", - "semver 1.0.9", + "semver 1.0.14", "serde", "serde_json", - "serde_yaml 0.9.10", + "serde_yaml 0.9.14", "shrinkwraprs 0.3.0", "strum", "sysinfo", @@ -1896,7 +1900,7 @@ dependencies = [ "tracing", "tracing-subscriber", "unicase 2.6.0", - "url 2.2.2", + "url 2.3.0", "uuid 1.1.2", "walkdir", "which", @@ -1904,6 +1908,20 @@ dependencies = [ "zip 0.6.2", ] +[[package]] +name = "enso-build-base" +version = "0.1.0" +dependencies = [ + "anyhow", + "fn-error-context", + "futures 0.3.24", + "futures-util", + "serde", + "serde_json", + "serde_yaml 0.9.14", + "tracing", +] + [[package]] name = "enso-build-cli" version = "0.1.0" @@ -1911,11 +1929,12 @@ dependencies = [ "anyhow", "byte-unit", "chrono", - "clap 3.1.18", + "clap 3.1.15", "derivative", "enso-build", + "enso-build-base", "enso-formatter", - "futures 0.3.21", + "futures 0.3.24", "futures-util", "glob", "humantime 2.1.0", @@ -1923,7 +1942,7 @@ dependencies = [ "octocrab", "serde", "serde_json", - "serde_yaml 0.9.10", + "serde_yaml 0.9.14", "strum", "tempfile", "tokio", @@ -1932,6 +1951,21 @@ dependencies = [ "tracing-subscriber", ] +[[package]] +name = "enso-build-macros" +version = "0.1.0" +dependencies = [ + "convert_case 0.6.0", + "enso-build-base", + "itertools 0.10.5", + "proc-macro2", + "quote", + "regex", + "serde_yaml 0.9.14", + "shrinkwraprs 0.3.0", + "syn", +] + [[package]] name = "enso-build-utilities" version = "0.1.0" @@ -1957,7 +1991,7 @@ dependencies = [ "enso-logger", "enso-prelude", "ensogl", - "semver 1.0.9", + "semver 1.0.14", ] [[package]] @@ -1978,7 +2012,7 @@ name = "enso-debug-api" version = "0.1.0" dependencies = [ "derivative", - "futures 0.3.21", + "futures 0.3.24", "js-sys", "wasm-bindgen", "web-sys", @@ -2063,17 +2097,17 @@ dependencies = [ "ensogl-text-msdf", "failure", "flo_stream", - "futures 0.3.21", + "futures 0.3.24", "fuzzly", "ide-view", - "itertools 0.10.3", + "itertools 0.10.5", "js-sys", "json-rpc", "mockall", "nalgebra 0.26.2", "parser-scala", "regex", - "semver 1.0.9", + "semver 1.0.14", "serde", "serde_json", "sha3", @@ -2227,19 +2261,19 @@ dependencies = [ "enso-shapely", "enso-web", "failure", - "futures 0.3.21", + "futures 0.3.24", "gen-iter", "ifmt", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "nalgebra 0.26.2", "num", "object 0.24.0", - "paste 1.0.7", + "paste 1.0.9", "serde", "serde_json", "shrinkwraprs 0.3.0", - "smallvec 1.8.0", + "smallvec 1.9.0", "tracing", "tracing-subscriber", "tracing-wasm", @@ -2255,7 +2289,7 @@ version = "0.1.0" dependencies = [ "enso-profiler-macros", "enso-web", - "futures 0.3.21", + "futures 0.3.24", "serde", "serde_json", "wasm-bindgen", @@ -2268,7 +2302,7 @@ dependencies = [ "derivative", "enso-prelude", "enso-profiler", - "futures 0.3.21", + "futures 0.3.24", "serde", "serde_json", ] @@ -2278,7 +2312,7 @@ name = "enso-profiler-demo-data" version = "0.1.0" dependencies = [ "enso-profiler", - "futures 0.3.21", + "futures 0.3.24", ] [[package]] @@ -2300,7 +2334,7 @@ version = "0.1.0" dependencies = [ "enso-profiler", "enso-profiler-data", - "futures 0.3.21", + "futures 0.3.24", ] [[package]] @@ -2394,7 +2428,7 @@ dependencies = [ "enso-reflect", "nalgebra 0.26.2", "num-traits", - "paste 1.0.7", + "paste 1.0.9", "serde", ] @@ -2477,16 +2511,16 @@ dependencies = [ "ensogl-text-embedded-fonts", "enum_dispatch", "failure", - "itertools 0.10.3", + "itertools 0.10.5", "js-sys", "nalgebra 0.26.2", "num-traits", "num_enum", "rustc-hash", - "semver 1.0.9", + "semver 1.0.14", "serde", "shrinkwraprs 0.3.0", - "smallvec 1.8.0", + "smallvec 1.9.0", "typenum", "wasm-bindgen", "wasm-bindgen-test", @@ -2599,7 +2633,7 @@ dependencies = [ "ensogl-grid-view", "ensogl-hardcoded-theme", "ensogl-text-msdf", - "itertools 0.10.3", + "itertools 0.10.5", "wasm-bindgen", ] @@ -2644,10 +2678,10 @@ dependencies = [ "ensogl-text", "ensogl-text-msdf", "ensogl-tooltip", - "futures 0.3.21", + "futures 0.3.24", "qstring", "serde", - "url 2.2.2", + "url 2.3.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -2668,7 +2702,7 @@ dependencies = [ "ensogl-text", "ensogl-text-msdf", "ensogl-tooltip", - "futures 0.3.21", + "futures 0.3.24", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -2781,7 +2815,7 @@ dependencies = [ "ensogl-scroll-area", "ensogl-shadow", "ensogl-text", - "itertools 0.10.3", + "itertools 0.10.5", "segment-tree", ] @@ -2940,7 +2974,7 @@ dependencies = [ "ensogl-text-embedded-fonts", "ensogl-text-font-family", "failure", - "futures 0.3.21", + "futures 0.3.24", "ide-ci", "js-sys", "nalgebra 0.26.2", @@ -2996,9 +3030,9 @@ dependencies = [ [[package]] name = "event-listener" -version = "2.5.2" +version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77f3309417938f28bf8228fcff79a4a37103981e3e186d2ccd19c74b38f4eb71" +checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" [[package]] name = "failure" @@ -3024,23 +3058,23 @@ dependencies = [ [[package]] name = "fastrand" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3fcf0cee53519c866c09b5de1f6c56ff9d647101f81c1964fa632e148896cdf" +checksum = "a7a407cfaa3385c4ae6b23e84623d48c2798d06e3e6a1878f7f59f17b3f86499" dependencies = [ "instant", ] [[package]] name = "filetime" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0408e2626025178a6a7f7ffc05a25bc47103229f19c113755de7bf63816290c" +checksum = "e94a7bbaa59354bc20dd75b67f23e2797b4490e9d6928203fb105c79e448c86c" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.13", - "winapi 0.3.9", + "redox_syscall 0.2.16", + "windows-sys", ] [[package]] @@ -3063,13 +3097,11 @@ dependencies = [ [[package]] name = "flate2" -version = "1.0.23" +version = "1.0.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b39522e96686d38f4bc984b9198e3a0613264abaebaff2c5c918bfa6b6da09af" +checksum = "f82b0f4c27ad9f8bfd1f3208d882da2b09c301bc1c828fd3a00d0216d2fbbff6" dependencies = [ - "cfg-if 1.0.0", "crc32fast", - "libc", "miniz_oxide", ] @@ -3079,8 +3111,8 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b02e0d3667b27514149c1ac9b372d700f3e6df4bbaf6b7c5df12915de2996049" dependencies = [ - "futures 0.3.21", - "smallvec 1.8.0", + "futures 0.3.24", + "smallvec 1.9.0", ] [[package]] @@ -3100,15 +3132,15 @@ checksum = "fb23b6902f3cdc0544f9916b4c092f46f4ff984e219d5a0c538b6b3539885af3" [[package]] name = "flume" -version = "0.10.12" +version = "0.10.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "843c03199d0c0ca54bc1ea90ac0d507274c28abcc4f691ae8b4eaa375087c76a" +checksum = "1657b4441c3403d9f7b3409e47575237dac27b1b5726df654a6ecbf92f0f7577" dependencies = [ "futures-core", "futures-sink", "nanorand", "pin-project", - "spin 0.9.3", + "spin 0.9.4", ] [[package]] @@ -3155,9 +3187,9 @@ dependencies = [ [[package]] name = "fragile" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9d758e60b45e8d749c89c1b389ad8aee550f86aa12e2b9298b546dda7a82ab1" +checksum = "85dcb89d2b10c5f6133de2efd8c11959ce9dbb46a2f7a4cab208c4eeda6ce1ab" [[package]] name = "fs_extra" @@ -3195,9 +3227,9 @@ checksum = "3a471a38ef8ed83cd6e40aa59c1ffe17db6855c18e3604d9c4ed8c08ebc28678" [[package]] name = "futures" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f73fe65f54d1e12b726f517d3e2135ca3125a437b6d998caf1962961f7172d9e" +checksum = "7f21eda599937fba36daeb58a22e8f5cee2d14c4a17b5b7739c7c8e5e3b8230c" dependencies = [ "futures-channel", "futures-core", @@ -3210,9 +3242,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3083ce4b914124575708913bca19bfe887522d6e2e6d0952943f5eac4a74010" +checksum = "30bdd20c28fadd505d0fd6712cdfcb0d4b5648baf45faef7f852afb2399bb050" dependencies = [ "futures-core", "futures-sink", @@ -3220,15 +3252,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c09fd04b7e4073ac7156a9539b57a484a8ea920f79c7c675d05d289ab6110d3" +checksum = "4e5aa3de05362c3fb88de6531e6296e85cde7739cccad4b9dfeeb7f6ebce56bf" [[package]] name = "futures-executor" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9420b90cfa29e327d0429f19be13e7ddb68fa1cccb09d65e5706b8c7a749b8a6" +checksum = "9ff63c23854bee61b6e9cd331d523909f238fc7636290b96826e9cfa5faa00ab" dependencies = [ "futures-core", "futures-task", @@ -3237,9 +3269,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc4045962a5a5e935ee2fdedaa4e08284547402885ab326734432bed5d12966b" +checksum = "bbf4d2a7a308fd4578637c0b17c7e1c7ba127b8f6ba00b29f717e9655d85eb68" [[package]] name = "futures-lite" @@ -3258,9 +3290,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c1e13800337f4d4d7a316bf45a567dbcb6ffe087f16424852d97e97a91f512" +checksum = "42cd15d1c7456c04dbdf7e88bcd69760d74f3a798d6444e16974b505b0e62f17" dependencies = [ "proc-macro2", "quote", @@ -3269,15 +3301,15 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21163e139fa306126e6eedaf49ecdb4588f939600f0b1e770f4205ee4b7fa868" +checksum = "21b20ba5a92e727ba30e72834706623d94ac93a725410b6a6b6fbc1b07f7ba56" [[package]] name = "futures-task" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57c66a976bf5909d801bbef33416c41372779507e7a6b3a5e25e4749c58f776a" +checksum = "a6508c467c73851293f390476d4491cf4d227dbabcd4170f3bb6044959b294f1" [[package]] name = "futures-timer" @@ -3287,9 +3319,9 @@ checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.21" +version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8b7abd5d659d9b90c8cba917f6ec750a74e2dc23902ef9cd4cc8c8b22e6036a" +checksum = "44fb6cb1be61cc1d2e43b262516aafcf63b241cffdb1d3fa115f91d9c7b09c90" dependencies = [ "futures-channel", "futures-core", @@ -3336,9 +3368,9 @@ dependencies = [ [[package]] name = "generic-array" -version = "0.14.5" +version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fd48d33ec7f05fbfa152300fdad764757cbded343c1aa1cff2fbaf4134851803" +checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9" dependencies = [ "typenum", "version_check 0.9.4", @@ -3379,9 +3411,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.26.1" +version = "0.26.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "78cc372d058dcf6d5ecd98510e7fbc9e5aec4d21de70f65fea8fecebcd881bd4" +checksum = "22030e2c5a68ec659fde1e949a745124b48e6fa8b045b7ed5bd1fe4ccc5c4e5d" [[package]] name = "glob" @@ -3461,9 +3493,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37a82c6d637fc9515a4694bbf1cb2457b79d81ce52b3108bdeea58b07dd34a57" +checksum = "5ca32592cf21ac7ccab1825cd87f6c9b3d9022c44d086172ed0966bec8af30be" dependencies = [ "bytes 1.1.0", "fnv", @@ -3486,15 +3518,18 @@ checksum = "eabb4a44450da02c90444cf74558da904edde8fb4e9035a9a6a4e15445af0bd7" [[package]] name = "hashbrown" -version = "0.12.1" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db0d4cf898abf0081f964436dc980e96670a0f36863e4b83aaacdb65c9d7ccc3" +checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" +dependencies = [ + "ahash", +] [[package]] name = "hdrhistogram" -version = "7.5.0" +version = "7.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31672b7011be2c4f7456c4ddbcb40e7e9a4a9fad8efe49a6ebaf5f307d0109c0" +checksum = "7f19b9f54f7c7f55e31401bb647626ce0cf0f67b0004982ce815b3ee72a02aa8" dependencies = [ "base64 0.13.0", "byteorder", @@ -3505,9 +3540,9 @@ dependencies = [ [[package]] name = "headers" -version = "0.3.7" +version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4cff78e5788be1e0ab65b04d306b2ed5092c815ec97ec70f4ebd5aee158aa55d" +checksum = "f3e372db8e5c0d213e0cd0b9be18be2aca3d44cf2fe30a9d46a65581cd454584" dependencies = [ "base64 0.13.0", "bitflags", @@ -3516,7 +3551,7 @@ dependencies = [ "http", "httpdate", "mime 0.3.16", - "sha-1", + "sha1 0.10.5", ] [[package]] @@ -3555,7 +3590,7 @@ version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c49c37c09c17a53d937dfbb742eb3a961d65a994e6bcdcf37e7399d0cc8ab5e" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -3566,7 +3601,7 @@ checksum = "75f43d41e26995c17e71ee126451dd3941010b0514a81a9d11f3b341debc2399" dependencies = [ "bytes 1.1.0", "fnv", - "itoa 1.0.2", + "itoa 1.0.3", ] [[package]] @@ -3588,9 +3623,9 @@ checksum = "0bfe8eed0a9285ef776bb792479ea3834e8b94e13d615c2f66d03dd50a435a29" [[package]] name = "http-serde" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6d98b3d9662de70952b14c4840ee0f37e23973542a363e2275f4b9d024ff6cca" +checksum = "0e272971f774ba29341db2f686255ff8a979365a26fb9e4277f6b6d9ec0cdd5e" dependencies = [ "http", "serde", @@ -3614,14 +3649,14 @@ dependencies = [ "serde_json", "serde_qs", "serde_urlencoded", - "url 2.2.2", + "url 2.3.0", ] [[package]] name = "httparse" -version = "1.7.1" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "496ce29bb5a52785b44e0f7ca2847ae0bb839c9bd28f69acac9b99d461c0c04c" +checksum = "d897f394bad6a705d5f4104762e116a75639e470d80901eed05a860a95cb1904" [[package]] name = "httpdate" @@ -3665,9 +3700,9 @@ dependencies = [ [[package]] name = "hyper" -version = "0.14.18" +version = "0.14.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b26ae0a80afebe130861d90abf98e3814a4f28a4c6ffeb5ab8ebb2be311e0ef2" +checksum = "02c929dc5c39e335a03c405292728118860721b10190d98c2a0f0efd5baafbac" dependencies = [ "bytes 1.1.0", "futures-channel", @@ -3678,7 +3713,7 @@ dependencies = [ "http-body", "httparse", "httpdate", - "itoa 1.0.2", + "itoa 1.0.3", "pin-project-lite", "socket2", "tokio", @@ -3695,7 +3730,7 @@ checksum = "5f9f7a97316d44c0af9b0301e65010573a853a9fc97046d7331d7f6bc0fd5a64" dependencies = [ "ct-logs", "futures-util", - "hyper 0.14.18", + "hyper 0.14.20", "log 0.4.17", "rustls 0.19.1", "rustls-native-certs", @@ -3711,7 +3746,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d87c48c02e0dc5e3b849a2041db3029fd066650f8f717c07bf8ed78ccb895cac" dependencies = [ "http", - "hyper 0.14.18", + "hyper 0.14.20", "rustls 0.20.6", "tokio", "tokio-rustls 0.23.4", @@ -3723,7 +3758,7 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper 0.14.18", + "hyper 0.14.20", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -3736,7 +3771,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.1.0", - "hyper 0.14.18", + "hyper 0.14.20", "native-tls", "tokio", "tokio-native-tls", @@ -3758,6 +3793,19 @@ dependencies = [ "unicase 2.6.0", ] +[[package]] +name = "iana-time-zone" +version = "0.1.50" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd911b35d940d2bd0bea0f9100068e5b97b51a1cbe13d13382f132e0365257a0" +dependencies = [ + "android_system_properties", + "core-foundation-sys", + "js-sys", + "wasm-bindgen", + "winapi 0.3.9", +] + [[package]] name = "ide-ci" version = "0.1.0" @@ -3771,19 +3819,19 @@ dependencies = [ "cached", "cfg-if 1.0.0", "chrono", - "clap 3.1.18", + "clap 3.1.15", "convert_case 0.6.0", "cron", "data-encoding", "derivative", "derive_more", "dirs", + "enso-build-base", "filetime", "flate2", "flume", - "fn-error-context", "fs_extra", - "futures 0.3.21", + "futures 0.3.24", "futures-util", "glob", "graphql_client", @@ -3793,14 +3841,14 @@ dependencies = [ "ifmt", "indexmap", "indicatif", - "itertools 0.10.3", + "itertools 0.10.5", "lazy_static", "log 0.4.17", "mime 0.3.16", "new_mime_guess", "nix", "octocrab", - "paste 1.0.7", + "paste 1.0.9", "path-absolutize", "path-slash", "pathdiff", @@ -3814,13 +3862,12 @@ dependencies = [ "regex", "reqwest", "scopeguard", - "semver 1.0.9", + "semver 1.0.14", "serde", "serde_json", - "serde_yaml 0.9.10", + "serde_yaml 0.9.14", "sha2", "shrinkwraprs 0.3.0", - "snafu", "strum", "symlink", "syn", @@ -3832,7 +3879,7 @@ dependencies = [ "tracing", "tracing-subscriber", "unicase 2.6.0", - "url 2.2.2", + "url 2.3.0", "uuid 1.1.2", "walkdir", "warp", @@ -4104,9 +4151,9 @@ dependencies = [ [[package]] name = "itertools" -version = "0.10.3" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a9a9d19fa1e79b6215ff29b9d6880b706147f16e9b1dbb1e4e5947b5b02bc5e3" +checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] @@ -4119,9 +4166,9 @@ checksum = "b71991ff56294aa922b450139ee08b3bfc70982c6b2c7562771375cf73542dd4" [[package]] name = "itoa" -version = "1.0.2" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "112c678d4050afce233f4f2852bb2eb519230b3cf12f33585275537d7e41578d" +checksum = "6c8af84674fe1f223a982c933a0ee1086ac4d4052aa0fb8060c12c6ad838e754" [[package]] name = "jni" @@ -4130,7 +4177,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c6df18c2e3db7e453d3c6ac5b3e9d5182664d28788126d39b91f2d1e22b017ec" dependencies = [ "cesu8", - "combine 4.6.4", + "combine 4.6.6", "jni-sys", "log 0.4.17", "thiserror", @@ -4145,9 +4192,9 @@ checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130" [[package]] name = "jobserver" -version = "0.1.24" +version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af25a77299a7f711a01975c35a6a424eb6862092cc2d6c72c4ed6cbc56dfc1fa" +checksum = "068b1ee6743e4d11fb9c6a1e6064b3693a1b600e7f5f5988047d98b3dc9fb90b" dependencies = [ "libc", ] @@ -4171,7 +4218,7 @@ dependencies = [ "enso-shapely", "enso-web", "failure", - "futures 0.3.21", + "futures 0.3.24", "serde", "serde_json", "shrinkwraprs 0.3.0", @@ -4179,9 +4226,9 @@ dependencies = [ [[package]] name = "jsonwebtoken" -version = "8.1.0" +version = "8.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc9051c17f81bae79440afa041b3a278e1de71bfb96d32454b477fd4703ccb6f" +checksum = "1aa4b4af834c6cfd35d8763d359661b90f2e45d8f750a0849156c7f4671af09c" dependencies = [ "base64 0.13.0", "pem", @@ -4280,15 +4327,15 @@ checksum = "349d5a591cd28b49e1d1037471617a32ddcda5731b99419008085f72d5a53836" [[package]] name = "libm" -version = "0.2.2" +version = "0.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33a33a362ce288760ec6a508b94caaec573ae7d3bbbd91b87aa0bad4456839db" +checksum = "292a948cd991e376cf75541fe5b97a1081d713c618b4f1b9500f8844e49eb565" [[package]] name = "linked-hash-map" -version = "0.5.4" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fb9b38af92608140b86b693604b9ffcc5824240a484d1ecd4795bacb2fe88f3" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "lock_api" @@ -4301,9 +4348,9 @@ dependencies = [ [[package]] name = "lock_api" -version = "0.4.7" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "327fa5b6a6940e4699ec49a9beae1ea4845c6bab9314e4f84ac68742139d8c53" +checksum = "435011366fe56583b16cf956f9df0095b405b82d76425bc8981c0e22e60ec4df" dependencies = [ "autocfg 1.1.0", "scopeguard", @@ -4332,11 +4379,11 @@ dependencies = [ name = "logstat" version = "0.1.0" dependencies = [ - "clap 3.1.18", + "clap 3.1.15", "enso-prelude", "lazy_static", "regex", - "time 0.3.9", + "time 0.3.14", "tokio", "tokio-stream", ] @@ -4388,11 +4435,11 @@ checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" [[package]] name = "md-5" -version = "0.10.1" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658646b21e0b72f7866c7038ab086d3d5e1cd6271f060fd37defb241949d0582" +checksum = "6365506850d44bff6e2fbcb5176cf63650e48bd45ef2fe2665ae1570e0f4b9ca" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -4443,9 +4490,9 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.5.1" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b29bd4bc3f33391105ebee3589c19197c4271e3e5a9ec9bfe8127eeff8f082" +checksum = "96590ba8f175222643a85693f33d26e9c8a015f599c216509b1a6894af675d34" dependencies = [ "adler", ] @@ -4471,9 +4518,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.3" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "713d550d9b44d89174e066b7a6217ae06234c10cb47819a88290d2b353c31799" +checksum = "57ee1c23c7c63b0c9250c339ffdc69255f110b298b901b9f6c82547b7b87caaf" dependencies = [ "libc", "log 0.4.17", @@ -4672,10 +4719,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "43db66d1170d347f9a065114077f7dccb00c1b9478c89384490a3425279a4606" dependencies = [ "num-bigint", - "num-complex 0.4.1", + "num-complex 0.4.2", "num-integer", "num-iter", - "num-rational 0.4.0", + "num-rational 0.4.1", "num-traits", ] @@ -4713,9 +4760,9 @@ dependencies = [ [[package]] name = "num-complex" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "97fbc387afefefd5e9e39493299f3069e14a140dd34dc19b4c1c1a8fddb6a790" +checksum = "7ae39348c8bc5fbd7f40c727a9925f03517afd2ab27d46702108b6a7e5414c19" dependencies = [ "num-traits", ] @@ -4765,9 +4812,9 @@ dependencies = [ [[package]] name = "num-rational" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d41702bd167c2df5520b384281bc111a4b5efcf7fbc4c9c222c815b07e0a6a6a" +checksum = "0638a1c9d0a3c0914158145bc76cff373a75a627e6ecbfb71cbe6f453a5a19b0" dependencies = [ "autocfg 1.1.0", "num-bigint", @@ -4842,22 +4889,23 @@ dependencies = [ [[package]] name = "object" -version = "0.28.4" +version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e42c982f2d955fac81dd7e1d0e1426a7d702acd9c98d19ab01083a6a0328c424" +checksum = "21158b2c33aa6d4561f1c0a6ea283ca92bc54802a93b263e910746d679a7eb53" dependencies = [ "memchr", ] [[package]] name = "octocrab" -version = "0.16.0" -source = "git+https://github.com/enso-org/octocrab#2a104c9673f45d48ae0438d0b05bcd905eb1334b" +version = "0.17.0" +source = "git+https://github.com/enso-org/octocrab#88f81c840085ce0aa591f9f53d6c62309cac8260" dependencies = [ "arc-swap", "async-trait", "base64 0.13.0", "bytes 1.1.0", + "cfg-if 1.0.0", "chrono", "hyperx", "jsonwebtoken", @@ -4868,14 +4916,14 @@ dependencies = [ "serde_json", "serde_path_to_error", "snafu", - "url 2.2.2", + "url 2.3.0", ] [[package]] name = "once_cell" -version = "1.16.0" +version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86f0b0d4bf799edbc74508c1e8bf170ff5f41238e5f8225603ca7caaae2b7860" +checksum = "e82dad04139b71a90c080c8463fe0dc7902db5192d939bd0950f074d014339e1" [[package]] name = "oorandom" @@ -4897,9 +4945,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "openssl" -version = "0.10.40" +version = "0.10.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb81a6430ac911acb25fe5ac8f1d2af1b4ea8a4fdfda0f1ee4292af2e2d8eb0e" +checksum = "12fc0523e3bd51a692c8850d075d74dc062ccf251c0110668cbd921917118a13" dependencies = [ "bitflags", "cfg-if 1.0.0", @@ -4929,9 +4977,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-sys" -version = "0.9.73" +version = "0.9.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d5fd19fb3e0a8191c1e34935718976a3e70c112ab9a24af6d7cadccd9d90bc0" +checksum = "5230151e44c0f05157effb743e8d517472843121cf9243e8b81393edb5acd9ce" dependencies = [ "autocfg 1.1.0", "cc", @@ -4942,35 +4990,34 @@ dependencies = [ [[package]] name = "ordered-float" -version = "3.0.0" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "96bcbab4bfea7a59c2c0fe47211a1ac4e3e96bea6eb446d704f310bc5c732ae2" +checksum = "98ffdb14730ed2ef599c65810c15b000896e21e8776b512de0db0c3d7335cc2a" dependencies = [ "num-traits", ] [[package]] name = "os_str_bytes" -version = "6.1.0" +version = "6.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21326818e99cfe6ce1e524c2a805c189a99b5ae555a35d19f9a284b427d86afa" +checksum = "9ff7415e9ae3fff1225851df9e0d9e4e5479f947619774677a63572e55e80eff" [[package]] name = "ouroboros" -version = "0.15.0" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f31a3b678685b150cba82b702dcdc5e155893f63610cf388d30cd988d4ca2bf" +checksum = "dfbb50b356159620db6ac971c6d5c9ab788c9cc38a6f49619fca2a27acb062ca" dependencies = [ "aliasable", "ouroboros_macro", - "stable_deref_trait", ] [[package]] name = "ouroboros_macro" -version = "0.15.0" +version = "0.15.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "084fd65d5dd8b3772edccb5ffd1e4b7eba43897ecd0f9401e330e8c542959408" +checksum = "4a0d9d1a6191c4f391f87219d1ea42b23f09ee84d64763cd05ee6ea88d9f384d" dependencies = [ "Inflector", "proc-macro-error", @@ -4981,9 +5028,9 @@ dependencies = [ [[package]] name = "owned_ttf_parser" -version = "0.15.1" +version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07ef1a404ae479dd6906f4fa2c88b3c94028f1284beb42a47c183a7c27ee9a3e" +checksum = "05e6affeb1632d6ff6a23d2cd40ffed138e82f1532571a26f527c8a284bb2fbb" dependencies = [ "ttf-parser", ] @@ -5007,11 +5054,11 @@ dependencies = [ [[package]] name = "parking_lot" -version = "0.12.0" +version = "0.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87f5ec2493a61ac0506c0f4199f99070cbe83857b0337006a30f3e6719b8ef58" +checksum = "3742b2c103b9f06bc9fff0a37ff4912935851bee6d36f3c02bcc755bcfec228f" dependencies = [ - "lock_api 0.4.7", + "lock_api 0.4.9", "parking_lot_core 0.9.3", ] @@ -5038,8 +5085,8 @@ checksum = "09a279cbf25cb0757810394fbc1e359949b59e348145c643a939a525692e6929" dependencies = [ "cfg-if 1.0.0", "libc", - "redox_syscall 0.2.13", - "smallvec 1.8.0", + "redox_syscall 0.2.16", + "smallvec 1.9.0", "windows-sys", ] @@ -5055,7 +5102,7 @@ dependencies = [ "enso-profiler", "enso-text", "failure", - "futures 0.3.21", + "futures 0.3.24", "ide-ci", "js-sys", "matches", @@ -5077,7 +5124,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d791538a6dcc1e7cb7fe6f6b58aca40e7f79403c45b2bc274008b5e647af1d8" dependencies = [ "base64ct", - "rand_core 0.6.3", + "rand_core 0.6.4", "subtle", ] @@ -5093,9 +5140,9 @@ dependencies = [ [[package]] name = "paste" -version = "1.0.7" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c520e05135d6e763148b6426a837e239041653ba7becd2e538c076c738025fc" +checksum = "b1de2e551fb905ac83f73f7aedf2f0cb4a0da7e35efa24a202a936269f1f18e1" [[package]] name = "paste-impl" @@ -5148,7 +5195,7 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "271779f35b581956db91a3e55737327a03aa051e90b1c47aeb189508533adfd7" dependencies = [ - "digest 0.10.3", + "digest 0.10.5", "hmac", "password-hash", "sha2", @@ -5156,9 +5203,9 @@ dependencies = [ [[package]] name = "pem" -version = "1.0.2" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e9a3b09a20e374558580a4914d3b7d89bd61b954a5a5e1dcbea98753addb1947" +checksum = "03c64931a1a212348ec4f3b4362585eca7159d0d09cbdf4a7f74f02173596fd4" dependencies = [ "base64 0.13.0", ] @@ -5177,18 +5224,18 @@ checksum = "d4fd5641d01c8f18a23da7b6fe29298ff4b55afcccdf78973b24cf3175fee32e" [[package]] name = "pin-project" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58ad3879ad3baf4e44784bc6a718a8698867bb991f8ce24d1bcbe2cfb4c3a75e" +checksum = "ad29a609b6bcd67fee905812e544992d216af9d755757c05ed2d0e15a74c6ecc" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.0.10" +version = "1.0.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "744b6f092ba29c3650faf274db506afd39944f48420f6c86b17cfe0ee1cb36bb" +checksum = "069bdb1e05adc7a8990dce9cc75370895fbe4e3d58b9b73bf1aee56359344a55" dependencies = [ "proc-macro2", "quote", @@ -5215,18 +5262,18 @@ checksum = "1df8c4ec4b0627e53bdf214615ad287367e482558cf84b109250b37464dc03ae" [[package]] name = "platforms" -version = "3.0.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86d1db500905601725f5c3629a5815a2ce7611fe063de279964b451f3edb3532" +checksum = "d8ec293fd25f7fcfeb7c70129241419a62c6200a26a725f680aff07c91d0ed05" dependencies = [ "serde", ] [[package]] name = "plotters" -version = "0.3.1" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32a3fd9ec30b9749ce28cd91f255d569591cdf937fe280c312143e3c4bad6f2a" +checksum = "2538b639e642295546c50fcd545198c9d64ee2a38620a628724a3b266d5fbf97" dependencies = [ "num-traits", "plotters-backend", @@ -5237,25 +5284,26 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d88417318da0eaf0fdcdb51a0ee6c3bed624333bff8f946733049380be67ac1c" +checksum = "193228616381fecdc1224c62e96946dfbc73ff4384fba576e052ff8c1bea8142" [[package]] name = "plotters-svg" -version = "0.3.1" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "521fa9638fa597e1dc53e9412a4f9cefb01187ee1f7413076f9e6749e2885ba9" +checksum = "f9a81d2759aae1dae668f783c308bc5c8ebd191ff4184aaa1b37f65a6ae5a56f" dependencies = [ "plotters-backend", ] [[package]] name = "polling" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "685404d509889fade3e86fe3a5803bca2ec09b0c0778d5ada6ec8bf7a8de5259" +checksum = "899b00b9c8ab553c743b3e11e87c5c7d423b2a2de229ba95b24a756344748011" dependencies = [ + "autocfg 1.1.0", "cfg-if 1.0.0", "libc", "log 0.4.17", @@ -5316,10 +5364,11 @@ dependencies = [ [[package]] name = "proc-macro-crate" -version = "1.1.3" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e17d47ce914bf4de440332250b0edd23ce48c005f59fab39d3335866b114f11a" +checksum = "eda0fc3b0fb7c975631757e14d9049da17374063edb6ebbcbc54d880d4fe94e9" dependencies = [ + "once_cell", "thiserror", "toml", ] @@ -5356,9 +5405,9 @@ checksum = "dbf0c48bc1d91375ae5c3cd81e3722dff1abcf81a30960240640d223f59fe0e5" [[package]] name = "proc-macro2" -version = "1.0.40" +version = "1.0.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd96a1e8ed2596c337f8eae5f24924ec83f5ad5ab21ea8e455d3566c69fbcaf7" +checksum = "7bd7356a8122b6c4a24a82b278680c73357984ca2fc79a0f9fa6dea7dced7c58" dependencies = [ "unicode-ident", ] @@ -5380,7 +5429,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7345d5f0e08c0536d7ac7229952590239e77abf0a0100a1b1d890add6ea96364" dependencies = [ "anyhow", - "itertools 0.10.3", + "itertools 0.10.5", "proc-macro2", "quote", "syn", @@ -5398,9 +5447,9 @@ dependencies = [ [[package]] name = "pulldown-cmark" -version = "0.9.1" +version = "0.9.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34f197a544b0c9ab3ae46c359a7ec9cbbb5c7bf97054266fecb7ead794a181d6" +checksum = "2d9cc634bc78768157b5cbfe988ffcd1dcba95cd2b2f03a88316c08c6d00ed63" dependencies = [ "bitflags", "getopts", @@ -5423,20 +5472,11 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -[[package]] -name = "quickcheck" -version = "1.0.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "588f6378e4dd99458b60ec275b4477add41ce4fa9f64dcba6f15adccb19b50d6" -dependencies = [ - "rand 0.8.5", -] - [[package]] name = "quote" -version = "1.0.20" +version = "1.0.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3bcdf212e9776fbcb2d23ab029360416bb1706b1aea2d1a5ba002727cbcab804" +checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" dependencies = [ "proc-macro2", ] @@ -5481,7 +5521,7 @@ checksum = "34af8d1a0e25924bc5b7c43c079c942339d8f0a8b57c39049bef581b46327404" dependencies = [ "libc", "rand_chacha 0.3.1", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5511,7 +5551,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6c10a63a0fa32252be49d21e7709d4d4baf8d231c2dbce1eaa8141b9b127d88" dependencies = [ "ppv-lite86", - "rand_core 0.6.3", + "rand_core 0.6.4", ] [[package]] @@ -5540,9 +5580,9 @@ dependencies = [ [[package]] name = "rand_core" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d34f1408f55294453790c48b2f1ebbb1c5b4b7563eb1f418bcfcfdbb06ebb4e7" +checksum = "ec0be4795e2f6a28069bec0b5ff3e2ac9bafc99e6a9a7dc3547996c5c816922c" dependencies = [ "getrandom 0.2.7", ] @@ -5663,7 +5703,7 @@ checksum = "258bcdb5ac6dad48491bb2992db6b7cf74878b0384908af124823d118c99683f" dependencies = [ "crossbeam-channel", "crossbeam-deque", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", "num_cpus", ] @@ -5684,9 +5724,9 @@ checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce" [[package]] name = "redox_syscall" -version = "0.2.13" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62f25bc4c7e55e0b0b7a1d43fb893f4fa1361d0abe38b9ce4f323c2adfe6ef42" +checksum = "fb5a58c1855b4b6819d59012155603f0b22ad30cad752600aadfcb695265519a" dependencies = [ "bitflags", ] @@ -5698,7 +5738,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b033d837a7cf162d7993aded9304e30a83213c648b6e389db233191f891e5c2b" dependencies = [ "getrandom 0.2.7", - "redox_syscall 0.2.13", + "redox_syscall 0.2.16", "thiserror", ] @@ -5751,7 +5791,7 @@ dependencies = [ "h2", "http", "http-body", - "hyper 0.14.18", + "hyper 0.14.20", "hyper-rustls 0.23.0", "hyper-tls", "ipnet", @@ -5772,7 +5812,7 @@ dependencies = [ "tokio-rustls 0.23.4", "tokio-util", "tower-service", - "url 2.2.2", + "url 2.3.0", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", @@ -5828,7 +5868,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.9", + "semver 1.0.14", ] [[package]] @@ -5888,9 +5928,9 @@ dependencies = [ [[package]] name = "rustversion" -version = "1.0.6" +version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2cc38e8fa666e2de3c4aba7edeb5ffc5246c1c2ed0e3d17e560aeeba736b23f" +checksum = "97477e48b4cf8603ad5f7aaf897467cf42ab4218a38ef76fb14c2d6773a6d6a8" [[package]] name = "rustybuzz" @@ -5900,7 +5940,7 @@ checksum = "a617c811f5c9a7060fe511d35d13bf5b9f0463ce36d63ce666d05779df2b4eba" dependencies = [ "bitflags", "bytemuck", - "smallvec 1.8.0", + "smallvec 1.9.0", "ttf-parser", "unicode-bidi-mirroring", "unicode-ccc", @@ -5910,9 +5950,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.10" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3f6f92acf49d1b98f7a81226834412ada05458b7364277387724a237f062695" +checksum = "4501abdff3ae82a1c1b477a17252eb69cee9e66eb915c1abaa4f44d873df9f09" [[package]] name = "safemem" @@ -5982,9 +6022,9 @@ dependencies = [ [[package]] name = "security-framework" -version = "2.6.1" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc14f172faf8a0194a3aded622712b0de276821addc574fa54fc0a1167e10dc" +checksum = "2bc1bb97804af6631813c55739f771071e0f2ed33ee20b68c86ec505d906356c" dependencies = [ "bitflags", "core-foundation", @@ -6020,9 +6060,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.9" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8cb243bdfdb5936c8dc3c45762a19d12ab4550cdc753bc247637d4ec35a040fd" +checksum = "e25dfac463d778e353db5be2449d1cce89bd6fd23c9f1ea21310ce6e5a1b29c4" dependencies = [ "serde", ] @@ -6035,9 +6075,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f747710de3dcd43b88c9168773254e809d8ddbdf9653b84e2554ab219f17860" +checksum = "728eb6351430bccb993660dfffc5a72f91ccc1295abaa8ce19b27ebe4f75568b" dependencies = [ "serde_derive", ] @@ -6054,9 +6094,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.144" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94ed3a816fb1d101812f83e789f888322c34e291f894f19590dc310963e87a00" +checksum = "81fa1584d3d1bcacd84c277a0dfe21f5b0f6accf4a23d04d4c6d61f1af522b4c" dependencies = [ "proc-macro2", "quote", @@ -6065,20 +6105,20 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.81" +version = "1.0.86" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b7ce2b32a1aed03c558dc61a5cd328f15aff2dbc17daad8fb8af04d2100e15c" +checksum = "41feea4228a6f1cd09ec7a3593a682276702cd67b5273544757dae23c096f074" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", ] [[package]] name = "serde_path_to_error" -version = "0.1.7" +version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7868ad3b8196a8a0aea99a8220b124278ee5320a55e4fde97794b6f85b1a377" +checksum = "184c643044780f7ceb59104cef98a5a6f12cb2288a7bc701ab93a362b49fd47d" dependencies = [ "serde", ] @@ -6101,16 +6141,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3491c14715ca2294c4d6a88f15e84739788c1d030eed8c110436aafdaa2f3fd" dependencies = [ "form_urlencoded", - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", ] [[package]] name = "serde_yaml" -version = "0.8.24" +version = "0.8.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "707d15895415db6628332b737c838b88c598522e4dc70647e59b72312924aebc" +checksum = "578a7433b776b56a35785ed5ce9a7e777ac0598aac5a6dd1b4b18a307c7fc71b" dependencies = [ "indexmap", "ryu", @@ -6120,12 +6160,12 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.10" +version = "0.9.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a09f551ccc8210268ef848f0bab37b306e87b85b2e017b899e7fb815f5aed62" +checksum = "6d232d893b10de3eb7258ff01974d6ee20663d8e833263c99409d4b13a0209da" dependencies = [ "indexmap", - "itoa 1.0.2", + "itoa 1.0.3", "ryu", "serde", "unsafe-libyaml", @@ -6139,7 +6179,7 @@ checksum = "028f48d513f9678cda28f6e4064755b3fbb2af6acd672f2c209b62323f7aea0f" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -6153,13 +6193,13 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.1" +version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c77f4e7f65455545c2153c1253d25056825e77ee2533f0e41deb65a93a34852f" +checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -6170,13 +6210,13 @@ checksum = "ae1a47186c03a32177042e55dbc5fd5aee900b8e0069a8d70fba96a9375cd012" [[package]] name = "sha2" -version = "0.10.2" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55deaec60f81eefe3cce0dc50bda92d6d8e88f2a27df7c5033b42afeb1ed2676" +checksum = "82e6b795fe2e3b1e845bafcb27aa35405c4d47cdfc92af5fc8d3002f76cebdc0" dependencies = [ "cfg-if 1.0.0", "cpufeatures", - "digest 0.10.3", + "digest 0.10.5", ] [[package]] @@ -6257,26 +6297,29 @@ dependencies = [ "approx 0.4.0", "num-complex 0.3.1", "num-traits", - "paste 1.0.7", + "paste 1.0.9", ] [[package]] name = "simple_asn1" -version = "0.6.1" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4a762b1c38b9b990c694b9c2f8abe3372ce6a9ceaae6bca39cfc46e054f45745" +checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint", "num-traits", "thiserror", - "time 0.3.9", + "time 0.3.14", ] [[package]] name = "slab" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb703cfe953bccee95685111adeedb76fabe4e97549a58d16f03ea7b9367bb32" +checksum = "4614a76b2a8be0058caa9dbbaf66d988527d86d003c11a94fbd335d7661edcef" +dependencies = [ + "autocfg 1.1.0", +] [[package]] name = "smallvec" @@ -6289,9 +6332,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2dd574626839106c320a323308629dcb1acfc96e32a8cba364ddc61ac23ee83" +checksum = "2fd0db749597d91ff862fd1d55ea87f7855a744a8425a64695b6fca237d1dad1" [[package]] name = "snafu" @@ -6318,9 +6361,9 @@ dependencies = [ [[package]] name = "socket2" -version = "0.4.4" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "66d72b759436ae32898a2af0a14218dbf55efde3feeb170eb623637db85ee1e0" +checksum = "02e2d2db9033d13a1567121ddd7a095ee144db4e1ca1b1bda3419bc0da294ebd" dependencies = [ "libc", "winapi 0.3.9", @@ -6328,18 +6371,18 @@ dependencies = [ [[package]] name = "sourcemap" -version = "6.0.2" +version = "6.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2ca89636b276071e7276488131f531dbf43ad1c19bc4bd5a04f6a0ce1ddc138" +checksum = "58ad6f449ac2dc2eaa01e766408b76b55fc0a20c842b63aa11a8448caa72f50b" dependencies = [ - "base64 0.11.0", + "base64 0.13.0", "if_chain", "lazy_static", "regex", "rustc_version 0.2.3", "serde", "serde_json", - "url 2.2.2", + "url 2.3.0", ] [[package]] @@ -6364,19 +6407,13 @@ checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "spin" -version = "0.9.3" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c530c2b0d0bf8b69304b39fe2001993e267461948b890cd037d8ad4293fa1a0d" +checksum = "7f6002a767bff9e83f8eeecf883ecb8011875a21ae8da43bffb817a57e78cc09" dependencies = [ - "lock_api 0.4.7", + "lock_api 0.4.9", ] -[[package]] -name = "stable_deref_trait" -version = "1.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" - [[package]] name = "strsim" version = "0.10.0" @@ -6385,18 +6422,18 @@ checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" [[package]] name = "strum" -version = "0.24.0" +version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e96acfc1b70604b8b2f1ffa4c57e59176c7dbb05d556c71ecd2f5498a1dee7f8" +checksum = "063e6045c0e62079840579a7e47a355ae92f60eb74daaf156fb1e84ba164e63f" dependencies = [ "strum_macros", ] [[package]] name = "strum_macros" -version = "0.24.0" +version = "0.24.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6878079b17446e4d3eba6192bb0a2950d5b14f0ed8424b852310e5a94345d0ef" +checksum = "1e385be0d24f186b4ce2f9982191e7101bb737312ad61c1f2f984f34bcf85d59" dependencies = [ "heck", "proc-macro2", @@ -6419,9 +6456,9 @@ checksum = "a7973cce6668464ea31f176d85b13c7ab3bba2cb3b77a2ed26abd7801688010a" [[package]] name = "syn" -version = "1.0.98" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c50aef8a904de4c23c788f104b7dddc7d6f79c647c7c8ce4cc8f73eb0ca773dd" +checksum = "e90cde112c4b9690b8cbe810cba9ddd8bc1d7472e2cae317b69e9438c1cba7d2" dependencies = [ "proc-macro2", "quote", @@ -6448,9 +6485,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.26.7" +version = "0.26.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c375d5fd899e32847b8566e10598d6e9f1d9b55ec6de3cdf9e7da4bdc51371bc" +checksum = "7890fff842b8db56f2033ebee8f6efe1921475c3830c115995552914fb967580" dependencies = [ "cfg-if 1.0.0", "core-foundation-sys", @@ -6481,7 +6518,7 @@ dependencies = [ "cfg-if 1.0.0", "fastrand", "libc", - "redox_syscall 0.2.13", + "redox_syscall 0.2.16", "remove_dir_all", "winapi 0.3.9", ] @@ -6531,18 +6568,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.31" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd829fe32373d27f76265620b5309d0340cb8550f523c1dda251d6298069069a" +checksum = "10deb33631e3c9018b9baf9dcbbc4f737320d2b576bac10f6aefa048fa407e3e" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.31" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0396bc89e626244658bef819e22d0cc459e795a5ebe878e6ec336d1674a8d79a" +checksum = "982d17546b47146b28f7c22e3d08465f6b8903d0ea13c1660d9d84a6e7adcdbb" dependencies = [ "proc-macro2", "quote", @@ -6571,14 +6608,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.9" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2702e08a7a860f005826c6815dcac101b19b5eb330c27fe4a5928fec1d20ddd" +checksum = "3c3f9a28b618c3a6b9251b6908e9c99e04b9e5c02e6581ccbb67d59c34ef7f9b" dependencies = [ - "itoa 1.0.2", + "itoa 1.0.3", "libc", "num_threads", - "quickcheck", "time-macros", ] @@ -6623,10 +6659,10 @@ dependencies = [ "bytes 1.1.0", "libc", "memchr", - "mio 0.8.3", + "mio 0.8.4", "num_cpus", "once_cell", - "parking_lot 0.12.0", + "parking_lot 0.12.1", "pin-project-lite", "signal-hook-registry", "socket2", @@ -6679,9 +6715,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b557f72f448c511a979e2564e55d74e6c4432fc96ff4f6241bc6bded342643b7" +checksum = "9724f9a975fb987ef7a3cd9be0350edcbe130698af5b8f7a631e23d42d052484" dependencies = [ "proc-macro2", "quote", @@ -6741,9 +6777,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df54d54117d6fdc4e4fea40fe1e4e566b3505700e148a6827e59b34b0d2600d9" +checksum = "f6edf2d6bc038a43d31353570e27270603f4648d18f5ed10c0e179abe43255af" dependencies = [ "futures-core", "pin-project-lite", @@ -6799,15 +6835,16 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f988a1a1adc2fb21f9c12aa96441da33a1728193ae0b95d2be22dbd17fcb4e5c" +checksum = "cc463cd8deddc3770d20f9852143d50bf6094e640b485cb2e189a2099085ff45" dependencies = [ "bytes 1.1.0", "futures-core", "futures-io", "futures-sink", "futures-util", + "hashbrown", "pin-project-lite", "slab", "tokio", @@ -6825,9 +6862,9 @@ dependencies = [ [[package]] name = "tonic" -version = "0.8.2" +version = "0.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "55b9af819e54b8f33d453655bef9b9acc171568fb49523078d0cc4e7484200ec" +checksum = "11cd56bdb54ef93935a6a79dbd1d91f1ebd4c64150fd61654031fd6b8b775c91" dependencies = [ "async-stream", "async-trait", @@ -6839,7 +6876,7 @@ dependencies = [ "h2", "http", "http-body", - "hyper 0.14.18", + "hyper 0.14.20", "hyper-timeout", "percent-encoding 2.1.0", "pin-project", @@ -6877,9 +6914,9 @@ dependencies = [ [[package]] name = "tower-http" -version = "0.3.3" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d342c6d58709c0a6d48d48dabbb62d4ef955cf5f0f3bbfd845838e7ae88dbae" +checksum = "3c530c8675c1dbf98facee631536fa116b5fb6382d7dd6dc1b118d970eafe3ba" dependencies = [ "bitflags", "bytes 1.1.0", @@ -6902,9 +6939,9 @@ checksum = "343bc9466d3fe6b0f960ef45960509f84480bf4fd96f92901afe7ff3df9d3a62" [[package]] name = "tower-service" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6" +checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" [[package]] name = "tracing" @@ -6963,16 +7000,16 @@ dependencies = [ [[package]] name = "tracing-subscriber" -version = "0.3.11" +version = "0.3.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bc28f93baff38037f64e6f43d34cfa1605f27a49c34e8a04c5e78b0babf2596" +checksum = "60db860322da191b40952ad9affe65ea23e7dd6a5c442c2c42865810c6ab8e6b" dependencies = [ "ansi_term", - "lazy_static", "matchers", + "once_cell", "regex", "sharded-slab", - "smallvec 1.8.0", + "smallvec 1.9.0", "thread_local", "tracing", "tracing-core", @@ -7023,7 +7060,7 @@ dependencies = [ "rand 0.8.5", "sha-1", "thiserror", - "url 2.2.2", + "url 2.3.0", "utf-8", ] @@ -7092,42 +7129,42 @@ checksum = "07547e3ee45e28326cc23faac56d44f58f16ab23e413db526debce3b0bfd2742" [[package]] name = "unicode-ident" -version = "1.0.0" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d22af068fba1eb5edcb4aea19d382b2a3deb4c8f9d475c589b6ada9e0fd493ee" +checksum = "dcc811dc4066ac62f84f11307873c4850cb653bfa9b1719cee2bd2204a4bc5dd" [[package]] name = "unicode-normalization" -version = "0.1.19" +version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d54590932941a9e9266f0832deed84ebe1bf2e4c9e4a3554d393d18f5e854bf9" +checksum = "5c5713f0fc4b5db668a2ac63cdb7bb4469d8c9fed047b1d0292cc7b0ce2ba921" dependencies = [ "tinyvec", ] [[package]] name = "unicode-script" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58dd944fd05f2f0b5c674917aea8a4df6af84f2d8de3fe8d988b95d28fb8fb09" +checksum = "7d817255e1bed6dfd4ca47258685d14d2bdcfbc64fdc9e3819bd5848057b8ecc" [[package]] name = "unicode-segmentation" -version = "1.9.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7e8820f5d777f6224dc4be3632222971ac30164d4a258d595640799554ebfd99" +checksum = "0fdbf052a0783de01e944a6ce7a8cb939e295b1e7be835a1112c3b9a7f047a5a" [[package]] name = "unicode-width" -version = "0.1.9" +version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3ed742d4ea2bd1176e236172c8429aaf54486e7ac098db29ffe6529e0ce50973" +checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" [[package]] name = "unicode-xid" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "957e51f3646910546462e67d5f7599b9e4fb8acdd304b087a6494730f9eebf04" +checksum = "f962df74c8c05a667b5ee8bcf162993134c104e96440b663c8daa176dc772d8c" [[package]] name = "unidecode" @@ -7146,9 +7183,9 @@ dependencies = [ [[package]] name = "unsafe-libyaml" -version = "0.2.2" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "931179334a56395bcf64ba5e0ff56781381c1a5832178280c7d7f91d1679aeb0" +checksum = "c1e5fa573d8ac5f1a856f8d7be41d390ee973daf97c806b2c1a465e4e1406e68" [[package]] name = "untrusted" @@ -7169,22 +7206,21 @@ dependencies = [ [[package]] name = "url" -version = "2.2.2" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a507c383b2d33b5fc35d1861e77e6b383d158b2da5e14fe51b83dfedf6fd578c" +checksum = "22fe195a4f217c25b25cb5058ced57059824a678474874038dc88d211bf508d3" dependencies = [ "form_urlencoded", "idna 0.2.3", - "matches", "percent-encoding 2.1.0", "serde", ] [[package]] name = "urlencoding" -version = "2.1.0" +version = "2.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b90931029ab9b034b300b797048cf23723400aa757e8a2bfb9d748102f9821" +checksum = "e8db7427f936968176eaa7cdf81b7f98b980b18495ec28f1b5791ac3bfe3eea9" [[package]] name = "utf-8" @@ -7303,7 +7339,7 @@ dependencies = [ "futures-util", "headers", "http", - "hyper 0.14.18", + "hyper 0.14.20", "log 0.4.17", "mime 0.3.16", "mime_guess", @@ -7471,9 +7507,9 @@ dependencies = [ [[package]] name = "webpki-roots" -version = "0.22.3" +version = "0.22.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44d8de8415c823c8abd270ad483c6feeac771fad964890779f9a8cb24fbbc1bf" +checksum = "368bfe657969fb01238bb756d351dcade285e0f6fcbd36dcb23359a5169975be" dependencies = [ "webpki 0.22.0", ] @@ -7504,9 +7540,9 @@ dependencies = [ [[package]] name = "websocket-codec" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72154d7f42457a99b2832ff093a22a6b303d88c6fe87ca975515cc6c7bc8d21d" +checksum = "2108c9c18a6e746addc085c18cedb66b672e8ffea6a993712decc295b0d8ae55" dependencies = [ "base64 0.13.0", "byteorder", @@ -7519,19 +7555,19 @@ dependencies = [ [[package]] name = "websocket-lite" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "44a2fea74fd5c7e2720dfd619bf029b46acef012cc619793d6d76d29c0ba8c14" +checksum = "1d6cae39139c6e837afebd915935e7adc8af5c28425935de606d0e8c9d3268f6" dependencies = [ "base64 0.13.0", "bytes 1.1.0", - "futures 0.3.21", + "futures 0.3.24", "native-tls", "rand 0.8.5", "tokio", "tokio-native-tls", "tokio-util", - "url 2.2.2", + "url 2.3.0", "websocket-codec", ] @@ -7556,21 +7592,22 @@ dependencies = [ [[package]] name = "which" -version = "4.2.5" +version = "4.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c4fb54e6113b6a8772ee41c3404fb0301ac79604489467e0a9ce1f3e97c24ae" +checksum = "1c831fbbee9e129a8cf93e7747a82da9d95ba8e16621cae60ec2cdc849bacb7b" dependencies = [ "either", - "lazy_static", "libc", + "once_cell", ] [[package]] name = "whoami" -version = "1.2.1" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "524b58fa5a20a2fb3014dd6358b70e6579692a56ef6fce928834e488f42f65e8" +checksum = "d6631b6a2fd59b1841b622e8f1a7ad241ef0a46f2d580464ce8140ac94cbd571" dependencies = [ + "bumpalo", "wasm-bindgen", "web-sys", ] @@ -7672,18 +7709,18 @@ dependencies = [ [[package]] name = "wiremock" -version = "0.5.15" +version = "0.5.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "249dc68542861d17eae4b4e5e8fb381c2f9e8f255a84f6771d5fdf8b6c03ce3c" +checksum = "cc3c7b7557dbfdad6431b5a51196c9110cef9d83f6a9b26699f35cdc0ae113ec" dependencies = [ "assert-json-diff", "async-trait", "base64 0.13.0", "deadpool", - "futures 0.3.21", + "futures 0.3.24", "futures-timer", "http-types", - "hyper 0.14.18", + "hyper 0.14.20", "log 0.4.17", "once_cell", "regex", @@ -7707,15 +7744,15 @@ name = "wstest" version = "0.1.0" dependencies = [ "base64 0.13.0", - "clap 3.1.18", + "clap 3.1.15", "either", "enso-prelude", - "futures 0.3.21", + "futures 0.3.24", "regex", - "time 0.3.9", + "time 0.3.14", "tokio", "tokio-stream", - "url 2.2.2", + "url 2.3.0", "websocket-lite", ] @@ -7757,9 +7794,9 @@ dependencies = [ [[package]] name = "zeroize" -version = "1.5.5" +version = "1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "94693807d016b2f2d2e14420eb3bfcca689311ff775dcf113d74ea624b7cdf07" +checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f" [[package]] name = "zip" @@ -7786,12 +7823,12 @@ dependencies = [ "bzip2", "constant_time_eq", "crc32fast", - "crossbeam-utils 0.8.8", + "crossbeam-utils 0.8.11", "flate2", "hmac", "pbkdf2", - "sha1 0.10.1", - "time 0.3.9", + "sha1 0.10.5", + "time 0.3.14", "zstd", ] diff --git a/Cargo.toml b/Cargo.toml index ef78a4055ba8..a7e1c1ef9409 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "app/gui", "app/gui/enso-profiler-enso-data", "build/cli", + "build/macros", "build/enso-formatter", "build/intellij-run-config-gen", "build/deprecated/rust-scripts", diff --git a/build/base/Cargo.toml b/build/base/Cargo.toml new file mode 100644 index 000000000000..e74d0c59b2a1 --- /dev/null +++ b/build/base/Cargo.toml @@ -0,0 +1,15 @@ +[package] +name = "enso-build-base" +version = "0.1.0" +edition = "2021" + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +anyhow = "1.0.65" +fn-error-context = "0.2.0" +futures-util = "0.3.24" +futures = "0.3.24" +serde = "1.0.145" +serde_json = "1.0.85" +serde_yaml = "0.9.13" +tracing = "0.1.36" diff --git a/build/base/src/extensions.rs b/build/base/src/extensions.rs new file mode 100644 index 000000000000..f60984692133 --- /dev/null +++ b/build/base/src/extensions.rs @@ -0,0 +1,16 @@ +//! Additional convenience methods for various common types. + + +// ============== +// === Export === +// ============== + +pub mod from_string; +pub mod future; +pub mod iterator; +pub mod maps; +pub mod option; +pub mod path; +pub mod pathbuf; +pub mod result; +pub mod str; diff --git a/build/ci_utils/src/extensions/from_string.rs b/build/base/src/extensions/from_string.rs similarity index 71% rename from build/ci_utils/src/extensions/from_string.rs rename to build/base/src/extensions/from_string.rs index cf2b814a1fca..63b8ece71d52 100644 --- a/build/ci_utils/src/extensions/from_string.rs +++ b/build/base/src/extensions/from_string.rs @@ -1,3 +1,5 @@ +//!Module with utilities for converting string-like values into other types. + use crate::prelude::*; use anyhow::Context; @@ -5,9 +7,12 @@ use std::any::type_name; +/// An equivalent of standard's library `std::str::FromStr` trait, but with nice error messages. pub trait FromString: Sized { + /// Parse a string into a value of this type. See: `std::str::FromStr::from_str`. fn from_str(s: &str) -> Result; + /// Parse a string into a value of this type and then convert it to `R`. fn parse_into(text: impl AsRef) -> Result where Self: TryInto, diff --git a/build/ci_utils/src/extensions/future.rs b/build/base/src/extensions/future.rs similarity index 62% rename from build/ci_utils/src/extensions/future.rs rename to build/base/src/extensions/future.rs index 9cee51f554cd..d5a7091a3b0d 100644 --- a/build/ci_utils/src/extensions/future.rs +++ b/build/base/src/extensions/future.rs @@ -1,3 +1,5 @@ +//! Extensions to [`Future`]-related types. + use crate::prelude::*; use futures_util::future::ErrInto; @@ -10,12 +12,12 @@ use futures_util::TryFutureExt as _; -fn void(_t: T) {} - +/// Extension methods for [`Future`]. pub trait FutureExt: Future { + /// Discard the result of this future. fn void(self) -> Map ()> where Self: Sized { - self.map(void) + self.map(drop) } } @@ -24,12 +26,27 @@ impl FutureExt for T where T: Future {} type FlattenResultFn = fn(std::result::Result, E>) -> std::result::Result; +/// Extension methods for [`TryFuture`], i.e. the Result-yielding [`Future`] pub trait TryFutureExt: TryFuture { + /// Discard the result of successful future. fn void_ok(self) -> MapOk ()> where Self: Sized { - self.map_ok(void) + self.map_ok(drop) + } + + /// Convert the error type of this future to [`anyhow::Error`] and add the context. + fn context( + self, + context: impl Display + Send + Sync + 'static, + ) -> BoxFuture<'static, Result> + where + Self: Sized + Send + 'static, + Self::Error: Into + Send + Sync + 'static, + { + self.map_err(|err| err.into().context(context)).boxed() } + /// Convert the error type of this future to [`anyhow::Error`]. fn anyhow_err(self) -> MapErr anyhow::Error> where Self: Sized, @@ -38,6 +55,7 @@ pub trait TryFutureExt: TryFuture { self.map_err(anyhow::Error::from) } + /// If the future is successful, apply the function to the result and return the new future. fn and_then_sync( self, f: F, @@ -53,16 +71,9 @@ pub trait TryFutureExt: TryFuture { impl TryFutureExt for T where T: TryFuture {} - -pub fn receiver_to_stream( - mut receiver: tokio::sync::mpsc::Receiver, -) -> impl Stream { - futures::stream::poll_fn(move |ctx| receiver.poll_recv(ctx)) -} - - - +/// Extension methods for [`TryStream`], i.e. a [`Stream`] that produces [`Result`]s. pub trait TryStreamExt: TryStream { + /// Wrap all the errors into [`anyhow::Error`]. fn anyhow_err(self) -> stream::MapErr anyhow::Error> where Self: Sized, diff --git a/build/ci_utils/src/extensions/iterator.rs b/build/base/src/extensions/iterator.rs similarity index 70% rename from build/ci_utils/src/extensions/iterator.rs rename to build/base/src/extensions/iterator.rs index 2d9bacbf0673..38cbc1987695 100644 --- a/build/ci_utils/src/extensions/iterator.rs +++ b/build/base/src/extensions/iterator.rs @@ -1,3 +1,5 @@ +//! Extension methods for `Iterator` and `Iterator`-like types. + use crate::prelude::*; use std::iter::Rev; @@ -5,7 +7,10 @@ use std::iter::Take; +/// Extension methods for `Iterator` and `Iterator`-like types. pub trait IteratorExt: Iterator { + /// try_filter + /// Transforms an [Iterator]'s items into `Result`s, and filters out the `Err` variants. fn try_filter(mut self, mut f: impl FnMut(&Self::Item) -> Result) -> Result where Self: Sized, @@ -16,6 +21,7 @@ pub trait IteratorExt: Iterator { }) } + /// Transforms an [Iterator]'s items into `Result`s, and filters out the `Err` variants. fn try_map(mut self, mut f: impl FnMut(Self::Item) -> Result) -> Result where Self: Sized, @@ -29,8 +35,12 @@ pub trait IteratorExt: Iterator { impl IteratorExt for I {} +/// Extension methods for `Iterator` and `Iterator`-like types.s pub trait TryIteratorExt: Iterator { + /// The result of successful iteration. type Ok; + + /// Collects the results of the iterator into a `Result>`. fn try_collect_vec(self) -> Result>; } @@ -45,7 +55,9 @@ where } } +#[allow(missing_docs)] pub trait ExactDoubleEndedIteratorExt: ExactSizeIterator + DoubleEndedIterator + Sized { + /// Take the last n elements of the iterator. fn take_last_n(self, n: usize) -> Rev>> { self.rev().take(n).rev() } diff --git a/build/ci_utils/src/extensions/maps.rs b/build/base/src/extensions/maps.rs similarity index 80% rename from build/ci_utils/src/extensions/maps.rs rename to build/base/src/extensions/maps.rs index 0894c868af3c..e26431a09953 100644 --- a/build/ci_utils/src/extensions/maps.rs +++ b/build/base/src/extensions/maps.rs @@ -1,11 +1,12 @@ +//! Extension methods for `HashMap` and `HashMap`-like types. + use crate::prelude::*; use std::collections::HashMap; -// trait Foo<'a, K, V> = FnOnce(&'a K) -> Future>; - +/// Get the value for the given key, or insert the value generated by the given 'f` function. pub async fn get_or_insert(map: &mut HashMap, key: K, f: F) -> Result<&V> where K: Eq + Hash, diff --git a/build/base/src/extensions/option.rs b/build/base/src/extensions/option.rs new file mode 100644 index 000000000000..7dcea43661fc --- /dev/null +++ b/build/base/src/extensions/option.rs @@ -0,0 +1,10 @@ +//! Extension methods for `Option`. + + + +// use crate::prelude::*; + +/// Extension methods for `Option`. +pub trait OptionExt {} + +impl OptionExt for Option {} diff --git a/build/base/src/extensions/path.rs b/build/base/src/extensions/path.rs new file mode 100644 index 000000000000..3494d7630aa9 --- /dev/null +++ b/build/base/src/extensions/path.rs @@ -0,0 +1,184 @@ +//! Extension methods for `Path` and `Path`-like types.1 + +use crate::prelude::*; + +use serde::de::DeserializeOwned; + + + +/// A number of extensions for `Path`-like types. +pub trait PathExt: AsRef { + /// Append multiple segments to this path. + fn join_iter>(&self, segments: impl IntoIterator) -> PathBuf { + let mut ret = self.as_ref().to_path_buf(); + ret.extend(segments); + ret + } + + /// Strips the leading `\\?\` prefix from Windows paths if present. + fn without_verbatim_prefix(&self) -> &Path { + self.as_str().strip_prefix(r"\\?\").map_or(self.as_ref(), Path::new) + } + + /// Appends a new extension to the file. + /// + /// Does not try to replace previous extension, unlike `set_extension`. + /// Does nothing when given extension string is empty. + /// + /// ``` + /// use enso_build_base::extensions::path::PathExt; + /// use std::path::PathBuf; + /// + /// let path = PathBuf::from("foo.tar").with_appended_extension("gz"); + /// assert_eq!(path, PathBuf::from("foo.tar.gz")); + /// + /// let path = PathBuf::from("foo").with_appended_extension("zip"); + /// assert_eq!(path, PathBuf::from("foo.zip")); + /// ``` + fn with_appended_extension(&self, extension: impl AsRef) -> PathBuf { + if extension.as_ref().is_empty() { + return self.as_ref().into(); + } else { + let mut ret = self.as_ref().to_path_buf().into_os_string(); + ret.push("."); + ret.push(extension.as_ref()); + ret.into() + } + } + + /// Parse this file's contents as a JSON-serialized value. + #[context("Failed to deserialize file `{}` as type `{}`.", self.as_ref().display(), std::any::type_name::())] + fn read_to_json(&self) -> Result { + let content = crate::fs::read_to_string(self)?; + serde_json::from_str(&content).with_context(|| format!("File content was: {}", content)) + } + + /// Write this file with a JSON-serialized value. + fn write_as_json(&self, value: &T) -> Result { + trace!("Writing JSON to {}.", self.as_ref().display()); + let file = crate::fs::create(self)?; + serde_json::to_writer(file, value).anyhow_err() + } + + /// Parse this file's contents as a YAML-serialized value. + fn read_to_yaml(&self) -> Result { + let content = crate::fs::read_to_string(self)?; + serde_yaml::from_str(&content).anyhow_err() + } + + /// Write this file with a YAML-serialized value. + fn write_as_yaml(&self, value: &T) -> Result { + trace!("Writing YAML to {}.", self.as_ref().display()); + let file = crate::fs::create(self)?; + serde_yaml::to_writer(file, value).anyhow_err() + } + + /// Get the path as `str`. + /// + /// # Safety + /// This will panic if the path contains invalid UTF-8 characters. Non-UTF-8 paths are not + /// something that we want to spend time on supporting right now. + fn as_str(&self) -> &str { + self.as_ref() + .to_str() + .unwrap_or_else(|| panic!("Path is not valid UTF-8: {:?}", self.as_ref())) + } + + /// Split path to components and collect them into a new PathBuf. + /// + /// This is useful for `/` -> native separator conversion. + fn normalize(&self) -> PathBuf { + self.as_ref().components().collect() + } + + /// Like `parent` but provides a sensible error message if the path has no parent. + fn try_parent(&self) -> Result<&Path> { + self.as_ref() + .parent() + .with_context(|| format!("Failed to get parent of path `{}`.", self.as_ref().display())) + } + + /// Like `file_name` but provides a sensible error message if the path has no file name. + fn try_file_name(&self) -> Result<&OsStr> { + self.as_ref().file_name().with_context(|| { + format!("Failed to get file name of path `{}`.", self.as_ref().display()) + }) + } + + /// Like `file_stem` but provides a sensible error message if the path has no file stem. + fn try_file_stem(&self) -> Result<&OsStr> { + self.as_ref().file_stem().with_context(|| { + format!("Failed to get file stem of path `{}`.", self.as_ref().display()) + }) + } + + /// Like `extension` but provides a sensible error message if the path has no extension. + /// Note that this method fails for paths like `foo.`. + fn try_extension(&self) -> Result<&OsStr> { + self.as_ref().extension().with_context(|| { + format!("Failed to get extension of path `{}`.", self.as_ref().display()) + }) + } + + /// Takes filename and splits it into file stem and extension. + /// + /// Fails if the path's filename has no extension. + fn split_filename(&self) -> Result { + let stem = self.try_file_stem()?; + let extension = self.try_extension()?; + Ok(SplitFilename { stem, extension }) + } + + /// Returns the path with replaced parent. The filename is kept intact. + /// + /// If there is no filename in the path, it is fully replaced. + fn with_parent(&self, parent: impl AsRef) -> PathBuf { + let mut ret = parent.as_ref().to_path_buf(); + ret.extend(self.as_ref().file_name()); + ret + } +} + +impl> PathExt for T {} + +/// A method that displays a value using `Display` trait. +pub fn display_fmt(path: &Path, f: &mut Formatter) -> std::fmt::Result { + std::fmt::Display::fmt(&path.display(), f) +} + + +/// A result of splitting a path into its filename components. +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub struct SplitFilename<'a> { + /// The file stem. + pub stem: &'a OsStr, + /// The file extension. + pub extension: &'a OsStr, +} + +#[cfg(test)] +mod tests { + #[allow(unused_imports)] + use super::*; + + #[test] + fn stripping_unc_prefix() { + let path_with_unc = Path::new(r"\\?\H:\NBO\ci-build\target\debug\enso-build2.exe"); + let path_without_unc = Path::new(r"H:\NBO\ci-build\target\debug\enso-build2.exe"); + assert_eq!(path_with_unc.without_verbatim_prefix(), path_without_unc); + assert_eq!(path_without_unc.without_verbatim_prefix(), path_without_unc); + } + + #[test] + /// This test just makes sure that usage of as_str correctly compiles without lifetime issues. + /// (there were such before) + fn foo() { + fn bar(path: impl AsRef) { + path.as_str(); + path.as_ref().as_str(); + } + + bar(""); + bar(String::from("")); + } +} diff --git a/build/base/src/extensions/pathbuf.rs b/build/base/src/extensions/pathbuf.rs new file mode 100644 index 000000000000..543fc7016d99 --- /dev/null +++ b/build/base/src/extensions/pathbuf.rs @@ -0,0 +1,22 @@ +//! Extensions to the [`PathBuf`] type. + +use crate::prelude::*; + + + +/// Extension methods for [`PathBuf`]. +pub trait PathBufExt { + /// Replace the [parent][std::path::Path::parent] directory of the path, maintaining the + /// [filename][std::path::Path::file_name]. + fn set_parent(&mut self, parent: impl AsRef); +} + +impl PathBufExt for PathBuf { + fn set_parent(&mut self, parent: impl AsRef) { + let parent = parent.as_ref(); + let filename = self.file_name().map(ToOwned::to_owned); + self.clear(); + self.push(parent); + self.extend(filename); + } +} diff --git a/build/ci_utils/src/extensions/result.rs b/build/base/src/extensions/result.rs similarity index 58% rename from build/ci_utils/src/extensions/result.rs rename to build/base/src/extensions/result.rs index ef8566dd2fac..6bf6dc16f7b8 100644 --- a/build/ci_utils/src/extensions/result.rs +++ b/build/base/src/extensions/result.rs @@ -1,8 +1,12 @@ +//! Extension methods for [`Result`]. + use crate::prelude::*; +/// Extension methods for [`Result`]. pub trait ResultExt: Sized { + /// Maps the value and wraps it as a [`Future`]. #[allow(clippy::type_complexity)] fn map_async<'a, T2, F, Fut>( self, @@ -15,6 +19,8 @@ pub trait ResultExt: Sized { F: FnOnce(T) -> Fut, Fut: Future + 'a; + /// Maps the `Ok` value to a [`Future`] value. If the result is `Err`, the error is returned + /// as a [`std::future::Ready`] future. fn and_then_async<'a, T2, E2, F, Fut>( self, f: F, @@ -25,6 +31,22 @@ pub trait ResultExt: Sized { E: Into, T2: Send + 'a, E2: Send + 'a; + + + /// Convert the error type to [`anyhow::Error`]. + /// + /// If there are additional context-specific information, use [`context`] instead. + fn anyhow_err(self) -> Result + where E: Into; + + /// Convert the `[Result]<[Future]>` to `Future`. + fn flatten_fut( + self, + ) -> futures::future::Either< + std::future::Ready>, + futures::future::IntoFuture, + > + where T: TryFuture>; } impl ResultExt for std::result::Result { @@ -62,4 +84,22 @@ impl ResultExt for std::result::Result { Err(e) => ready(Err(e.into())).right_future(), } } + + fn anyhow_err(self) -> Result + where E: Into { + self.map_err(E::into) + } + + fn flatten_fut( + self, + ) -> futures::future::Either< + std::future::Ready>, + futures::future::IntoFuture, + > + where T: TryFuture> { + match self { + Ok(fut) => fut.into_future().right_future(), + Err(e) => ready(Err(T::Error::from(e))).left_future(), + } + } } diff --git a/build/base/src/extensions/str.rs b/build/base/src/extensions/str.rs new file mode 100644 index 000000000000..8c600649e51c --- /dev/null +++ b/build/base/src/extensions/str.rs @@ -0,0 +1,34 @@ +//! Extensions fot string-like types. + +use crate::prelude::*; + + + +/// Extension methods for strings and similar types. +pub trait StrLikeExt { + /// Convenience variant of `FromString::from_str`. + /// + /// Should be preferred over [`str::parse`] due to better error messages. + // FIXME: This needs better name! However, we cannot use `parse` as it conflicts with + // `str::parse`. As a method on `str`, it would take priority over an extension trait. + fn parse2(&self) -> Result; + + /// Convenience variant of `FromString::parse_into`. + fn parse_through(&self) -> Result + where + T: FromString + TryInto, + >::Error: Into; +} + +impl> StrLikeExt for S { + fn parse2(&self) -> Result { + U::from_str(self.as_ref()) + } + + fn parse_through(&self) -> Result + where + T: FromString + TryInto, + >::Error: Into, { + T::parse_into(self.as_ref()) + } +} diff --git a/build/base/src/fs.rs b/build/base/src/fs.rs new file mode 100644 index 000000000000..bc6910ec5a51 --- /dev/null +++ b/build/base/src/fs.rs @@ -0,0 +1,221 @@ +//! Module meant as a replacement for [std::fs] module. +//! +//! Functionality and API-s are basically the same, but the functions are enriched with logging and +//! improved diagnostics. Most importantly, file operation failures will display the relevant path. + +use crate::prelude::*; + +use std::io::Write; + + +// ============== +// === Export === +// ============== + +pub mod wrappers; + +pub use wrappers::*; + + + +/// Like the standard version but will create any missing parent directories from the path. +#[context("Failed to write path: {}", path.as_ref().display())] +pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + create_parent_dir_if_missing(&path)?; + wrappers::write(&path, &contents) +} + +/// Serialize the data to JSON text and write it to the file. +/// +/// See [`write()`]. +#[context("Failed to write path: {}", path.as_ref().display())] +pub fn write_json(path: impl AsRef, contents: &impl Serialize) -> Result { + let contents = serde_json::to_string(contents)?; + write(&path, &contents) +} + +/// Like the standard version but will create any missing parent directories from the path. +#[context("Failed to open path for writing: {}", path.as_ref().display())] +pub fn create(path: impl AsRef) -> Result { + create_parent_dir_if_missing(&path)?; + wrappers::create(&path) +} + +/// Read the file content and parse it using [`FromString`]. +#[context("Failed to read the file: {}", path.as_ref().display())] +pub fn read_string_into(path: impl AsRef) -> Result { + read_to_string(&path)?.parse2() +} + +/// Create a directory (and all missing parent directories), +/// +/// Does not fail when a directory already exists. +#[context("Failed to create directory {}", path.as_ref().display())] +pub fn create_dir_if_missing(path: impl AsRef) -> Result { + let result = std::fs::create_dir_all(&path); + match result { + Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()), + result => result.anyhow_err(), + } +} + +/// Create a parent directory for path (and all missing parent directories), +/// +/// Does not fail when a directory already exists. +#[context("Failed to create parent directory for {}", path.as_ref().display())] +pub fn create_parent_dir_if_missing(path: impl AsRef) -> Result { + if let Some(parent) = path.as_ref().parent() { + create_dir_if_missing(parent)?; + Ok(parent.into()) + } else { + bail!("No parent directory for path {}.", path.as_ref().display()) + } +} + +/// Remove a directory with all its subtree. +/// +/// Does not fail if the directory is not found. +#[tracing::instrument(fields(path = %path.as_ref().display()))] +#[context("Failed to remove directory {}", path.as_ref().display())] +pub fn remove_dir_if_exists(path: impl AsRef) -> Result { + let result = std::fs::remove_dir_all(&path); + match result { + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result.anyhow_err(), + } +} + +/// Remove a regular file. +/// +/// Does not fail if the file is not found. +#[tracing::instrument(fields(path = %path.as_ref().display()))] +#[context("Failed to remove file {}", path.as_ref().display())] +pub fn remove_file_if_exists(path: impl AsRef) -> Result<()> { + let result = std::fs::remove_file(&path); + match result { + Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), + result => result.anyhow_err(), + } +} + +/// Remove a file being either directory or regular file.. +/// +/// Does not fail if the file is not found. +#[context("Failed to remove entry {} (if exists)", path.as_ref().display())] +pub fn remove_if_exists(path: impl AsRef) -> Result { + let path = path.as_ref(); + if path.is_dir() { + remove_dir_if_exists(path) + } else { + remove_file_if_exists(path) + } +} + +/// Recreate directory, so it exists and is empty. +pub fn reset_dir(path: impl AsRef) -> Result { + let path = path.as_ref(); + debug!("Will reset directory {}", path.display()); + remove_dir_if_exists(path)?; + create_dir_if_missing(path)?; + Ok(()) +} + +/// Fail if the given path does not exist. +pub fn require_exist(path: impl AsRef) -> Result { + if path.as_ref().exists() { + trace!("{} does exist.", path.as_ref().display()); + Ok(()) + } else { + bail!("{} does not exist.", path.as_ref().display()) + } +} + +/// Check if the both path are equal. +/// +/// This performs canonicalization of the paths before comparing them. As such, it requires that the +/// both paths exist. +pub fn same_existing_path(source: impl AsRef, destination: impl AsRef) -> Result { + Ok(canonicalize(source)? == canonicalize(destination)?) +} + +/// Fail if the given path is not an existing directory. +#[context("Failed because the path does not point to a directory: {}", path.as_ref().display())] +pub fn expect_dir(path: impl AsRef) -> Result { + let filetype = metadata(&path)?.file_type(); + if filetype.is_dir() { + Ok(()) + } else { + bail!("File is not directory, its type is: {filetype:?}") + } +} + + +/// Fail if the given path is not an existing file. +#[context("Failed because the path does not point to a regular file: {}", path.as_ref().display())] +pub fn expect_file(path: impl AsRef) -> Result { + let filetype = metadata(&path)?.file_type(); + if filetype.is_file() { + Ok(()) + } else { + bail!("File is not a regular file, its type is: {filetype:?}") + } +} + +/// Change the file permissions, so the owner can execute it. +#[context("Failed to update permissions on `{}`", path.as_ref().display())] +pub fn allow_owner_execute(path: impl AsRef) -> Result { + #[cfg(not(target_os = "windows"))] + { + use std::os::unix::prelude::*; + debug!("Setting executable permission on {}", path.as_ref().display()); + let metadata = path.as_ref().metadata()?; + let mut permissions = metadata.permissions(); + let mode = permissions.mode(); + let owner_can_execute = 0o0100; + permissions.set_mode(mode | owner_can_execute); + set_permissions(path.as_ref(), permissions)?; + } + Ok(()) +} + +/// Check if the files are of identical content. +pub fn check_if_identical(source: impl AsRef, target: impl AsRef) -> Result { + // Different length means different content, no need to read. + if metadata(&source)?.len() != metadata(&target)?.len() { + return Ok(false); + } + + // TODO: Not good for large files, should process them chunk by chunk. + Ok(read(&source)? == read(&target)?) +} + +/// Copy a file from source to target, unless they are already identical. +/// +/// This is meant not really as an optimization for copying, but rather as a way to avoid +/// unnecessary file changes, which would trigger unnecessary rebuilds. +pub fn copy_file_if_different(source: impl AsRef, target: impl AsRef) -> Result { + if check_if_identical(&source, &target).contains(&true) { + trace!("Files are identical, not copying from {}.", source.as_ref().display()); + } else { + trace!( + "Modified, will copy {} to {}.", + source.as_ref().display(), + target.as_ref().display() + ); + copy(&source, &target)?; + } + Ok(()) +} + +/// Append contents to the file. +/// +/// If the file does not exist, it will be created. +pub fn append(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + std::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&path) + .context(format!("Failed to open {} for writing.", path.as_ref().display()))? + .write_all(contents.as_ref()) + .context(format!("Failed to write to {}.", path.as_ref().display())) +} diff --git a/build/base/src/fs/wrappers.rs b/build/base/src/fs/wrappers.rs new file mode 100644 index 000000000000..71ed4647b4a0 --- /dev/null +++ b/build/base/src/fs/wrappers.rs @@ -0,0 +1,87 @@ +//! Wrappers over [`std::fs`] functions that provide sensible error messages, i.e. explaining what +//! operation was attempted and what was the relevant path. +//! +//! Unless there is a specific reason to use the standard library functions, you should use these. + +use crate::prelude::*; + +use std::fs::File; +use std::fs::Metadata; + + + +// ============== +// === Export === +// ============== + +/// See [std::fs::metadata]. +#[context("Failed to obtain metadata for file: {}", path.as_ref().display())] +pub fn metadata>(path: P) -> Result { + std::fs::metadata(&path).anyhow_err() +} + +/// See [std::fs::copy]. +#[context("Failed to copy file from {} to {}", from.as_ref().display(), to.as_ref().display())] +pub fn copy(from: impl AsRef, to: impl AsRef) -> Result { + std::fs::copy(&from, &to).anyhow_err() +} + +/// See [std::fs::rename]. +#[context("Failed to rename file from {} to {}", from.as_ref().display(), to.as_ref().display())] +pub fn rename(from: impl AsRef, to: impl AsRef) -> Result { + std::fs::rename(&from, &to).anyhow_err() +} + +/// See [std::fs::read]. +#[context("Failed to read the file: {}", path.as_ref().display())] +pub fn read(path: impl AsRef) -> Result> { + std::fs::read(&path).anyhow_err() +} + +/// See [std::fs::read_dir]. +#[context("Failed to read the directory: {}", path.as_ref().display())] +pub fn read_dir(path: impl AsRef) -> Result { + std::fs::read_dir(&path).anyhow_err() +} + +/// See [std::fs::read_to_string]. +#[context("Failed to read the file: {}", path.as_ref().display())] +pub fn read_to_string(path: impl AsRef) -> Result { + std::fs::read_to_string(&path).anyhow_err() +} + +/// See [std::fs::write]. +#[context("Failed to write path: {}", path.as_ref().display())] +pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + std::fs::write(&path, contents).anyhow_err() +} + +/// See [std::fs::File::open]. +#[context("Failed to open path for reading: {}", path.as_ref().display())] +pub fn open(path: impl AsRef) -> Result { + File::open(&path).anyhow_err() +} + +/// See [std::fs::File::create]. +#[context("Failed to open path for writing: {}", path.as_ref().display())] +pub fn create(path: impl AsRef) -> Result { + File::create(&path).anyhow_err() +} + +/// See [std::fs::canonicalize]. +#[context("Failed to canonicalize path: {}", path.as_ref().display())] +pub fn canonicalize(path: impl AsRef) -> Result { + std::fs::canonicalize(&path).anyhow_err() +} + +/// See [std::fs::create_dir_all]. +#[context("Failed to create missing directories no path: {}", path.as_ref().display())] +pub fn create_dir_all(path: impl AsRef) -> Result { + std::fs::create_dir_all(&path).anyhow_err() +} + +/// See [std::fs::set_permissions]. +#[context("Failed to permissions on file: {}", path.as_ref().display())] +pub fn set_permissions(path: impl AsRef, perm: std::fs::Permissions) -> Result { + std::fs::set_permissions(&path, perm).anyhow_err() +} diff --git a/build/base/src/lib.rs b/build/base/src/lib.rs new file mode 100644 index 000000000000..651529983438 --- /dev/null +++ b/build/base/src/lib.rs @@ -0,0 +1,112 @@ +//! This crate is meant to provide a foundational set of utilities and reexports, that should be +//! common for the whole Enso codebase. Eventually both WASM and native code should use this crate. +//! +//! Currently it is employed by the native build scripts code. + +// === Features === +#![feature(pin_macro)] +#![feature(default_free_fn)] +#![feature(result_flattening)] +#![feature(associated_type_bounds)] +#![feature(extend_one)] +#![feature(option_result_contains)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] +// === Non-Standard Linter Configuration === +#![warn(missing_docs)] + + +// ============== +// === Export === +// ============== + +pub mod extensions; +pub mod fs; + + + +pub mod prelude { + //! This module contains all the reexports of the most common traits and types used in the + //! Enso codebase. + + /// anyhow-based result type. + pub type Result = anyhow::Result; + + pub use std::borrow::Borrow; + pub use std::borrow::BorrowMut; + pub use std::borrow::Cow; + pub use std::collections::BTreeMap; + pub use std::collections::BTreeSet; + pub use std::collections::HashMap; + pub use std::collections::HashSet; + pub use std::default::default; + pub use std::ffi::OsStr; + pub use std::ffi::OsString; + pub use std::fmt::Debug; + pub use std::fmt::Display; + pub use std::fmt::Formatter; + pub use std::future::ready; + pub use std::future::Future; + pub use std::hash::Hash; + pub use std::io::Read; + pub use std::io::Seek; + pub use std::iter::once; + pub use std::iter::FromIterator; + pub use std::marker::PhantomData; + pub use std::ops::Deref; + pub use std::ops::DerefMut; + pub use std::ops::Range; + pub use std::path::Path; + pub use std::path::PathBuf; + pub use std::pin::pin; + pub use std::pin::Pin; + pub use std::sync::Arc; + + pub use crate::extensions::from_string::FromString; + pub use crate::extensions::future::FutureExt as _; + pub use crate::extensions::future::TryFutureExt as _; + pub use crate::extensions::iterator::IteratorExt as _; + pub use crate::extensions::iterator::TryIteratorExt as _; + pub use crate::extensions::option::OptionExt as _; + pub use crate::extensions::path::PathExt as _; + pub use crate::extensions::pathbuf::PathBufExt as _; + pub use crate::extensions::result::ResultExt as _; + pub use crate::extensions::str::StrLikeExt as _; + + pub use anyhow::anyhow; + pub use anyhow::bail; + pub use anyhow::ensure; + pub use anyhow::Context as _; + pub use fn_error_context::context; + pub use futures_util::future::BoxFuture; + pub use futures_util::select; + pub use futures_util::stream::BoxStream; + pub use futures_util::try_join; + pub use futures_util::AsyncWrite; + pub use futures_util::FutureExt as _; + pub use futures_util::Stream; + pub use futures_util::StreamExt as _; + pub use futures_util::TryFuture; + pub use futures_util::TryFutureExt as _; + pub use futures_util::TryStream; + pub use futures_util::TryStreamExt as _; + pub use serde::de::DeserializeOwned; + pub use serde::Deserialize; + pub use serde::Serialize; + pub use tracing::debug; + pub use tracing::debug_span; + pub use tracing::error; + pub use tracing::error_span; + pub use tracing::info; + pub use tracing::info_span; + pub use tracing::instrument; + pub use tracing::span; + pub use tracing::trace; + pub use tracing::trace_span; + pub use tracing::warn; + pub use tracing::warn_span; + pub use tracing::Instrument; +} diff --git a/build/build/Cargo.toml b/build/build/Cargo.toml index 808f02f7b211..c6d7731eadbb 100644 --- a/build/build/Cargo.toml +++ b/build/build/Cargo.toml @@ -32,14 +32,15 @@ glob = "0.3.0" #handlebars = "4.2.1" heck = "0.4.0" humantime = "2.1.0" +enso-build-base = { path = "../base" } ide-ci = { path = "../ci_utils" } indexmap = "1.7.0" indicatif = "0.17.1" itertools = "0.10.1" lazy_static = "1.4.0" #git2 = "0.13.25" -log = "0.4.14" mime = "0.3.16" +new_mime_guess = "4.0.1" nix = { workspace = true } octocrab = { git = "https://github.com/enso-org/octocrab", default-features = false, features = [ "rustls" @@ -47,6 +48,7 @@ octocrab = { git = "https://github.com/enso-org/octocrab", default-features = fa ouroboros = "0.15.0" paste = "1.0.7" path-absolutize = "3.0.11" +path-slash = "0.2.1" platforms = { version = "3.0.0", features = ["serde"] } pin-project = "1.0.8" port_check = "0.1.5" diff --git a/build/build/examples/experiments.rs b/build/build/examples/experiments.rs index 413063bd7c77..98d11afbf31e 100644 --- a/build/build/examples/experiments.rs +++ b/build/build/examples/experiments.rs @@ -1,7 +1,7 @@ use enso_build::prelude::*; use enso_build::setup_octocrab; -use ide_ci::models::config::RepoContext; +use ide_ci::github::Repo; use octocrab::models::ReleaseId; @@ -9,9 +9,10 @@ use octocrab::models::ReleaseId; #[tokio::main] async fn main() -> Result { let octo = setup_octocrab().await?; - let repo = RepoContext::from_str("enso-org/enso-staging")?; - let handler = repo.repos(&octo); - let releases = handler.releases(); + let repo = Repo::from_str("enso-org/enso-staging")?; + let handler = repo.handle(&octo); + let repos = handler.repos(); + let releases = repos.releases(); let release = releases.get_by_id(ReleaseId(59585385)).await?; dbg!(&release); diff --git a/build/build/examples/s3.rs b/build/build/examples/s3.rs index 4aa2824ffd24..d4cd7e847e15 100644 --- a/build/build/examples/s3.rs +++ b/build/build/examples/s3.rs @@ -3,7 +3,7 @@ use enso_build::prelude::*; use aws_sdk_s3::model::ObjectCannedAcl; use aws_sdk_s3::types::ByteStream; use aws_sdk_s3::Client; -use enso_build::aws::BucketContext; +use enso_build::aws::s3::BucketContext; use enso_build::aws::EDITIONS_BUCKET_NAME; @@ -15,7 +15,7 @@ async fn main() -> Result { client: Client::new(&config), bucket: EDITIONS_BUCKET_NAME.to_string(), upload_acl: ObjectCannedAcl::PublicRead, - key_prefix: "enso".into(), + key_prefix: Some("enso".into()), }; // std::env::set_var("AWS_SECRET_ACCESS_KEY", std::env::var("AWS_SECRET_ACCESS_KEY")?.trim()); diff --git a/build/build/paths.yaml b/build/build/paths.yaml index 15f7eea41ebb..597e567bf85e 100644 --- a/build/build/paths.yaml +++ b/build/build/paths.yaml @@ -8,6 +8,7 @@ changelog.yml: gui.yml: nightly.yml: + release.yml: scala-new.yml: app/: gui/: @@ -26,15 +27,13 @@ "project-manager-bundle-": enso: dist/: - bin/: - client/: - content/: + gui/: assets/: - package.json: - preload.js: - icons/: - project-manager/: - tmp/: + ide.wasm: + index.js: + style.css: + wasm_imports.js: + # Final WASM artifacts in `dist` directory. wasm/: ? path: ide.wasm @@ -43,13 +42,15 @@ var: wasm_main_raw ? path: ide.js var: wasm_glue - init: - build-init: - build.json: distribution/: editions/: .yaml: engine/: + runner-native/: + src/: + test/: + resources/: + Factorial.enso: runtime/: target/: bench-report.xml: @@ -68,6 +69,7 @@ simple-library-server/: build.sbt: run: + runner: # The runner native image (Linux only). CHANGELOG.md: project-manager/: diff --git a/build/build/src/aws.rs b/build/build/src/aws.rs index 143c0f64c725..928fb0b6276e 100644 --- a/build/build/src/aws.rs +++ b/build/build/src/aws.rs @@ -1,14 +1,11 @@ use crate::prelude::*; -use crate::version::BuildKind; +use crate::version::Kind; use anyhow::Context; use aws_sdk_s3::model::ObjectCannedAcl; -use aws_sdk_s3::output::PutObjectOutput; use aws_sdk_s3::types::ByteStream; -use bytes::Buf; -use ide_ci::models::config::RepoContext; -use serde::de::DeserializeOwned; +use s3::BucketContext; // ============== @@ -16,6 +13,7 @@ use serde::de::DeserializeOwned; // ============== pub mod ecr; +pub mod s3; @@ -49,7 +47,7 @@ impl Edition { self.0.contains("nightly") || Version::find_in_text(self) .as_ref() - .map_or(false, |version| BuildKind::Nightly.matches(version)) + .map_or(false, |version| Kind::Nightly.matches(version)) } } @@ -87,56 +85,12 @@ impl Manifest { } } -#[derive(Clone, Debug)] -pub struct BucketContext { - pub client: aws_sdk_s3::Client, - pub bucket: String, - pub upload_acl: ObjectCannedAcl, - pub key_prefix: String, -} - -impl BucketContext { - pub async fn get(&self, path: &str) -> Result { - Ok(self - .client - .get_object() - .bucket(&self.bucket) - .key(format!("{}/{}", self.key_prefix, path)) - .send() - .await? - .body) - } - - pub async fn put(&self, path: &str, data: ByteStream) -> Result { - dbg!(self - .client - .put_object() - .bucket(&self.bucket) - .acl(self.upload_acl.clone()) - .key(format!("{}/{}", self.key_prefix, path)) - .body(data)) - .send() - .await - .anyhow_err() - } - - pub async fn get_yaml(&self, path: &str) -> Result { - let text = self.get(path).await?.collect().await?; - serde_yaml::from_reader(text.reader()).anyhow_err() - } - - pub async fn put_yaml(&self, path: &str, data: &impl Serialize) -> Result { - let buf = serde_yaml::to_string(data)?; - self.put(path, ByteStream::from(buf.into_bytes())).await - } -} - -pub async fn update_manifest(repo_context: &RepoContext, edition_file: &Path) -> Result { +pub async fn update_manifest(repo_context: &impl IsRepo, edition_file: &Path) -> Result { let bucket_context = BucketContext { client: aws_sdk_s3::Client::new(&aws_config::load_from_env().await), bucket: EDITIONS_BUCKET_NAME.to_string(), upload_acl: ObjectCannedAcl::PublicRead, - key_prefix: repo_context.name.clone(), + key_prefix: Some(repo_context.name().to_string()), }; let new_edition_name = Edition( diff --git a/build/build/src/aws/ecr.rs b/build/build/src/aws/ecr.rs index 41cce0a8c2b6..b24a65edd400 100644 --- a/build/build/src/aws/ecr.rs +++ b/build/build/src/aws/ecr.rs @@ -12,7 +12,8 @@ pub mod runtime; -#[instrument(skip(client))] +/// Lookup the repository by name. +#[instrument(skip(client), err)] pub async fn resolve_repository( client: &aws_sdk_ecr::Client, repository_name: &str, @@ -23,10 +24,11 @@ pub async fn resolve_repository( .repositories .context("Missing repositories information.")? .pop() - .context(format!("Cannot find repository {repository_name} in the registry.")) + .with_context(|| format!("Cannot find repository {repository_name} in the registry.")) } -#[instrument(skip(client))] +/// Generate an authentication token for the repository. +#[instrument(skip(client), err)] pub async fn get_credentials(client: &aws_sdk_ecr::Client) -> Result { let token = client.get_authorization_token().send().await?; let auth_data = token @@ -47,6 +49,7 @@ pub async fn get_credentials(client: &aws_sdk_ecr::Client) -> Result aws_sdk_ecr::Client { + let config = aws_config::load_from_env().await; + aws_sdk_ecr::Client::new(&config) +} diff --git a/build/build/src/aws/ecr/runtime.rs b/build/build/src/aws/ecr/runtime.rs index 2c03cbcba246..72a121b9cdee 100644 --- a/build/build/src/aws/ecr/runtime.rs +++ b/build/build/src/aws/ecr/runtime.rs @@ -1,3 +1,5 @@ +//! This module contains data necessary to deploy Enso Runtime to the cloud. + use crate::prelude::*; use crate::paths::generated; @@ -8,11 +10,13 @@ use ide_ci::programs::Docker; -/// Name of the repository. +/// Name of the ECR repository with Runtime images. pub const NAME: &str = "runtime"; +/// Region where the ECR repository with Runtime images is located. pub const REGION: &str = "eu-west-1"; +/// Build the Runtime Docker image from the Engine package. #[instrument(fields(%dockerfile, %engine_package_root))] pub async fn build_runtime_image( dockerfile: generated::RepoRootToolsCiDocker, diff --git a/build/build/src/aws/s3.rs b/build/build/src/aws/s3.rs new file mode 100644 index 000000000000..db720704bc30 --- /dev/null +++ b/build/build/src/aws/s3.rs @@ -0,0 +1,198 @@ +//! Code supporting our S3 operations. + +use crate::prelude::*; + +use aws_sdk_s3::model::ObjectCannedAcl; +use aws_sdk_s3::output::PutObjectOutput; +use aws_sdk_s3::types::ByteStream; +use bytes::Buf; +use enso_build_base::extensions::path::SplitFilename; +use mime::Mime; + + +// ============== +// === Export === +// ============== + +pub mod gui; + + + +/// Construct client from the environment. +pub async fn client_from_env() -> aws_sdk_s3::Client { + aws_sdk_s3::Client::new(&aws_config::load_from_env().await) +} + +/// Everything we need to get/put files to S3. +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct BucketContext { + #[derivative(Debug = "ignore")] + pub client: aws_sdk_s3::Client, + pub bucket: String, + pub upload_acl: ObjectCannedAcl, + /// Prefix that will be prepended to the object key. + pub key_prefix: Option, +} + +impl BucketContext { + pub fn key(&self, path: impl AsRef) -> String { + let path = path.as_ref(); + let normalized = path_slash::PathExt::to_slash_lossy(path); + if let Some(prefix) = &self.key_prefix { + format!("{}/{}", prefix, normalized) + } else { + normalized.into() + } + } + + pub async fn get(&self, path: &str) -> Result { + trace!("Downloading {path} at {self:?}."); + Ok(self + .client + .get_object() + .bucket(&self.bucket) + .key(self.key(path)) + .send() + .await + .with_context(|| { + format!("Failed to download {} from S3 bucket {}.", self.key(path), self.bucket) + })? + .body) + } + + pub async fn put(&self, path: &str, data: ByteStream) -> Result { + trace!("Uploading {path} at {self:?}."); + let mut request = self + .client + .put_object() + .bucket(&self.bucket) + .acl(self.upload_acl.clone()) + .key(self.key(path)) + .body(data); + + // Cloud requested us to set content encoding and type. + let content_headers = ContentHeaders::from_path(path); + request = content_headers.apply(request); + + request.send().await.with_context(|| { + format!("Failed to upload {} to S3 bucket {}.", self.key(path), self.bucket) + }) + } + + #[instrument(fields(path = %path.as_ref().display()))] + pub async fn put_file(&self, path: impl AsRef) -> Result { + let path = path.as_ref(); + let stream = ByteStream::from_path(path).await?; + let path = path.file_name().with_context(|| format!("Path {:?} has no file name", path))?; + self.put(path.as_str(), stream).await + } + + pub async fn get_yaml(&self, path: &str) -> Result { + let text = self.get(path).await?.collect().await?; + serde_yaml::from_reader(text.reader()).anyhow_err() + } + + pub async fn put_yaml(&self, path: &str, data: &impl Serialize) -> Result { + let buf = serde_yaml::to_string(data)?; + self.put(path, ByteStream::from(buf.into_bytes())).await + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum ContentEncoding { + Gzip, +} + +impl ContentEncoding { + pub fn from_ext(ext: &str) -> Result { + match ext { + "gz" => Ok(Self::Gzip), + _ => bail!("Cannot recognize content encoding from extension: {}", ext), + } + } +} + +/// Describe additional content-related headers that we might want to set. +#[derive(Clone, Debug)] +pub struct ContentHeaders { + /// Encoding of the content. Typically compression, if any. + pub content_encoding: Option, + /// MIME type of the content. + pub content_type: Mime, +} + +impl Default for ContentHeaders { + fn default() -> Self { + Self { content_encoding: None, content_type: mime::APPLICATION_OCTET_STREAM } + } +} + +impl ContentHeaders { + pub fn new(content_type: Mime) -> Self { + Self { content_type, ..default() } + } + + pub fn content_encoding(&self) -> Option<&'static str> { + self.content_encoding.as_ref().map(|enc| match enc { + ContentEncoding::Gzip => "gzip", + }) + } + + pub fn from_path(path: impl AsRef) -> Self { + let Ok(SplitFilename{ extension: outermost_extension, stem}) = path.split_filename() else { + // No extension, use defaults. + return default() + }; + + let Ok(next_extension) = stem.try_extension() else { + // Only one extension, use primary MIME. + let content_type = new_mime_guess::from_ext(outermost_extension.as_str()).first_or_octet_stream(); + return Self::new(content_type) + }; + + if let Ok(content_encoding) = ContentEncoding::from_ext(outermost_extension.as_str()) { + // Two extensions, use primary MIME and encoding. + let content_type = + new_mime_guess::from_ext(next_extension.as_str()).first_or_octet_stream(); + Self { content_encoding: Some(content_encoding), content_type } + } else { + // No encoding, use primary MIME. + let content_type = + new_mime_guess::from_ext(outermost_extension.as_str()).first_or_octet_stream(); + Self::new(content_type) + } + } + + pub fn apply( + &self, + mut request: aws_sdk_s3::client::fluent_builders::PutObject, + ) -> aws_sdk_s3::client::fluent_builders::PutObject { + if let Some(content_encoding) = self.content_encoding() { + request = request.content_encoding(content_encoding); + } + request.content_type(&self.content_type.to_string()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn deduce_content_headers() -> Result { + fn case(path: &str, expected_encoding: Option<&str>, expected_type: &str) { + let headers = ContentHeaders::from_path(path); + assert_eq!(headers.content_encoding(), expected_encoding); + assert_eq!(headers.content_type.to_string().as_str(), expected_type); + } + + case("wasm_imports.js.gz", Some("gzip"), "application/javascript"); + case("index.js", None, "application/javascript"); + case("style.css", None, "text/css"); + case("ide.wasm", None, "application/wasm"); + case("ide.wasm.gz", Some("gzip"), "application/wasm"); + + Ok(()) + } +} diff --git a/build/build/src/aws/s3/gui.rs b/build/build/src/aws/s3/gui.rs new file mode 100644 index 000000000000..7462e260ce12 --- /dev/null +++ b/build/build/src/aws/s3/gui.rs @@ -0,0 +1,36 @@ +use crate::prelude::*; + +use crate::aws::s3::BucketContext; + +use aws_config::meta::region::RegionProviderChain; + + + +/// AWS Region of the `ensocdn` bucket. +pub const BUCKET_REGION: &str = "us-west-1"; + +/// The bucket where the GUI releases are stored. +pub const BUCKET: &str = "ensocdn"; + +/// As default but with the region resolution fallback. +/// +/// We do know the region, so we should not require it. Still, it is allowed to overwrite it through +/// the environment. +pub async fn client_from_env() -> Result { + let region = RegionProviderChain::default_provider().or_else(BUCKET_REGION); + let config = aws_config::from_env().region(region).load().await; + let client = aws_sdk_s3::Client::new(&config); + Ok(client) +} + +/// Construct a context for handling a given GUI version release. +/// +/// Requires AWS credentials in the environment. +pub async fn context(version: &Version) -> Result { + Ok(BucketContext { + client: client_from_env().await?, + bucket: BUCKET.to_string(), + upload_acl: aws_sdk_s3::model::ObjectCannedAcl::PublicRead, + key_prefix: Some(format!("ide/{version}")), + }) +} diff --git a/build/build/src/changelog/check.rs b/build/build/src/changelog/check.rs index 3158c6e8bb32..d5349f35c404 100644 --- a/build/build/src/changelog/check.rs +++ b/build/build/src/changelog/check.rs @@ -4,7 +4,7 @@ use crate::ci::labels::NO_CHANGELOG_CHECK; use crate::paths::generated::RepoRoot; use ide_ci::actions::workflow::MessageLevel; -use ide_ci::programs::Git; +use ide_ci::programs::git; @@ -49,7 +49,7 @@ pub async fn check(repo_path: RepoRoot, context: ide_ci::actions::Context) -> Re let repository = context.payload.repository.context("Missing repository information.")?; let default_branch = repository.default_branch.context("Missing default branch information.")?; - let git = Git::new(&repo_path).await?; + let git = git::Context::new(&repo_path).await?; git.fetch_branch(REMOTE_NAME, &default_branch).await?; let remote_base = format!("{REMOTE_NAME}/{default_branch}"); let files_changed = git.diff_against(remote_base).await?; diff --git a/build/build/src/config.rs b/build/build/src/config.rs index 61226ef43197..40bfebaeb4cd 100644 --- a/build/build/src/config.rs +++ b/build/build/src/config.rs @@ -51,29 +51,23 @@ pub struct ConfigRaw { /// The configuration of the script that is being provided by the external environment. /// /// In our case, it is usually a configuration file in the main repository. -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug, Default, Serialize, Deserialize)] pub struct Config { pub wasm_size_limit: Option, pub required_versions: HashMap, } impl Config { + /// Check whether all the required programs are available and have the required versions. pub async fn check_programs(&self) -> Result { - for (program, version_req) in &self.required_versions { - let found = program.version().await?; - if !version_req.matches(&found) { - bail!( - "Found program {} in version {} that does not fulfill requirement {}.", - program, - found, - version_req - ); - } else { - info!( - "Found program {} in supported version {} (required {}).", - program, found, version_req - ); - } + let check_tasks = self + .required_versions + .iter() + .map(|(program, version_req)| check_program(program, version_req)); + let results = futures::future::join_all(check_tasks).await; + let errors = results.into_iter().filter_map(Result::err).collect_vec(); + if !(errors.is_empty()) { + bail!("Some required programs are not available or have wrong versions: {errors:?}") } Ok(()) } @@ -101,10 +95,40 @@ impl TryFrom for Config { } } +/// Check if the given program is installed in the system and has the required version. +pub async fn check_program(program: &RecognizedProgram, version_req: &VersionReq) -> Result { + let found = program.version().await?; + if !version_req.matches(&found) { + bail!( + "Found program `{}` in version `{}` that does not fulfill requirement `{}`.", + program, + found, + version_req + ); + } else { + info!( + "Found program `{}` in supported version `{}` (required `{}`).", + program, found, version_req + ); + } + Ok(()) +} + #[cfg(test)] mod tests { use super::*; use ide_ci::log::setup_logging; + use ide_ci::programs::Node; + + #[tokio::test] + async fn check_node_version() -> Result { + setup_logging()?; + + let version = Node.parse_version("v16.13.2")?; + let requirement = VersionReq::parse("=16.15.0")?; + assert!(!requirement.matches(&version)); + Ok(()) + } #[tokio::test] #[ignore] @@ -125,4 +149,26 @@ required-versions: Ok(()) } + + #[tokio::test] + async fn deserialize_config_in_repo() -> Result { + setup_logging()?; + // let config = include_str!("../../../build-config.yaml"); + let config = r#"# Options intended to be common for all developers. + +wasm-size-limit: 15.25 MiB + +required-versions: + cargo-watch: ^8.1.1 + node: =16.15.0 + wasm-pack: ^0.10.2 +# TODO [mwu]: Script can install `flatc` later on (if `conda` is present), so this is not required. However it should +# be required, if `conda` is missing. +# flatc: =1.12.0 +"#; + let config = serde_yaml::from_str::(config)?; + dbg!(&config); + dbg!(Config::try_from(config))?; + Ok(()) + } } diff --git a/build/build/src/context.rs b/build/build/src/context.rs index 2cf55e8ad111..7107bf347657 100644 --- a/build/build/src/context.rs +++ b/build/build/src/context.rs @@ -3,8 +3,8 @@ use crate::prelude::*; use crate::paths::TargetTriple; use derivative::Derivative; -use ide_ci::models::config::RepoContext; -use ide_ci::programs::Git; +use ide_ci::github; +use ide_ci::programs::git; use octocrab::models::repos::Release; use octocrab::models::ReleaseId; @@ -25,7 +25,7 @@ pub struct BuildContext { /// Remote repository is used for release-related operations. This also includes deducing a new /// version number. - pub remote_repo: RepoContext, + pub remote_repo: ide_ci::github::Repo, } impl BuildContext { @@ -34,7 +34,7 @@ impl BuildContext { async move { match ide_ci::actions::env::GITHUB_SHA.get() { Ok(commit) => Ok(commit), - Err(_e) => Git::new(root).await?.head_hash().await, + Err(_e) => git::Context::new(root).await?.head_hash().await, } } .boxed() @@ -45,18 +45,16 @@ impl BuildContext { &self, designator: String, ) -> BoxFuture<'static, Result> { - let repository = self.remote_repo.clone(); - let octocrab = self.octocrab.clone(); + let repository = self.remote_repo_handle(); let designator_cp = designator.clone(); async move { let release = if let Ok(id) = designator.parse2::() { - repository.find_release_by_id(&octocrab, id).await? + repository.find_release_by_id(id).await? } else { match designator.as_str() { - "latest" => repository.latest_release(&octocrab).await?, - "nightly" => - crate::version::latest_nightly_release(&octocrab, &repository).await?, - tag => repository.find_release_by_text(&octocrab, tag).await?, + "latest" => repository.latest_release().await?, + "nightly" => crate::version::latest_nightly_release(&repository).await?, + tag => repository.find_release_by_text(tag).await?, } }; Ok(release) @@ -66,4 +64,8 @@ impl BuildContext { }) .boxed() } + + pub fn remote_repo_handle(&self) -> github::repo::Handle { + github::repo::Handle::new(&self.octocrab, self.remote_repo.clone()) + } } diff --git a/build/build/src/engine.rs b/build/build/src/engine.rs index 691041f7101a..5e7d4f381440 100644 --- a/build/build/src/engine.rs +++ b/build/build/src/engine.rs @@ -7,7 +7,7 @@ use crate::paths::ComponentPaths; use crate::paths::Paths; use ide_ci::future::AsyncPolicy; -use ide_ci::models::config::RepoContext; +use ide_ci::github::Repo; use std::collections::BTreeSet; @@ -191,7 +191,7 @@ pub enum ReleaseCommand { #[derive(Clone, PartialEq, Eq, Debug)] pub struct ReleaseOperation { pub command: ReleaseCommand, - pub repo: RepoContext, + pub repo: Repo, } #[derive(Clone, PartialEq, Eq, Debug)] diff --git a/build/build/src/engine/context.rs b/build/build/src/engine/context.rs index 041988afcfbc..373b6ec052b4 100644 --- a/build/build/src/engine/context.rs +++ b/build/build/src/engine/context.rs @@ -19,11 +19,10 @@ use crate::paths::cache_directory; use crate::paths::Paths; use crate::paths::TargetTriple; use crate::project::ProcessWrapper; -use crate::retrieve_github_access_token; use ide_ci::actions::workflow::is_in_env; use ide_ci::cache; -use ide_ci::env::Variable; +use ide_ci::github::release::IsReleaseExt; use ide_ci::platform::DEFAULT_SHELL; use ide_ci::programs::graal; use ide_ci::programs::sbt; @@ -88,7 +87,7 @@ impl RunContext { Sbt.require_present().await?; // Other programs. - ide_ci::programs::Git::new_current().await?.require_present().await?; + ide_ci::programs::Git.require_present().await?; ide_ci::programs::Go.require_present().await?; ide_ci::programs::Cargo.require_present().await?; ide_ci::programs::Node.require_present().await?; @@ -97,7 +96,7 @@ impl RunContext { let prepare_simple_library_server = { if self.config.test_scala { let simple_server_path = &self.paths.repo_root.tools.simple_library_server; - ide_ci::programs::Git::new(simple_server_path) + ide_ci::programs::git::Context::new(simple_server_path) .await? .cmd()? .clean() @@ -122,7 +121,8 @@ impl RunContext { // TODO: After flatc version is bumped, it should be possible to get it without `conda`. // See: https://www.pivotaltracker.com/story/show/180303547 if let Err(e) = Flatc.require_present_at(&FLATC_VERSION).await { - debug!("Cannot find expected flatc: {}", e); + warn!("Cannot find expected flatc: {}", e); + warn!("Will try to install it using conda. In case of issues, please install flatc manually, to avoid dependency on conda."); // GitHub-hosted runner has `conda` on PATH but not things installed by it. // It provides `CONDA` variable pointing to the relevant location. if let Some(conda_path) = std::env::var_os("CONDA").map(PathBuf::from) { @@ -142,13 +142,14 @@ impl RunContext { Flatc.lookup()?; } - let _ = self.paths.emit_env_to_actions(); // Ignore error: we might not be run on CI. + self.paths.emit_env_to_actions().await?; // Ignore error: we might not be run on CI. debug!("Build configuration: {:#?}", self.config); // Setup Tests on Windows if TARGET_OS == OS::Windows { - env::CiTestTimeFactor.set(&2); - env::CiFlakyTestEnable.set(&true); + let default_time_factor: usize = 2; + env::CI_TEST_TIMEFACTOR.set(&default_time_factor)?; + env::CI_TEST_FLAKY_ENABLE.set(&true)?; } // TODO [mwu] @@ -409,11 +410,22 @@ impl RunContext { } // if build_native_runner { - // Command::new("./runner") - // .current_dir(&self.repo_root) - // .args(["--run", "./engine/runner-native/src/test/resources/Factorial.enso"]) - // .run_ok() + // let factorial_input = "6"; + // let factorial_expected_output = "720"; + // let output = Command::new(&self.repo_root.runner) + // .args([ + // "--run", + // + // self.repo_root.engine.runner_native.src.test.resources.factorial_enso.as_str(), + // factorial_input, + // ]) + // .env(ENSO_DATA_DIRECTORY.name(), &self.paths.engine.dir) + // .run_stdout() // .await?; + // ensure!( + // output.contains(factorial_expected_output), + // "Native runner output does not contain expected result." + // ); // } // Verify License Packages in Distributions @@ -481,28 +493,27 @@ impl RunContext { } pub async fn execute(&self, operation: Operation) -> Result { - match &operation { + match operation { Operation::Release(ReleaseOperation { command, repo }) => match command { ReleaseCommand::Upload => { let artifacts = self.build().await?; - - // Make packages. - let release_id = crate::env::ReleaseId.fetch()?; - let client = ide_ci::github::create_client(retrieve_github_access_token()?)?; - let upload_asset = |asset: PathBuf| { - ide_ci::github::release::upload_asset(repo, &client, release_id, asset) - }; + let release_id = crate::env::ENSO_RELEASE_ID.get()?; + let release = ide_ci::github::release::ReleaseHandle::new( + &self.inner.octocrab, + repo, + release_id, + ); for package in artifacts.packages.into_iter() { package.pack().await?; - upload_asset(package.artifact_archive).await?; + release.upload_asset_file(package.artifact_archive).await?; } for bundle in artifacts.bundles.into_iter() { bundle.pack().await?; - upload_asset(bundle.artifact_archive).await?; + release.upload_asset_file(bundle.artifact_archive).await?; } if TARGET_OS == OS::Linux { - upload_asset(self.paths.manifest_file()).await?; - upload_asset(self.paths.launcher_manifest_file()).await?; + release.upload_asset_file(self.paths.manifest_file()).await?; + release.upload_asset_file(self.paths.launcher_manifest_file()).await?; } } }, diff --git a/build/build/src/engine/env.rs b/build/build/src/engine/env.rs index c39091e64a69..0163a884682c 100644 --- a/build/build/src/engine/env.rs +++ b/build/build/src/engine/env.rs @@ -2,20 +2,14 @@ //use crate::prelude::*; -use ide_ci::env::Variable; +use ide_ci::define_env_var; -#[derive(Clone, Copy, Debug)] -pub struct CiTestTimeFactor; -impl Variable for CiTestTimeFactor { - const NAME: &'static str = "CI_TEST_TIMEFACTOR"; - type Value = usize; -} +define_env_var! { + /// Factor applied to timeouts in tests. 1.0 means no change, 2.0 means double the timeout. + CI_TEST_TIMEFACTOR, usize; -#[derive(Clone, Copy, Debug)] -pub struct CiFlakyTestEnable; -impl Variable for CiFlakyTestEnable { - const NAME: &'static str = "CI_TEST_FLAKY_ENABLE"; - type Value = bool; + /// Whether flaku tests should be run. + CI_TEST_FLAKY_ENABLE, bool; } diff --git a/build/build/src/enso.rs b/build/build/src/enso.rs index 8f7dee9b133f..1f6af8471367 100644 --- a/build/build/src/enso.rs +++ b/build/build/src/enso.rs @@ -5,7 +5,6 @@ use crate::postgres; use crate::postgres::EndpointConfiguration; use crate::postgres::Postgresql; -use ide_ci::env::Variable; use ide_ci::future::AsyncPolicy; use ide_ci::programs::docker::ContainerId; @@ -59,6 +58,12 @@ impl BuiltEnso { Ok(command) } + pub fn repl(&self) -> Result { + let mut command = self.cmd()?; + command.arg("--repl"); + Ok(command) + } + pub fn compile_lib(&self, target: impl AsRef) -> Result { ide_ci::fs::require_exist(&target)?; let mut command = self.cmd()?; @@ -82,9 +87,8 @@ impl BuiltEnso { let _httpbin = crate::httpbin::get_and_spawn_httpbin_on_free_port().await?; let _postgres = match TARGET_OS { OS::Linux => { - let runner_context_string = crate::env::RunnerContainerName - .fetch() - .map(|name| name.0) + let runner_context_string = crate::env::ENSO_RUNNER_CONTAINER_NAME + .get_raw() .or_else(|_| ide_ci::actions::env::RUNNER_NAME.get()) .unwrap_or_else(|_| Uuid::new_v4().to_string()); // GH-hosted runners are named like "GitHub Actions 10". Spaces are not allowed in diff --git a/build/build/src/env.rs b/build/build/src/env.rs index a10d9154988a..5fd5621f537f 100644 --- a/build/build/src/env.rs +++ b/build/build/src/env.rs @@ -1,28 +1,16 @@ #[allow(unused_imports)] use crate::prelude::*; -use ide_ci::env::Variable; +use ide_ci::define_env_var; use ide_ci::programs::docker::ContainerId; -#[derive(Clone, Copy, Debug)] -pub struct ReleaseId; -impl Variable for ReleaseId { - const NAME: &'static str = "ENSO_RELEASE_ID"; - type Value = octocrab::models::ReleaseId; -} +define_env_var! { + ENSO_RELEASE_ID, octocrab::models::ReleaseId; -#[derive(Clone, Copy, Debug)] -pub struct RunnerContainerName; -impl Variable for RunnerContainerName { - const NAME: &'static str = "ENSO_RUNNER_CONTAINER_NAME"; - type Value = ContainerId; -} + /// Name of the container that is running the current build. + ENSO_RUNNER_CONTAINER_NAME, ContainerId; -#[derive(Clone, Copy, Debug)] -pub struct NightlyEditionsLimit; -impl Variable for NightlyEditionsLimit { - const NAME: &'static str = "ENSO_NIGHTLY_EDITIONS_LIMIT"; - type Value = usize; + ENSO_NIGHTLY_EDITIONS_LIMIT, usize; } diff --git a/build/build/src/httpbin.rs b/build/build/src/httpbin.rs index 94d4c30a1ee9..2cd632e44f9f 100644 --- a/build/build/src/httpbin.rs +++ b/build/build/src/httpbin.rs @@ -1,18 +1,16 @@ use crate::prelude::*; -use ide_ci::env::Variable; use ide_ci::programs::Go; use tokio::process::Child; pub mod env { - /// Environment variable that stores URL under which spawned httpbin server is available. - #[derive(Clone, Copy, Debug)] - pub struct Url; - impl ide_ci::env::Variable for Url { - const NAME: &'static str = "ENSO_HTTP_TEST_HTTPBIN_URL"; - type Value = url::Url; + use super::*; + + ide_ci::define_env_var! { + /// Environment variable that stores URL under which spawned httpbin server is available. + ENSO_HTTP_TEST_HTTPBIN_URL, Url; } } @@ -40,14 +38,14 @@ pub async fn get_and_spawn_httpbin(port: u16) -> Result { let url_string = format!("http://localhost:{port}"); let url = Url::parse(&url_string)?; - env::Url.set(&url); + env::ENSO_HTTP_TEST_HTTPBIN_URL.set(&url)?; Ok(Spawned { url, process }) } impl Drop for Spawned { fn drop(&mut self) { debug!("Dropping the httpbin wrapper."); - env::Url.remove(); + env::ENSO_HTTP_TEST_HTTPBIN_URL.remove(); } } diff --git a/build/build/src/ide/web.rs b/build/build/src/ide/web.rs index 157a3e824176..56762b9eed4b 100644 --- a/build/build/src/ide/web.rs +++ b/build/build/src/ide/web.rs @@ -9,8 +9,8 @@ use crate::project::ProcessWrapper; use anyhow::Context; use futures_util::future::try_join; use futures_util::future::try_join4; +use ide_ci::github::RepoRef; use ide_ci::io::download_all; -use ide_ci::models::config::RepoContext; use ide_ci::program::command; use ide_ci::program::EMPTY_ARGS; use ide_ci::programs::node::NpmCommand; @@ -35,7 +35,7 @@ pub const IDE_ASSETS_URL: &str = pub const ARCHIVED_ASSET_FILE: &str = "ide-assets-main/content/assets/"; -pub const GOOGLE_FONTS_REPOSITORY: &str = "google/fonts"; +pub const GOOGLE_FONTS_REPOSITORY: RepoRef = RepoRef { owner: "google", name: "fonts" }; pub const GOOGLE_FONT_DIRECTORY: &str = "ofl"; @@ -81,6 +81,13 @@ pub mod env { /// The app-specific password (not Apple ID password). See: /// https://support.apple.com/HT204397 APPLEIDPASS, String; + + /// `true` or `false`. Defaults to `true` — on a macOS development machine valid and + /// appropriate identity from your keychain will be automatically used. + CSC_IDENTITY_AUTO_DISCOVERY, bool; + + /// Path to the python2 executable, used by electron-builder on macOS to package DMG. + PYTHON_PATH, PathBuf; } } @@ -102,9 +109,9 @@ pub async fn download_google_font( output_path: impl AsRef, ) -> Result> { let destination_dir = output_path.as_ref(); - let repo = RepoContext::from_str(GOOGLE_FONTS_REPOSITORY)?; + let repo = GOOGLE_FONTS_REPOSITORY.handle(octocrab); let path = format!("{GOOGLE_FONT_DIRECTORY}/{family}"); - let files = repo.repos(octocrab).get_content().path(path).send().await?; + let files = repo.repos().get_content().path(path).send().await?; let ttf_files = files.items.into_iter().filter(|file| file.name.ends_with(".ttf")).collect_vec(); for file in &ttf_files { @@ -293,8 +300,6 @@ impl IdeDesktop { // When watching we expect our artifacts to be served through server, not appear in any // specific location on the disk. let output_path = TempDir::new()?; - // let span = tracing:: - // let wasm = wasm.inspect() let watch_environment = ContentEnvironment::new(self, wasm, build_info, output_path).await?; Span::current().record("wasm", watch_environment.wasm.as_str()); @@ -352,7 +357,7 @@ impl IdeDesktop { let content_build = self .npm()? - .set_env(env::ENSO_BUILD_GUI, gui.as_ref())? + .set_env(env::ENSO_BUILD_GUI, gui.as_path())? .set_env(env::ENSO_BUILD_PROJECT_MANAGER, project_manager.as_ref())? .set_env(env::ENSO_BUILD_IDE, output_path.as_ref())? .set_env_opt(env::ENSO_BUILD_IDE_BUNDLED_ENGINE_VERSION, engine_version_to_use)? @@ -367,12 +372,25 @@ impl IdeDesktop { let (icons, _content) = try_join(icons_build, content_build).await?; + let python_path = if TARGET_OS == OS::MacOS { + // On macOS electron-builder will fail during DMG creation if there is no python2 + // installed. It is looked for in `/usr/bin/python` which is not valid place on newer + // MacOS versions. + // We can work around this by setting the `PYTHON_PATH` env variable. We attempt to + // locate `python2` in PATH which is enough to work on GitHub-hosted macOS + // runners. + Some(ide_ci::program::lookup("python2")?) + } else { + None + }; + self.npm()? .try_applying(&icons)? // .env("DEBUG", "electron-builder") - .set_env(env::ENSO_BUILD_GUI, gui.as_ref())? + .set_env(env::ENSO_BUILD_GUI, gui.as_path())? .set_env(env::ENSO_BUILD_IDE, output_path.as_ref())? .set_env(env::ENSO_BUILD_PROJECT_MANAGER, project_manager.as_ref())? + .set_env_opt(env::PYTHON_PATH, python_path.as_ref())? .workspace(Workspaces::Enso) // .args(["--loglevel", "verbose"]) .run("dist", EMPTY_ARGS) diff --git a/build/build/src/lib.rs b/build/build/src/lib.rs index 40fa72902377..f5a4b75facb5 100644 --- a/build/build/src/lib.rs +++ b/build/build/src/lib.rs @@ -28,6 +28,10 @@ #![warn(unused_import_braces)] #![warn(unused_qualifications)] + + +extern crate core; + use crate::prelude::*; use anyhow::Context; diff --git a/build/build/src/paths.rs b/build/build/src/paths.rs index eab1e79e4d87..f580128a9e53 100644 --- a/build/build/src/paths.rs +++ b/build/build/src/paths.rs @@ -16,6 +16,10 @@ ide_ci::define_env_var! { /// Directory where JUnit-format test run results are stored. /// These are generated as part of the standard library test suite run. ENSO_TEST_JUNIT_DIR, PathBuf; + + /// Used to overwrite the default location of data directory. See: + /// . + ENSO_DATA_DIRECTORY, PathBuf; } pub const EDITION_FILE_ARTIFACT_NAME: &str = "Edition File"; @@ -59,7 +63,7 @@ impl ComponentPaths { Self { name, root, dir, artifact_archive } } - pub fn emit_to_actions(&self, prefix: &str) -> Result { + pub async fn emit_to_actions(&self, prefix: &str) -> Result { let paths = [ ("NAME", &self.name), ("ROOT", &self.root), @@ -70,7 +74,8 @@ impl ComponentPaths { ide_ci::actions::workflow::set_env( &iformat!("{prefix}_DIST_{what}"), &path.to_string_lossy(), - )?; + ) + .await?; } Ok(()) } @@ -182,7 +187,7 @@ impl Paths { /// Sets the environment variables in the current process and in GitHub Actions Runner (if being /// run in its environment), so future steps of the job also have access to them. - pub fn emit_env_to_actions(&self) -> Result { + pub async fn emit_env_to_actions(&self) -> Result { let components = [ ("ENGINE", &self.engine), ("LAUNCHER", &self.launcher), @@ -190,11 +195,11 @@ impl Paths { ]; for (prefix, paths) in components { - paths.emit_to_actions(prefix)?; + paths.emit_to_actions(prefix).await?; } - ide_ci::actions::workflow::set_env("TARGET_DIR", &self.target.to_string_lossy())?; - ENSO_TEST_JUNIT_DIR.set_workflow_env(self.test_results.as_path())?; + ide_ci::actions::workflow::set_env("TARGET_DIR", &self.target.to_string_lossy()).await?; + ENSO_TEST_JUNIT_DIR.set_workflow_env(self.test_results.as_path()).await?; Ok(()) } @@ -264,18 +269,21 @@ pub fn root_to_changelog(root: impl AsRef) -> PathBuf { /// The default value of `ENSO_DATA_DIRECTORY`. /// See: +/// +/// We use it as a fallback when the environment variable is not set. pub fn default_data_directory() -> PathBuf { let project_path = match TARGET_OS { OS::MacOS => "org.enso", _ => "enso", }; // We can unwrap, because all systems we target define data local directory. + // This is enforced by the unit test below. dirs::data_local_dir().unwrap().join(project_path) } /// Get the `ENSO_DATA_DIRECTORY` path. pub fn data_directory() -> PathBuf { - std::env::var_os("ENSO_DATA_DIRECTORY").map_or_else(default_data_directory, PathBuf::from) + ENSO_DATA_DIRECTORY.get().unwrap_or_else(|_| default_data_directory()) } /// Get the place where global IR caches are stored. @@ -302,3 +310,14 @@ pub fn parent_cargo_toml(initial_path: impl AsRef) -> Result { ensure!(path.pop(), "No Cargo.toml found for {}", initial_path.as_ref().display()); } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn default_data_directory_is_present() { + // We just check that the function does not panic, as it has unwrap. + default_data_directory(); + } +} diff --git a/build/build/src/postgres.rs b/build/build/src/postgres.rs index 343fa863e4fb..2b4c07a7e4d1 100644 --- a/build/build/src/postgres.rs +++ b/build/build/src/postgres.rs @@ -52,9 +52,9 @@ pub enum EndpointConfiguration { impl EndpointConfiguration { /// Tries to deduce what endpoint should be used for a spawned Postgres service. pub fn deduce() -> Result { - if let Ok(container_name) = std::env::var("ENSO_RUNNER_CONTAINER_NAME") { + if let Ok(container_name) = crate::env::ENSO_RUNNER_CONTAINER_NAME.get() { debug!("Assuming that I am in the Docker container named {container_name}."); - Ok(Self::Container { owner: ContainerId(container_name) }) + Ok(Self::Container { owner: container_name }) } else { // If we are running on the bare machine (i.e. not in container), we spawn postgres // and expose it on a free host port. Then we can directly consume. @@ -86,7 +86,7 @@ impl Configuration { pub fn set_enso_test_env(&self) -> Result { env::tests::ENSO_DATABASE_TEST_DB_NAME.set(&self.database_name)?; - env::tests::ENSO_DATABASE_TEST_HOST.set(match &self.endpoint { + env::tests::ENSO_DATABASE_TEST_HOST.set(&match &self.endpoint { EndpointConfiguration::Host { port } => format!("localhost:{port}"), EndpointConfiguration::Container { .. } => format!("localhost:{POSTGRES_CONTAINER_DEFAULT_PORT}"), diff --git a/build/build/src/project.rs b/build/build/src/project.rs index 84519cfdeb01..b6f1b649be88 100644 --- a/build/build/src/project.rs +++ b/build/build/src/project.rs @@ -72,11 +72,6 @@ impl PlainArtifact { pub fn new(path: impl Into) -> Self { Self { path: path.into(), phantom: default() } } - - fn from_existing(path: impl AsRef) -> BoxFuture<'static, Result> - where T: Send + Sync + 'static { - ready(Ok(Self::new(path.as_ref()))).boxed() - } } /// State available to all project-related operations. @@ -224,15 +219,18 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static { ) -> BoxFuture<'static, Result> { let Context { octocrab, cache, upload_artifacts: _, repo_root: _ } = context; let CiRunSource { run_id, artifact_name, repository } = ci_run; + let repository = repository.handle(&octocrab); let span = info_span!("Downloading CI Artifact.", %artifact_name, %repository, target = output_path.as_str()); let this = self.clone(); async move { - let artifact = - repository.find_artifact_by_name(&octocrab, run_id, &artifact_name).await?; + let artifact = repository.find_artifact_by_name(run_id, &artifact_name).await?; info!("Will download artifact: {:#?}", artifact); let artifact_to_get = cache::artifact::ExtractedArtifact { client: octocrab.clone(), - key: cache::artifact::Key { artifact_id: artifact.id, repository }, + key: cache::artifact::Key { + artifact_id: artifact.id, + repository: repository.repo, + }, }; let artifact = cache.get(artifact_to_get).await?; let inner_archive_path = @@ -244,10 +242,34 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static { .boxed() } - fn find_asset(&self, _assets: Vec) -> Result { + fn find_asset<'a>(&self, release: &'a octocrab::models::repos::Release) -> Result<&'a Asset> { + release.assets.iter().find(|asset| self.matches_asset(asset)).with_context(|| { + let asset_names = release.assets.iter().map(|asset| &asset.name).join(", "); + format!( + "No matching asset for target {:?} in release {:?}. Available assets: {}", + self, release, asset_names + ) + }) + } + + fn matches_asset(&self, _asset: &Asset) -> bool { todo!("Not implemented for target {self:?}!") } + // /// Upload the artifact as an asset to the GitHub release. + // fn upload_asset( + // &self, + // release_handle: ReleaseHandle, + // output: impl Future> + Send + 'static, + // ) -> BoxFuture<'static, Result> { + // async move { + // let artifact = output.await?; + // release_handle.upload_compressed_dir(&artifact).await?; + // Ok(()) + // } + // .boxed() + // } + fn download_asset( &self, context: Context, @@ -261,7 +283,8 @@ pub trait IsTarget: Clone + Debug + Sized + Send + Sync + 'static { let this = self.clone(); async move { let ReleaseSource { asset_id, repository } = &source; - let archive_source = repository.download_asset_job(&octocrab, *asset_id); + let repository = repository.handle(&octocrab); + let archive_source = repository.download_asset_job(*asset_id); let extract_job = cache::archive::ExtractedArchive { archive_source, path_to_extract: path_to_extract(), diff --git a/build/build/src/project/backend.rs b/build/build/src/project/backend.rs index 000bb87b60b1..8da5b538a0de 100644 --- a/build/build/src/project/backend.rs +++ b/build/build/src/project/backend.rs @@ -150,16 +150,12 @@ impl IsTarget for Backend { .boxed() } - fn find_asset(&self, assets: Vec) -> Result { - assets - .into_iter() - .find(|asset| { - let name = &asset.name; - self.matches_platform(name) - && is_archive_name(name) - && name.contains("project-manager") - && (name.contains("bundle") || asset.size > 200_000_000) - }) - .context("Failed to find release asset with Enso Project Manager bundle.") + fn matches_asset(&self, asset: &Asset) -> bool { + // The size condition is used to discern actual artifact from its checksum. + let name = &asset.name; + self.matches_platform(name) + && is_archive_name(name) + && name.contains("project-manager") + && (name.contains("bundle") || asset.size > 200_000_000) } } diff --git a/build/build/src/project/gui.rs b/build/build/src/project/gui.rs index fc82e70a6e79..dc839f2b01d8 100644 --- a/build/build/src/project/gui.rs +++ b/build/build/src/project/gui.rs @@ -2,11 +2,11 @@ use crate::prelude::*; use crate::ide::web::IdeDesktop; use crate::project::Context; +use crate::project::IsArtifact; use crate::project::IsTarget; use crate::project::IsWatchable; use crate::project::IsWatcher; use crate::project::PerhapsWatched; -use crate::project::PlainArtifact; use crate::project::Wasm; use crate::source::BuildTargetJob; use crate::source::GetTargetJob; @@ -21,7 +21,23 @@ use ide_ci::ok_ready_boxed; -pub type Artifact = PlainArtifact; +#[derive(Clone, Debug, PartialEq, Eq, Hash, Shrinkwrap)] +pub struct Artifact(crate::paths::generated::RepoRootDistGui); + +impl AsRef for Artifact { + fn as_ref(&self) -> &Path { + self.0.as_path() + } +} + +impl IsArtifact for Artifact {} + +impl Artifact { + pub fn new(gui_path: impl AsRef) -> Self { + // TODO: sanity check + Self(crate::paths::generated::RepoRootDistGui::new_root(gui_path.as_ref())) + } +} #[derive(Clone, Derivative, derive_more::Deref)] #[derivative(Debug)] @@ -55,7 +71,7 @@ impl IsTarget for Gui { } fn adapt_artifact(self, path: impl AsRef) -> BoxFuture<'static, Result> { - Artifact::from_existing(path) + ok_ready_boxed(Artifact::new(path)) } fn build_internal( @@ -127,7 +143,7 @@ impl IsWatchable for Gui { let wasm_artifacts = ok_ready_boxed(perhaps_watched_wasm.as_ref().clone()); let watch_process = ide.watch_content(wasm_artifacts, &build_info.await?, watch_input.shell).await?; - let artifact = Self::Artifact::from_existing(destination).await?; + let artifact = Artifact::new(&destination); let web_watcher = crate::project::Watcher { watch_process, artifact }; Ok(Self::Watcher { wasm: perhaps_watched_wasm, web: web_watcher }) } diff --git a/build/build/src/project/wasm.rs b/build/build/src/project/wasm.rs index cc6a63b786dd..85d79af4a145 100644 --- a/build/build/src/project/wasm.rs +++ b/build/build/src/project/wasm.rs @@ -14,7 +14,6 @@ use crate::source::WithDestination; use derivative::Derivative; use ide_ci::cache; -use ide_ci::env::Variable; use ide_ci::fs::compressed_size; use ide_ci::fs::copy_file_if_different; use ide_ci::programs::cargo; @@ -208,7 +207,7 @@ impl IsTarget for Wasm { command .current_dir(&repo_root) .kill_on_drop(true) - .env_remove(ide_ci::programs::rustup::env::Toolchain::NAME) + .env_remove(ide_ci::programs::rustup::env::RUSTUP_TOOLCHAIN.name()) .set_env(env::ENSO_ENABLE_PROC_MACRO_SPAN, &true)? .build() .arg(wasm_pack::Profile::from(*profile)) diff --git a/build/build/src/release.rs b/build/build/src/release.rs index 5eccffa697a6..8cf0727ff1f3 100644 --- a/build/build/src/release.rs +++ b/build/build/src/release.rs @@ -5,15 +5,30 @@ use crate::paths::generated; use crate::paths::TargetTriple; use crate::paths::EDITION_FILE_ARTIFACT_NAME; use crate::project; +use crate::project::gui; +use crate::project::Gui; +use crate::project::IsTarget; +use crate::source::ExternalSource; +use crate::source::FetchTargetJob; use ide_ci::github; +use ide_ci::github::release::ReleaseHandle; +use ide_ci::io::web::handle_error_response; use ide_ci::programs::Docker; +use ide_ci::programs::SevenZip; use octocrab::models::repos::Release; +use reqwest::Response; +use serde_json::json; use tempfile::tempdir; -pub async fn create_release(context: &BuildContext) -> Result { +pub fn release_from_env(context: &BuildContext) -> Result { + let release_id = crate::env::ENSO_RELEASE_ID.get()?; + Ok(ReleaseHandle::new(&context.octocrab, context.remote_repo.clone(), release_id)) +} + +pub async fn draft_a_new_release(context: &BuildContext) -> Result { let versions = &context.triple.versions; let commit = ide_ci::actions::env::GITHUB_SHA.get()?; @@ -22,9 +37,10 @@ pub async fn create_release(context: &BuildContext) -> Result { crate::changelog::Changelog(&changelog_contents).top_release_notes()?; debug!("Preparing release {} for commit {}", versions.version, commit); + let release = context - .remote_repo - .repos(&context.octocrab) + .remote_repo_handle() + .repos() .releases() .create(&versions.tag()) .target_commitish(&commit) @@ -35,22 +51,22 @@ pub async fn create_release(context: &BuildContext) -> Result { .send() .await?; - crate::env::ReleaseId.emit(&release.id)?; + ide_ci::actions::workflow::set_output(&crate::env::ENSO_RELEASE_ID, &release.id).await?; Ok(release) } pub async fn publish_release(context: &BuildContext) -> Result { - let BuildContext { inner: project::Context { octocrab, .. }, remote_repo, triple, .. } = - context; + let remote_repo = context.remote_repo_handle(); + let BuildContext { inner: project::Context { .. }, triple, .. } = context; - let release_id = crate::env::ReleaseId.fetch()?; + let release_id = crate::env::ENSO_RELEASE_ID.get()?; debug!("Looking for release with id {release_id} on github."); - let release = remote_repo.repos(octocrab).releases().get_by_id(release_id).await?; + let release = remote_repo.repos().releases().get_by_id(release_id).await?; ensure!(release.draft, "Release has been already published!"); debug!("Found the target release, will publish it."); - remote_repo.repos(octocrab).releases().update(release.id.0).draft(false).send().await?; + remote_repo.repos().releases().update(release.id.0).draft(false).send().await?; debug!("Done. Release URL: {}", release.url); let temp = tempdir()?; @@ -68,68 +84,172 @@ pub async fn publish_release(context: &BuildContext) -> Result { .await?; debug!("Updating edition in the AWS S3."); - crate::aws::update_manifest(remote_repo, &edition_file_path).await?; + crate::aws::update_manifest(&remote_repo, &edition_file_path).await?; Ok(()) } -pub async fn deploy_to_ecr(context: &BuildContext, repository: String) -> Result { - let octocrab = &context.octocrab; - let release_id = crate::env::ReleaseId.fetch()?; - - let linux_triple = TargetTriple { os: OS::Linux, ..context.triple.clone() }; - let package_name = - generated::RepoRootBuiltDistribution::new_root(".", linux_triple.to_string()) - .enso_engine_triple - .file_name() - .context("Failed to get Engine Package name.")? - .as_str() - .to_string(); - - let release = context.remote_repo.find_release_by_id(octocrab, release_id).await?; +/// Download the Enso Engine distribution from the GitHub release. +pub async fn get_engine_package( + repo: &github::repo::Handle, + output: impl AsRef, + triple: &TargetTriple, +) -> Result { + let release_id = crate::env::ENSO_RELEASE_ID.get()?; + let package_name = generated::RepoRootBuiltDistribution::new_root(".", triple.to_string()) + .enso_engine_triple + .file_name() + .context("Failed to get Engine Package name.")? + .as_str() + .to_string(); + + let release = repo.find_release_by_id(release_id).await?; let asset = github::find_asset_by_text(&release, &package_name)?; - - let temp_for_archive = tempdir()?; - let downloaded_asset = context - .remote_repo - .download_asset_to(octocrab, asset, temp_for_archive.path().to_owned()) - .await?; - - let temp_for_extraction = tempdir()?; - ide_ci::archive::extract_to(&downloaded_asset, &temp_for_extraction).await?; + let downloaded_asset = + repo.download_asset_to(asset, temp_for_archive.path().to_owned()).await?; - let engine_package = generated::EnginePackage::new_under( - &temp_for_extraction, - context.triple.versions.version.to_string(), - ); + ide_ci::archive::extract_to(&downloaded_asset, output.as_ref()).await?; + let engine_package = + generated::EnginePackage::new_under(output.as_ref(), triple.versions.version.to_string()); + Ok(engine_package) +} - let config = &aws_config::load_from_env().await; - let client = aws_sdk_ecr::Client::new(config); - let repository_uri = crate::aws::ecr::get_repository_uri(&client, &repository).await?; - let tag = format!("{}:{}", repository_uri, context.triple.versions.version); - let _image = crate::aws::ecr::runtime::build_runtime_image( +/// Download the Enso Engine distribution from the GitHub release and build Runtime Docker image +/// from it. +pub async fn generate_runtime_image( + context: &BuildContext, + tag: impl Into, +) -> Result { + // Our runtime images always target Linux. + let linux_triple = TargetTriple { os: OS::Linux, ..context.triple.clone() }; + let temp_for_extraction = tempdir()?; + let engine_package = get_engine_package( + &context.remote_repo_handle(), + temp_for_extraction.path(), + &linux_triple, + ) + .await?; + crate::aws::ecr::runtime::build_runtime_image( context.repo_root.tools.ci.docker.clone(), engine_package, - tag.clone(), + tag.into(), ) - .await?; + .await +} +/// Perform deploy of the backend to the ECR. +/// +/// Downloads the Engine package from the release, builds the runtime image from it and pushes it +/// to our ECR. +pub async fn deploy_to_ecr(context: &BuildContext, repository: String) -> Result { + let client = crate::aws::ecr::client_from_env().await; + let repository_uri = crate::aws::ecr::get_repository_uri(&client, &repository).await?; + let tag = format!("{}:{}", repository_uri, context.triple.versions.version); + // We don't care about the image ID, we will refer to it by the tag. + let _image_id = generate_runtime_image(context, &tag).await?; let credentials = crate::aws::ecr::get_credentials(&client).await?; Docker.while_logged_in(credentials, || async move { Docker.push(&tag).await }).await?; Ok(()) } -pub async fn dispatch_cloud_image_build_action(octocrab: &Octocrab, version: &Version) -> Result { - let input = serde_json::json!({ - "version": version.to_string(), +/// Download the GUI artifacts from the current CI run artifacts. +pub async fn get_gui_from_current_ci_run( + context: &BuildContext, + out_dir: impl Into, +) -> Result { + let target = Gui; + let source = ExternalSource::new_ongoing_ci_run(target.artifact_name()); + let fetch_job = FetchTargetJob { destination: out_dir.into(), inner: source }; + target.get_external(context.inner.clone(), fetch_job).await +} + +/// Upload GUI to the cloud (AWS S3). +pub async fn upload_gui_to_cloud_good(context: &BuildContext) -> Result { + let temp = tempdir()?; + let gui = get_gui_from_current_ci_run(context, temp.path()).await?; + upload_gui_to_cloud(&gui.assets, &context.triple.versions.version).await?; + notify_cloud_about_gui(&context.triple.versions.version).await?; + Ok(()) +} + +/// Upload GUI to the cloud (AWS S3). +pub async fn upload_gui_to_cloud( + assets: &crate::paths::generated::RepoRootDistGuiAssets, + version: &Version, +) -> Result { + let bucket = crate::aws::s3::gui::context(version).await?; + + // Some file we upload as-is, some gzipped. This seems somewhat arbitrary now. + let files_to_upload = [assets.ide_wasm.as_path(), assets.style_css.as_path()]; + let files_to_upload_gzipped = [assets.index_js.as_path(), assets.wasm_imports_js.as_path()]; + + + for file in files_to_upload.iter() { + bucket.put_file(file).await?; + } + put_files_gzipping(&bucket, &files_to_upload_gzipped).await?; + + Ok(()) +} + +/// Packs given files with `gzip` and uploads them to the S3 bucket. +/// +/// The files are uploaded with the same name, but with `.gz` extension. +pub async fn put_files_gzipping( + bucket: &crate::aws::s3::BucketContext, + files: impl IntoIterator>, +) -> Result { + let temp_for_gzipping = tempdir()?; + for file in files { + let gzipped_file = file.with_parent(&temp_for_gzipping).with_appended_extension("gz"); + SevenZip.pack(&gzipped_file, [file]).await?; + bucket.put_file(&gzipped_file).await?; + } + Ok(()) +} + +#[context("Failed to notify the cloud about GUI upload in version {}.", version)] +pub async fn notify_cloud_about_gui(version: &Version) -> Result { + let body = json!({ + "versionNumber": version.to_string(), + "versionType": "Ide" }); - octocrab - .actions() - .create_workflow_dispatch("enso-org", "cloud-v2", "build-image.yaml", "main") - .inputs(input) + let response = reqwest::Client::new() + .post("https://nngmxi3zr4.execute-api.eu-west-1.amazonaws.com/versions") + .header("x-enso-organization-id", "org-2BqGX0q2yCdONdmx3Om1MVZzmv3") + .header("Content-Type", "application/json") + .json(&body) .send() - .await - .context("Failed to dispatch the cloud image build action.") + .await?; + debug!("Response code from the cloud: {}.", response.status()); + handle_error_response(response).await +} + +#[cfg(test)] +mod tests { + use super::*; + + #[tokio::test] + #[ignore] + async fn upload_gui() -> Result { + setup_logging()?; + let assets = crate::paths::generated::RepoRootDistGuiAssets::new_root( + r"H:\NBO\enso4\dist\gui\assets", + ); + let version = "2022.1.1-dev.provisional.test.2".parse2()?; + upload_gui_to_cloud(&assets, &version).await?; + notify_cloud_about_gui(&version).await?; + Ok(()) + } + + #[tokio::test] + #[ignore] + async fn notify_cloud() -> Result { + setup_logging()?; + let version = Version::from_str("2022.1.1-rc.2")?; + notify_cloud_about_gui(&version).await?; + Ok(()) + } } diff --git a/build/build/src/repo.rs b/build/build/src/repo.rs index 1db230ee4a65..3e69450792cd 100644 --- a/build/build/src/repo.rs +++ b/build/build/src/repo.rs @@ -1,6 +1,13 @@ use crate::prelude::*; +// ============== +// === Export === +// ============== + +pub mod cloud; + + /// Heuristic that checks if given path can be plausibly considered to be the root of the Enso /// repository. diff --git a/build/build/src/repo/cloud.rs b/build/build/src/repo/cloud.rs new file mode 100644 index 000000000000..3a66ca38dc15 --- /dev/null +++ b/build/build/src/repo/cloud.rs @@ -0,0 +1,60 @@ +use crate::prelude::*; + +use ide_ci::github; +use ide_ci::github::RepoRef; + + + +pub const CLOUD_REPO: RepoRef = RepoRef { owner: "enso-org", name: "cloud-v2" }; + +pub const BUILD_IMAGE_WORKFLOW: &str = "build-image.yaml"; + +/// Build Image workflow input. Follows schema defined by +/// https://github.com/enso-org/cloud-v2/blob/main/.github/workflows/build-image.yaml#L4 +#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] +pub struct BuildImageInput { + runtime_version: T, +} + +impl BuildImageInput { + pub fn new(runtime_version: T) -> Self { + Self { runtime_version } + } +} + +/// This function tells the cloud to build Enso Backend image, based on the image that is in ECR. +/// +/// In general, we want this function to be invoked after each ECR push. +#[instrument(fields(%version), skip(octocrab))] +pub async fn build_image_workflow_dispatch_input(octocrab: &Octocrab, version: &Version) -> Result { + let repo = CLOUD_REPO.handle(octocrab); + + // We want to call our workflow on the default branch. + let default_branch = repo.get().await?.default_branch.with_context(|| { + format!( + "Failed to get the default branch of the {} repository. Missing field: `default_branch`.", + CLOUD_REPO + ) + })?; + + debug!("Will invoke on ref: '{}'", default_branch); + let input = BuildImageInput::new(version); + info!("Dispatching the cloud workflow to build the image."); + github::workflow::dispatch(&repo, BUILD_IMAGE_WORKFLOW, default_branch, &input).await +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::setup_octocrab; + + #[tokio::test] + #[ignore] + async fn manual_call() -> Result { + setup_logging()?; + let octo = setup_octocrab().await?; + build_image_workflow_dispatch_input(&octo, &Version::parse("2022.1.1-nightly.2022-10-18")?) + .await?; + Ok(()) + } +} diff --git a/build/build/src/source.rs b/build/build/src/source.rs index 47ffd06ca4f9..6c2779b68d1d 100644 --- a/build/build/src/source.rs +++ b/build/build/src/source.rs @@ -4,7 +4,7 @@ use crate::project::IsTarget; use crate::project::IsWatchable; use derivative::Derivative; -use ide_ci::models::config::RepoContext; +use ide_ci::github::Repo; use octocrab::models::AssetId; use octocrab::models::RunId; @@ -23,6 +23,12 @@ pub enum ExternalSource { Release(ReleaseSource), } +impl ExternalSource { + pub fn new_ongoing_ci_run(artifact_name: impl Into) -> Self { + Self::OngoingCiRun(OngoingCiRunSource { artifact_name: artifact_name.into() }) + } +} + #[derive(Derivative)] #[derivative(Debug)] pub enum Source { @@ -41,7 +47,7 @@ pub struct OngoingCiRunSource { #[derivative(Debug)] pub struct CiRunSource { #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] - pub repository: RepoContext, + pub repository: Repo, #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] pub run_id: RunId, pub artifact_name: String, @@ -51,7 +57,7 @@ pub struct CiRunSource { #[derivative(Debug)] pub struct ReleaseSource { #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] - pub repository: RepoContext, + pub repository: Repo, #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] pub asset_id: AssetId, } diff --git a/build/build/src/version.rs b/build/build/src/version.rs index 9b861ed6bb38..06d7e4b8b219 100644 --- a/build/build/src/version.rs +++ b/build/build/src/version.rs @@ -1,3 +1,5 @@ +//! Code that deals with the version of Enso. + use crate::prelude::*; use anyhow::Context; @@ -5,7 +7,7 @@ use chrono::Datelike; use derivative::Derivative; use ide_ci::define_env_var; use ide_ci::env::new::TypedVariable; -use ide_ci::models::config::RepoContext; +use ide_ci::github; use octocrab::models::repos::Release; use semver::Prerelease; use std::collections::BTreeSet; @@ -18,64 +20,88 @@ use tracing::instrument; // Variable that stores Enso Engine version. define_env_var! { + /// The version of Enso (shared by GUI and Engine). ENSO_VERSION, Version; + /// Edition name for the build. + /// + /// By convention, this is the same as the version. ENSO_EDITION, String; + + /// Whether the development-specific Engine features should be disabled. ENSO_RELEASE_MODE, bool; } pub const LOCAL_BUILD_PREFIX: &str = "dev"; pub const NIGHTLY_BUILD_PREFIX: &str = "nightly"; - -pub fn default_dev_version() -> Version { - let mut ret = Version::new(0, 0, 0); - ret.pre = Prerelease::new(LOCAL_BUILD_PREFIX).unwrap(); - ret -} - -pub fn is_nightly_release(release: &Release) -> bool { - !release.draft && release.tag_name.contains(NIGHTLY_BUILD_PREFIX) +pub const RC_BUILD_PREFIX: &str = "rc"; + +/// Check if the given GitHub release matches the provided kind. +pub fn is_release_of_kind(release: &Release, kind: Kind) -> bool { + match kind { + Kind::Dev => release.tag_name.contains(LOCAL_BUILD_PREFIX), + Kind::Nightly => release.tag_name.contains(NIGHTLY_BUILD_PREFIX), + Kind::Rc => release.tag_name.contains(RC_BUILD_PREFIX), + Kind::Stable => !release.prerelease, + } } -pub async fn nightly_releases( - octocrab: &Octocrab, - repo: &RepoContext, +/// List all releases in the GitHub repository that are of a given kind. +pub async fn releases_of_kind( + repo: &github::repo::Handle, + kind: Kind, ) -> Result> { - Ok(repo.all_releases(octocrab).await?.into_iter().filter(is_nightly_release)) + Ok(repo.all_releases().await?.into_iter().filter(move |r| is_release_of_kind(r, kind))) } -pub async fn latest_nightly_release(octocrab: &Octocrab, repo: &RepoContext) -> Result { +/// Get the latest nightly release in the GitHub repository. +pub async fn latest_nightly_release(repo: &github::repo::Handle) -> Result { // TODO: this assumes that releases are returned in date order, to be confirmed // (but having to download all the pages to see which is latest wouldn't be nice) - nightly_releases(octocrab, repo).await?.next().context("Failed to find any nightly releases.") + releases_of_kind(repo, Kind::Nightly) + .await? + .next() + .context("Failed to find any nightly releases.") } - +/// Keeps the version of Enso, edition name and whether this version should be treated as a release. +/// +/// Basically this is everything that is needed to define the version of the build. #[derive(Clone, Derivative, Serialize, Deserialize, Shrinkwrap, PartialEq, Eq)] #[derivative(Debug)] pub struct Versions { + /// The version of Enso. + /// + /// Currently it also doubles as the edition name. In future we might want to separate them. #[shrinkwrap(main_field)] #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] - pub version: Version, - pub release_mode: bool, -} + pub version: Version, -impl Default for Versions { - fn default() -> Self { - Versions { version: default_dev_version(), release_mode: false } - } + /// Whether this version should be treated as a release. + /// + /// This is later propagated to [`ENSO_RELEASE_MODE`] environment variable. + pub release_mode: bool, } impl Versions { + /// Create a new version from a single SemVer [`Version`] value. + /// + /// Edition name will be deduced, to be the same as the version. + /// Whether this version should be treated as a release is deduced from the version's + /// [pre-release](https://semver.org/#spec-item-9) part. pub fn new(version: Version) -> Self { let release_mode = !version.pre.as_str().contains(LOCAL_BUILD_PREFIX) && !version.pre.as_str().contains("SNAPSHOT"); Versions { version, release_mode } } + /// Get the edition name. + /// + /// By convention, this is the same as the version. pub fn edition_name(&self) -> String { self.version.to_string() } + /// Pretty print the product name and version, e.g. "Enso 2022.1.0". pub fn pretty_name(&self) -> String { format!("Enso {}", self.version) } @@ -84,7 +110,9 @@ impl Versions { Prerelease::new(LOCAL_BUILD_PREFIX).anyhow_err() } - pub async fn nightly_prerelease(octocrab: &Octocrab, repo: &RepoContext) -> Result { + pub async fn nightly_prerelease( + repo: &github::repo::Handle, + ) -> Result { let date = chrono::Utc::now(); let date = date.format("%F").to_string(); @@ -98,7 +126,7 @@ impl Versions { Ok(pre) }; - let relevant_nightly_versions = nightly_releases(octocrab, repo) + let relevant_nightly_versions = releases_of_kind(repo, Kind::Nightly) .await? .filter_map(|release| { if release.tag_name.contains(&todays_pre_text) { @@ -121,15 +149,61 @@ impl Versions { unreachable!("After infinite loop.") } + /// Generate prerelease string for the "release candidate" release. + /// + /// We list all the RC releases in the repository, and increment the number of the latest one. + pub async fn rc_prerelease( + version: &Version, + repo: &github::repo::Handle, + ) -> Result { + let relevant_rc_versions = releases_of_kind(repo, Kind::Rc) + .await? + .filter_map(|release| { + let release_version = Version::parse(&release.tag_name).ok()?; + let version_matches = release_version.major == version.major + && release_version.minor == version.minor + && release_version.patch == version.patch; + version_matches.then_some(release_version.pre) + }) + .collect::>(); + + // Generate subsequent RC sub-releases, until a free one is found. + // Should happen rarely. + for index in 0.. { + let pre = Prerelease::from_str(&format!("{}.{}", RC_BUILD_PREFIX, index))?; + if !relevant_rc_versions.contains(&pre) { + return Ok(pre); + } + } + unreachable!("After infinite loop.") + } + + /// Get a git tag that should be applied to a commit released as this version. pub fn tag(&self) -> String { self.version.to_string() } - pub fn publish(&self) -> Result { + pub async fn publish(&self) -> Result { let edition = self.edition_name(); - ENSO_VERSION.emit_to_workflow(&self.version)?; - ENSO_EDITION.emit_to_workflow(edition.as_str())?; - ENSO_RELEASE_MODE.emit_to_workflow(&self.release_mode)?; + // Some components (like SBT) consume version information through these environment + // variables. + ENSO_VERSION.set(&self.version)?; + ENSO_EDITION.set(&edition)?; + ENSO_RELEASE_MODE.set(&self.release_mode)?; + + // This is actually used only in some workflows (primarily the release one, where release + // creation and the asset compilation happen in separate jobs). Still, no harm in doing this + // always. + // + // Note that our output names are the same as the environment variable names. + ide_ci::actions::workflow::set_output(ENSO_VERSION.name, &ENSO_VERSION.get_raw()?).await?; + ide_ci::actions::workflow::set_output(ENSO_EDITION.name, &ENSO_EDITION.get_raw()?).await?; + ide_ci::actions::workflow::set_output( + ENSO_RELEASE_MODE.name, + &ENSO_RELEASE_MODE.get_raw()?, + ) + .await?; + Ok(()) } } @@ -175,7 +249,7 @@ pub fn suggest_next_version(previous: &Version) -> Version { } #[instrument(ret)] -pub fn versions_from_env(expected_build_kind: Option) -> Result> { +pub fn versions_from_env(expected_build_kind: Option) -> Result> { if let Ok(version) = ENSO_VERSION.get() { // The currently adopted version scheme uses same string for version and edition name, // so we enforce it here. There are no fundamental reasons for this requirement. @@ -188,7 +262,7 @@ pub fn versions_from_env(expected_build_kind: Option) -> Result) -> Result, +pub async fn deduce_or_generate( + repo: Result<&github::repo::Handle>, + kind: Kind, root_path: impl AsRef, ) -> Result { debug!("Deciding on version to target."); - if let Some(versions) = versions_from_env(Some(build_kind))? { + if let Some(versions) = versions_from_env(Some(kind))? { Ok(versions) } else { let changelog_path = crate::paths::root_to_changelog(&root_path); + let base_version = base_version(&changelog_path)?; let version = Version { - pre: match build_kind { - BuildKind::Dev => Versions::local_prerelease()?, - BuildKind::Nightly => Versions::nightly_prerelease(octocrab, target_repo?).await?, + pre: match kind { + Kind::Dev => Versions::local_prerelease()?, + Kind::Nightly => Versions::nightly_prerelease(repo?).await?, + Kind::Rc => Versions::rc_prerelease(&base_version, repo?).await?, + Kind::Stable => todo!(), //Versions::stable(repo?).await?, }, - ..base_version(&changelog_path)? + ..base_version }; Ok(Versions::new(version)) } @@ -232,14 +308,14 @@ mod tests { #[test] fn is_nightly_test() { - let is_nightly = |text: &str| BuildKind::Nightly.matches(&Version::parse(text).unwrap()); + let is_nightly = |text: &str| Kind::Nightly.matches(&Version::parse(text).unwrap()); assert!(is_nightly("2022.1.1-nightly.2022.1.1")); assert!(is_nightly("2022.1.1-nightly")); assert!(is_nightly("2022.1.1-nightly.2022.1.1")); assert!(is_nightly("2022.1.1-nightly.2022.1.1")); let version = Version::parse("2022.1.1-nightly.2022-06-06.3").unwrap(); - assert!(BuildKind::deduce(&version).contains(&BuildKind::Nightly)); + assert!(Kind::deduce(&version).contains(&Kind::Nightly)); } #[test] @@ -250,18 +326,33 @@ mod tests { } } -#[derive(clap::ArgEnum, Clone, Copy, PartialEq, Eq, Debug, EnumString, EnumIter, strum::Display)] +#[derive( + clap::ArgEnum, + Clone, + Copy, + PartialEq, + Eq, + Debug, + EnumString, + EnumIter, + strum::Display, + strum::AsRefStr +)] #[strum(serialize_all = "kebab-case")] -pub enum BuildKind { +pub enum Kind { Dev, Nightly, + Rc, + Stable, } -impl BuildKind { +impl Kind { pub fn prerelease_prefix(self) -> &'static str { match self { - BuildKind::Dev => LOCAL_BUILD_PREFIX, - BuildKind::Nightly => NIGHTLY_BUILD_PREFIX, + Kind::Dev => LOCAL_BUILD_PREFIX, + Kind::Nightly => NIGHTLY_BUILD_PREFIX, + Kind::Rc => RC_BUILD_PREFIX, + Kind::Stable => "", } } @@ -270,7 +361,7 @@ impl BuildKind { } pub fn deduce(version: &Version) -> Result { - BuildKind::iter() + Kind::iter() .find(|kind| kind.matches(version)) .context(format!("Failed to deduce build kind for version {version}")) } diff --git a/build/ci_utils/Cargo.toml b/build/ci_utils/Cargo.toml index 9c5588c3a884..e914f023f394 100644 --- a/build/ci_utils/Cargo.toml +++ b/build/ci_utils/Cargo.toml @@ -21,10 +21,10 @@ data-encoding = "2.3.2" derivative = "2.2.0" derive_more = "0.99.17" dirs = "4.0.0" +enso-build-base = { path = "../base" } filetime = "0.2.15" flate2 = "1.0.22" flume = "0.10.10" -fn-error-context = "0.2.0" fs_extra = "1.2.0" futures = "0.3.17" futures-util = "0.3.17" @@ -60,7 +60,6 @@ regex = "1.5.4" reqwest = { version = "0.11.5", default-features = false, features = [ "stream" ] } -snafu = "0.7.0" semver = { version = "1.0.4", features = ["serde"] } serde = { version = "1.0.130", features = ["derive"] } serde_json = "1.0.68" diff --git a/build/ci_utils/src/actions.rs b/build/ci_utils/src/actions.rs index c136c484a10f..7a094d374a69 100644 --- a/build/ci_utils/src/actions.rs +++ b/build/ci_utils/src/actions.rs @@ -1,3 +1,6 @@ +//! General utilities for working within the GitHub Actions environment. + + // ============== // === Export === // ============== @@ -5,6 +8,7 @@ pub mod artifacts; pub mod context; pub mod env; +pub mod env_file; pub mod workflow; pub use context::Context; diff --git a/build/ci_utils/src/actions/artifacts.rs b/build/ci_utils/src/actions/artifacts.rs index ef464f6aea52..1908199da89c 100644 --- a/build/ci_utils/src/actions/artifacts.rs +++ b/build/ci_utils/src/actions/artifacts.rs @@ -151,7 +151,7 @@ pub async fn upload_compressed_directory( let archive_path = tempdir.path().join(format!("{artifact_name}.tar.gz")); info!("Packing {} to {}", path_to_upload.as_ref().display(), archive_path.display()); - crate::archive::pack_directory_contents(&archive_path, path_to_upload).await?; + crate::archive::compress_directory(&archive_path, path_to_upload).await?; info!("Starting upload of {artifact_name}."); upload_single_file(&archive_path, artifact_name).await?; diff --git a/build/ci_utils/src/actions/context.rs b/build/ci_utils/src/actions/context.rs index 764f9f8beb43..52f68c491ee8 100644 --- a/build/ci_utils/src/actions/context.rs +++ b/build/ci_utils/src/actions/context.rs @@ -1,4 +1,3 @@ -#[allow(unused_imports)] use crate::prelude::*; use octocrab::models; @@ -20,6 +19,7 @@ pub struct WebhookPayload { /// Corresponds to https://github.com/actions/toolkit/blob/main/packages/github/src/context.ts #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] pub struct Context { + /// Webhook payload object that triggered the workflow. pub payload: WebhookPayload, pub event_name: String, pub sha: String, @@ -37,6 +37,7 @@ pub struct Context { impl Context { /// Creates a new context from the environment. + #[context("Failed to create a new GitHub context from the environment.")] pub fn from_env() -> Result { let payload: WebhookPayload = if let Ok(event_path) = crate::actions::env::GITHUB_EVENT_PATH.get() { diff --git a/build/ci_utils/src/actions/env.rs b/build/ci_utils/src/actions/env.rs index 3e4a9a302466..f338fc504e50 100644 --- a/build/ci_utils/src/actions/env.rs +++ b/build/ci_utils/src/actions/env.rs @@ -3,7 +3,9 @@ use crate::prelude::*; use crate::define_env_var; -use crate::models::config::RepoContext; +use crate::env::new::RawVariable; +use crate::env::new::TypedVariable; +use crate::github::Repo; @@ -23,7 +25,7 @@ define_env_var! { /// For a step executing an action, this is the owner and repository name of the action. /// For example, `actions/checkout`. - GITHUB_ACTION_REPOSITORY, RepoContext; + GITHUB_ACTION_REPOSITORY, Repo; /// Always set to true when GitHub Actions is running the workflow. You can use this variable /// to differentiate when tests are being run locally or by GitHub Actions. @@ -45,6 +47,10 @@ define_env_var! { /// `/home/runner/work/_temp/_runner_file_commands/set_env_87406d6e-4979-4d42-98e1-3dab1f48b13a`. GITHUB_ENV, PathBuf; + /// Path to the environment file with step's output parameters. This file is unique to the + /// current step and changes for each step in a job. + GITHUB_OUTPUT, PathBuf; + /// The name of the event that triggered the workflow. For example, `workflow_dispatch`. GITHUB_EVENT_NAME, String; @@ -89,7 +95,7 @@ define_env_var! { GITHUB_REF_TYPE, String; /// The owner and repository name. For example, octocat/Hello-World. - GITHUB_REPOSITORY, RepoContext; + GITHUB_REPOSITORY, Repo; /// The repository owner's name. For example, octocat. GITHUB_REPOSITORY_OWNER, String; @@ -123,7 +129,7 @@ define_env_var! { /// This file is unique to the current step and changes for each step in a job. For example, /// `/home/rob/runner/_layout/_work/_temp/_runner_file_commands/step_summary_1cb22d7f-5663-41a8-9ffc-13472605c76c`. /// For more information, see "Workflow commands for GitHub Actions." - GITHUB_STEP_SUMMARY, String; + GITHUB_STEP_SUMMARY, PathBuf; /// The name of the workflow. For example, `My test workflow`. If the workflow file doesn't /// specify a name, the value of this variable is the full path of the workflow file in the @@ -166,3 +172,9 @@ pub fn is_self_hosted() -> Result { let name = RUNNER_NAME.get_raw()?; Ok(!name.starts_with("GitHub Actions")) } + +pub async fn set_and_emit(var: &V, value: &V::Borrowed) -> Result +where V: TypedVariable { + let value_raw = var.generate(value)?; + crate::actions::workflow::set_env(var.name(), &value_raw).await +} diff --git a/build/ci_utils/src/actions/env_file.rs b/build/ci_utils/src/actions/env_file.rs new file mode 100644 index 000000000000..f6ff8b625211 --- /dev/null +++ b/build/ci_utils/src/actions/env_file.rs @@ -0,0 +1,83 @@ +//! During the execution of a workflow, the runner generates temporary files that can be used to +//! perform certain actions. The path to these files are exposed via environment variables. +//! +//! See for more information. + +use crate::prelude::*; + +use crate::actions::env; +use crate::env::new::PathBufVariable; + + + +// ============================ +// === GitHub-defined files === +// ============================ + +/// Environment file that can be used to set environment variables for the subsequent steps of the +/// current job. See: +pub static GITHUB_ENV: EnvironmentFile = EnvironmentFile::new(env::GITHUB_ENV); + +/// Environment file used to set current step's output parameters. See: +pub static GITHUB_OUTPUT: EnvironmentFile = EnvironmentFile::new(env::GITHUB_OUTPUT); + +/// Environment file used to store job's summary. See: +pub static GITHUB_STEP_SUMMARY: EnvironmentFile = EnvironmentFile::new(env::GITHUB_STEP_SUMMARY); + +/// File with environment variables that will be set for subsequent steps of the current job. +pub static GITHUB_PATH: EnvironmentFile = EnvironmentFile::new(env::GITHUB_PATH); + + +// ======================= +// === EnvironmentFile === +// ======================= + +/// Structure that handles access to the environment file. +/// +/// Contains mutex for synchronization, so the different threads can access the file safely. +#[derive(Debug)] +pub struct EnvironmentFile { + /// Environment variable that contains path to the file. + pub env_var: PathBufVariable, + /// Mutex for synchronization. + mutex: tokio::sync::Mutex<()>, +} + +impl EnvironmentFile { + /// Create a new environment file accessor. + pub const fn new(env_var: PathBufVariable) -> Self { + Self { env_var, mutex: tokio::sync::Mutex::const_new(()) } + } + + /// Read the file contents. + pub async fn read(&self) -> Result { + let _guard = self.mutex.lock().await; + let path = self.env_var.get()?; + crate::fs::tokio::read_to_string(path).await + } + + /// Appends line to the file. + pub async fn append_line(&self, line: impl AsRef) -> Result { + let _guard = self.mutex.lock().await; + let path = self.env_var.get()?; + let mut line = line.as_ref().to_string(); + if !line.ends_with('\n') { + line.push('\n'); + }; + crate::fs::tokio::append(path, line).await + } + + /// Append key-value pair to the file. + /// + /// Automatically generates a unique delimiter, so the value is allowed to contain `=` or + /// newline characters. + pub async fn append_key_value(&self, key: impl AsRef, value: impl AsRef) -> Result { + let key = key.as_ref(); + let value = value.as_ref(); + let delimiter = format!("ghadelimiter_{}", Uuid::new_v4()); + ensure!(!key.contains(&delimiter), "Key cannot contain delimiter {}.", delimiter); + ensure!(!value.contains(&delimiter), "Value cannot contain delimiter {}.", delimiter); + let line = format!("{key}<<{delimiter}\n{value}\n{delimiter}"); + self.append_line(line).await + } +} diff --git a/build/ci_utils/src/actions/workflow.rs b/build/ci_utils/src/actions/workflow.rs index c007ec7ca199..5fd1fd8de710 100644 --- a/build/ci_utils/src/actions/workflow.rs +++ b/build/ci_utils/src/actions/workflow.rs @@ -1,8 +1,7 @@ use crate::prelude::*; use crate::actions::env; - -use std::io::Write; +use crate::actions::env_file; // ============== @@ -22,10 +21,13 @@ pub fn is_in_env() -> bool { /// Sets an action's output parameter. /// /// See: -pub fn set_output(name: &str, value: &impl ToString) { - let value = value.to_string(); - debug!("Setting GitHub Actions step output {name} to {value}"); - println!("::set-output name={name}::{value}"); +pub async fn set_output(name: &str, value: &(impl ToString + ?Sized)) -> Result { + if is_in_env() { + let value = value.to_string(); + debug!("Setting GitHub Actions step output {name} to {value}."); + env_file::GITHUB_OUTPUT.append_key_value(name, &value).await?; + } + Ok(()) } /// Prints a debug message to the log. @@ -44,16 +46,18 @@ pub fn debug(message: &str) { /// variables are case-sensitive and you can include punctuation. /// /// Just logs and sets variable locally if used under non-GH CI. -pub fn set_env(name: &str, value: &impl ToString) -> Result { +pub fn set_env(name: impl AsRef, value: &impl ToString) -> BoxFuture<'static, Result> { + let name = name.as_ref().to_string(); let value_string = value.to_string(); - debug!("Will try writing Github Actions environment variable: {name}={value_string}"); - std::env::set_var(name, value.to_string()); - if is_in_env() { - let env_file = env::GITHUB_ENV.get()?; - let mut file = std::fs::OpenOptions::new().create_new(false).append(true).open(env_file)?; - writeln!(file, "{name}={value_string}")?; + async move { + std::env::set_var(&name, &value_string); + if is_in_env() { + debug!("Setting GitHub Actions environment variable {name} to {value_string}"); + env_file::GITHUB_ENV.append_key_value(name, value_string).await?; + } + Ok(()) } - Ok(()) + .boxed() } pub fn mask_text(text: impl AsRef) { diff --git a/build/ci_utils/src/actions/workflow/definition.rs b/build/ci_utils/src/actions/workflow/definition.rs index 117c10c52a59..d0fdd7abc051 100644 --- a/build/ci_utils/src/actions/workflow/definition.rs +++ b/build/ci_utils/src/actions/workflow/definition.rs @@ -3,11 +3,14 @@ use crate::prelude::*; use crate::env::new::RawVariable; use heck::ToKebabCase; +use std::collections::btree_map::Entry; use std::collections::BTreeMap; use std::collections::BTreeSet; +pub const DEFAULT_TIMEOUT_IN_MINUTES: u32 = 360; + pub fn wrap_expression(expression: impl AsRef) -> String { format!("${{{{ {} }}}}", expression.as_ref()) } @@ -86,7 +89,12 @@ pub fn shell_os(os: OS, command_line: impl Into) -> Step { } pub fn shell(command_line: impl Into) -> Step { - Step { run: Some(command_line.into()), env: once(github_token_env()).collect(), ..default() } + Step { + run: Some(command_line.into()), + env: once(github_token_env()).collect(), + timeout_minutes: Some(DEFAULT_TIMEOUT_IN_MINUTES), + ..default() + } } /// Invoke our entry point to the build scripts, i.e. the `./run` script. @@ -428,22 +436,28 @@ impl Event { #[derive(Clone, Debug, Default, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Job { - pub name: String, + pub name: String, #[serde(skip_serializing_if = "BTreeSet::is_empty")] - pub needs: BTreeSet, - pub runs_on: Vec, - pub steps: Vec, - #[serde(skip_serializing_if = "BTreeMap::is_empty")] - pub outputs: BTreeMap, + pub needs: BTreeSet, + #[serde(skip_serializing_if = "Option::is_none")] + pub r#if: Option, + pub runs_on: Vec, + pub steps: Vec, #[serde(skip_serializing_if = "Option::is_none")] - pub strategy: Option, + pub concurrency: Option, #[serde(skip_serializing_if = "BTreeMap::is_empty")] - pub env: BTreeMap, + pub outputs: BTreeMap, + #[serde(skip_serializing_if = "BTreeMap::is_empty")] + pub env: BTreeMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub strategy: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub timeout_minutes: Option, } impl Job { pub fn new(name: impl Into) -> Self { - Self { name: name.into(), ..default() } + Self { name: name.into(), timeout_minutes: Some(DEFAULT_TIMEOUT_IN_MINUTES), ..default() } } pub fn expose_output(&mut self, step_id: impl AsRef, output_name: impl Into) { @@ -525,21 +539,25 @@ impl Strategy { #[serde(rename_all = "kebab-case")] pub struct Step { #[serde(skip_serializing_if = "Option::is_none")] - pub id: Option, + pub id: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub r#if: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub name: Option, + pub name: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub uses: Option, + pub uses: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub run: Option, + pub run: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub r#if: Option, + pub shell: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub with: Option, + pub with: Option, #[serde(skip_serializing_if = "BTreeMap::is_empty")] - pub env: BTreeMap, + pub env: BTreeMap, #[serde(skip_serializing_if = "Option::is_none")] - pub shell: Option, + pub continue_on_error: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub timeout_minutes: Option, } impl Step { @@ -553,7 +571,20 @@ impl Step { } pub fn with_env(mut self, name: impl Into, value: impl Into) -> Self { - self.env.insert(name.into(), value.into()); + let name = name.into(); + let value = value.into(); + let entry = self.env.entry(name); + if let Entry::Occupied(mut entry) = entry { + warn!( + "Overriding environment variable `{}` with value `{}` (old value was `{}`)", + entry.key(), + value, + entry.get(), + ); + *entry.get_mut() = value; + } else { + entry.or_insert(value); + } self } diff --git a/build/ci_utils/src/anyhow.rs b/build/ci_utils/src/anyhow.rs deleted file mode 100644 index ff5d665a05c7..000000000000 --- a/build/ci_utils/src/anyhow.rs +++ /dev/null @@ -1,46 +0,0 @@ -use crate::prelude::*; - -use anyhow::Error; - - - -pub trait ResultExt { - fn anyhow_err(self) -> Result; - - #[allow(clippy::type_complexity)] - fn flatten_fut( - self, - ) -> futures::future::Either< - std::future::Ready>, - futures::future::IntoFuture, - > - where T: TryFuture>; - - // fn flatten_fut(self) -> impl Future> - // where T: TryFuture> { - // async move { fut?.into_future().await } - // } - // fn flatten_fut(self) - // where T: TryFuture; -} - -impl ResultExt for std::result::Result -where E: Into -{ - fn anyhow_err(self) -> Result { - self.map_err(E::into) - } - - fn flatten_fut( - self, - ) -> futures::future::Either< - std::future::Ready>, - futures::future::IntoFuture, - > - where T: TryFuture> { - match self { - Ok(fut) => fut.into_future().right_future(), - Err(e) => ready(Err(T::Error::from(e))).left_future(), - } - } -} diff --git a/build/ci_utils/src/archive.rs b/build/ci_utils/src/archive.rs index fea76b073158..f50567a7dbff 100644 --- a/build/ci_utils/src/archive.rs +++ b/build/ci_utils/src/archive.rs @@ -115,7 +115,7 @@ pub fn is_archive_name(path: impl AsRef) -> bool { skip_all, fields(src=%root_directory.as_ref().display(), dest=%output_archive.as_ref().display()), err)] -pub async fn pack_directory_contents( +pub async fn compress_directory( output_archive: impl AsRef, root_directory: impl AsRef, ) -> Result { diff --git a/build/ci_utils/src/cache/artifact.rs b/build/ci_utils/src/cache/artifact.rs index 4f0b4c5123cd..130639f7f745 100644 --- a/build/ci_utils/src/cache/artifact.rs +++ b/build/ci_utils/src/cache/artifact.rs @@ -2,7 +2,7 @@ use crate::prelude::*; use crate::cache::Cache; use crate::cache::Storable; -use crate::models::config::RepoContext; +use crate::github::Repo; use octocrab::models::ArtifactId; @@ -10,7 +10,7 @@ use octocrab::models::ArtifactId; #[derive(Clone, Debug, Serialize, Deserialize)] pub struct Key { - pub repository: RepoContext, + pub repository: Repo, pub artifact_id: ArtifactId, } @@ -34,7 +34,8 @@ impl Storable for ExtractedArtifact { async move { let ExtractedArtifact { client, key } = this; let Key { artifact_id, repository } = key; - repository.download_and_unpack_artifact(&client, artifact_id, &store).await?; + let repository = repository.handle(&client); + repository.download_and_unpack_artifact(artifact_id, &store).await?; Ok(()) } .boxed() diff --git a/build/ci_utils/src/cache/download.rs b/build/ci_utils/src/cache/download.rs index 52c014a49fe1..ed1a8211a271 100644 --- a/build/ci_utils/src/cache/download.rs +++ b/build/ci_utils/src/cache/download.rs @@ -10,6 +10,7 @@ use crate::io::web::stream_response_to_file; use derivative::Derivative; use headers::HeaderMap; use reqwest::Client; +use reqwest::ClientBuilder; use reqwest::IntoUrl; use reqwest::Response; @@ -37,7 +38,7 @@ impl DownloadFile { pub fn new(url: impl IntoUrl) -> Result { Ok(Self { key: Key { url: url.into_url()?, additional_headers: default() }, - client: default(), + client: ClientBuilder::new().user_agent("enso-build").build()?, }) } @@ -93,3 +94,8 @@ impl Storable for DownloadFile { self.key.clone() } } + +pub async fn download(cache: Cache, url: impl IntoUrl) -> Result { + let download = DownloadFile::new(url)?; + cache.get(download).await +} diff --git a/build/ci_utils/src/cache/goodie/graalvm.rs b/build/ci_utils/src/cache/goodie/graalvm.rs index d1d8e22bf567..ea01336e610d 100644 --- a/build/ci_utils/src/cache/goodie/graalvm.rs +++ b/build/ci_utils/src/cache/goodie/graalvm.rs @@ -1,7 +1,7 @@ use crate::prelude::*; use crate::cache::goodie::Goodie; -use crate::models::config::RepoContext; +use crate::github::Repo; use crate::programs::java; use crate::programs::java::JAVA_HOME; use crate::programs::Java; @@ -36,8 +36,8 @@ pub async fn find_graal_version() -> Result { } /// The repository that contains the GraalVM CE releases for download. -pub fn ce_build_repository() -> RepoContext { - RepoContext { owner: GITHUB_ORGANIZATION.into(), name: CE_BUILDS_REPOSITORY.into() } +pub fn ce_build_repository() -> Repo { + Repo { owner: GITHUB_ORGANIZATION.into(), name: CE_BUILDS_REPOSITORY.into() } } /// Description necessary to download and install GraalVM. @@ -58,7 +58,8 @@ impl Goodie for GraalVM { let client = self.client.clone(); let repo = ce_build_repository(); async move { - let release = repo.find_release_by_text(&client, &graal_version.to_string()).await?; + let repo = repo.handle(&client); + let release = repo.find_release_by_text(&graal_version.to_string()).await?; crate::github::find_asset_url_by_text(&release, &platform_string).cloned() } .boxed() diff --git a/build/ci_utils/src/env.rs b/build/ci_utils/src/env.rs index a7622b41431d..4146cbbba176 100644 --- a/build/ci_utils/src/env.rs +++ b/build/ci_utils/src/env.rs @@ -5,6 +5,13 @@ use std::collections::BTreeSet; use unicase::UniCase; +// ============== +// === Export === +// ============== + +pub mod known; + + pub fn current_dir() -> Result { std::env::current_dir().context("Failed to get current directory.") @@ -16,6 +23,20 @@ pub fn set_current_dir(path: impl AsRef) -> Result { std::env::set_current_dir(&path).anyhow_err() } +/// Define typed accessors for environment variables. Supported types inclide `String`, `PathBuf`, +/// and other types that implement `FromStr`. +/// +/// Example: +/// ``` +/// # use std::path::PathBuf; +/// # use ide_ci::define_env_var; +/// # use ide_ci::env::new::TypedVariable; +/// define_env_var! { +/// /// Documentation. +/// ENV_VAR_NAME, PathBuf; +/// }; +/// let path = ENV_VAR_NAME.get().unwrap_or_else(|_error| PathBuf::from("default")); +/// ``` #[macro_export] macro_rules! define_env_var { () => {}; @@ -42,10 +63,6 @@ macro_rules! define_env_var { }; } - - -pub mod known; - pub mod new { use super::*; use crate::program::command::FallibleManipulator; @@ -95,22 +112,21 @@ pub mod new { self.parse(self.get_raw()?.as_str()) } - fn set(&self, value: impl AsRef) -> Result { - let value = self.generate(value.as_ref())?; + fn set(&self, value: &Self::Borrowed) -> Result { + let value = self.generate(value)?; self.set_raw(value); Ok(()) } - fn set_workflow_output(&self, value: impl Borrow) -> Result { - crate::actions::workflow::set_output(self.name(), &self.generate(value.borrow())?); - Ok(()) - } - fn set_workflow_env(&self, value: impl Borrow) -> Result { - crate::actions::workflow::set_env(self.name(), &self.generate(value.borrow())?) - } - fn emit_to_workflow(&self, value: impl Borrow) -> Result { - self.set_workflow_output(value.borrow())?; - self.set_workflow_env(value.borrow()) + fn set_workflow_env( + &self, + value: impl Borrow, + ) -> BoxFuture<'static, Result> { + let name = self.name().to_string(); + let value = self.generate(value.borrow()); + value + .and_then_async(move |value| crate::actions::workflow::set_env(name, &value)) + .boxed() } } @@ -150,9 +166,10 @@ pub mod new { } } - #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] + #[derive(Clone, Debug, Ord, PartialOrd, Eq, PartialEq, derive_more::Deref)] pub struct SimpleVariable { - pub name: Cow<'static, str>, + #[deref] + pub name: &'static str, pub phantom_data: PhantomData, pub phantom_data2: PhantomData, } @@ -163,9 +180,9 @@ pub mod new { } } - impl AsRef for SimpleVariable { + impl const AsRef for SimpleVariable { fn as_ref(&self) -> &str { - &self.name + self.name } } @@ -183,17 +200,13 @@ pub mod new { impl SimpleVariable { pub const fn new(name: &'static str) -> Self { - Self { - name: Cow::Borrowed(name), - phantom_data: PhantomData, - phantom_data2: PhantomData, - } + Self { name, phantom_data: PhantomData, phantom_data2: PhantomData } } } - impl RawVariable for SimpleVariable { + impl const RawVariable for SimpleVariable { fn name(&self) -> &str { - &self.name + self.name } } @@ -266,110 +279,18 @@ pub mod new { } } -// -// -// impl<'a, T> SpecFromIter for std::slice::Iter<'a, T> { -// fn f(&self) {} -// } - -#[derive(Clone, Copy, Debug, Display, Ord, PartialOrd, Eq, PartialEq)] -pub struct StrLikeVariable { - pub name: &'static str, -} - -impl StrLikeVariable { - pub const fn new(name: &'static str) -> Self { - Self { name } - } -} - -impl Variable for StrLikeVariable { - const NAME: &'static str = ""; - fn name(&self) -> &str { - self.name - } -} - -pub trait Variable { - const NAME: &'static str; - type Value: FromString = String; - - fn format(&self, value: &Self::Value) -> String - where Self::Value: ToString { - value.to_string() - } - - fn name(&self) -> &str { - Self::NAME - } - - fn fetch(&self) -> Result { - self.fetch_as() - } - - fn fetch_as(&self) -> Result { - self.fetch_string()?.parse2() - } - - fn fetch_string(&self) -> Result { - expect_var(self.name()) - } - - fn fetch_os_string(&self) -> Result { - expect_var_os(self.name()) - } - - fn set(&self, value: &Self::Value) - where Self::Value: ToString { - debug!("Setting env {}={}", self.name(), self.format(value)); - std::env::set_var(self.name(), self.format(value)) - } - - fn set_os(&self, value: &Self::Value) - where Self::Value: AsRef { - std::env::set_var(self.name(), value) - } - - fn set_path

(&self, value: &P) - where - Self::Value: AsRef, - P: AsRef, { - std::env::set_var(self.name(), value.as_ref()) - } - - fn emit_env(&self, value: &Self::Value) -> Result - where Self::Value: ToString { - crate::actions::workflow::set_env(self.name(), value) - } - - fn emit(&self, value: &Self::Value) -> Result - where Self::Value: ToString { - self.emit_env(value)?; - crate::actions::workflow::set_output(self.name(), value); - Ok(()) - } - - fn is_set(&self) -> bool { - self.fetch_os_string().is_ok() - } - - fn remove(&self) { - std::env::remove_var(self.name()) - } -} - const PATH_ENVIRONMENT_NAME: &str = "PATH"; pub fn expect_var(name: impl AsRef) -> Result { let name = name.as_ref(); - std::env::var(name).context(anyhow!("Missing environment variable {}.", name)) + std::env::var(name).with_context(|| anyhow!("Missing environment variable {name}.")) } pub fn expect_var_os(name: impl AsRef) -> Result { let name = name.as_ref(); std::env::var_os(name) - .ok_or_else(|| anyhow!("Missing environment variable {}.", name.to_string_lossy())) + .with_context(|| anyhow!("Missing environment variable {}.", name.to_string_lossy())) } pub fn prepend_to_path(path: impl Into) -> Result { @@ -383,17 +304,17 @@ pub fn prepend_to_path(path: impl Into) -> Result { Ok(()) } -pub async fn fix_duplicated_env_var(var_name: impl AsRef) -> Result { - let var_name = var_name.as_ref(); - - let mut paths = indexmap::IndexSet::new(); - while let Ok(path) = std::env::var(var_name) { - paths.extend(std::env::split_paths(&path)); - std::env::remove_var(var_name); - } - std::env::set_var(var_name, std::env::join_paths(paths)?); - Ok(()) -} +// pub async fn fix_duplicated_env_var(var_name: impl AsRef) -> Result { +// let var_name = var_name.as_ref(); +// +// let mut paths = indexmap::IndexSet::new(); +// while let Ok(path) = std::env::var(var_name) { +// paths.extend(std::env::split_paths(&path)); +// std::env::remove_var(var_name); +// } +// std::env::set_var(var_name, std::env::join_paths(paths)?); +// Ok(()) +// } #[derive(Clone, Debug)] pub enum Action { diff --git a/build/ci_utils/src/extensions.rs b/build/ci_utils/src/extensions.rs index 6360b7313cc2..60f627d24cd5 100644 --- a/build/ci_utils/src/extensions.rs +++ b/build/ci_utils/src/extensions.rs @@ -5,15 +5,8 @@ pub mod child; pub mod clap; pub mod command; -pub mod from_string; -pub mod future; -pub mod iterator; -pub mod maps; pub mod octocrab; pub mod os; pub mod output; -pub mod path; pub mod reqwest; -pub mod result; -pub mod str; pub mod version; diff --git a/build/ci_utils/src/extensions/path.rs b/build/ci_utils/src/extensions/path.rs deleted file mode 100644 index e1adc358c917..000000000000 --- a/build/ci_utils/src/extensions/path.rs +++ /dev/null @@ -1,100 +0,0 @@ -use crate::prelude::*; - -use serde::de::DeserializeOwned; - - - -pub trait PathExt: AsRef { - fn join_iter>(&self, segments: impl IntoIterator) -> PathBuf { - let mut ret = self.as_ref().to_path_buf(); - ret.extend(segments); - ret - } - - /// Strips the leading `\\?\` prefix from Windows paths if present. - fn without_verbatim_prefix(&self) -> &Path { - self.as_str().strip_prefix(r"\\?\").map_or(self.as_ref(), Path::new) - } - - /// Appends a new extension to the file. - /// - /// Does not try to replace previous extension, unlike `set_extension`. - /// Does nothing when given extension string is empty. - /// - /// ``` - /// use ide_ci::extensions::path::PathExt; - /// use std::path::PathBuf; - /// - /// let path = PathBuf::from("foo.tar").with_appended_extension("gz"); - /// assert_eq!(path, PathBuf::from("foo.tar.gz")); - /// - /// let path = PathBuf::from("foo").with_appended_extension("zip"); - /// assert_eq!(path, PathBuf::from("foo.zip")); - /// ``` - fn with_appended_extension(&self, extension: impl AsRef) -> PathBuf { - if extension.as_ref().is_empty() { - return self.as_ref().into(); - } else { - let mut ret = self.as_ref().to_path_buf().into_os_string(); - ret.push("."); - ret.push(extension.as_ref()); - ret.into() - } - } - - #[context("Failed to deserialize file `{}` as type `{}`.", self.as_ref().display(), std::any::type_name::())] - fn read_to_json(&self) -> Result { - let content = crate::fs::read_to_string(self)?; - serde_json::from_str(&content).anyhow_err() - } - - fn write_as_json(&self, value: &T) -> Result { - trace!("Writing JSON to {}.", self.as_ref().display()); - let file = crate::fs::create(self)?; - serde_json::to_writer(file, value).anyhow_err() - } - - fn write_as_yaml(&self, value: &T) -> Result { - trace!("Writing YAML to {}.", self.as_ref().display()); - let file = crate::fs::create(self)?; - serde_yaml::to_writer(file, value).anyhow_err() - } - - fn as_str(&self) -> &str { - self.as_ref().to_str().unwrap() - } - - /// Split path to components and collect them into a new PathBuf. - /// - /// This is useful for `/` -> native separator conversion. - fn normalize(&self) -> PathBuf { - self.as_ref().components().collect() - } - - /// Like `parent` but provides a sensible error message if the path has no parent. - fn try_parent(&self) -> Result<&Path> { - self.as_ref() - .parent() - .with_context(|| format!("Failed to get parent of path `{}`.", self.as_ref().display())) - } -} - -impl> PathExt for T {} - -pub fn display_fmt(path: &Path, f: &mut Formatter) -> std::fmt::Result { - std::fmt::Display::fmt(&path.display(), f) -} - -#[cfg(test)] -mod tests { - #[allow(unused_imports)] - use super::*; - - #[test] - fn stripping_unc_prefix() { - let path_with_unc = Path::new(r"\\?\H:\NBO\ci-build\target\debug\enso-build2.exe"); - let path_without_unc = Path::new(r"H:\NBO\ci-build\target\debug\enso-build2.exe"); - assert_eq!(path_with_unc.without_verbatim_prefix(), path_without_unc); - assert_eq!(path_without_unc.without_verbatim_prefix(), path_without_unc); - } -} diff --git a/build/ci_utils/src/extensions/str.rs b/build/ci_utils/src/extensions/str.rs deleted file mode 100644 index 22827cf0d6b0..000000000000 --- a/build/ci_utils/src/extensions/str.rs +++ /dev/null @@ -1,28 +0,0 @@ -use crate::prelude::*; - -use anyhow::Context; -use std::any::type_name; - - - -pub trait StrLikeExt { - // FIXME: this needs better name! - fn parse2(&self) -> Result; - - fn parse_through(&self) -> Result - where - T: FromString + TryInto, - >::Error: Into, { - self.parse2::()?.try_into().anyhow_err().context(format!( - "Failed to convert {} => {}.", - type_name::(), - type_name::(), - )) - } -} - -impl> StrLikeExt for T { - fn parse2(&self) -> Result { - U::from_str(self.as_ref()) - } -} diff --git a/build/ci_utils/src/fs.rs b/build/ci_utils/src/fs.rs index fd5828248b29..76959ed7cab2 100644 --- a/build/ci_utils/src/fs.rs +++ b/build/ci_utils/src/fs.rs @@ -3,7 +3,6 @@ use crate::prelude::*; use async_compression::tokio::bufread::GzipEncoder; use async_compression::Level; use fs_extra::dir::CopyOptions; -use std::fs::File; // ============== @@ -13,134 +12,14 @@ use std::fs::File; pub mod tokio; pub mod wrappers; -pub use wrappers::*; +pub use enso_build_base::fs::*; -///////////////////////////// - -/// Like the standard version but will create any missing parent directories from the path. -#[context("Failed to write path: {}", path.as_ref().display())] -pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { - create_parent_dir_if_missing(&path)?; - wrappers::write(&path, &contents) -} - -/// Serialize the data to JSON text and write it to the file. -/// -/// See [`write`]. -#[context("Failed to write path: {}", path.as_ref().display())] -pub fn write_json(path: impl AsRef, contents: &impl Serialize) -> Result { - let contents = serde_json::to_string(contents)?; - write(&path, &contents) -} - -/// Like the standard version but will create any missing parent directories from the path. -#[context("Failed to open path for writing: {}", path.as_ref().display())] -pub fn create(path: impl AsRef) -> Result { - create_parent_dir_if_missing(&path)?; - wrappers::create(&path) -} - -/////////////////////////// - -#[context("Failed to read the file: {}", path.as_ref().display())] -pub fn read_string_into(path: impl AsRef) -> Result { - read_to_string(&path)?.parse2() -} - -/// Create a directory (and all missing parent directories), -/// -/// Does not fail when a directory already exists. -#[context("Failed to create directory {}", path.as_ref().display())] -pub fn create_dir_if_missing(path: impl AsRef) -> Result { - let result = std::fs::create_dir_all(&path); - match result { - Err(e) if e.kind() == std::io::ErrorKind::AlreadyExists => Ok(()), - result => result.anyhow_err(), - } -} - -/// Create a parent directory for path (and all missing parent directories), -/// -/// Does not fail when a directory already exists. -#[context("Failed to create parent directory for {}", path.as_ref().display())] -pub fn create_parent_dir_if_missing(path: impl AsRef) -> Result { - if let Some(parent) = path.as_ref().parent() { - create_dir_if_missing(parent)?; - Ok(parent.into()) - } else { - bail!("No parent directory for path {}.", path.as_ref().display()) - } -} - -/// Remove a directory with all its subtree. -/// -/// Does not fail if the directory is not found. -#[tracing::instrument(fields(path = %path.as_ref().display()))] -#[context("Failed to remove directory {}", path.as_ref().display())] -pub fn remove_dir_if_exists(path: impl AsRef) -> Result { - let result = std::fs::remove_dir_all(&path); - match result { - Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), - result => result.anyhow_err(), - } -} - -/// Remove a regular file. -/// -/// Does not fail if the file is not found. -#[tracing::instrument(fields(path = %path.as_ref().display()))] -#[context("Failed to remove file {}", path.as_ref().display())] -pub fn remove_file_if_exists(path: impl AsRef) -> Result<()> { - let result = std::fs::remove_file(&path); - match result { - Err(e) if e.kind() == std::io::ErrorKind::NotFound => Ok(()), - result => result.anyhow_err(), - } -} - -/// Remove a file being either directory or regular file.. -/// -/// Does not fail if the file is not found. -#[context("Failed to remove entry {} (if exists)", path.as_ref().display())] -pub fn remove_if_exists(path: impl AsRef) -> Result { - let path = path.as_ref(); - if path.is_dir() { - remove_dir_if_exists(path) - } else { - remove_file_if_exists(path) - } -} - -#[context("Failed to create symlink {} => {}", src.as_ref().display(), dst.as_ref().display())] -pub fn symlink_auto(src: impl AsRef, dst: impl AsRef) -> Result { - create_parent_dir_if_missing(&dst)?; - symlink::symlink_auto(&src, &dst).anyhow_err() -} - -/// Recreate directory, so it exists and is empty. -pub fn reset_dir(path: impl AsRef) -> Result { - let path = path.as_ref(); - debug!("Will reset directory {}", path.display()); - remove_dir_if_exists(path)?; - create_dir_if_missing(path)?; - Ok(()) -} - -pub fn require_exist(path: impl AsRef) -> Result { - if path.as_ref().exists() { - trace!("{} does exist.", path.as_ref().display()); - Ok(()) - } else { - bail!("{} does not exist.", path.as_ref().display()) - } -} - #[tracing::instrument(skip_all, fields( - src = %source_file.as_ref().display(), - dest = %dest_dir.as_ref().display()), - err)] +src = %source_file.as_ref().display(), +dest = %dest_dir.as_ref().display()), +err)] pub fn copy_to(source_file: impl AsRef, dest_dir: impl AsRef) -> Result { require_exist(&source_file)?; create_dir_if_missing(dest_dir.as_ref())?; @@ -153,9 +32,9 @@ pub fn copy_to(source_file: impl AsRef, dest_dir: impl AsRef) -> Res #[tracing::instrument(skip_all, fields( - src = %source_file.as_ref().display(), - dest = %destination_file.as_ref().display()), - err)] +src = %source_file.as_ref().display(), +dest = %destination_file.as_ref().display()), +err)] pub fn copy(source_file: impl AsRef, destination_file: impl AsRef) -> Result { let source_file = source_file.as_ref(); let destination_file = destination_file.as_ref(); @@ -168,7 +47,7 @@ pub fn copy(source_file: impl AsRef, destination_file: impl AsRef) - options.content_only = true; fs_extra::dir::copy(source_file, destination_file, &options)?; } else { - wrappers::copy(source_file, destination_file)?; + enso_build_base::fs::wrappers::copy(source_file, destination_file)?; } } else { bail!("Cannot copy to the root path: {}", destination_file.display()); @@ -176,9 +55,6 @@ pub fn copy(source_file: impl AsRef, destination_file: impl AsRef) - Ok(()) } -pub fn same_existing_path(source: impl AsRef, destination: impl AsRef) -> Result { - Ok(canonicalize(source)? == canonicalize(destination)?) -} pub async fn mirror_directory(source: impl AsRef, destination: impl AsRef) -> Result { create_dir_if_missing(destination.as_ref())?; @@ -195,47 +71,6 @@ pub async fn mirror_directory(source: impl AsRef, destination: impl AsRef< } } -#[context("Failed because the path does not point to a directory: {}", path.as_ref().display())] -pub fn expect_dir(path: impl AsRef) -> Result { - let filetype = metadata(&path)?.file_type(); - if filetype.is_dir() { - Ok(()) - } else { - bail!("File is not directory, its type is: {filetype:?}") - } -} - - -#[context("Failed because the path does not point to a regular file: {}", path.as_ref().display())] -pub fn expect_file(path: impl AsRef) -> Result { - let filetype = metadata(&path)?.file_type(); - if filetype.is_file() { - Ok(()) - } else { - bail!("File is not a regular file, its type is: {filetype:?}") - } -} - -#[cfg(not(target_os = "windows"))] -#[context("Failed to update permissions on `{}`", path.as_ref().display())] -pub fn allow_owner_execute(path: impl AsRef) -> Result { - use crate::anyhow::ResultExt; - use std::os::unix::prelude::*; - debug!("Setting executable permission on {}", path.as_ref().display()); - let metadata = path.as_ref().metadata()?; - let mut permissions = metadata.permissions(); - let mode = permissions.mode(); - let owner_can_execute = 0o0100; - permissions.set_mode(mode | owner_can_execute); - std::fs::set_permissions(path.as_ref(), permissions).anyhow_err() -} - -#[cfg(target_os = "windows")] -#[context("Failed to update permissions on `{}`", path.as_ref().display())] -pub fn allow_owner_execute(path: impl AsRef) -> Result { - // No-op on Windows. - Ok(()) -} /// Get the size of a file after gzip compression. pub async fn compressed_size(path: impl AsRef) -> Result { @@ -244,39 +79,10 @@ pub async fn compressed_size(path: impl AsRef) -> Result crate::io::read_length(encoded_stream).await.map(into) } -pub fn check_if_identical(source: impl AsRef, target: impl AsRef) -> bool { - (|| -> Result { - #[allow(clippy::if_same_then_else)] // should be different after TODO - if metadata(&source)?.len() == metadata(&target)?.len() { - Ok(true) - } else if read(&source)? == read(&target)? { - // TODO: Not good for large files, should process them chunk by chunk. - Ok(true) - } else { - Ok(false) - } - })() - .unwrap_or(false) -} - -pub fn copy_file_if_different(source: impl AsRef, target: impl AsRef) -> Result { - if !check_if_identical(&source, &target) { - trace!( - "Modified, will copy {} to {}.", - source.as_ref().display(), - target.as_ref().display() - ); - copy(&source, &target)?; - } else { - trace!("No changes, skipping {}.", source.as_ref().display()) - } - Ok(()) -} - #[tracing::instrument(skip_all, fields( - src = %source.as_ref().display(), - dest = %target.as_ref().display()), - err)] +src = %source.as_ref().display(), +dest = %target.as_ref().display()), +err)] pub async fn copy_if_different(source: impl AsRef, target: impl AsRef) -> Result { if tokio::metadata(&source).await?.is_file() { return copy_file_if_different(source, target); @@ -293,17 +99,8 @@ pub async fn copy_if_different(source: impl AsRef, target: impl AsRef Result { - setup_logging()?; - copy_if_different("../../..", r"C:\temp\out").await?; - Ok(()) - } +#[context("Failed to create symlink {} => {}", src.as_ref().display(), dst.as_ref().display())] +pub fn symlink_auto(src: impl AsRef, dst: impl AsRef) -> Result { + create_parent_dir_if_missing(&dst)?; + symlink::symlink_auto(&src, &dst).anyhow_err() } diff --git a/build/ci_utils/src/fs/tokio.rs b/build/ci_utils/src/fs/tokio.rs index 544889a58776..95c19723255c 100644 --- a/build/ci_utils/src/fs/tokio.rs +++ b/build/ci_utils/src/fs/tokio.rs @@ -50,6 +50,12 @@ pub async fn create_parent_dir_if_missing(path: impl AsRef) -> Result, contents: impl AsRef<[u8]>) -> Result { + create_parent_dir_if_missing(&path).await?; + crate::fs::wrappers::tokio::write(&path, &contents).await.anyhow_err() +} + pub async fn copy_to_file( mut content: impl AsyncRead + Unpin, output_path: impl AsRef, @@ -71,6 +77,15 @@ pub async fn remove_dir_if_exists(path: impl AsRef) -> Result { } } +pub async fn perhaps_remove_dir_if_exists(dry_run: bool, path: impl AsRef) -> Result { + if dry_run { + info!("Would remove directory {}.", path.as_ref().display()); + Ok(()) + } else { + remove_dir_if_exists(path).await + } +} + /// Recreate directory, so it exists and is empty. pub async fn reset_dir(path: impl AsRef) -> Result { let path = path.as_ref(); @@ -94,3 +109,18 @@ pub async fn write_iter( })?; Ok(()) } + +/// Append contents to the file. +/// +/// If the file does not exist, it will be created. +pub async fn append(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { + tokio::fs::OpenOptions::new() + .append(true) + .create(true) + .open(&path) + .await + .with_context(|| format!("Failed to open file {} for appending.", path.as_ref().display()))? + .write_all(contents.as_ref()) + .await + .with_context(|| format!("Failed to write to file {}.", path.as_ref().display())) +} diff --git a/build/ci_utils/src/fs/wrappers.rs b/build/ci_utils/src/fs/wrappers.rs index 691cf41c2cd3..4512340b73bc 100644 --- a/build/ci_utils/src/fs/wrappers.rs +++ b/build/ci_utils/src/fs/wrappers.rs @@ -1,82 +1,5 @@ -//! Wrappers over [`std::fs`] functions that provide sensible error messages, i.e. explaining what -//! operation was attempted and what was the relevant path. - -use crate::prelude::*; - -use std::fs::File; -use std::fs::Metadata; -use std::io::Write; - - // ============== // === Export === // ============== pub mod tokio; - - - -#[context("Failed to obtain metadata for file: {}", path.as_ref().display())] -pub fn metadata>(path: P) -> Result { - std::fs::metadata(&path).anyhow_err() -} - -#[context("Failed to copy file from {} to {}", from.as_ref().display(), to.as_ref().display())] -pub fn copy(from: impl AsRef, to: impl AsRef) -> Result { - std::fs::copy(&from, &to).anyhow_err() -} - -#[context("Failed to rename file from {} to {}", from.as_ref().display(), to.as_ref().display())] -pub fn rename(from: impl AsRef, to: impl AsRef) -> Result { - std::fs::rename(&from, &to).anyhow_err() -} - -#[context("Failed to read the file: {}", path.as_ref().display())] -pub fn read(path: impl AsRef) -> Result> { - std::fs::read(&path).anyhow_err() -} - -#[context("Failed to read the directory: {}", path.as_ref().display())] -pub fn read_dir(path: impl AsRef) -> Result { - std::fs::read_dir(&path).anyhow_err() -} - -#[context("Failed to read the file: {}", path.as_ref().display())] -pub fn read_to_string(path: impl AsRef) -> Result { - std::fs::read_to_string(&path).anyhow_err() -} - -#[context("Failed to write path: {}", path.as_ref().display())] -pub fn write(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { - std::fs::write(&path, contents).anyhow_err() -} - -pub fn append(path: impl AsRef, contents: impl AsRef<[u8]>) -> Result { - std::fs::OpenOptions::new() - .append(true) - .create(true) - .open(&path) - .context(format!("Failed to open {} for writing.", path.as_ref().display()))? - .write_all(contents.as_ref()) - .context(format!("Failed to write to {}.", path.as_ref().display())) -} - -#[context("Failed to open path for reading: {}", path.as_ref().display())] -pub fn open(path: impl AsRef) -> Result { - File::open(&path).anyhow_err() -} - -#[context("Failed to open path for writing: {}", path.as_ref().display())] -pub fn create(path: impl AsRef) -> Result { - File::create(&path).anyhow_err() -} - -#[context("Failed to canonicalize path: {}", path.as_ref().display())] -pub fn canonicalize(path: impl AsRef) -> Result { - std::fs::canonicalize(&path).anyhow_err() -} - -#[context("Failed to create missing directories no path: {}", path.as_ref().display())] -pub fn create_dir_all(path: impl AsRef) -> Result { - std::fs::create_dir_all(&path).anyhow_err() -} diff --git a/build/ci_utils/src/fs/wrappers/tokio.rs b/build/ci_utils/src/fs/wrappers/tokio.rs index 2c85d5bf4250..0f7fb00374d1 100644 --- a/build/ci_utils/src/fs/wrappers/tokio.rs +++ b/build/ci_utils/src/fs/wrappers/tokio.rs @@ -2,12 +2,13 @@ use crate::prelude::*; use tokio::fs::File; use tokio::io::AsyncReadExt; +use tokio_util::io::ReaderStream; -#[context("Failed to obtain metadata for file: {}", path.as_ref().display())] -pub async fn metadata>(path: P) -> Result { - tokio::fs::metadata(&path).await.anyhow_err() +pub fn metadata>(path: P) -> BoxFuture<'static, Result> { + let path = path.as_ref().to_owned(); + tokio::fs::metadata(path).anyhow_err().boxed() } #[context("Failed to open path for reading: {}", path.as_ref().display())] @@ -15,6 +16,17 @@ pub async fn open(path: impl AsRef) -> Result { File::open(&path).await.anyhow_err() } +// #[context("Failed to open path for reading: {}", path.as_ref().display())] +pub fn open_stream(path: impl AsRef) -> BoxFuture<'static, Result>> { + let path = path.as_ref().to_owned(); + let file = open(path); + async move { + let file = file.await?; + Ok(ReaderStream::new(file)) + } + .boxed() +} + #[context("Failed to open path for writing: {}", path.as_ref().display())] pub async fn create(path: impl AsRef) -> Result { File::create(&path).await.anyhow_err() diff --git a/build/ci_utils/src/future.rs b/build/ci_utils/src/future.rs index cf5bc7d4f720..2bb6ae99c6e7 100644 --- a/build/ci_utils/src/future.rs +++ b/build/ci_utils/src/future.rs @@ -4,6 +4,13 @@ use futures_util::future::OptionFuture; +pub fn receiver_to_stream( + mut receiver: tokio::sync::mpsc::Receiver, +) -> impl Stream { + futures::stream::poll_fn(move |ctx| receiver.poll_recv(ctx)) +} + + #[derive(Copy, Clone, Debug)] pub enum AsyncPolicy { Sequential, diff --git a/build/ci_utils/src/github.rs b/build/ci_utils/src/github.rs index 78e5eed1b1c6..ad64a6e593a6 100644 --- a/build/ci_utils/src/github.rs +++ b/build/ci_utils/src/github.rs @@ -1,19 +1,7 @@ use crate::prelude::*; -use crate::cache::download::DownloadFile; - -use anyhow::Context; -use headers::HeaderMap; -use headers::HeaderValue; use octocrab::models::repos::Asset; use octocrab::models::repos::Release; -use octocrab::models::workflows::WorkflowListArtifact; -use octocrab::models::ArtifactId; -use octocrab::models::AssetId; -use octocrab::models::ReleaseId; -use octocrab::models::RunId; -use octocrab::params::actions::ArchiveFormat; -use reqwest::Response; @@ -21,6 +9,12 @@ const MAX_PER_PAGE: u8 = 100; pub mod model; pub mod release; +pub mod repo; +pub mod workflow; + +pub use repo::Repo; +pub use repo::RepoRef; + /// Goes over all the pages and returns result. /// @@ -35,170 +29,8 @@ pub async fn get_all( client.all_pages(first_page).await } -/// Entity that uniquely identifies a GitHub-hosted repository. -#[async_trait] -pub trait RepoPointer: Display { - fn owner(&self) -> &str; - fn name(&self) -> &str; - - /// Generate a token that can be used to register a new runner for this repository. - async fn generate_runner_registration_token( - &self, - octocrab: &Octocrab, - ) -> Result { - let path = - iformat!("/repos/{self.owner()}/{self.name()}/actions/runners/registration-token"); - let url = octocrab.absolute_url(path)?; - octocrab.post(url, EMPTY_REQUEST_BODY).await.context(format!( - "Failed to generate a runner registration token for the {self} repository." - )) - } - - /// The repository's URL. - fn url(&self) -> Result { - let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}"); - Url::parse(&url_text) - .context(format!("Failed to generate an URL for the {self} repository.")) - } - - fn repos<'a>(&'a self, client: &'a Octocrab) -> octocrab::repos::RepoHandler<'a> { - client.repos(self.owner(), self.name()) - } - - async fn all_releases(&self, client: &Octocrab) -> Result> { - get_all(client, self.repos(client).releases().list().per_page(MAX_PER_PAGE).send()) - .await - .context(format!("Failed to list all releases in the {self} repository.")) - } - - async fn latest_release(&self, client: &Octocrab) -> Result { - self.repos(client) - .releases() - .get_latest() - .await - .context(format!("Failed to get the latest release in the {self} repository.")) - } - - async fn find_release_by_id( - &self, - client: &Octocrab, - release_id: ReleaseId, - ) -> Result { - let repo_handler = self.repos(client); - let releases_handler = repo_handler.releases(); - releases_handler - .get_by_id(release_id) - .await - .context(format!("Failed to find release by id `{release_id}` in `{self}`.")) - } - - #[tracing::instrument(skip(client), fields(%self, %text), err)] - async fn find_release_by_text(&self, client: &Octocrab, text: &str) -> anyhow::Result { - self.all_releases(client) - .await? - .into_iter() - .find(|release| release.tag_name.contains(text)) - .inspect(|release| info!("Found release at: {} (id={}).", release.html_url, release.id)) - .context(format!("No release with tag matching `{text}` in {self}.")) - } - - #[tracing::instrument(skip(client), fields(%self, %run_id, %name), err, ret)] - async fn find_artifact_by_name( - &self, - client: &Octocrab, - run_id: RunId, - name: &str, - ) -> Result { - let artifacts = client - .actions() - .list_workflow_run_artifacts(self.owner(), self.name(), run_id) - .per_page(100) - .send() - .await - .context(format!("Failed to list artifacts of run {run_id} in {self}."))? - .value - .context("Failed to find any artifacts.")?; - - artifacts - .into_iter() - .find(|artifact| artifact.name == name) - .context(format!("Failed to find artifact by name '{name}'.")) - } - - async fn download_artifact(&self, client: &Octocrab, artifact_id: ArtifactId) -> Result { - client - .actions() - .download_artifact(self.owner(), self.name(), artifact_id, ArchiveFormat::Zip) - .await - .context(format!("Failed to download artifact with ID={artifact_id}.")) - } - - async fn download_and_unpack_artifact( - &self, - client: &Octocrab, - artifact_id: ArtifactId, - output_dir: &Path, - ) -> Result { - let bytes = self.download_artifact(client, artifact_id).await?; - crate::archive::zip::extract_bytes(bytes, output_dir)?; - Ok(()) - } - - #[tracing::instrument(name="Get the asset information.", skip(client), fields(self=%self), err)] - async fn asset(&self, client: &Octocrab, asset_id: AssetId) -> Result { - self.repos(client).releases().get_asset(asset_id).await.anyhow_err() - } - - fn download_asset_job(&self, octocrab: &Octocrab, asset_id: AssetId) -> DownloadFile { - let path = iformat!("/repos/{self.owner()}/{self.name()}/releases/assets/{asset_id}"); - // Unwrap will work, because we are appending relative URL constant. - let url = octocrab.absolute_url(path).unwrap(); - DownloadFile { - client: octocrab.client.clone(), - key: crate::cache::download::Key { - url, - additional_headers: HeaderMap::from_iter([( - reqwest::header::ACCEPT, - HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()), - )]), - }, - } - } - - #[tracing::instrument(name="Download the asset.", skip(client), fields(self=%self), err)] - async fn download_asset(&self, client: &Octocrab, asset_id: AssetId) -> Result { - self.download_asset_job(client, asset_id).send_request().await - } - - #[tracing::instrument(name="Download the asset to a file.", skip(client, output_path), fields(self=%self, dest=%output_path.as_ref().display()), err)] - async fn download_asset_as( - &self, - client: &Octocrab, - asset_id: AssetId, - output_path: impl AsRef + Send + Sync + 'static, - ) -> Result { - let response = self.download_asset(client, asset_id).await?; - crate::io::web::stream_response_to_file(response, &output_path).await - } - - #[tracing::instrument(name="Download the asset to a directory.", - skip(client, output_dir, asset), - fields(self=%self, dest=%output_dir.as_ref().display(), id = %asset.id), - err)] - async fn download_asset_to( - &self, - client: &Octocrab, - asset: &Asset, - output_dir: impl AsRef + Send + Sync + 'static, - ) -> Result { - let output_path = output_dir.as_ref().join(&asset.name); - self.download_asset_as(client, asset.id, output_path.clone()).await?; - Ok(output_path) - } -} - #[async_trait] -pub trait OrganizationPointer { +pub trait IsOrganization { /// Organization name. fn name(&self) -> &str; @@ -221,7 +53,7 @@ pub trait OrganizationPointer { /// Get the biggest asset containing given text. #[instrument(skip(release), fields(id = %release.id, url = %release.url), err)] -pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Result<&'a Asset> { +pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> Result<&'a Asset> { release .assets .iter() @@ -235,7 +67,7 @@ pub fn find_asset_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Resul /// Get the biggest asset containing given text. #[instrument(skip(release), fields(id = %release.id, url = %release.url), ret(Display), err)] -pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> anyhow::Result<&'a Url> { +pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> Result<&'a Url> { let matching_asset = find_asset_by_text(release, text)?; Ok(&matching_asset.browser_download_url) } @@ -244,7 +76,7 @@ pub fn find_asset_url_by_text<'a>(release: &'a Release, text: &str) -> anyhow::R /// /// Octocrab client does not need to bo authorized with a PAT for this. However, being authorized /// will help with GitHub API query rate limits. -pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> anyhow::Result { +pub async fn latest_runner_url(octocrab: &Octocrab, os: OS) -> Result { let latest_release = octocrab.repos("actions", "runner").releases().get_latest().await?; let os_name = match os { diff --git a/build/ci_utils/src/github/release.rs b/build/ci_utils/src/github/release.rs index 18fe4cce639c..ebaac94c54b9 100644 --- a/build/ci_utils/src/github/release.rs +++ b/build/ci_utils/src/github/release.rs @@ -1,46 +1,134 @@ use crate::prelude::*; +use crate::github::Repo; + +use mime::Mime; +use octocrab::models::repos::Asset; +use octocrab::models::repos::Release; use octocrab::models::ReleaseId; use reqwest::Body; use tracing::instrument; -#[context("Failed to upload the asset {}", asset.as_ref().display())] -#[instrument(skip_all, fields(source = %asset.as_ref().display(), %repo, %release))] -pub async fn upload_asset( - repo: &(impl RepoPointer + Send + Sync + 'static), - client: &reqwest::Client, - release: ReleaseId, - asset: impl AsRef + Send + Sync, -) -> Result { - let upload_url = format!( - "https://uploads.github.com/repos/{}/{}/releases/{}/assets", - repo.owner(), - repo.name(), - release - ); - let asset_path = asset.as_ref(); - let mime = new_mime_guess::from_path(asset_path).first_or_octet_stream(); - let file = tokio::fs::File::open(asset_path).await?; - let file_size = file.metadata().await?.len(); - let file_contents_stream = tokio_util::io::ReaderStream::new(file); - let body = Body::wrap_stream(file_contents_stream); - let asset_name = asset_path.file_name().unwrap().to_string_lossy(); - let request = client - .post(upload_url) - .query(&[("name", asset_name.as_ref())]) - .header(reqwest::header::ACCEPT, "application/vnd.github.v3+json") - .header(reqwest::header::CONTENT_TYPE, mime.to_string()) - .header(reqwest::header::CONTENT_LENGTH, file_size) - .body(body) - .build()?; - - dbg!(&request); - let response = client.execute(request).await?; - dbg!(&response); - response.error_for_status()?; - Ok(()) +/// Types that uniquely identify a release and can be used to fetch it from GitHub. +pub trait IsRelease: Debug { + /// The release ID. + fn id(&self) -> ReleaseId; + + /// The repository where the release is located. + fn repo(&self) -> Repo; + + /// Client used to perform GitHub API operations. + fn octocrab(&self) -> &Octocrab; +} + +#[async_trait] +pub trait IsReleaseExt: IsRelease + Sync { + /// Upload a new asset to the release. + fn upload_asset( + &self, + asset_name: impl AsRef, + content_type: Mime, + content_length: u64, + body: impl Into, + ) -> BoxFuture<'static, Result> { + let upload_url = format!( + "https://uploads.github.com/repos/{repo}/releases/{release_id}/assets", + repo = self.repo(), + release_id = self.id(), + ); + let body = body.into(); + let request = self + .octocrab() + .client + .post(upload_url) + .query(&[("name", asset_name.as_ref())]) + .header(reqwest::header::ACCEPT, "application/vnd.github.v3+json") + .header(reqwest::header::CONTENT_TYPE, content_type.to_string()) + .header(reqwest::header::CONTENT_LENGTH, content_length) + .body(body); + + async move { + crate::io::web::execute(request).await?.json().await.context("Failed to upload asset.") + } + .boxed() + } + + /// Upload a new asset to the release from a given file. + /// + /// The filename will be used to name the asset and deduce MIME content type. + // #[context("Failed to upload an asset from the file under {}", path.as_ref().display())] + #[instrument(skip_all, fields(source = %path.as_ref().display()))] + async fn upload_asset_file(&self, path: impl AsRef + Send) -> Result { + let error_msg = + format!("Failed to upload an asset from the file under {}", path.as_ref().display()); + async move { + let path = path.as_ref().to_path_buf(); + let asset_name = path.file_name().with_context(|| { + format!("The given path {} does not contain a filename.", path.display()) + })?; + let content_type = new_mime_guess::from_path(&path).first_or_octet_stream(); + let file_size = crate::fs::tokio::metadata(&path).await?.len(); + let file = crate::fs::tokio::open_stream(&path).await?; + let body = Body::wrap_stream(file); + self.upload_asset(asset_name.as_str(), content_type, file_size, body).await + } + .await + .context(error_msg) + } + + async fn upload_compressed_dir(&self, path: impl AsRef + Send) -> Result { + let dir_to_upload = path.as_ref(); + let temp_dir = tempfile::tempdir()?; + let archive_path = + dir_to_upload.with_parent(temp_dir.path()).with_appended_extension("tar.gz"); + crate::archive::compress_directory(&archive_path, &dir_to_upload).await?; + self.upload_asset_file(archive_path).await + } + + async fn get(&self) -> Result { + self.octocrab() + .repos(self.repo().owner(), self.repo().name()) + .releases() + .get_by_id(self.id()) + .await + .anyhow_err() + } +} + +impl IsReleaseExt for T where T: IsRelease + Sync {} + +/// A release on GitHub. +#[derive(Clone, Derivative)] +#[derivative(Debug)] +pub struct ReleaseHandle { + #[derivative(Debug(format_with = "std::fmt::Display::fmt"))] + pub repo: Repo, + pub id: ReleaseId, + #[derivative(Debug = "ignore")] + pub octocrab: Octocrab, +} + +impl IsRelease for ReleaseHandle { + fn id(&self) -> ReleaseId { + self.id + } + + fn repo(&self) -> Repo { + self.repo.clone() + } + + fn octocrab(&self) -> &Octocrab { + &self.octocrab + } +} + +impl ReleaseHandle { + pub fn new(octocrab: &Octocrab, repo: impl Into, id: ReleaseId) -> Self { + let repo = repo.into(); + Self { repo, id, octocrab: octocrab.clone() } + } } #[cfg(test)] diff --git a/build/ci_utils/src/github/repo.rs b/build/ci_utils/src/github/repo.rs new file mode 100644 index 000000000000..bfd3a03e06bd --- /dev/null +++ b/build/ci_utils/src/github/repo.rs @@ -0,0 +1,305 @@ +use crate::prelude::*; + +use crate::cache::download::DownloadFile; +use crate::github; +use crate::github::model; +use crate::github::MAX_PER_PAGE; + +use headers::HeaderMap; +use headers::HeaderValue; +use octocrab::models::repos::Asset; +use octocrab::models::repos::Release; +use octocrab::models::workflows::WorkflowListArtifact; +use octocrab::models::ArtifactId; +use octocrab::models::AssetId; +use octocrab::models::ReleaseId; +use octocrab::models::RunId; +use octocrab::params::actions::ArchiveFormat; +use reqwest::Response; + + + +/// Owned data denoting a specific GitHub repository. +#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)] +#[display(fmt = "{}/{}", owner, name)] +pub struct Repo { + /// Owner - an organization's or user's name. + pub owner: String, + pub name: String, +} + +impl IsRepo for Repo { + fn owner(&self) -> &str { + &self.owner + } + + fn name(&self) -> &str { + &self.name + } +} + +/// Parse from strings in format "owner/name". Opposite of `Display`. +impl std::str::FromStr for Repo { + type Err = anyhow::Error; + + fn from_str(s: &str) -> std::result::Result { + RepoRef::try_from(s).map(Into::into) + } +} + +impl<'a> From> for Repo { + fn from(repo: RepoRef<'a>) -> Self { + Repo { owner: repo.owner.to_owned(), name: repo.name.to_owned() } + } +} + +impl Repo { + pub fn new(owner: impl Into, name: impl Into) -> Self { + Self { owner: owner.into(), name: name.into() } + } +} + + +/// Non-owning equivalent of `Repo`. +/// +/// Particularly useful for defining `const` repositories. +#[derive(Clone, Copy, Debug, PartialEq, Eq, Deserialize, Serialize, derive_more::Display)] +#[display(fmt = "{}/{}", owner, name)] +pub struct RepoRef<'a> { + /// Owner - an organization's or user's name. + pub owner: &'a str, + pub name: &'a str, +} + +impl<'a> IsRepo for RepoRef<'a> { + fn owner(&self) -> &str { + self.owner + } + + fn name(&self) -> &str { + self.name + } +} + +impl<'a> RepoRef<'a> { + pub const fn new(owner: &'a T1, name: &'a T2) -> Self + where + T1: ~const AsRef + ?Sized, + T2: ~const AsRef + ?Sized, { + Self { owner: owner.as_ref(), name: name.as_ref() } + } +} + +/// Note that we chose to implemend `TryFrom` rather than `FromStr` for `RepoRef` because +/// `FromStr` requires the parsed value to be owned (or at least lifetime-independent from input), +/// which is not the case for `RepoRef`. +impl<'a> TryFrom<&'a str> for RepoRef<'a> { + type Error = anyhow::Error; + + fn try_from(value: &'a str) -> std::result::Result { + match value.split('/').collect_vec().as_slice() { + [owner, name] => Ok(Self { owner, name }), + slice => bail!("Failed to parse string '{}': Splitting by '/' should yield exactly 2 pieces, found: {}", value, slice.len()), + } + } +} + +/// Any entity that uniquely identifies a GitHub-hosted repository. +#[async_trait] +pub trait IsRepo: Display { + fn owner(&self) -> &str; + fn name(&self) -> &str; + + /// The repository's URL. + fn url(&self) -> Result { + let url_text = iformat!("https://github.com/{self.owner()}/{self.name()}"); + Url::parse(&url_text) + .context(format!("Failed to generate an URL for the {self} repository.")) + } + + fn handle(&self, octocrab: &Octocrab) -> Handle + where Self: Clone + Sized { + Handle { repo: self.clone(), octocrab: octocrab.clone() } + } +} + +/// A handle to a specific GitHub repository. +/// +/// It includes a client (so also an authentication token) and a repository. +#[derive(Debug, Clone)] +pub struct Handle { + pub octocrab: Octocrab, + pub repo: Repo, +} + +impl Display for Handle { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.repo) + } +} + +impl IsRepo for Handle { + fn owner(&self) -> &str { + self.repo.owner() + } + + fn name(&self) -> &str { + self.repo.name() + } +} + +impl Handle { + /// Create a new handle. + pub fn new(octocrab: &Octocrab, repo: R) -> Self { + Self { octocrab: octocrab.clone(), repo } + } + + /// Generate a token that can be used to register a new runner for this repository. + pub async fn generate_runner_registration_token(&self) -> Result { + let path = + iformat!("/repos/{self.owner()}/{self.name()}/actions/runners/registration-token"); + let url = self.octocrab.absolute_url(path)?; + self.octocrab.post(url, EMPTY_REQUEST_BODY).await.context(format!( + "Failed to generate a runner registration token for the {self} repository." + )) + } + + pub fn repos(&self) -> octocrab::repos::RepoHandler { + self.octocrab.repos(self.owner(), self.name()) + } + + pub async fn all_releases(&self) -> Result> { + github::get_all( + &self.octocrab, + self.repos().releases().list().per_page(MAX_PER_PAGE).send(), + ) + .await + .context(format!("Failed to list all releases in the {self} repository.")) + } + + pub async fn latest_release(&self) -> Result { + self.repos() + .releases() + .get_latest() + .await + .context(format!("Failed to get the latest release in the {self} repository.")) + } + + pub async fn find_release_by_id(&self, release_id: ReleaseId) -> Result { + let repo_handler = self.repos(); + let releases_handler = repo_handler.releases(); + releases_handler + .get_by_id(release_id) + .await + .context(format!("Failed to find release by id `{release_id}` in `{self}`.")) + } + + #[tracing::instrument(fields(%self, %text), err)] + pub async fn find_release_by_text(&self, text: &str) -> anyhow::Result { + self.all_releases() + .await? + .into_iter() + .find(|release| release.tag_name.contains(text)) + .inspect(|release| info!("Found release at: {} (id={}).", release.html_url, release.id)) + .context(format!("No release with tag matching `{text}` in {self}.")) + } + + #[tracing::instrument(fields(%self, %run_id, %name), err, ret)] + pub async fn find_artifact_by_name( + &self, + run_id: RunId, + name: &str, + ) -> Result { + let artifacts = self + .octocrab + .actions() + .list_workflow_run_artifacts(self.owner(), self.name(), run_id) + .per_page(100) + .send() + .await + .context(format!("Failed to list artifacts of run {run_id} in {self}."))? + .value + .context("Failed to find any artifacts.")?; + + artifacts + .into_iter() + .find(|artifact| artifact.name == name) + .context(format!("Failed to find artifact by name '{name}'.")) + } + + pub async fn download_artifact(&self, artifact_id: ArtifactId) -> Result { + self.octocrab + .actions() + .download_artifact(self.owner(), self.name(), artifact_id, ArchiveFormat::Zip) + .await + .context(format!("Failed to download artifact with ID={artifact_id}.")) + } + + pub async fn download_and_unpack_artifact( + &self, + artifact_id: ArtifactId, + output_dir: &Path, + ) -> Result { + let bytes = self.download_artifact(artifact_id).await?; + crate::archive::zip::extract_bytes(bytes, output_dir)?; + Ok(()) + } + + #[tracing::instrument(name="Get the asset information.", fields(self=%self), err)] + pub async fn asset(&self, asset_id: AssetId) -> Result { + self.repos().releases().get_asset(asset_id).await.anyhow_err() + } + + pub fn download_asset_job(&self, asset_id: AssetId) -> DownloadFile { + let path = iformat!("/repos/{self.owner()}/{self.name()}/releases/assets/{asset_id}"); + // Unwrap will work, because we are appending relative URL constant. + let url = self.octocrab.absolute_url(path).unwrap(); + DownloadFile { + client: self.octocrab.client.clone(), + key: crate::cache::download::Key { + url, + additional_headers: HeaderMap::from_iter([( + reqwest::header::ACCEPT, + HeaderValue::from_static(mime::APPLICATION_OCTET_STREAM.as_ref()), + )]), + }, + } + } + + #[tracing::instrument(name="Download the asset.", fields(self=%self), err)] + pub async fn download_asset(&self, asset_id: AssetId) -> Result { + self.download_asset_job(asset_id).send_request().await + } + + #[tracing::instrument(name="Download the asset to a file.", skip(output_path), fields(self=%self, dest=%output_path.as_ref().display()), err)] + pub async fn download_asset_as( + &self, + asset_id: AssetId, + output_path: impl AsRef + Send + Sync + 'static, + ) -> Result { + let response = self.download_asset(asset_id).await?; + crate::io::web::stream_response_to_file(response, &output_path).await + } + + #[tracing::instrument(name="Download the asset to a directory.", + skip(output_dir, asset), + fields(self=%self, dest=%output_dir.as_ref().display(), id = %asset.id), + err)] + pub async fn download_asset_to( + &self, + asset: &Asset, + output_dir: impl AsRef + Send + Sync + 'static, + ) -> Result { + let output_path = output_dir.as_ref().join(&asset.name); + self.download_asset_as(asset.id, output_path.clone()).await?; + Ok(output_path) + } + + /// Get the repository information. + pub async fn get(&self) -> Result { + self.repos() + .get() + .await + .with_context(|| format!("Failed to get the infomation for the {self} repository.")) + } +} diff --git a/build/ci_utils/src/github/workflow.rs b/build/ci_utils/src/github/workflow.rs new file mode 100644 index 000000000000..2e635173c640 --- /dev/null +++ b/build/ci_utils/src/github/workflow.rs @@ -0,0 +1,38 @@ +use crate::prelude::*; + +use crate::github; + + + +/// HTTP body payload for the workflow dispatch. +#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] +pub struct RequestBody { + /// Reference to the commit or branch to build. Should be string-like. + pub r#ref: S, + + /// Inputs to the workflow. + pub inputs: T, +} + +// Function that invokes GitHub API REST API workflow dispatch. +pub async fn dispatch( + repo: &github::repo::Handle, + workflow_id: impl AsRef + Send + Sync + 'static, + r#ref: impl AsRef + Send + Sync + 'static, + inputs: &impl Serialize, +) -> Result { + // Don't use octocrab for this, it has broken error handling! + // (treating error 404 as Ok) + let workflow_id = workflow_id.as_ref(); + let name = repo.name(); + let owner = repo.owner(); + let url = repo.octocrab.absolute_url(format!( + "/repos/{owner}/{name}/actions/workflows/{workflow_id}/dispatches" + ))?; + let r#ref = r#ref.as_ref(); + let body = RequestBody { r#ref, inputs }; + let response = repo.octocrab._post(url, Some(&body)).await?; + let _response = crate::io::web::handle_error_response(response).await?; + // Nothing interesting in OK response, so we just return empty struct. + Ok(()) +} diff --git a/build/ci_utils/src/lib.rs b/build/ci_utils/src/lib.rs index 5ccf43d4711c..b5873639137e 100644 --- a/build/ci_utils/src/lib.rs +++ b/build/ci_utils/src/lib.rs @@ -16,6 +16,7 @@ #![feature(io_error_other)] #![feature(string_remove_matches)] #![feature(once_cell)] +#![feature(const_deref)] #![feature(duration_constants)] #![feature(const_trait_impl)] #![feature(is_some_with)] @@ -40,7 +41,6 @@ // ============== pub mod actions; -pub mod anyhow; pub mod archive; pub mod buffer; pub mod cache; @@ -70,29 +70,12 @@ pub mod serde; pub mod prelude { + pub use enso_build_base::prelude::*; - pub type Result = anyhow::Result; - pub use anyhow::anyhow; - pub use anyhow::bail; - pub use anyhow::ensure; - pub use anyhow::Context as _; pub use async_trait::async_trait; pub use bytes::Bytes; pub use derivative::Derivative; pub use derive_more::Display; - pub use fn_error_context::context; - pub use futures_util::future::BoxFuture; - pub use futures_util::select; - pub use futures_util::stream::BoxStream; - pub use futures_util::try_join; - pub use futures_util::AsyncWrite; - pub use futures_util::FutureExt as _; - pub use futures_util::Stream; - pub use futures_util::StreamExt as _; - pub use futures_util::TryFuture; - pub use futures_util::TryFutureExt as _; - pub use futures_util::TryStream; - pub use futures_util::TryStreamExt as _; pub use ifmt::iformat; pub use itertools::Itertools; pub use lazy_static::lazy_static; @@ -101,63 +84,16 @@ pub mod prelude { pub use platforms::target::Arch; pub use platforms::target::OS; pub use semver::Version; - pub use serde::de::DeserializeOwned; - pub use serde::Deserialize; - pub use serde::Serialize; pub use shrinkwraprs::Shrinkwrap; - pub use snafu::Snafu; - pub use std::borrow::Borrow; - pub use std::borrow::BorrowMut; - pub use std::borrow::Cow; - pub use std::collections::BTreeMap; - pub use std::collections::BTreeSet; - pub use std::collections::HashMap; - pub use std::collections::HashSet; - pub use std::default::default; - pub use std::ffi::OsStr; - pub use std::ffi::OsString; - pub use std::fmt::Debug; - pub use std::fmt::Display; - pub use std::fmt::Formatter; - pub use std::future::ready; - pub use std::future::Future; - pub use std::hash::Hash; - pub use std::io::Read; - pub use std::io::Seek; - pub use std::iter::once; - pub use std::iter::FromIterator; - pub use std::marker::PhantomData; - pub use std::ops::Deref; - pub use std::ops::DerefMut; - pub use std::ops::Range; - pub use std::path::Path; - pub use std::path::PathBuf; - pub use std::pin::pin; - pub use std::pin::Pin; - pub use std::sync::Arc; pub use tokio::io::AsyncWriteExt as _; - pub use tracing::debug; - pub use tracing::debug_span; - pub use tracing::error; - pub use tracing::error_span; - pub use tracing::info; - pub use tracing::info_span; - pub use tracing::instrument; - pub use tracing::span; - pub use tracing::trace; - pub use tracing::trace_span; - pub use tracing::warn; - pub use tracing::warn_span; - pub use tracing::Instrument; pub use url::Url; pub use uuid::Uuid; pub use crate::EMPTY_REQUEST_BODY; - pub use crate::anyhow::ResultExt; - pub use crate::env::Variable as EnvironmentVariable; - pub use crate::extensions::str::StrLikeExt; - pub use crate::github::RepoPointer; + pub use crate::extensions::output::OutputExt as _; + pub use crate::github::release::IsRelease; + pub use crate::github::repo::IsRepo; pub use crate::goodie::Goodie; pub use crate::log::setup_logging; pub use crate::os::target::TARGET_ARCH; @@ -169,19 +105,13 @@ pub mod prelude { pub use crate::program::Program; pub use crate::program::Shell; + pub use crate::cache::goodie::GoodieExt as _; pub use crate::env::new::RawVariable as _; pub use crate::env::new::TypedVariable as _; pub use crate::extensions::clap::ArgExt as _; pub use crate::extensions::command::CommandExt as _; - pub use crate::extensions::from_string::FromString; - pub use crate::extensions::future::FutureExt as _; - pub use crate::extensions::future::TryFutureExt as _; - pub use crate::extensions::iterator::IteratorExt; - pub use crate::extensions::iterator::TryIteratorExt; - pub use crate::extensions::output::OutputExt as _; - pub use crate::extensions::path::PathExt as _; - pub use crate::extensions::result::ResultExt as _; + pub use crate::github::release::IsReleaseExt as _; pub use crate::program::command::provider::CommandProviderExt as _; pub use crate::program::version::IsVersion as _; pub use crate::program::ProgramExt as _; diff --git a/build/ci_utils/src/models/config.rs b/build/ci_utils/src/models/config.rs index d3442f4fcf9d..d891fc7031e8 100644 --- a/build/ci_utils/src/models/config.rs +++ b/build/ci_utils/src/models/config.rs @@ -2,8 +2,9 @@ use crate::prelude::*; -use crate::github::OrganizationPointer; -use crate::github::RepoPointer; +use crate::github::repo::IsRepo; +use crate::github::IsOrganization; +use crate::github::Repo; use crate::serde::regex_vec; use crate::serde::single_or_sequence; @@ -42,7 +43,7 @@ impl RepoConfig { #[serde(rename_all = "snake_case")] pub enum RunnerLocation { Organization(OrganizationContext), - Repository(RepoContext), + Repository(Repo), } impl RunnerLocation { @@ -55,7 +56,7 @@ impl RunnerLocation { RunnerLocation::Organization(org) => org.generate_runner_registration_token(octocrab).await, RunnerLocation::Repository(repo) => - repo.generate_runner_registration_token(octocrab).await, + repo.handle(octocrab).generate_runner_registration_token().await, } } @@ -74,54 +75,12 @@ pub struct OrganizationContext { pub name: String, } -impl OrganizationPointer for OrganizationContext { +impl IsOrganization for OrganizationContext { fn name(&self) -> &str { &self.name } } -/// Data denoting a specific GitHub repository. -#[derive(Clone, Debug, PartialEq, Eq, Deserialize, Serialize)] -pub struct RepoContext { - /// Owner - an organization's or user's name. - pub owner: String, - pub name: String, -} - -impl RepoPointer for RepoContext { - fn owner(&self) -> &str { - &self.owner - } - - fn name(&self) -> &str { - &self.name - } -} - -impl Display for RepoContext { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - write!(f, "{}/{}", self.owner, self.name) - } -} - -/// Parse from strings in format "owner/name". Opposite of `Display`. -impl std::str::FromStr for RepoContext { - type Err = anyhow::Error; - - fn from_str(s: &str) -> std::result::Result { - match s.split('/').collect_vec().as_slice() { - [owner, name] => Ok(Self { owner: owner.to_string(), name: name.to_string() }), - slice => bail!("Failed to parse string '{}': Splitting by '/' should yield exactly 2 pieces, found: {}", s, slice.len()), - } - } -} - -impl RepoContext { - pub fn new(owner: impl Into, name: impl Into) -> Self { - Self { owner: owner.into(), name: name.into() } - } -} - /// Description of the runners deployment for a specific GitHub repository. #[allow(clippy::large_enum_variant)] // We don't mind. #[derive(Clone, Debug, Deserialize, Serialize)] diff --git a/build/ci_utils/src/path/trie.rs b/build/ci_utils/src/path/trie.rs index 91097136d8c4..54fcfd1fb9b5 100644 --- a/build/ci_utils/src/path/trie.rs +++ b/build/ci_utils/src/path/trie.rs @@ -2,13 +2,18 @@ use crate::prelude::*; +/// A trie data structure, where each node represents a single fs path component. +/// +/// As such, a trie defines a set of fs paths (each being defined by a path within the trie). #[derive(Clone, Debug, Default, PartialEq, Eq)] pub struct Trie<'a> { pub children: HashMap, Trie<'a>>, + /// Number of paths that end in this node. pub count: usize, } impl<'a> Trie<'a> { + /// Insert a path into the trie. pub fn insert(&mut self, path: &'a Path) { let mut current = self; for component in path.components() { @@ -17,6 +22,7 @@ impl<'a> Trie<'a> { current.count += 1; } + /// Is this node a leaf? pub fn is_leaf(&self) -> bool { self.children.is_empty() } diff --git a/build/ci_utils/src/program/command.rs b/build/ci_utils/src/program/command.rs index 59c52b84bfea..011531b4d869 100644 --- a/build/ci_utils/src/program/command.rs +++ b/build/ci_utils/src/program/command.rs @@ -232,6 +232,15 @@ pub trait IsCommandWrapper { // let fut = self.borrow_mut_command().output(); // async move { fut.await.anyhow_err() }.boxed() // } + + + + fn with_current_dir(self, dir: impl AsRef) -> Self + where Self: Sized { + let mut this = self; + this.current_dir(dir); + this + } } impl> IsCommandWrapper for T { @@ -434,12 +443,6 @@ impl Command { this.stderr(stderr); this } - - pub fn with_current_dir(self, dir: impl AsRef) -> Self { - let mut this = self; - this.current_dir(dir); - this - } } pub fn spawn_log_processor( diff --git a/build/ci_utils/src/programs/git.rs b/build/ci_utils/src/programs/git.rs index ecbf1a10d2b5..db487ddaffb7 100644 --- a/build/ci_utils/src/programs/git.rs +++ b/build/ci_utils/src/programs/git.rs @@ -13,37 +13,79 @@ pub use clean::Clean; +#[derive(Clone, Copy, Debug)] +pub struct Git; + +impl Program for Git { + type Command = GitCommand; + fn executable_name(&self) -> &'static str { + "git" + } +} + +impl Git { + /// Create a new, empty git repository in the given directory. + pub fn init(&self, path: impl AsRef) -> Result { + let mut cmd = self.cmd()?; + cmd.arg(Command::Init); + cmd.current_dir(path); + Ok(cmd) + } +} + +/// The wrapper over `Git` program invocation context. +/// +/// It is stateful (knowing both repository root and current directory locations), as they both are +/// needed to properly handle relative paths. #[derive(Clone, Debug, PartialEq, Eq, Hash)] -pub struct Git { +pub struct Context { /// The path to the repository root above the `working_dir`. /// /// Many paths that git returns are relative to the repository root. - repo_path: PathBuf, + repository_root: PathBuf, /// Directory in which commands will be invoked. /// It might not be the repository root and it makes difference for many commands. - working_dir: PathBuf, + working_dir: PathBuf, } -impl Program for Git { - type Command = GitCommand; - fn executable_name(&self) -> &'static str { - "git" +impl Context { + /// Initialize a new command invoking git. + pub fn cmd(&self) -> Result { + Ok(Git.cmd()?.with_current_dir(&self.working_dir)) } - fn current_directory(&self) -> Option { - Some(self.working_dir.clone()) + + /// Create a wrapper with explicitly set repository root and working directory. + /// + /// The caller is responsible for ensuring that the `working_dir` is a subdirectory of the + /// `repository_root`. + pub async fn new_unchecked( + repository_root: impl AsRef, + working_dir: impl AsRef, + ) -> Self { + Self { + repository_root: repository_root.as_ref().to_path_buf(), + working_dir: working_dir.as_ref().to_path_buf(), + } } -} -impl Git { - pub async fn new(repo_path: impl Into) -> Result { - let repo_path = repo_path.into(); - let temp_git = Git { working_dir: repo_path.clone(), repo_path }; - let repo_path = temp_git.repository_root().await?; - Ok(Git { repo_path, working_dir: temp_git.working_dir }) + /// Create a `Git` invocation context within a given directory. + /// + /// The `working_dir` is the directory in which git commands will be invoked. It is expected to + /// be a part of some git repository. + pub async fn new(working_directory: impl Into) -> Result { + let working_directory = working_directory.into(); + // Faux `Git` instance to get the repository root. + // TODO: should be nicer, likely instance should be separate from program. + let temp_git = Context { + working_dir: working_directory.clone(), + repository_root: working_directory, + }; + let repo_root = temp_git.repository_root().await?; + Ok(Context { repository_root: repo_root, working_dir: temp_git.working_dir }) } pub async fn new_current() -> Result { - Git::new(crate::env::current_dir()?).await + Context::new(crate::env::current_dir()?).await } pub async fn head_hash(&self) -> Result { @@ -59,7 +101,7 @@ impl Git { /// List of files that are different than the compared commit. #[context("Failed to list files that are different than {}.", compare_against.as_ref())] pub async fn diff_against(&self, compare_against: impl AsRef) -> Result> { - let root = self.repo_path.as_path(); + let root = self.repository_root.as_path(); Ok(self .cmd()? .args(["diff", "--name-only", compare_against.as_ref()]) @@ -100,13 +142,20 @@ impl GitCommand { #[derive(Clone, Copy, Debug)] pub enum Command { + /// Remove untracked files from the working tree. Clean, + /// Show changes between commits, commit and working tree, etc. + Diff, + /// Create an empty Git repository or reinitialize an existing one. + Init, } impl AsRef for Command { fn as_ref(&self) -> &OsStr { match self { Command::Clean => OsStr::new("clean"), + Command::Diff => OsStr::new("diff"), + Command::Init => OsStr::new("init"), } } } @@ -118,7 +167,7 @@ mod tests { #[tokio::test] #[ignore] async fn repo_root() -> Result { - let git = Git::new(".").await?; + let git = Context::new(".").await?; let diff = git.repository_root().await?; println!("{:?}", diff); Ok(()) @@ -127,7 +176,7 @@ mod tests { #[tokio::test] #[ignore] async fn call_diff() -> Result { - let git = Git::new(".").await?; + let git = Context::new(".").await?; let diff = git.diff_against("origin/develop").await?; println!("{:?}", diff); Ok(()) diff --git a/build/ci_utils/src/programs/git/clean.rs b/build/ci_utils/src/programs/git/clean.rs index ed71856599f4..d3fc4db1640d 100644 --- a/build/ci_utils/src/programs/git/clean.rs +++ b/build/ci_utils/src/programs/git/clean.rs @@ -2,7 +2,7 @@ use crate::prelude::*; use crate::path::trie::Trie; use crate::program::command::Manipulator; -use crate::programs::Git; +use crate::programs::git; use std::path::Component; @@ -14,7 +14,7 @@ pub struct DirectoryToClear<'a> { pub trie: &'a Trie<'a>, } -/// Run ``git clean -xfd`` but preserve the given paths. +/// Run `git clean -xfd` but preserve the given paths. /// /// This may involve multiple git clean calls on different subtrees. /// Given paths can be either absolute or relative. If relative, they are relative to the @@ -22,6 +22,7 @@ pub struct DirectoryToClear<'a> { pub async fn clean_except_for( repo_root: impl AsRef, paths: impl IntoIterator>, + dry_run: bool, ) -> Result { let root = repo_root.as_ref().canonicalize()?; @@ -40,26 +41,16 @@ pub async fn clean_except_for( }) .collect_vec(); - let trie = Trie::from_iter(relative_exclusions.iter()); - - let mut directories_to_clear = vec![DirectoryToClear { prefix: vec![], trie: &trie }]; - while let Some(DirectoryToClear { prefix, trie }) = directories_to_clear.pop() { - let current_dir = root.join_iter(&prefix); - let exclusions_in_current_dir = - trie.children.keys().map(|c| Clean::Exclude(c.as_os_str().to_string_lossy().into())); - let git = Git::new(¤t_dir).await?; - git.cmd()?.clean().apply_iter(exclusions_in_current_dir).run_ok().await?; - - for (child_name, child_trie) in trie.children.iter() { - if !child_trie.is_leaf() { - let mut prefix = prefix.clone(); - prefix.push(*child_name); - directories_to_clear.push(DirectoryToClear { prefix, trie: child_trie }); - } - } - } + let exclusions = relative_exclusions.into_iter().map(Clean::exclude).collect_vec(); - Ok(()) + git::Context::new(root) + .await? + .cmd()? + .nice_clean() + .apply_iter(exclusions) + .apply_opt(dry_run.then_some(&Clean::DryRun)) + .run_ok() + .await } #[derive(Clone, Debug)] @@ -97,6 +88,17 @@ pub enum Clean { OnlyIgnored, } +impl Clean { + pub fn exclude(path: impl AsRef) -> Self { + let mut ret = String::new(); + for component in path.as_ref().components() { + ret.push('/'); + ret.push_str(&component.as_os_str().to_string_lossy()); + } + Clean::Exclude(ret) + } +} + impl Manipulator for Clean { fn apply(&self, command: &mut C) { // fn apply<'a, C: IsCommandWrapper + ?Sized>(&self, c: &'a mut C) -> &'a mut C { @@ -112,3 +114,31 @@ impl Manipulator for Clean { command.args(args); } } + +#[cfg(test)] +mod tests { + use super::*; + use crate::programs::Git; + + #[tokio::test] + async fn test_cleaning() -> Result { + setup_logging()?; + let dir = PathBuf::from(r"C:\temp\test_cleaning"); + crate::fs::tokio::reset_dir(&dir).await?; + Git.init(&dir)?.run_ok().await?; + + let foo = dir.join("foo"); + let foo_target = foo.join("target"); + crate::fs::tokio::write(&foo_target, "target in foo").await?; + + let target = dir.join("target"); + let target_foo = target.join("foo"); + crate::fs::tokio::write(&target_foo, "foo in target").await?; + + clean_except_for(&dir, vec!["target/foo"], false).await?; + + + + Ok(()) + } +} diff --git a/build/ci_utils/src/programs/rustup.rs b/build/ci_utils/src/programs/rustup.rs index 713a0a7f11d0..00d8833abf77 100644 --- a/build/ci_utils/src/programs/rustup.rs +++ b/build/ci_utils/src/programs/rustup.rs @@ -3,17 +3,14 @@ use crate::prelude::*; pub mod env { + crate::define_env_var! { /// The Rust toolchain version which was selected by Rustup. /// /// If set, any cargo invocation will follow this version. Otherwise, Rustup will deduce /// toolchain to be used and set up this variable for the spawned process. /// /// Example value: `"nightly-2022-01-20-x86_64-pc-windows-msvc"`. - #[derive(Clone, Copy, Debug)] - pub struct Toolchain; - - impl crate::env::Variable for Toolchain { - const NAME: &'static str = "RUSTUP_TOOLCHAIN"; + RUSTUP_TOOLCHAIN, String; } } diff --git a/build/ci_utils/src/programs/seven_zip.rs b/build/ci_utils/src/programs/seven_zip.rs index f8bc4cf3dd44..5d481afd906c 100644 --- a/build/ci_utils/src/programs/seven_zip.rs +++ b/build/ci_utils/src/programs/seven_zip.rs @@ -1,7 +1,5 @@ use crate::prelude::*; -use snafu::Snafu; - #[derive(Clone, Copy, Debug)] @@ -26,48 +24,29 @@ impl Program for SevenZip { vec![] } - fn handle_exit_status(status: std::process::ExitStatus) -> anyhow::Result<()> { + fn handle_exit_status(status: std::process::ExitStatus) -> Result { if status.success() { Ok(()) } else if let Some(code) = status.code() { - Err(ExecutionError::from_exit_code(code).into()) + error_from_exit_code(code) } else { - Err(ExecutionError::Unknown.into()) + bail!("Unknown execution error.") } } } -// Cf https://7zip.bugaco.com/7zip/MANUAL/cmdline/exit_codes.htm -#[derive(Snafu, Copy, Clone, Debug, Ord, PartialOrd, Eq, PartialEq)] -pub enum ExecutionError { - #[snafu(display( - "Warning (Non fatal error(s)). For example, one or more files were locked by some \ - other application, so they were not compressed." - ))] - Warning, - #[snafu(display("Fatal error"))] - Fatal, - #[snafu(display("Command line error"))] - CommandLine, - #[snafu(display("Not enough memory for operation"))] - NotEnoughMemory, - #[snafu(display("User stopped the process"))] - UserStopped, - #[snafu(display("Unrecognized error code"))] - Unknown, -} - -impl ExecutionError { - fn from_exit_code(code: i32) -> Self { - match code { - 1 => Self::Warning, - 2 => Self::Fatal, - 7 => Self::CommandLine, - 8 => Self::NotEnoughMemory, - 255 => Self::UserStopped, - _ => Self::Unknown, - } - } +pub fn error_from_exit_code(code: i32) -> anyhow::Result<()> { + let message = match code { + 1 => + "Warning (Non fatal error(s)). For example, one or more files were locked by some \ + other application, so they were not compressed.", + 2 => "Fatal error.", + 7 => "Command line error.", + 8 => "Not enough memory for operation.", + 255 => "User stopped the process.", + _ => "Unrecognized error code.", + }; + bail!(message); } impl SevenZip { diff --git a/build/ci_utils/src/programs/tar.rs b/build/ci_utils/src/programs/tar.rs index 598fd6927a53..f57d65c79f73 100644 --- a/build/ci_utils/src/programs/tar.rs +++ b/build/ci_utils/src/programs/tar.rs @@ -266,8 +266,8 @@ impl Tar { #[cfg(test)] pub mod tests { use super::*; + use crate::archive::compress_directory; use crate::archive::extract_to; - use crate::archive::pack_directory_contents; use crate::log::setup_logging; #[test] @@ -296,7 +296,7 @@ pub mod tests { let linked_temp = archive_temp.path().join("linked"); symlink::symlink_dir(temp.path(), &linked_temp)?; - pack_directory_contents(&archive_path, &linked_temp).await?; + compress_directory(&archive_path, &linked_temp).await?; assert!(archive_path.exists()); assert!(archive_path.metadata()?.len() > 0); diff --git a/build/ci_utils/src/programs/vswhere.rs b/build/ci_utils/src/programs/vswhere.rs index 865b1e527dc3..26a41cbde5eb 100644 --- a/build/ci_utils/src/programs/vswhere.rs +++ b/build/ci_utils/src/programs/vswhere.rs @@ -39,7 +39,9 @@ impl VsWhere { let stdout = command.run_stdout().await?; let instances = serde_json::from_str::>(&stdout)?; - Ok(instances.into_iter().next().ok_or(NoMsvcInstallation)?) + instances.into_iter().next().with_context(|| { + format!("No Visual Studio installation found with component {}.", component) + }) } /// Looks up installation of Visual Studio that has installed @@ -54,10 +56,6 @@ impl VsWhere { } } -#[derive(Clone, Copy, Debug, Snafu)] -#[snafu(display("failed to find a MSVC installation"))] -pub struct NoMsvcInstallation; - #[derive(Serialize, Deserialize, Debug)] #[serde(rename_all = "camelCase")] pub struct InstanceInfo { @@ -139,7 +137,7 @@ impl From<&Format> for OsString { } // cf. https://docs.microsoft.com/en-us/visualstudio/install/workload-component-id-vs-community?view=vs-2019&preserve-view=true -#[derive(Clone, Copy, Debug)] +#[derive(Clone, Copy, Debug, Display)] pub enum Component { /// MSVC v142 - VS 2019 C++ x64/x86 build tools CppBuildTools, diff --git a/build/cli/Cargo.toml b/build/cli/Cargo.toml index 2f910f65beb5..1ac588850c13 100644 --- a/build/cli/Cargo.toml +++ b/build/cli/Cargo.toml @@ -10,6 +10,7 @@ byte-unit = { version = "4.0.14", features = ["serde"] } clap = { version = "3.1.5", features = ["derive", "env", "wrap_help"] } chrono = "0.4.19" derivative = "2.2.0" +enso-build-base = { path = "../base" } enso-build = { path = "../build" } enso-formatter = { path = "../enso-formatter" } futures = "0.3.17" diff --git a/build/cli/src/arg.rs b/build/cli/src/arg.rs index 8a5f3cc1ddca..eb89d7f015ab 100644 --- a/build/cli/src/arg.rs +++ b/build/cli/src/arg.rs @@ -6,9 +6,9 @@ use clap::Args; use clap::Parser; use clap::Subcommand; use derivative::Derivative; +use enso_build_base::extensions::path::display_fmt; use ide_ci::cache; -use ide_ci::extensions::path::display_fmt; -use ide_ci::models::config::RepoContext; +use ide_ci::github::Repo; use octocrab::models::RunId; @@ -38,10 +38,10 @@ pub fn default_repo_path() -> Option { enso_build::repo::deduce_repository_path().ok() } -pub fn default_repo_remote() -> RepoContext { +pub fn default_repo_remote() -> Repo { ide_ci::actions::env::GITHUB_REPOSITORY .get() - .unwrap_or_else(|_| RepoContext::from_str(DEFAULT_REMOTE_REPOSITORY_FALLBACK).unwrap()) + .unwrap_or_else(|_| Repo::from_str(DEFAULT_REMOTE_REPOSITORY_FALLBACK).unwrap()) } pub fn default_cache_path() -> Option { @@ -75,6 +75,7 @@ pub trait IsTargetSource { const SOURCE_NAME: &'static str; const PATH_NAME: &'static str; const OUTPUT_PATH_NAME: &'static str; + // const UPLOAD_ASSET_NAME: &'static str; const RUN_ID_NAME: &'static str; const RELEASE_DESIGNATOR_NAME: &'static str; const ARTIFACT_NAME_NAME: &'static str; @@ -94,6 +95,7 @@ macro_rules! source_args_hlp { const SOURCE_NAME: &'static str = concat!($prefix, "-", "source"); const PATH_NAME: &'static str = concat!($prefix, "-", "path"); const OUTPUT_PATH_NAME: &'static str = concat!($prefix, "-", "output-path"); + // const UPLOAD_ASSET_NAME: &'static str = concat!($prefix, "-", "upload-asset"); const RUN_ID_NAME: &'static str = concat!($prefix, "-", "run-id"); const RELEASE_DESIGNATOR_NAME: &'static str = concat!($prefix, "-", "release"); const ARTIFACT_NAME_NAME: &'static str = concat!($prefix, "-", "artifact-name"); @@ -156,11 +158,11 @@ pub struct Cli { /// released versions to generate a new one, or uploading release assets). /// The argument should follow the format `owner/repo_name`. #[clap(long, global = true, default_value_t = default_repo_remote(), enso_env())] - pub repo_remote: RepoContext, + pub repo_remote: Repo, /// The build kind. Affects the default version generation. - #[clap(long, global = true, arg_enum, default_value_t = enso_build::version::BuildKind::Dev, env = crate::BuildKind::NAME)] - pub build_kind: enso_build::version::BuildKind, + #[clap(long, global = true, arg_enum, default_value_t = enso_build::version::Kind::Dev, env = *crate::ENSO_BUILD_KIND)] + pub build_kind: enso_build::version::Kind, /// Platform to target. Currently cross-compilation is enabled only for GUI/IDE (without /// Project Manager) on platforms where Electron Builder supports this. @@ -222,6 +224,9 @@ pub struct Source { #[clap(flatten)] pub output_path: OutputPath, + // + // #[clap(name = Target::UPLOAD_ASSET_NAME, long)] + // pub upload_asset: bool, } /// Discriminator denoting how some target artifact should be obtained. diff --git a/build/cli/src/arg/git_clean.rs b/build/cli/src/arg/git_clean.rs index 5f3536a146c5..55188078015c 100644 --- a/build/cli/src/arg/git_clean.rs +++ b/build/cli/src/arg/git_clean.rs @@ -4,6 +4,9 @@ use crate::prelude::*; #[derive(Clone, Copy, Debug, Default, clap::Args)] pub struct Options { + /// Do not perform the action, just print what would be deleted. + #[clap(long)] + pub dry_run: bool, /// Clean also the build script's cache (located in the user's local application data subtree). #[clap(long)] pub cache: bool, diff --git a/build/cli/src/arg/ide.rs b/build/cli/src/arg/ide.rs index 390e508003d0..7ca6325c5bdd 100644 --- a/build/cli/src/arg/ide.rs +++ b/build/cli/src/arg/ide.rs @@ -37,7 +37,7 @@ pub enum Command { Upload { #[clap(flatten)] params: BuildInput, - #[clap(long, env = enso_build::env::ReleaseId::NAME)] + #[clap(long, env = *enso_build::env::ENSO_RELEASE_ID)] release_id: ReleaseId, }, /// Like `Build` but automatically starts the IDE. diff --git a/build/cli/src/arg/release.rs b/build/cli/src/arg/release.rs index 61ff3539bcb7..038d08e7d840 100644 --- a/build/cli/src/arg/release.rs +++ b/build/cli/src/arg/release.rs @@ -6,16 +6,21 @@ use clap::Subcommand; #[derive(Args, Clone, Debug)] -pub struct DeployToEcr { +pub struct DeployRuntime { #[clap(long, default_value = enso_build::aws::ecr::runtime::NAME, enso_env())] pub ecr_repository: String, } +#[derive(Args, Clone, Copy, Debug)] +pub struct DeployGui {} + #[derive(Subcommand, Clone, Debug)] pub enum Action { CreateDraft, /// Build the runtime image and push it to ECR. - DeployToEcr(DeployToEcr), + DeployRuntime(DeployRuntime), + /// Upload the GUI to the S3 Bucket and notify. + DeployGui(DeployGui), Publish, } diff --git a/build/cli/src/bin/enso-remove-draft-releases.rs b/build/cli/src/bin/enso-remove-draft-releases.rs index 5a9b9d564db5..a24d57f85c3d 100644 --- a/build/cli/src/bin/enso-remove-draft-releases.rs +++ b/build/cli/src/bin/enso-remove-draft-releases.rs @@ -7,19 +7,19 @@ use enso_build_cli::prelude::*; use enso_build::setup_octocrab; +use ide_ci::github::Repo; use ide_ci::io::web::handle_error_response; use ide_ci::log::setup_logging; -use ide_ci::models::config::RepoContext; #[tokio::main] async fn main() -> Result { setup_logging()?; - let repo = RepoContext::from_str("enso-org/enso")?; let octo = setup_octocrab().await?; + let repo = Repo::from_str("enso-org/enso")?.handle(&octo); - let releases = repo.all_releases(&octo).await?; + let releases = repo.all_releases().await?; let draft_releases = releases.into_iter().filter(|r| r.draft); for release in draft_releases { let id = release.id; diff --git a/build/cli/src/ci_gen.rs b/build/cli/src/ci_gen.rs index 0a05434238de..c19d88c2845c 100644 --- a/build/cli/src/ci_gen.rs +++ b/build/cli/src/ci_gen.rs @@ -5,6 +5,7 @@ use crate::ci_gen::job::plain_job; use crate::ci_gen::job::plain_job_customized; use crate::ci_gen::job::RunsOn; +use enso_build::version; use ide_ci::actions::workflow::definition::checkout_repo_step; use ide_ci::actions::workflow::definition::is_non_windows_runner; use ide_ci::actions::workflow::definition::is_windows_runner; @@ -51,6 +52,8 @@ pub const TARGETED_SYSTEMS: [OS; 3] = [OS::Windows, OS::Linux, OS::MacOS]; pub const DEFAULT_BRANCH_NAME: &str = "develop"; +pub const RELEASE_CONCURRENCY_GROUP: &str = "release"; + /// Secrets set up in our organization. /// /// To manage, see: https://github.com/organizations/enso-org/settings/secrets/actions @@ -71,7 +74,6 @@ pub mod secret { pub const APPLE_NOTARIZATION_USERNAME: &str = "APPLE_NOTARIZATION_USERNAME"; pub const APPLE_NOTARIZATION_PASSWORD: &str = "APPLE_NOTARIZATION_PASSWORD"; - // === Windows Code Signing === /// Name of the GitHub Actions secret that stores path to the Windows code signing certificate /// within the runner. @@ -80,8 +82,15 @@ pub mod secret { /// Name of the GitHub Actions secret that stores password to the Windows code signing /// certificate. pub const WINDOWS_CERT_PASSWORD: &str = "MICROSOFT_CODE_SIGNING_CERT_PASSWORD"; + + // === Github Token === + /// A token created for the `enso-ci` user. + pub const CI_PRIVATE_TOKEN: &str = "CI_PRIVATE_TOKEN"; } +pub fn release_concurrency() -> Concurrency { + Concurrency::new(RELEASE_CONCURRENCY_GROUP) +} impl RunsOn for DeluxeRunner { fn runs_on(&self) -> Vec { @@ -247,23 +256,24 @@ pub fn nightly() -> Result { ..default() }; - let linux_only = OS::Linux; - - let concurrency_group = "release"; let mut workflow = Workflow { on, name: "Nightly Release".into(), - concurrency: Some(Concurrency::new(concurrency_group)), + concurrency: Some(release_concurrency()), ..default() }; - let prepare_job_id = workflow.add::(linux_only); - let build_wasm_job_id = workflow.add::(linux_only); + add_release_steps(&mut workflow, version::Kind::Nightly)?; + Ok(workflow) +} + +fn add_release_steps(workflow: &mut Workflow, kind: version::Kind) -> Result { + let prepare_job_id = workflow.add::(PRIMARY_OS); + let build_wasm_job_id = workflow.add::(PRIMARY_OS); let mut packaging_job_ids = vec![]; // Assumed, because Linux is necessary to deploy ECR runtime image. assert!(TARGETED_SYSTEMS.contains(&OS::Linux)); - for os in TARGETED_SYSTEMS { let backend_job_id = workflow.add_dependent::(os, [&prepare_job_id]); let build_ide_job_id = workflow.add_dependent::(os, [ @@ -271,12 +281,20 @@ pub fn nightly() -> Result { &backend_job_id, &build_wasm_job_id, ]); - packaging_job_ids.push(build_ide_job_id); + packaging_job_ids.push(build_ide_job_id.clone()); + // Deploying our release to cloud needs to be done only once. + // We could do this on any platform, but we choose Linux, because it's most easily + // available and performant. if os == OS::Linux { - let upload_runtime_job_id = workflow - .add_dependent::(os, [&prepare_job_id, &backend_job_id]); + let runtime_requirements = [&prepare_job_id, &backend_job_id]; + let upload_runtime_job_id = + workflow.add_dependent::(os, runtime_requirements); packaging_job_ids.push(upload_runtime_job_id); + + let gui_requirements = [build_ide_job_id]; + let deploy_gui_job_id = workflow.add_dependent::(os, gui_requirements); + packaging_job_ids.push(deploy_gui_job_id); } } @@ -285,11 +303,28 @@ pub fn nightly() -> Result { packaging_job_ids }; - let _publish_job_id = workflow.add_dependent::(linux_only, publish_deps); - let global_env = [("ENSO_BUILD_KIND", "nightly"), ("RUST_BACKTRACE", "full")]; + + let _publish_job_id = workflow.add_dependent::(PRIMARY_OS, publish_deps); + + let global_env = [(*crate::ENSO_BUILD_KIND, kind.as_ref()), ("RUST_BACKTRACE", "full")]; for (var_name, value) in global_env { workflow.env(var_name, value); } + + Ok(()) +} + +pub fn release_candidate() -> Result { + let on = Event { workflow_dispatch: Some(default()), ..default() }; + + let mut workflow = Workflow { + on, + name: "Release Candidate".into(), + concurrency: Some(release_concurrency()), + ..default() + }; + + add_release_steps(&mut workflow, version::Kind::Rc)?; Ok(workflow) } @@ -378,5 +413,6 @@ pub fn generate(repo_root: &enso_build::paths::generated::RepoRootGithubWorkflow repo_root.scala_new_yml.write_as_yaml(&backend()?)?; repo_root.gui_yml.write_as_yaml(&gui()?)?; repo_root.benchmark_yml.write_as_yaml(&benchmark()?)?; + repo_root.release_yml.write_as_yaml(&release_candidate()?)?; Ok(()) } diff --git a/build/cli/src/ci_gen/job.rs b/build/cli/src/ci_gen/job.rs index e959c25c2352..730464759de5 100644 --- a/build/cli/src/ci_gen/job.rs +++ b/build/cli/src/ci_gen/job.rs @@ -149,11 +149,12 @@ impl JobArchetype for UploadBackend { } #[derive(Clone, Copy, Debug)] -pub struct UploadRuntimeToEcr; -impl JobArchetype for UploadRuntimeToEcr { +pub struct DeployRuntime; +impl JobArchetype for DeployRuntime { fn job(os: OS) -> Job { - plain_job_customized(&os, "Upload Runtime to ECR", "release deploy-to-ecr", |step| { + plain_job_customized(&os, "Upload Runtime to ECR", "release deploy-runtime", |step| { let step = step + .with_secret_exposed_as("CI_PRIVATE_TOKEN", "GITHUB_TOKEN") .with_env("ENSO_BUILD_ECR_REPOSITORY", enso_build::aws::ecr::runtime::NAME) .with_secret_exposed_as(secret::ECR_PUSH_RUNTIME_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID") .with_secret_exposed_as( @@ -166,6 +167,23 @@ impl JobArchetype for UploadRuntimeToEcr { } } +#[derive(Clone, Copy, Debug)] +pub struct DeployGui; +impl JobArchetype for DeployGui { + fn job(os: OS) -> Job { + plain_job_customized(&os, "Upload GUI to S3", "release deploy-gui", |step| { + let step = step + .with_secret_exposed_as("CI_PRIVATE_TOKEN", "GITHUB_TOKEN") + .with_secret_exposed_as(secret::ARTEFACT_S3_ACCESS_KEY_ID, "AWS_ACCESS_KEY_ID") + .with_secret_exposed_as( + secret::ARTEFACT_S3_SECRET_ACCESS_KEY, + "AWS_SECRET_ACCESS_KEY", + ); + vec![step] + }) + } +} + pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step { match os { OS::Windows => step @@ -178,11 +196,23 @@ pub fn expose_os_specific_signing_secret(os: OS, step: Step) -> Step { &enso_build::ide::web::env::WIN_CSC_KEY_PASSWORD, ), OS::MacOS => step - .with_secret_exposed_as(secret::APPLE_CODE_SIGNING_CERT, "CSC_LINK") - .with_secret_exposed_as(secret::APPLE_CODE_SIGNING_CERT_PASSWORD, "CSC_KEY_PASSWORD") - .with_secret_exposed_as(secret::APPLE_NOTARIZATION_USERNAME, "APPLEID") - .with_secret_exposed_as(secret::APPLE_NOTARIZATION_PASSWORD, "APPLEIDPASS") - .with_env("CSC_IDENTITY_AUTO_DISCOVERY", "true"), + .with_secret_exposed_as( + secret::APPLE_CODE_SIGNING_CERT, + &enso_build::ide::web::env::CSC_LINK, + ) + .with_secret_exposed_as( + secret::APPLE_CODE_SIGNING_CERT_PASSWORD, + &enso_build::ide::web::env::CSC_KEY_PASSWORD, + ) + .with_secret_exposed_as( + secret::APPLE_NOTARIZATION_USERNAME, + &enso_build::ide::web::env::APPLEID, + ) + .with_secret_exposed_as( + secret::APPLE_NOTARIZATION_PASSWORD, + &enso_build::ide::web::env::APPLEIDPASS, + ) + .with_env(&enso_build::ide::web::env::CSC_IDENTITY_AUTO_DISCOVERY, "true"), _ => step, } } diff --git a/build/cli/src/lib.rs b/build/cli/src/lib.rs index 252e1e4abc86..e081bd925b38 100644 --- a/build/cli/src/lib.rs +++ b/build/cli/src/lib.rs @@ -33,8 +33,6 @@ pub mod prelude { use crate::prelude::*; use std::future::join; -use ide_ci::env::Variable; - use crate::arg::java_gen; use crate::arg::release::Action; use crate::arg::BuildJob; @@ -80,10 +78,9 @@ use enso_build::source::WithDestination; use futures_util::future::try_join; use ide_ci::actions::workflow::is_in_env; use ide_ci::cache::Cache; +use ide_ci::define_env_var; use ide_ci::fs::remove_if_exists; -use ide_ci::github::release::upload_asset; use ide_ci::global; -use ide_ci::log::setup_logging; use ide_ci::ok_ready_boxed; use ide_ci::programs::cargo; use ide_ci::programs::git::clean; @@ -99,11 +96,8 @@ fn resolve_artifact_name(input: Option, project: &impl IsTarget) -> Stri input.unwrap_or_else(|| project.artifact_name()) } -#[derive(Clone, Copy, Debug)] -pub struct BuildKind; -impl Variable for BuildKind { - const NAME: &'static str = "ENSO_BUILD_KIND"; - type Value = enso_build::version::BuildKind; +define_env_var! { + ENSO_BUILD_KIND, enso_build::version::Kind; } /// The basic, common information available in this application. @@ -125,22 +119,18 @@ impl Processor { /// Setup common build environment information based on command line input and local /// environment. pub async fn new(cli: &Cli) -> Result { - // let build_kind = match &cli.target { - // Target::Release(release) => release.kind, - // _ => enso_build::version::BuildKind::Dev, - // }; let absolute_repo_path = cli.repo_path.absolutize()?; let octocrab = setup_octocrab().await?; - let versions = enso_build::version::deduce_versions( - &octocrab, + let remote_repo = cli.repo_remote.handle(&octocrab); + let versions = enso_build::version::deduce_or_generate( + Ok(&remote_repo), cli.build_kind, - Ok(&cli.repo_remote), &absolute_repo_path, ) .await?; let mut triple = TargetTriple::new(versions); triple.os = cli.target_os; - triple.versions.publish()?; + triple.versions.publish().await?; let context = BuildContext { inner: project::Context { cache: Cache::new(&cli.cache_path).await?, @@ -215,16 +205,8 @@ impl Processor { let release = self.resolve_release_designator(designator); release .and_then_sync(move |release| { - Ok(ReleaseSource { - repository, - asset_id: target - .find_asset(release.assets) - .context(format!( - "Failed to find a relevant asset in the release '{}'.", - release.tag_name - ))? - .id, - }) + let asset = target.find_asset(&release)?; + Ok(ReleaseSource { repository, asset_id: asset.id }) }) .boxed() } @@ -487,13 +469,15 @@ impl Processor { arg::ide::Command::Build { params } => self.build_ide(params).void_ok().boxed(), arg::ide::Command::Upload { params, release_id } => { let build_job = self.build_ide(params); - let remote_repo = self.remote_repo.clone(); - let client = self.octocrab.client.clone(); + let release = ide_ci::github::release::ReleaseHandle::new( + &self.octocrab, + self.remote_repo.clone(), + release_id, + ); async move { let artifacts = build_job.await?; - upload_asset(&remote_repo, &client, release_id, &artifacts.image).await?; - upload_asset(&remote_repo, &client, release_id, &artifacts.image_checksum) - .await?; + release.upload_asset_file(&artifacts.image).await?; + release.upload_asset_file(&artifacts.image_checksum).await?; Ok(()) } .boxed() @@ -764,21 +748,32 @@ impl WatchResolvable for Gui { } } -#[tracing::instrument(err)] -pub async fn main_internal(config: enso_build::config::Config) -> Result { - setup_logging()?; +#[tracing::instrument(err, skip(config))] +pub async fn main_internal(config: Option) -> Result { + trace!("Starting the build process."); + let config = config.unwrap_or_else(|| { + warn!("No config provided, using default config."); + enso_build::config::Config::default() + }); + trace!("Creating the build context."); // Setup that affects Cli parser construction. if let Some(wasm_size_limit) = config.wasm_size_limit { crate::arg::wasm::initialize_default_wasm_size_limit(wasm_size_limit)?; } + debug!("Initial configuration for the CLI driver: {config:#?}"); + let cli = Cli::parse(); debug!("Parsed CLI arguments: {cli:#?}"); if !cli.skip_version_check { - config.check_programs().await?; + // Let's be helpful! + let error_message = "Program requirements were not fulfilled. Please do one of the \ + following:\n * Install the tools in the required versions.\n * Update the requirements in \ + `build-config.yaml`.\n * Run the build with `--skip-version-check` flag."; + config.check_programs().await.context(error_message)?; } // TRANSITION: Previous Engine CI job used to clone these both repositories side-by-side. @@ -800,15 +795,17 @@ pub async fn main_internal(config: enso_build::config::Config) -> Result { Target::Ide(ide) => ctx.handle_ide(ide).await?, // TODO: consider if out-of-source ./dist should be removed Target::GitClean(options) => { + let crate::arg::git_clean::Options { dry_run, cache, build_script } = options; let mut exclusions = vec![".idea"]; - if !options.build_script { + if !build_script { exclusions.push("target/enso-build"); } - let git_clean = clean::clean_except_for(&ctx.repo_root, exclusions); + let git_clean = clean::clean_except_for(&ctx.repo_root, exclusions, dry_run); let clean_cache = async { - if options.cache { - ide_ci::fs::tokio::remove_dir_if_exists(ctx.cache.path()).await?; + if cache { + ide_ci::fs::tokio::perhaps_remove_dir_if_exists(dry_run, ctx.cache.path()) + .await?; } Result::Ok(()) }; @@ -842,24 +839,26 @@ pub async fn main_internal(config: enso_build::config::Config) -> Result { let prettier = prettier::write(&ctx.repo_root); let our_formatter = enso_formatter::process_path(&ctx.repo_root, enso_formatter::Action::Format); - // our_formatter.await?; - // prettier.await?; let (r1, r2) = join!(prettier, our_formatter).await; r1?; r2?; } Target::Release(release) => match release.action { Action::CreateDraft => { - enso_build::release::create_release(&ctx).await?; + enso_build::release::draft_a_new_release(&ctx).await?; } - Action::DeployToEcr(args) => { + Action::DeployRuntime(args) => { enso_build::release::deploy_to_ecr(&ctx, args.ecr_repository).await?; - enso_build::release::dispatch_cloud_image_build_action( + enso_build::repo::cloud::build_image_workflow_dispatch_input( &ctx.octocrab, &ctx.triple.versions.version, ) .await?; } + Action::DeployGui(args) => { + let crate::arg::release::DeployGui {} = args; + enso_build::release::upload_gui_to_cloud_good(&ctx).await?; + } Action::Publish => { enso_build::release::publish_release(&ctx).await?; } @@ -893,39 +892,12 @@ pub async fn main_internal(config: enso_build::config::Config) -> Result { Ok(()) } -pub fn lib_main(config: enso_build::config::Config) -> Result { +pub fn lib_main(config: Option) -> Result { + trace!("Starting the tokio runtime."); let rt = tokio::runtime::Runtime::new()?; + trace!("Entering main."); rt.block_on(async { main_internal(config).await })?; rt.shutdown_timeout(Duration::from_secs(60 * 30)); info!("Successfully ending."); Ok(()) } - - -// #[cfg(test)] -// mod tests { -// use super::*; -// use enso_build::version::Versions; -// use ide_ci::models::config::RepoContext; -// -// #[tokio::test] -// async fn resolving_release() -> Result { -// setup_logging()?; -// let octocrab = Octocrab::default(); -// let context = Processor { -// context: BuildContext { -// remote_repo: RepoContext::from_str("enso-org/enso")?, -// triple: TargetTriple::new(Versions::new(Version::new(2022, 1, 1))), -// source_root: r"H:/NBO/enso5".into(), -// octocrab, -// cache: Cache::new_default().await?, -// }, -// }; -// -// dbg!( -// context.resolve_release_source(Backend { target_os: TARGET_OS }, -// "latest".into()).await )?; -// -// Ok(()) -// } -// } diff --git a/build/cli/src/main.rs b/build/cli/src/main.rs index ee7cd186f3d6..8705e8e3182a 100644 --- a/build/cli/src/main.rs +++ b/build/cli/src/main.rs @@ -6,8 +6,16 @@ use enso_build::prelude::*; +use enso_build::config::Config; +use enso_build::config::ConfigRaw; + fn main() -> Result { - enso_build_cli::lib_main(Default::default()) + setup_logging()?; + trace!("Starting CLI driver, cwd is {}", ide_ci::env::current_dir()?.display()); + let build_config_yaml = include_str!("../../../build-config.yaml"); + let build_config_raw = serde_yaml::from_str::(build_config_yaml)?; + let build_config = Config::try_from(build_config_raw)?; + enso_build_cli::lib_main(Some(build_config)) } diff --git a/build/macros/Cargo.toml b/build/macros/Cargo.toml new file mode 100644 index 000000000000..a437ae50c4d7 --- /dev/null +++ b/build/macros/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "enso-build-macros" +version = "0.1.0" +edition = "2021" + +[lib] +proc-macro = true + +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html +[dependencies] +convert_case = "0.6.0" +enso-build-base = { path = "../base" } +itertools = "0.10.5" +proc-macro2 = "1.0.36" +quote = "1.0.15" +regex = "1.6.0" +serde_yaml = "0.9.14" +shrinkwraprs = "0.3.0" +syn = "1.0.86" diff --git a/build/macros/src/lib.rs b/build/macros/src/lib.rs new file mode 100644 index 000000000000..35b43130ca8d --- /dev/null +++ b/build/macros/src/lib.rs @@ -0,0 +1,41 @@ +// === Features === +#![feature(const_trait_impl)] +#![feature(string_remove_matches)] +#![feature(default_free_fn)] +#![feature(once_cell)] +#![feature(option_result_contains)] +// === Standard Linter Configuration === +#![deny(non_ascii_idents)] +#![warn(unsafe_code)] +#![allow(clippy::bool_to_int_with_if)] +#![allow(clippy::let_and_return)] + + + +mod prelude { + pub use enso_build_base::prelude::*; + + pub use convert_case::Case; + pub use convert_case::Casing; + pub use itertools::Itertools; + pub use proc_macro2::Span; + pub use proc_macro2::TokenStream; + pub use quote::quote; + pub use shrinkwraprs::Shrinkwrap; + pub use syn::parse::Parse; + pub use syn::Data; + pub use syn::DeriveInput; + pub use syn::Ident; +} + +use prelude::*; + +#[proc_macro_derive(Arg, attributes(arg))] +pub fn derive_answer_fn(item: proc_macro::TokenStream) -> proc_macro::TokenStream { + let input = syn::parse_macro_input!(item as DeriveInput); + program_args::arg(input) + .unwrap_or_else(|err| panic!("Failed to derive program argument: {err:?}")) + .into() +} + +mod program_args; diff --git a/build/macros/src/program_args.rs b/build/macros/src/program_args.rs new file mode 100644 index 000000000000..a499b60ce1fe --- /dev/null +++ b/build/macros/src/program_args.rs @@ -0,0 +1,178 @@ +use crate::prelude::*; + + + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Policy { + pub flag_case: Case, + pub format: Option, +} + +impl const Default for Policy { + fn default() -> Self { + Self { flag_case: Case::Kebab, format: None } + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct Generator<'a> { + pub input: &'a syn::DeriveInput, + pub policy: Vec, +} + +impl<'a> Generator<'a> { + pub fn current_policy(&self) -> &Policy { + static DEFAULT_POLICY: Policy = Policy::default(); + self.policy.last().unwrap_or(&DEFAULT_POLICY) + } + + pub fn new(input: &'a syn::DeriveInput) -> Self { + Self { input, policy: vec![Default::default()] } + } + + pub fn format_flag(&mut self, name: impl ToString) -> String { + format!("--{}", name.to_string().to_case(self.current_policy().flag_case)) + } + + pub fn generate(self) -> TokenStream { + // let name = &self.input.ident; + match &self.input.data { + Data::Enum(e) => EnumGenerator::new(self, e).generate(), + _ => unimplemented!(), + // Data::Struct(_) => {} + // Data::Union(_) => {} + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Shrinkwrap)] +#[shrinkwrap(mutable)] +pub struct EnumGenerator<'a> { + #[shrinkwrap(main_field)] + pub generator: Generator<'a>, + pub enum_data: &'a syn::DataEnum, +} + +impl<'a> EnumGenerator<'a> { + pub fn new(generator: Generator<'a>, enum_data: &'a syn::DataEnum) -> Self { + Self { generator, enum_data } + } + + /// Generate output for enum where all variants are units. + /// + /// In such case every variant can be converted to OsStr. + /// An iterator is just a single occurrence of the string. + pub fn generate_plain(&mut self) -> TokenStream { + let name = &self.generator.input.ident; + let variant_names = + self.enum_data.variants.iter().map(|variant| &variant.ident).collect_vec(); + let flags = variant_names.iter().map(|v| self.format_flag(v)).collect_vec(); + quote! { + impl AsRef for #name { + fn as_ref(&self) -> &std::ffi::OsStr { + match self { + #( #name::#variant_names => #flags, )* + }.as_ref() + } + } + + impl IntoIterator for #name { + type Item = std::ffi::OsString; + type IntoIter = std::iter::Once; + + fn into_iter(self) -> Self::IntoIter { + std::iter::once(self.as_ref().to_owned()) + } + } + } + } + + /// Generate arm that matches a variant with zero or one field and outputs `Vec`. + pub fn generate_arm_with_field(&mut self, variant: &syn::Variant) -> TokenStream { + let relevant_attrs = variant + .attrs + .iter() + .filter_map(|attr| attr.path.is_ident("arg").then_some(&attr.tokens)) + .collect_vec(); + // dbg!(&relevant_attrs.iter().map(|t| t.to_string()).collect_vec()); + let _relevant_attrs_as_expr = relevant_attrs + .iter() + .filter_map(|tokens| syn::parse2::((*tokens).clone()).ok()) + .collect_vec(); + // dbg!(relevant_attrs_as_expr); + + let name = &self.generator.input.ident; + let variant_name = &variant.ident; + let flag = self.format_flag(variant_name); + if let Some(_field) = variant.fields.iter().next() { + // let field_type = &field.ty; + quote! { + #name::#variant_name(field) => { + let mut result = Vec::new(); + result.push(#flag.into()); + let os_str: &OsStr = field.as_ref(); + result.push(os_str.into()); + result.into_iter() + } + } + } else { + quote! { + #name::#variant_name => vec![#flag.into()].into_iter() + } + } + } + + /// Generate output for enum where variant can have fields. + pub fn generate_with_fields(&mut self) -> TokenStream { + let name = &self.generator.input.ident; + let arms = self.enum_data.variants.iter().map(|v| self.generate_arm_with_field(v)); + quote! { + impl IntoIterator for #name { + type Item = std::ffi::OsString; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + match self { + #( #arms, )* + } + } + } + } + } + + pub fn generate(&mut self) -> TokenStream { + // If all variants are unit variants, we just pretty print their names. + if self.enum_data.variants.iter().all(|v| v.fields.is_empty()) { + self.generate_plain() + } else { + self.generate_with_fields() + } + } +} + +pub fn arg(input: DeriveInput) -> Result { + let generator = Generator::new(&input); + Ok(generator.generate()) +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn foo() -> Result { + let code = "enum Foo { + #[arg] + Bar, + #[arg] + Baz, + #[arg] + Quux, + }"; + let token_stream = syn::parse_str::(code)?; + + + dbg!(token_stream); + Ok(()) + } +} diff --git a/build/macros/tests/plain.rs b/build/macros/tests/plain.rs new file mode 100644 index 000000000000..472b5ac2053b --- /dev/null +++ b/build/macros/tests/plain.rs @@ -0,0 +1,38 @@ +// === Non-Standard Linter Configuration === +#![allow(clippy::disallowed_names)] + +use enso_build_base::prelude::*; + +use itertools::Itertools; +use std::str::FromStr; + + + +#[derive(enso_build_macros::Arg)] +pub enum Foo { + Foo, + BarBaz, +} + +#[test] +fn hello() { + let foo = Foo::Foo; + assert_eq!(foo.as_ref(), OsStr::new("--foo")); + let args = foo.into_iter().collect_vec(); + assert_eq!(args, vec![OsString::from("--foo")]); + + let bar_baz = Foo::BarBaz; + assert_eq!(bar_baz.as_ref(), OsStr::new("--bar-baz")); + let args = bar_baz.into_iter().collect_vec(); + assert_eq!(args, vec![OsString::from("--bar-baz")]); +} + +#[test] +fn experiment_with_parsing() -> Result { + let code = "foo = ToString::to_string"; + let token_stream = proc_macro2::TokenStream::from_str(code).unwrap(); + dbg!(&token_stream); + let foo = syn::parse2::(token_stream).unwrap(); + dbg!(&foo); + Ok(()) +} diff --git a/build/macros/tests/with_arg.rs b/build/macros/tests/with_arg.rs new file mode 100644 index 000000000000..59bfe77ec897 --- /dev/null +++ b/build/macros/tests/with_arg.rs @@ -0,0 +1,32 @@ +use enso_build_base::prelude::*; + +use itertools::Itertools; + + + +#[derive(enso_build_macros::Arg)] +pub enum Foo { + Bar, + BarBaz(String), + HogeHoge(OsString), + // #[arg(format = ToString::to_string)] + // TaraPon(u32), +} + +#[test] +fn test_argument_formatting() { + let bar = Foo::Bar; + assert_eq!(bar.into_iter().collect_vec(), vec![OsString::from("--bar")]); + + let bar_baz = Foo::BarBaz("foo".into()); + assert_eq!(bar_baz.into_iter().collect_vec(), vec![ + OsString::from("--bar-baz"), + OsString::from("foo") + ]); + + let hoge_hoge = Foo::HogeHoge(OsString::from("foo")); + assert_eq!(hoge_hoge.into_iter().collect_vec(), vec![ + OsString::from("--hoge-hoge"), + OsString::from("foo") + ]); +} diff --git a/run.cmd b/run.cmd index eabf9433ef0e..379b94751e27 100644 --- a/run.cmd +++ b/run.cmd @@ -1,6 +1,6 @@ -pushd %~dp0 +@ pushd %~dp0 @ set TARGET_DIR=%~dp0target\enso-build @ set TARGET_EXE=%TARGET_DIR%\buildscript\enso-build-cli.exe cargo build --profile buildscript --target-dir "%TARGET_DIR%" --package enso-build-cli && "%TARGET_EXE%" %* -popd -exit /b %ERRORLEVEL% +@ popd +@ exit /b %ERRORLEVEL% diff --git a/run.ps1 b/run.ps1 new file mode 100644 index 000000000000..48809e07b81b --- /dev/null +++ b/run.ps1 @@ -0,0 +1,20 @@ +# Build script driver for the PowerShell. +# +# Having it in addition to CMD script allows better experience in some cases, +# like interrupting the build with Ctrl+C. +# +# This was developed and tested on Windows only, though there is no reason +# why it should not work on other platforms through PowerShell Core. +$ErrorActionPreference = "Stop" +$TargetDir = Join-Path $PSScriptRoot "target" "enso-build" +$BuildScriptProfile = "buildscript" +$BuildScriptBin = "enso-build-cli" + +$TargetExe = Join-Path $TargetDir $BuildScriptProfile $BuildScriptBin + +$BuildArgs = "build", "--profile", $BuildScriptProfile, "--target-dir", $TargetDir, "--package", $BuildScriptBin +Set-Location $PSScriptRoot +Start-Process cargo -NoNewWindow -Wait -ArgumentList $BuildArgs +if (!$?) { Exit $LASTEXITCODE } +Start-Process $TargetExe -NoNewWindow -Wait -ArgumentList $args +Exit $LASTEXITCODE From e8f3ad397949b8c791aa3d7a1cd12cc124b2b314 Mon Sep 17 00:00:00 2001 From: Kaz Wesley Date: Tue, 8 Nov 2022 18:57:40 -0800 Subject: [PATCH 4/4] Ensure parses of invalid inputs represent all tokens (#3860) Ensure all tokens from the input are represented in trees resulting from invalid inputs--tests now cover every reachable code line that creates an `Invalid` node. (Also implemented stricter validation, mainly of `import`/`export` statements.) See: https://www.pivotaltracker.com/story/show/183405907 --- lib/rust/parser/debug/tests/parse.rs | 94 +++++++++++++++++++++++++- lib/rust/parser/src/macros/built_in.rs | 74 ++++++++++++++------ lib/rust/parser/src/syntax/operator.rs | 14 ---- 3 files changed, 144 insertions(+), 38 deletions(-) diff --git a/lib/rust/parser/debug/tests/parse.rs b/lib/rust/parser/debug/tests/parse.rs index 7d187f1811f3..b42c81f525c9 100644 --- a/lib/rust/parser/debug/tests/parse.rs +++ b/lib/rust/parser/debug/tests/parse.rs @@ -42,9 +42,9 @@ macro_rules! test { -// ============= -// === Tests === -// ============= +// ================================ +// === Language Construct Tests === +// ================================ #[test] fn nothing() { @@ -1248,6 +1248,94 @@ fn multiline_annotations() { +// ========================== +// === Syntax Error Tests === +// ========================== + +#[test] +fn space_required() { + test_invalid("foo = if cond.x else.y"); +} + +#[test] +fn incomplete_type_definition() { + test_invalid("type"); +} + +#[test] +fn bad_case() { + test_invalid("foo = case x of\n 4"); + test_invalid("foo = case x of\n 4 ->"); + test_invalid("foo = case x of\n 4->"); +} + +#[test] +fn malformed_sequence() { + test_invalid("(1, )"); + test_invalid("foo = (1, )"); +} + +#[test] +fn unmatched_delimiter() { + test_invalid("("); + test_invalid(")"); + test_invalid("["); + test_invalid("]"); + test_invalid("foo = ("); + test_invalid("foo = )"); + test_invalid("foo = ["); + test_invalid("foo = ]"); +} + +#[test] +fn unexpected_special_operator() { + test_invalid("foo = 1, 2"); +} + +#[test] +fn malformed_import() { + test_invalid("import"); + test_invalid("import as Foo"); + test_invalid("import Foo as Foo, Bar"); + test_invalid("import Foo as Foo.Bar"); + test_invalid("import Foo as"); + test_invalid("import Foo as Bar.Baz"); + test_invalid("import Foo hiding"); + test_invalid("import Foo hiding X,"); + test_invalid("polyglot import Foo"); + test_invalid("polyglot java import"); + test_invalid("from import all"); + test_invalid("from Foo import all hiding"); + test_invalid("from Foo import all hiding X.Y"); + test_invalid("export"); + test_invalid("export as Foo"); + test_invalid("export Foo as Foo, Bar"); + test_invalid("export Foo as Foo.Bar"); + test_invalid("export Foo as"); + test_invalid("export Foo as Bar.Baz"); + test_invalid("export Foo hiding"); + test_invalid("export Foo hiding X,"); + test_invalid("from export all"); + test_invalid("from Foo export all hiding"); + test_invalid("from Foo export all hiding X.Y"); +} + +#[test] +fn invalid_token() { + test_invalid("`"); + test_invalid("splice_outside_text = `"); +} + +#[test] +fn illegal_foreign_body() { + test_invalid("foreign 4"); + test_invalid("foreign 4 * 4"); + test_invalid("foreign foo = \"4\""); + test_invalid("foreign js foo = 4"); +} + + + // ==================== // === Test Support === // ==================== diff --git a/lib/rust/parser/src/macros/built_in.rs b/lib/rust/parser/src/macros/built_in.rs index 568916291abe..4dd71981f7b1 100644 --- a/lib/rust/parser/src/macros/built_in.rs +++ b/lib/rust/parser/src/macros/built_in.rs @@ -44,11 +44,11 @@ fn register_import_macros(macros: &mut resolver::SegmentMap<'_>) { use crate::macro_definition; let defs = [ macro_definition! {("import", everything()) import_body}, - macro_definition! {("import", everything(), "as", identifier()) import_body}, + macro_definition! {("import", everything(), "as", everything()) import_body}, macro_definition! {("import", everything(), "hiding", everything()) import_body}, macro_definition! {("polyglot", everything(), "import", everything()) import_body}, macro_definition! { - ("polyglot", everything(), "import", everything(), "as", identifier()) import_body}, + ("polyglot", everything(), "import", everything(), "as", everything()) import_body}, macro_definition! { ("polyglot", everything(), "import", everything(), "hiding", everything()) import_body}, macro_definition! { @@ -73,17 +73,22 @@ fn import_body(segments: NonEmptyVec) -> syntax::Tree { let mut as_ = None; let mut hiding = None; let mut parser = operator::Precedence::new(); + let mut incomplete_import = false; for segment in segments { let header = segment.header; let tokens = segment.result.tokens(); let body; let field = match header.code.as_ref() { "polyglot" => { - body = parser.resolve(tokens).map(expect_ident); + body = Some( + parser.resolve(tokens).map(expect_ident).unwrap_or_else(expected_nonempty), + ); &mut polyglot } "from" => { - body = parser.resolve(tokens).map(expect_qualified); + body = Some( + parser.resolve(tokens).map(expect_qualified).unwrap_or_else(expected_nonempty), + ); &mut from } "import" => { @@ -92,34 +97,44 @@ fn import_body(segments: NonEmptyVec) -> syntax::Tree { None => expect_qualified, }; body = sequence_tree(&mut parser, tokens, expect); + incomplete_import = body.is_none(); &mut import } "all" => { debug_assert!(tokens.is_empty()); all = Some(into_ident(header)); + incomplete_import = false; continue; } "as" => { - body = parser.resolve(tokens).map(expect_ident); + body = Some( + parser.resolve(tokens).map(expect_ident).unwrap_or_else(expected_nonempty), + ); &mut as_ } "hiding" => { - body = sequence_tree(&mut parser, tokens, expect_ident); + body = Some( + sequence_tree(&mut parser, tokens, expect_ident) + .unwrap_or_else(expected_nonempty), + ); &mut hiding } _ => unreachable!(), }; *field = Some(syntax::tree::MultiSegmentAppSegment { header, body }); } - let import = import.unwrap(); - syntax::Tree::import(polyglot, from, import, all, as_, hiding) + let import = syntax::Tree::import(polyglot, from, import.unwrap(), all, as_, hiding); + if incomplete_import { + return import.with_error("Expected name or `all` keyword following `import` keyword."); + } + import } fn register_export_macros(macros: &mut resolver::SegmentMap<'_>) { use crate::macro_definition; let defs = [ macro_definition! {("export", everything()) export_body}, - macro_definition! {("export", everything(), "as", identifier()) export_body}, + macro_definition! {("export", everything(), "as", everything()) export_body}, macro_definition! {("from", everything(), "export", everything()) export_body}, macro_definition! { ("from", everything(), "export", nothing(), "all", nothing()) export_body}, @@ -129,7 +144,7 @@ fn register_export_macros(macros: &mut resolver::SegmentMap<'_>) { ("from", everything(), "export", nothing(), "all", nothing(), "hiding", everything()) export_body}, macro_definition! { - ("from", everything(), "as", identifier(), "export", everything()) export_body}, + ("from", everything(), "as", everything(), "export", everything()) export_body}, ]; for def in defs { macros.register(def); @@ -143,13 +158,16 @@ fn export_body(segments: NonEmptyVec) -> syntax::Tree { let mut as_ = None; let mut hiding = None; let mut parser = operator::Precedence::new(); + let mut incomplete_export = false; for segment in segments { let header = segment.header; let tokens = segment.result.tokens(); let body; let field = match header.code.as_ref() { "from" => { - body = parser.resolve(tokens).map(expect_qualified); + body = Some( + parser.resolve(tokens).map(expect_qualified).unwrap_or_else(expected_nonempty), + ); &mut from } "export" => { @@ -158,27 +176,37 @@ fn export_body(segments: NonEmptyVec) -> syntax::Tree { None => expect_qualified, }; body = sequence_tree(&mut parser, tokens, expect); + incomplete_export = body.is_none(); &mut export } "all" => { debug_assert!(tokens.is_empty()); all = Some(into_ident(header)); + incomplete_export = false; continue; } "as" => { - body = parser.resolve(tokens).map(expect_ident); + body = Some( + parser.resolve(tokens).map(expect_ident).unwrap_or_else(expected_nonempty), + ); &mut as_ } "hiding" => { - body = sequence_tree(&mut parser, tokens, expect_ident); + body = Some( + sequence_tree(&mut parser, tokens, expect_ident) + .unwrap_or_else(expected_nonempty), + ); &mut hiding } _ => unreachable!(), }; *field = Some(syntax::tree::MultiSegmentAppSegment { header, body }); } - let export = export.unwrap(); - syntax::Tree::export(from, export, all, as_, hiding) + let export = syntax::Tree::export(from, export.unwrap(), all, as_, hiding); + if incomplete_export { + return export.with_error("Expected name or `all` keyword following `export` keyword."); + } + export } /// If-then-else macro definition. @@ -254,11 +282,7 @@ fn type_def_body(matched_segments: NonEmptyVec) -> syntax::Tree code, variant: syntax::token::Variant::Ident(ident), })) => syntax::Token(left_offset, code, ident), - _ => { - let placeholder = - Tree::ident(syntax::token::ident("", "", false, 0, false, false, false)); - return placeholder.with_error("Expected identifier after `type` keyword."); - } + _ => return Tree::ident(header).with_error("Expected identifier after `type` keyword."), }; let params = operator::Precedence::new() .resolve_non_section(tokens) @@ -665,7 +689,7 @@ fn sequence_tree<'s>( let mut tree = first.map(&mut f); for OperatorDelimitedTree { operator, body } in rest { invalid = invalid || body.is_none(); - tree = Tree::opr_app(tree, Ok(operator), body).into(); + tree = Tree::opr_app(tree, Ok(operator), body.map(&mut f)).into(); } if invalid { tree = tree.map(|tree| tree.with_error("Malformed comma-delimited sequence.")); @@ -727,6 +751,9 @@ fn try_foreign_body<'s>( if !equals.properties.is_assignment() { return Err(expected_function); }; + if !matches!(body.variant, box syntax::tree::Variant::TextLiteral(_)) { + return Err("Expected a text literal as body of `foreign` declaration."); + } let (name, args) = crate::collect_arguments(lhs); let mut name = try_tree_into_ident(name).ok_or(expected_name)?; name.left_offset += function.span.left_offset; @@ -792,3 +819,8 @@ fn expect_qualified(tree: syntax::Tree) -> syntax::Tree { tree.with_error("Expected qualified name.") } } + +fn expected_nonempty<'s>() -> syntax::Tree<'s> { + let empty = syntax::Tree::ident(syntax::token::ident("", "", false, 0, false, false, false)); + empty.with_error("Expected tokens.") +} diff --git a/lib/rust/parser/src/syntax/operator.rs b/lib/rust/parser/src/syntax/operator.rs index a80a079ad697..1ce3ad661247 100644 --- a/lib/rust/parser/src/syntax/operator.rs +++ b/lib/rust/parser/src/syntax/operator.rs @@ -90,20 +90,6 @@ impl<'s> Extend> for Precedence<'s> { } } -/// Annotate expressions that should use spacing, because otherwise they are misleading. For -/// example, `if cond then.x else.y` is parsed as `if cond then .x else .y`, which after expansion -/// translates to `if cond then (\t -> t.x) else (\t -> t.y)`. However, for some macros spacing is -/// not needed. For example, `(.x)` is parsed as `(\t -> t.x)`, which is understandable. -fn annotate_tokens_that_need_spacing(item: syntax::Item) -> syntax::Item { - use syntax::tree::Variant::*; - item.map_tree(|ast| match &*ast.variant { - MultiSegmentApp(data) - if !matches!(data.segments.first().header.variant, token::Variant::OpenSymbol(_)) => - ast.with_error("This expression cannot be used in a non-spaced equation."), - _ => ast, - }) -} - /// Take [`Item`] stream, resolve operator precedence and return the final AST. /// /// The precedence resolution algorithm is based on the Shunting yard algorithm[1], extended to