From fa81d4b455a3b9d4a01b95401ae1716b95ab8ef6 Mon Sep 17 00:00:00 2001 From: wbot <69343704+wtools-bot@users.noreply.github.com> Date: Sat, 29 Jun 2024 20:39:03 +0300 Subject: [PATCH] NOT READY : refactoring willbe (#1379) evolve macro_tools and willbe --- Cargo.toml | 6 + bash.exe.stackdump | 38 - .../instance_of/src/typing/implements_lib.rs | 2 + .../src/typing/inspect_type_lib.rs | 2 + .../instance_of/src/typing/is_slice_lib.rs | 2 + module/alias/instance_of/src/typing/typing.rs | 2 + module/alias/werror/Cargo.toml | 16 +- module/alias/winterval/Cargo.toml | 2 +- module/alias/wtest/Cargo.toml | 2 +- .../wtest_basic/src/_blank/standard_lib.rs | 2 + module/blank/willbe_old/Cargo.toml | 80 -- module/blank/willbe_old/License | 22 - module/blank/willbe_old/Readme.md | 35 - .../src/willbe_old/commands/each.rs | 119 --- .../willbe_old/src/willbe_old/commands/end.rs | 61 -- .../src/willbe_old/commands/init.rs | 43 - .../willbe_old/src/willbe_old/commands/mod.rs | 66 -- .../src/willbe_old/commands/package/info.rs | 80 -- .../src/willbe_old/commands/package/mod.rs | 8 - .../willbe_old/commands/package/publish.rs | 66 -- .../src/willbe_old/core/entities/mod.rs | 10 - .../core/entities/package/metadata.rs | 119 --- .../willbe_old/core/entities/package/mod.rs | 10 - .../core/entities/package/package.rs | 53 -- .../core/entities/package/verification.rs | 65 -- .../src/willbe_old/core/entities/utility.rs | 124 --- .../src/willbe_old/core/entities/workspace.rs | 95 -- .../src/willbe_old/core/iterators.rs | 36 - .../willbe_old/src/willbe_old/core/mod.rs | 9 - .../blank/willbe_old/src/willbe_old/files.rs | 34 - .../willbe_old/src/willbe_old/willbe_entry.rs | 34 - .../willbe_old/src/willbe_old/willbe_lib.rs | 36 - module/blank/willbe_old/tests/smoke_test.rs | 15 - .../tests/willbe_old/_asset/empty/.gitignore | 4 - .../willbe_old/_asset/package/Cargo.toml | 9 - .../tests/willbe_old/_asset/package/License | 0 .../tests/willbe_old/_asset/package/Readme.md | 0 .../willbe_old/_asset/package/src/lib.rs | 15 - .../willbe_old/_asset/package/src/main.rs | 3 - .../_asset/package_no_verified/Cargo.toml | 8 - .../_asset/package_no_verified/src/lib.rs | 8 - .../_asset/package_no_verified/src/main.rs | 3 - .../_asset/workspace_with_deps/Cargo.toml | 8 - .../module/module1/Cargo.toml | 8 - .../module/module1/src/lib.rs | 14 - .../module/module2/Cargo.toml | 9 - .../module/module2/src/lib.rs | 14 - .../module/module3/Cargo.toml | 9 - .../module/module3/src/lib.rs | 14 - .../willbe_old/_asset/workspaces/Cargo.toml | 9 - .../_asset/workspaces/workspace1/Cargo.toml | 8 - .../workspace1/module/module1/Cargo.toml | 8 - .../workspace1/module/module1/src/lib.rs | 14 - .../workspace1/module/module2/Cargo.toml | 8 - .../workspace1/module/module2/src/lib.rs | 14 - .../_asset/workspaces/workspace2/Cargo.toml | 8 - .../workspace2/module/module3/Cargo.toml | 8 - .../workspace2/module/module3/src/lib.rs | 14 - .../workspace2/module/module4/Cargo.toml | 8 - .../workspace2/module/module4/src/lib.rs | 14 - .../workspace2/module/module5/Cargo.toml | 8 - .../workspace2/module/module5/src/lib.rs | 14 - .../willbe_old/tests/willbe_old/tests/from.rs | 38 - .../willbe_old/tests/integration/each.rs | 130 --- .../willbe_old/tests/integration/info.rs | 78 -- .../tests/willbe_old/tests/integration/mod.rs | 8 - .../tests/willbe_old/tests/iterator.rs | 149 --- .../tests/willbe_old/tests/metadata.rs | 24 - .../willbe_old/tests/willbe_old/tests/mod.rs | 11 - .../tests/willbe_old/tests/ordering/mod.rs | 6 - .../tests/ordering/through_workspaces.rs | 74 -- .../willbe_old/tests/ordering/workspace.rs | 103 -- .../tests/willbe_old/tests/verification.rs | 26 - .../willbe_old/tests/willbe_old/utility.rs | 71 -- .../tests/willbe_old/willbe_test.rs | 7 - module/blank/wlang/Cargo.toml | 2 +- module/blank/wlang/src/standard_lib.rs | 2 + module/core/clone_dyn/Readme.md | 2 + module/core/clone_dyn/src/lib.rs | 2 + module/core/clone_dyn_meta/Cargo.toml | 4 +- module/core/clone_dyn_types/src/lib.rs | 2 + module/core/collection_tools/Readme.md | 2 +- .../examples/collection_tools_trivial.rs | 2 +- module/core/collection_tools/src/lib.rs | 2 + module/core/data_type/Cargo.toml | 2 +- module/core/data_type/src/dt.rs | 2 + module/core/data_type/src/lib.rs | 4 +- module/core/derive_tools/Cargo.toml | 2 +- module/core/derive_tools/src/lib.rs | 2 + module/core/derive_tools/tests/inc/mod.rs | 96 +- module/core/derive_tools_meta/Cargo.toml | 7 +- module/core/derive_tools_meta/src/derive.rs | 1 - .../core/derive_tools_meta/src/derive/from.rs | 2 +- module/core/diagnostics_tools/Cargo.toml | 2 +- module/core/diagnostics_tools/src/diag/cta.rs | 2 + .../core/diagnostics_tools/src/diag/layout.rs | 2 + module/core/diagnostics_tools/src/diag/mod.rs | 2 + module/core/diagnostics_tools/src/diag/rta.rs | 2 + module/core/diagnostics_tools/src/lib.rs | 2 + module/core/error_tools/Cargo.toml | 14 +- module/core/error_tools/src/assert.rs | 2 + module/core/error_tools/src/error.rs | 3 + module/core/error_tools/src/lib.rs | 93 +- module/core/error_tools/src/typed.rs | 60 ++ module/core/error_tools/src/untyped.rs | 60 ++ .../error_tools/tests/inc/for_app_test.rs | 4 +- module/core/for_each/Cargo.toml | 2 +- module/core/for_each/src/lib.rs | 2 + module/core/format_tools/Cargo.toml | 2 +- module/core/format_tools/src/format.rs | 4 + .../core/format_tools/src/format/as_table.rs | 2 + module/core/format_tools/src/format/print.rs | 2 + module/core/format_tools/src/format/table.rs | 2 + .../core/format_tools/src/format/to_string.rs | 2 + .../src/format/to_string_with_fallback.rs | 2 + .../core/format_tools/src/format/wrapper.rs | 2 + module/core/format_tools/src/lib.rs | 2 + module/core/former/Cargo.toml | 19 +- module/core/former/src/lib.rs | 52 +- .../inc/components_tests/component_assign.rs | 1 + .../only_test/component_assign.rs | 7 + ...lision_former_hashmap_without_parameter.rs | 2 + ...llision_former_vector_without_parameter.rs | 2 + .../tests/inc/former_tests/name_collisions.rs | 2 + module/core/former_meta/Cargo.toml | 8 +- module/core/former_types/Cargo.toml | 11 +- module/core/former_types/Readme.md | 4 +- .../examples/former_types_trivial.rs | 4 +- module/core/former_types/src/collection.rs | 2 + .../src/collection/binary_heap.rs | 3 +- module/core/former_types/src/component.rs | 12 + module/core/former_types/src/lib.rs | 16 +- module/core/former_types/tests/inc/mod.rs | 3 +- module/core/fs_tools/src/fs/fs.rs | 2 + module/core/fs_tools/src/fs/lib.rs | 2 + module/core/implements/Cargo.toml | 2 +- module/core/implements/src/lib.rs | 2 + .../core/impls_index/src/impls_index/func.rs | 2 + .../core/impls_index/src/impls_index/impls.rs | 2 + .../core/impls_index/src/impls_index/mod.rs | 2 + module/core/impls_index/src/lib.rs | 2 + module/core/impls_index_meta/Cargo.toml | 2 +- module/core/include_md/Cargo.toml | 2 +- .../include_md/src/_blank/standard_lib.rs | 2 + module/core/inspect_type/Cargo.toml | 2 +- module/core/inspect_type/src/lib.rs | 2 + module/core/interval_adapter/Cargo.toml | 2 +- module/core/interval_adapter/src/lib.rs | 2 + module/core/is_slice/Cargo.toml | 2 +- module/core/is_slice/src/lib.rs | 2 + module/core/iter_tools/Cargo.toml | 9 +- module/core/iter_tools/src/iter.rs | 197 +++- module/core/iter_tools/src/lib.rs | 13 +- module/core/macro_tools/Cargo.toml | 66 +- module/core/macro_tools/Readme.md | 18 +- .../examples/macro_tools_attr_prop.rs | 18 +- module/core/macro_tools/src/attr.rs | 59 +- module/core/macro_tools/src/attr_prop.rs | 46 +- .../core/macro_tools/src/attr_prop/boolean.rs | 2 +- .../src/attr_prop/boolean_optional.rs | 3 +- .../macro_tools/src/attr_prop/singletone.rs | 2 +- .../src/attr_prop/singletone_optional.rs | 2 +- module/core/macro_tools/src/attr_prop/syn.rs | 2 +- .../macro_tools/src/attr_prop/syn_optional.rs | 2 +- module/core/macro_tools/src/components.rs | 70 ++ module/core/macro_tools/src/container_kind.rs | 6 +- module/core/macro_tools/src/ct.rs | 63 ++ module/core/macro_tools/src/ct/str.rs | 8 + module/core/macro_tools/src/derive.rs | 4 +- module/core/macro_tools/src/diag.rs | 15 +- module/core/macro_tools/src/drop.rs | 5 +- module/core/macro_tools/src/equation.rs | 5 +- module/core/macro_tools/src/generic_args.rs | 5 +- module/core/macro_tools/src/generic_params.rs | 7 +- module/core/macro_tools/src/item.rs | 8 +- module/core/macro_tools/src/item_struct.rs | 10 +- module/core/macro_tools/src/iter.rs | 168 +--- module/core/macro_tools/src/kw.rs | 69 ++ module/core/macro_tools/src/lib.rs | 352 ++++--- module/core/macro_tools/src/name.rs | 7 +- module/core/macro_tools/src/phantom.rs | 7 +- module/core/macro_tools/src/punctuated.rs | 7 +- module/core/macro_tools/src/quantifier.rs | 19 +- module/core/macro_tools/src/struct_like.rs | 9 +- module/core/macro_tools/src/tokens.rs | 7 +- module/core/macro_tools/src/typ.rs | 8 +- .../macro_tools/tests/inc/attr_prop_test.rs | 11 +- .../core/macro_tools/tests/inc/attr_test.rs | 66 ++ .../tests/inc/compile_time_test.rs | 40 + .../tests/inc/container_kind_test.rs | 1 + .../core/macro_tools/tests/inc/derive_test.rs | 2 +- .../core/macro_tools/tests/inc/diag_test.rs | 1 + .../macro_tools/tests/inc/equation_test.rs | 3 + .../tests/inc/generic_args_test.rs | 1 + .../tests/inc/generic_params_test.rs | 2 + module/core/macro_tools/tests/inc/mod.rs | 4 +- .../macro_tools/tests/inc/phantom_test.rs | 1 + .../macro_tools/tests/inc/quantifier_test.rs | 1 + .../core/macro_tools/tests/inc/syntax_test.rs | 83 -- .../core/macro_tools/tests/inc/tokens_test.rs | 1 + .../tests/inc/{typ_Test.rs => typ_test.rs} | 19 +- module/core/mem_tools/Cargo.toml | 2 +- module/core/mem_tools/src/lib.rs | 2 + module/core/mem_tools/src/mem.rs | 2 + module/core/meta_tools/Cargo.toml | 2 +- module/core/mod_interface/Cargo.toml | 2 +- module/core/mod_interface/Readme.md | 2 + module/core/mod_interface/src/lib.rs | 2 + .../tests/inc/derive/attr_debug/layer_a.rs | 2 + .../tests/inc/derive/layer/layer_a.rs | 2 + .../tests/inc/derive/layer/layer_b.rs | 2 + .../tests/inc/derive/layer_bad_vis/layer_a.rs | 2 + .../inc/derive/layer_unknown_vis/layer_a.rs | 2 + .../tests/inc/derive/use_as/layer_x.rs | 2 + .../tests/inc/derive/use_as/layer_y.rs | 2 + .../tests/inc/derive/use_as/manual_only.rs | 2 + .../tests/inc/derive/use_basic/layer_a.rs | 2 + .../tests/inc/derive/use_basic/layer_b.rs | 2 + .../tests/inc/manual/layer/layer_a.rs | 2 + .../tests/inc/manual/layer/layer_b.rs | 2 + .../tests/inc/manual/layer/mod.rs | 2 + .../tests/inc/manual/layer_use/layer_a.rs | 2 + .../tests/inc/manual/layer_use/layer_b.rs | 2 + .../tests/inc/manual/layer_use/mod.rs | 2 + .../tests/inc/manual/micro_modules/mod.rs | 2 + .../tests/inc/manual/micro_modules_two/mod.rs | 2 + module/core/mod_interface_meta/Cargo.toml | 2 +- module/core/mod_interface_meta/src/impls.rs | 18 +- module/core/mod_interface_meta/src/record.rs | 12 +- .../core/mod_interface_meta/src/use_tree.rs | 12 +- .../core/mod_interface_meta/src/visibility.rs | 22 +- module/core/process_tools/Cargo.toml | 4 +- module/core/program_tools/Cargo.toml | 2 +- module/core/program_tools/src/lib.rs | 2 +- module/core/proper_path_tools/Cargo.toml | 14 +- module/core/proper_path_tools/src/lib.rs | 7 + module/core/proper_path_tools/src/path.rs | 283 +++--- .../src/path/absolute_path.rs | 279 +++++- .../src/path/canonical_path.rs | 287 ++++++ .../src/path/current_path.rs | 62 ++ .../proper_path_tools/src/path/native_path.rs | 302 ++++++ .../core/proper_path_tools/src/transitive.rs | 201 ++++ .../tests/inc/absolute_path.rs | 40 +- .../tests/inc/current_path.rs | 33 + .../core/proper_path_tools/tests/inc/mod.rs | 3 + .../tests/inc/path_canonicalize.rs | 50 + .../proper_path_tools/tests/inc/path_join.rs | 420 ++++++--- .../tests/inc/path_relative.rs | 2 +- .../proper_path_tools/tests/inc/transitive.rs | 86 ++ module/core/reflect_tools/Cargo.toml | 2 +- module/core/reflect_tools/src/lib.rs | 2 + module/core/reflect_tools/src/reflect.rs | 2 + .../reflect_tools/src/reflect/axiomatic.rs | 2 + .../reflect_tools/src/reflect/entity_array.rs | 2 + .../src/reflect/entity_hashmap.rs | 6 +- .../src/reflect/entity_hashset.rs | 4 +- .../reflect_tools/src/reflect/entity_slice.rs | 14 +- .../reflect_tools/src/reflect/entity_vec.rs | 4 +- .../core/reflect_tools/src/reflect/fields.rs | 2 + .../reflect_tools/src/reflect/primitive.rs | 2 + .../core/reflect_tools/src/reflect/wrapper.rs | 2 + module/core/reflect_tools_meta/Cargo.toml | 2 +- module/core/strs_tools/Cargo.toml | 2 +- module/core/strs_tools/src/lib.rs | 2 + .../core/strs_tools/src/string/indentation.rs | 2 + module/core/strs_tools/src/string/isolate.rs | 2 + module/core/strs_tools/src/string/mod.rs | 2 + module/core/strs_tools/src/string/number.rs | 2 + .../strs_tools/src/string/parse_request.rs | 2 + module/core/strs_tools/src/string/split.rs | 2 + module/core/test_tools/Cargo.toml | 2 +- module/core/time_tools/Cargo.toml | 2 +- module/core/time_tools/src/lib.rs | 2 + module/core/typing_tools/Cargo.toml | 2 +- module/core/typing_tools/src/lib.rs | 2 + module/core/typing_tools/src/typing.rs | 2 + module/core/variadic_from/Cargo.toml | 2 +- module/core/variadic_from/src/lib.rs | 2 + module/core/variadic_from/src/variadic.rs | 2 + module/core/wtools/Cargo.toml | 14 +- module/core/wtools/src/lib.rs | 2 + module/move/assistant/src/client.rs | 2 + module/move/assistant/src/lib.rs | 2 + module/move/crates_tools/src/lib.rs | 2 + module/move/deterministic_rand/Cargo.toml | 2 +- module/move/graphs_tools/Cargo.toml | 2 +- module/move/plot_interface/Cargo.toml | 2 +- module/move/sqlx_query/Cargo.toml | 2 +- module/move/sqlx_query/src/lib.rs | 2 + module/move/willbe/Cargo.toml | 37 +- .../willbe/src/{description.md => Readme.md} | 0 module/move/willbe/src/action/cicd_renew.rs | 267 ++++-- module/move/willbe/src/action/deploy_renew.rs | 313 ++++--- module/move/willbe/src/action/features.rs | 42 +- module/move/willbe/src/action/list.rs | 679 ++++++++------ module/move/willbe/src/action/main_header.rs | 161 ++-- module/move/willbe/src/action/mod.rs | 2 - module/move/willbe/src/action/publish.rs | 177 ++-- module/move/willbe/src/action/publish_diff.rs | 113 ++- .../src/action/readme_health_table_renew.rs | 478 ++++++---- .../action/readme_modules_headers_renew.rs | 235 +++-- module/move/willbe/src/action/test.rs | 153 +-- .../move/willbe/src/action/workspace_renew.rs | 179 ++-- module/move/willbe/src/bin/cargo-will.rs | 2 +- module/move/willbe/src/bin/will.rs | 5 +- module/move/willbe/src/bin/willbe.rs | 2 +- module/move/willbe/src/command/cicd_renew.rs | 8 +- .../move/willbe/src/command/deploy_renew.rs | 13 +- module/move/willbe/src/command/features.rs | 24 +- module/move/willbe/src/command/list.rs | 52 +- module/move/willbe/src/command/main_header.rs | 13 +- module/move/willbe/src/command/publish.rs | 63 +- .../move/willbe/src/command/publish_diff.rs | 18 +- .../src/command/readme_headers_renew.rs | 52 +- .../src/command/readme_health_table_renew.rs | 8 +- .../command/readme_modules_headers_renew.rs | 10 +- module/move/willbe/src/command/test.rs | 91 +- .../willbe/src/command/workspace_renew.rs | 19 +- module/move/willbe/src/entity/channel.rs | 15 +- module/move/willbe/src/entity/code.rs | 43 + module/move/willbe/src/entity/dependency.rs | 285 ++++++ module/move/willbe/src/entity/diff.rs | 14 +- module/move/willbe/src/entity/features.rs | 26 +- module/move/willbe/src/entity/files.rs | 54 ++ .../move/willbe/src/entity/files/crate_dir.rs | 259 ++++++ module/move/willbe/src/entity/files/either.rs | 90 ++ .../willbe/src/entity/files/manifest_file.rs | 276 ++++++ .../willbe/src/entity/files/source_file.rs | 270 ++++++ module/move/willbe/src/entity/git.rs | 88 ++ module/move/willbe/src/entity/manifest.rs | 203 +--- module/move/willbe/src/entity/mod.rs | 87 +- module/move/willbe/src/entity/optimization.rs | 19 +- module/move/willbe/src/entity/package.rs | 877 ++---------------- .../willbe/src/entity/package_md_extension.rs | 152 +++ module/move/willbe/src/entity/packages.rs | 36 +- module/move/willbe/src/entity/packed_crate.rs | 9 +- module/move/willbe/src/entity/progress_bar.rs | 108 +++ module/move/willbe/src/entity/publish.rs | 441 +++++++++ module/move/willbe/src/entity/test.rs | 331 +++---- module/move/willbe/src/entity/version.rs | 160 ++-- module/move/willbe/src/entity/workspace.rs | 481 ++++------ .../move/willbe/src/entity/workspace_graph.rs | 38 + .../src/entity/workspace_md_extension.rs | 70 ++ .../willbe/src/entity/workspace_package.rs | 212 +++++ module/move/willbe/src/lib.rs | 10 +- module/move/willbe/src/tool/_path.rs | 170 ---- module/move/willbe/src/tool/cargo.rs | 77 +- module/move/willbe/src/tool/collection.rs | 12 + module/move/willbe/src/tool/error.rs | 55 ++ module/move/willbe/src/tool/files.rs | 8 +- module/move/willbe/src/tool/git.rs | 35 +- module/move/willbe/src/tool/graph.rs | 73 +- module/move/willbe/src/tool/http.rs | 5 +- module/move/willbe/src/tool/iter.rs | 16 + module/move/willbe/src/tool/macros.rs | 15 + module/move/willbe/src/tool/mod.rs | 58 +- module/move/willbe/src/tool/path.rs | 12 + module/move/willbe/src/tool/query.rs | 13 +- module/move/willbe/src/tool/repository.rs | 61 ++ module/move/willbe/src/tool/sha.rs | 26 - module/move/willbe/src/tool/template.rs | 277 ++++-- module/move/willbe/src/tool/tree.rs | 167 ++++ module/move/willbe/src/tool/url.rs | 12 +- module/move/willbe/src/wtools.rs | 56 +- .../move/willbe/template/deploy/Makefile.hbs | 128 +-- .../move/willbe/template/workspace/Makefile | 78 +- .../workspace/module/module1/Cargo.toml.x | 2 +- .../module1/examples/module1_example.rs | 4 +- .../workspace/module/module1/src/lib.rs | 2 +- .../module/module1/tests/hello_test.rs | 2 +- .../{action => action_tests}/cicd_renew.rs | 18 +- .../inc/{action => action_tests}/features.rs | 13 +- .../inc/{action => action_tests}/list.rs | 0 .../inc/{action => action_tests}/list/data.rs | 48 +- .../{action => action_tests}/list/format.rs | 89 +- .../{action => action_tests}/main_header.rs | 2 +- .../tests/inc/{action => action_tests}/mod.rs | 0 .../readme_health_table_renew.rs | 0 .../readme_modules_headers_renew.rs | 30 +- .../inc/{action => action_tests}/test.rs | 132 +-- .../workspace_renew.rs | 0 .../willbe/tests/inc/command/tests_run.rs | 9 +- .../willbe/tests/inc/entity/dependencies.rs | 129 ++- module/move/willbe/tests/inc/entity/diff.rs | 27 +- .../move/willbe/tests/inc/entity/features.rs | 28 +- .../move/willbe/tests/inc/entity/version.rs | 46 +- module/move/willbe/tests/inc/helper.rs | 116 +++ module/move/willbe/tests/inc/helpers.rs | 1 - module/move/willbe/tests/inc/mod.rs | 22 +- module/move/willbe/tests/inc/package.rs | 8 +- module/move/willbe/tests/inc/tool/graph.rs | 217 ----- .../move/willbe/tests/inc/tool/graph_test.rs | 215 +++++ module/move/willbe/tests/inc/tool/mod.rs | 9 +- module/move/willbe/tests/inc/tool/process.rs | 65 -- .../inc/tool/{query.rs => query_test.rs} | 11 +- module/move/wplot/Cargo.toml | 2 +- module/postponed/_video_experiment/Cargo.toml | 2 +- module/postponed/non_std/Cargo.toml | 12 +- module/postponed/std_tools/Cargo.toml | 12 +- module/postponed/std_x/Cargo.toml | 12 +- module/postponed/type_constructor/Cargo.toml | 2 +- module/postponed/type_constructor/src/lib.rs | 2 + .../src/type_constuctor/enumerable.rs | 2 + .../src/type_constuctor/helper.rs | 2 + .../src/type_constuctor/many.rs | 2 + .../src/type_constuctor/mod.rs | 2 + .../src/type_constuctor/no_many.rs | 2 + .../src/type_constuctor/pair.rs | 2 + .../src/type_constuctor/single.rs | 2 + .../src/type_constuctor/traits.rs | 2 + .../src/type_constuctor/types.rs | 2 + .../src/type_constuctor/vectorized_from.rs | 2 + module/postponed/wpublisher/Cargo.toml | 2 +- .../_template_procedural_macro/front/lib.rs | 2 + .../_template_procedural_macro/runtime/lib.rs | 2 + module/template/layer/layer.rs | 2 + step/Cargo.toml | 24 + step/src/bin/sources.rs | 66 ++ 418 files changed, 10147 insertions(+), 7195 deletions(-) delete mode 100644 bash.exe.stackdump delete mode 100644 module/blank/willbe_old/Cargo.toml delete mode 100644 module/blank/willbe_old/License delete mode 100644 module/blank/willbe_old/Readme.md delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/each.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/end.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/init.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/mod.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/package/info.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/package/mod.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/commands/package/publish.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/mod.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/package/metadata.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/package/mod.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/package/package.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/package/verification.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/utility.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/entities/workspace.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/iterators.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/core/mod.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/files.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/willbe_entry.rs delete mode 100644 module/blank/willbe_old/src/willbe_old/willbe_lib.rs delete mode 100644 module/blank/willbe_old/tests/smoke_test.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/empty/.gitignore delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package/License delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package/Readme.md delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package/src/main.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/main.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/Cargo.toml delete mode 100644 module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/src/lib.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/from.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/integration/each.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/integration/info.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/integration/mod.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/iterator.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/metadata.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/mod.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/ordering/mod.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/ordering/through_workspaces.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/ordering/workspace.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/tests/verification.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/utility.rs delete mode 100644 module/blank/willbe_old/tests/willbe_old/willbe_test.rs create mode 100644 module/core/error_tools/src/typed.rs create mode 100644 module/core/error_tools/src/untyped.rs create mode 100644 module/core/macro_tools/src/components.rs create mode 100644 module/core/macro_tools/src/ct.rs create mode 100644 module/core/macro_tools/src/ct/str.rs create mode 100644 module/core/macro_tools/src/kw.rs create mode 100644 module/core/macro_tools/tests/inc/compile_time_test.rs delete mode 100644 module/core/macro_tools/tests/inc/syntax_test.rs rename module/core/macro_tools/tests/inc/{typ_Test.rs => typ_test.rs} (94%) create mode 100644 module/core/proper_path_tools/src/path/canonical_path.rs create mode 100644 module/core/proper_path_tools/src/path/current_path.rs create mode 100644 module/core/proper_path_tools/src/path/native_path.rs create mode 100644 module/core/proper_path_tools/src/transitive.rs create mode 100644 module/core/proper_path_tools/tests/inc/current_path.rs create mode 100644 module/core/proper_path_tools/tests/inc/path_canonicalize.rs create mode 100644 module/core/proper_path_tools/tests/inc/transitive.rs rename module/move/willbe/src/{description.md => Readme.md} (100%) create mode 100644 module/move/willbe/src/entity/code.rs create mode 100644 module/move/willbe/src/entity/dependency.rs create mode 100644 module/move/willbe/src/entity/files.rs create mode 100644 module/move/willbe/src/entity/files/crate_dir.rs create mode 100644 module/move/willbe/src/entity/files/either.rs create mode 100644 module/move/willbe/src/entity/files/manifest_file.rs create mode 100644 module/move/willbe/src/entity/files/source_file.rs create mode 100644 module/move/willbe/src/entity/git.rs create mode 100644 module/move/willbe/src/entity/package_md_extension.rs create mode 100644 module/move/willbe/src/entity/progress_bar.rs create mode 100644 module/move/willbe/src/entity/publish.rs create mode 100644 module/move/willbe/src/entity/workspace_graph.rs create mode 100644 module/move/willbe/src/entity/workspace_md_extension.rs create mode 100644 module/move/willbe/src/entity/workspace_package.rs delete mode 100644 module/move/willbe/src/tool/_path.rs create mode 100644 module/move/willbe/src/tool/collection.rs create mode 100644 module/move/willbe/src/tool/error.rs create mode 100644 module/move/willbe/src/tool/iter.rs create mode 100644 module/move/willbe/src/tool/macros.rs create mode 100644 module/move/willbe/src/tool/path.rs create mode 100644 module/move/willbe/src/tool/repository.rs delete mode 100644 module/move/willbe/src/tool/sha.rs create mode 100644 module/move/willbe/src/tool/tree.rs rename module/move/willbe/tests/inc/{action => action_tests}/cicd_renew.rs (92%) rename module/move/willbe/tests/inc/{action => action_tests}/features.rs (86%) rename module/move/willbe/tests/inc/{action => action_tests}/list.rs (100%) rename module/move/willbe/tests/inc/{action => action_tests}/list/data.rs (87%) rename module/move/willbe/tests/inc/{action => action_tests}/list/format.rs (82%) rename module/move/willbe/tests/inc/{action => action_tests}/main_header.rs (99%) rename module/move/willbe/tests/inc/{action => action_tests}/mod.rs (100%) rename module/move/willbe/tests/inc/{action => action_tests}/readme_health_table_renew.rs (100%) rename module/move/willbe/tests/inc/{action => action_tests}/readme_modules_headers_renew.rs (83%) rename module/move/willbe/tests/inc/{action => action_tests}/test.rs (68%) rename module/move/willbe/tests/inc/{action => action_tests}/workspace_renew.rs (100%) create mode 100644 module/move/willbe/tests/inc/helper.rs delete mode 100644 module/move/willbe/tests/inc/helpers.rs delete mode 100644 module/move/willbe/tests/inc/tool/graph.rs create mode 100644 module/move/willbe/tests/inc/tool/graph_test.rs delete mode 100644 module/move/willbe/tests/inc/tool/process.rs rename module/move/willbe/tests/inc/tool/{query.rs => query_test.rs} (97%) create mode 100644 step/Cargo.toml create mode 100644 step/src/bin/sources.rs diff --git a/Cargo.toml b/Cargo.toml index 1b52be3898..323238d738 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,6 +6,7 @@ members = [ "module/core/*", "module/move/*", "module/test/*", + "step", ] exclude = [ "-*", @@ -45,6 +46,11 @@ restriction = "warn" pedantic = "warn" # Denies undocumented unsafe blocks. undocumented_unsafe_blocks = "deny" +# xxx : check +# Warns if core could be used instead of std, but didn't +std_instead_of_core = "warn" +# Warns if alloc could be used instead of std, but didn't +std_instead_of_alloc = "warn" # xxx : document single_call_fn = "allow" inline_always = "allow" diff --git a/bash.exe.stackdump b/bash.exe.stackdump deleted file mode 100644 index ef9b31e0d7..0000000000 --- a/bash.exe.stackdump +++ /dev/null @@ -1,38 +0,0 @@ -Stack trace: -Frame Function Args -0000005FF160 00021006118E (0002102B5B12, 000210272B3E, 00000000005E, 0000005FACC0) msys-2.0.dll+0x2118E -0000005FF160 0002100469BA (000000000000, 000000000000, 000000000130, 000000001000) msys-2.0.dll+0x69BA -0000005FF160 0002100469F2 (000000000000, 0000000005AF, 00000000005E, 000000000000) msys-2.0.dll+0x69F2 -0000005FF160 0002101791E8 (0002102B5892, 000800000000, 00080000DFB8, 000000000000) msys-2.0.dll+0x1391E8 -0000005FF160 000210183B67 (000000000000, 0002102280C8, 0002102280B0, 0000005FD320) msys-2.0.dll+0x143B67 -0000005FF160 000210046DF4 (00021031C800, 0000005FD320, 000000000000, 000000000000) msys-2.0.dll+0x6DF4 -0000005FF160 00021004850F (00007FFE0384, 000000000000, 000000000000, 000000000000) msys-2.0.dll+0x850F -0000005FF160 00021007251C (000000000000, 000000713234, 000000000000, 000000000000) msys-2.0.dll+0x3251C -0000005FF400 7FFF332D869F (000210040000, 000000000001, 000000000000, 7FFF333E8A70) ntdll.dll+0x2869F -0000005FF400 7FFF3331D03D (0000005FF300, 000000000000, 00000071A078, 000000000001) ntdll.dll+0x6D03D -0000005FF400 7FFF3331CDEE (0000007130D0, 0000005FF400, 000000718450, 000000000000) ntdll.dll+0x6CDEE -0000005FF400 7FFF3331CE60 (7FFF333EB860, 000000000000, 000000213000, 7FFF00000000) ntdll.dll+0x6CE60 -000000000000 7FFF3338EA39 (000000000000, 000000000000, 000000000001, 000000000000) ntdll.dll+0xDEA39 -000000000000 7FFF3337A744 (7FFF332B0000, 000000213050, 0000002157EE, 000000000000) ntdll.dll+0xCA744 -000000000000 7FFF33323EA3 (000000000000, 0000005FFAD0, 000000000000, 000000000000) ntdll.dll+0x73EA3 -000000000000 7FFF33323DCE (000000000000, 0000005FFAD0, 000000000000, 000000000000) ntdll.dll+0x73DCE -End of stack trace -Loaded modules: -000100400000 bash.exe -7FFF332B0000 ntdll.dll -7FFF32910000 KERNEL32.DLL -7FFF307E0000 KERNELBASE.dll -7FFF330C0000 USER32.dll -7FFF307B0000 win32u.dll -000210040000 msys-2.0.dll -7FFF328E0000 GDI32.dll -7FFF305D0000 gdi32full.dll -7FFF30ED0000 msvcp_win.dll -7FFF30C10000 ucrtbase.dll -7FFF31660000 advapi32.dll -7FFF326C0000 msvcrt.dll -7FFF31D90000 sechost.dll -7FFF30D30000 bcrypt.dll -7FFF30FE0000 RPCRT4.dll -7FFF2FE30000 CRYPTBASE.DLL -7FFF30B90000 bcryptPrimitives.dll diff --git a/module/alias/instance_of/src/typing/implements_lib.rs b/module/alias/instance_of/src/typing/implements_lib.rs index 4b7067310e..98ac9b7d4f 100644 --- a/module/alias/instance_of/src/typing/implements_lib.rs +++ b/module/alias/instance_of/src/typing/implements_lib.rs @@ -88,8 +88,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/alias/instance_of/src/typing/inspect_type_lib.rs b/module/alias/instance_of/src/typing/inspect_type_lib.rs index 26cb2b6a24..f5a5d1ae3c 100644 --- a/module/alias/instance_of/src/typing/inspect_type_lib.rs +++ b/module/alias/instance_of/src/typing/inspect_type_lib.rs @@ -87,8 +87,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/alias/instance_of/src/typing/is_slice_lib.rs b/module/alias/instance_of/src/typing/is_slice_lib.rs index a65f9f68c8..ba8686e2ba 100644 --- a/module/alias/instance_of/src/typing/is_slice_lib.rs +++ b/module/alias/instance_of/src/typing/is_slice_lib.rs @@ -92,8 +92,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/alias/instance_of/src/typing/typing.rs b/module/alias/instance_of/src/typing/typing.rs index 312d1bf158..29f3d6397b 100644 --- a/module/alias/instance_of/src/typing/typing.rs +++ b/module/alias/instance_of/src/typing/typing.rs @@ -29,8 +29,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/alias/werror/Cargo.toml b/module/alias/werror/Cargo.toml index 31884116ec..ca345c91c3 100644 --- a/module/alias/werror/Cargo.toml +++ b/module/alias/werror/Cargo.toml @@ -29,23 +29,23 @@ all-features = false [features] default = [ "enabled", - "error_for_lib", - "error_for_app" + "error_typed", + "error_untyped" ] full = [ "enabled", - "error_for_lib", - "error_for_app" + "error_typed", + "error_untyped" ] no_std = [ "error_tools/no_std" ] use_alloc = [ "error_tools/use_alloc" ] enabled = [ "error_tools/enabled" ] -error_for_lib = [ - "error_tools/error_for_lib" +error_typed = [ + "error_tools/error_typed" ] -error_for_app = [ - "error_tools/error_for_app" +error_untyped = [ + "error_tools/error_untyped" ] [dependencies] diff --git a/module/alias/winterval/Cargo.toml b/module/alias/winterval/Cargo.toml index 385471c227..8b523e9388 100644 --- a/module/alias/winterval/Cargo.toml +++ b/module/alias/winterval/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/alias/wtest/Cargo.toml b/module/alias/wtest/Cargo.toml index 4cb3aad3a8..a3b92484b8 100644 --- a/module/alias/wtest/Cargo.toml +++ b/module/alias/wtest/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/test", "/Cargo.toml", diff --git a/module/alias/wtest_basic/src/_blank/standard_lib.rs b/module/alias/wtest_basic/src/_blank/standard_lib.rs index bd56ee14ed..52f572cffe 100644 --- a/module/alias/wtest_basic/src/_blank/standard_lib.rs +++ b/module/alias/wtest_basic/src/_blank/standard_lib.rs @@ -42,8 +42,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; } // #[ doc( inline ) ] diff --git a/module/blank/willbe_old/Cargo.toml b/module/blank/willbe_old/Cargo.toml deleted file mode 100644 index 56716163d5..0000000000 --- a/module/blank/willbe_old/Cargo.toml +++ /dev/null @@ -1,80 +0,0 @@ -[package] -name = "willbe_old" -version = "0.1.0" -edition = "2021" -authors = [ - "Kostiantyn Wandalen ", - "Dmytro Kryvoruchko ", -] -license = "MIT" -readme = "Readme.md" -documentation = "https://docs.rs/willbe" -repository = "https://github.com/Wandalen/wTools/tree/master/module/core/willbe_old" -homepage = "https://github.com/Wandalen/wTools/tree/master/module/core/willbe_old" -description = """ -___ -""" -categories = [ "algorithms", "development-tools" ] -keywords = [ "fundamental", "general-purpose" ] -publish = false - -[lints] -workspace = true - -[package.metadata.docs.rs] -features = [ "full" ] -all-features = false -# exclude = [ "/tests", "/examples", "-*" ] -include = [ - "/rust/impl/willbe_old", - "/Cargo.toml", - "/Readme.md", - "/License", -] - -[features] -default = [ "enabled" ] -full = [ "enabled" ] -no_std = [] -use_alloc = [ "no_std" ] -enabled = [] - -[lib] -name = "willbe_old" -path = "src/willbe_old/willbe_lib.rs" - -[[bin]] -name = "willbe_old" -path = "src/willbe_old/willbe_entry.rs" - -[[test]] -name = "willbe_test" -path = "tests/willbe_old/willbe_test.rs" - -# disable for now -# [[test]] -# name = "willbe_smoke_test" -# path = "tests/_integration_test/smoke_test.rs" -# -# [[example]] -# name = "willbe_trivial" -# path = "examples/willbe_trivial/src/main.rs" - -[dependencies] -wtools = { workspace = true } -wca = { workspace = true } -mod_interface = { workspace = true, features = [ "default" ] } -iter_tools = { workspace = true, features = [ "default" ] } -error_tools = { workspace = true, features = [ "default" ] } -toml = "0.5" -globwalk = "0.8" -cargo_metadata = "0.15" -path-absolutize = "3" -petgraph = "~0.6" -rand = "0.8.4" - -[dev-dependencies] -test_tools = { workspace = true } -tempfile = "3" -assert_cmd = "2.0" -predicates = "2.1" diff --git a/module/blank/willbe_old/License b/module/blank/willbe_old/License deleted file mode 100644 index 6d5ef8559f..0000000000 --- a/module/blank/willbe_old/License +++ /dev/null @@ -1,22 +0,0 @@ -Copyright Kostiantyn W and Out of the Box Systems (c) 2013-2024 - -Permission is hereby granted, free of charge, to any person -obtaining a copy of this software and associated documentation -files (the "Software"), to deal in the Software without -restriction, including without limitation the rights to use, -copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following -conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND -NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT -HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, -WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING -FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR -OTHER DEALINGS IN THE SOFTWARE. diff --git a/module/blank/willbe_old/Readme.md b/module/blank/willbe_old/Readme.md deleted file mode 100644 index 7028ba383c..0000000000 --- a/module/blank/willbe_old/Readme.md +++ /dev/null @@ -1,35 +0,0 @@ - - -# Module :: willbe - - [![experimental](https://raster.shields.io/static/v1?label=&message=experimental&color=orange)](https://github.com/emersion/stability-badges#experimental) [![rust-status](https://github.com/Wandalen/wTools/actions/workflows/module_willbe_old_push.yml/badge.svg)](https://github.com/Wandalen/wTools/actions/workflows/module_willbe_old_push.yml) [![docs.rs](https://img.shields.io/docsrs/willbe_old?color=e3e8f0&logo=docs.rs)](https://docs.rs/willbe_old) [![discord](https://img.shields.io/discord/872391416519737405?color=eee&logo=discord&logoColor=eee&label=ask)](https://discord.gg/m3YfbXpUUY) - - -___ - -### Basic use-case - - - -```rust -use willbe_old::*; - -fn main() -{ -} -``` - -### To add to your project - -```bash -cargo add willbe -``` - -### Try out from the repository - -``` shell test -git clone https://github.com/Wandalen/wTools -cd wTools -cd examples/willbe_trivial -cargo run -``` diff --git a/module/blank/willbe_old/src/willbe_old/commands/each.rs b/module/blank/willbe_old/src/willbe_old/commands/each.rs deleted file mode 100644 index 21a81accae..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/each.rs +++ /dev/null @@ -1,119 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::{ env, rc::Rc, cell::RefCell, }; - use wca:: - { - Args, Props, - Context, - }; - use error_tools::{ Result, BasicError }; - - use crate::protected::*; - use crate::commands::{ StartPointStack, EndPointStack }; - - #[ derive( Clone ) ] - struct PackagesIterator - ( - Rc< RefCell< dyn Iterator< Item = Package > > > - ); - - impl< I > From< I > for PackagesIterator - where - I : Iterator< Item = Package > + 'static - { - fn from( iter : I ) -> Self - { - Self( Rc::new( RefCell::new( iter ) ) ) - } - } - - impl PackagesIterator - { - fn next( &self ) -> Option< Package > - { - self.0.borrow_mut().next() - } - } - - /// Each command declaration - pub fn each_command() -> wca::Command - { - wca::Command::former() - .hint( "Iterate over packages" ) - .long_hint( "Iterates over all packages from current directory" ) - .phrase( "each" ) - .form() - } - - /// - /// Iterate over packages - /// - - pub fn each( _ : ( Args, Props ), mut ctx : Context ) -> Result< () > - { - println!( "[LOG] Called each command" ); - - // Already iterate - if let Some( iter ) = ctx.get_mut::< PackagesIterator >() - { - // It isn't end of iterator - let is_current_package_exists = ctx.get_ref::< Option< Package > >().and_then( | p | p.as_ref() ).is_some(); - let next_package = iter.next(); - if is_current_package_exists && next_package.is_some() - { - ctx.insert( next_package ); - } - else - { - ctx.remove::< Option< Package > >(); - ctx.remove::< PackagesIterator >(); - // At the end of each - go to first endpoint - // remove self from startpoints - ctx.get_mut::< StartPointStack >().and_then( | sp | sp.pop() ); - // go to endpoint - let prog_state = ctx.get_mut::< wca::RuntimeState >() - .ok_or_else( || BasicError::new( "Have no Program State" ) )?; - - ctx.get_mut::< EndPointStack >() - .and_then( | ep | ep.pop() ) - .map( | point | prog_state.pos = point ) - //? What is better - panic or go to the end of the program when endpoints doesn't exists for any reason - .unwrap_or_else( || prog_state.pos = usize::MAX ); - } - } - else - { - // Begin iteration - let current_path = env::current_dir().unwrap(); - let mut packages_iter = packages_iterate( current_path ); - - let package = packages_iter.next(); - - // But anyway program must found the end of `.each` - if package.is_none() - { - println!( "Any package was found at current directory" ); - } - - // Add current package and the iterator to context - ctx.insert( package ); - ctx.insert::< PackagesIterator >( packages_iter.into() ); - - // Start point to previous instruction( back to current ) - let prog_state = ctx.get_ref::< wca::RuntimeState >() - .ok_or_else( || BasicError::new( "Have no Program State" ) )?; - ctx.get_or_default::< StartPointStack >().push( prog_state.pos - 1 ); - } - - Ok( () ) - } -} - -// - -crate::mod_interface! -{ - prelude use each_command; - prelude use each; -} diff --git a/module/blank/willbe_old/src/willbe_old/commands/end.rs b/module/blank/willbe_old/src/willbe_old/commands/end.rs deleted file mode 100644 index b7931a06dd..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/end.rs +++ /dev/null @@ -1,61 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use wca:: - { - Args, Props, - Context, - }; - use error_tools::{ Result, BasicError }; - - use crate::commands::{ StartPointStack, EndPointStack }; - - // ! TODO: Remove this command somehow - - /// End command declaration - pub fn end_command() -> wca::Command - { - wca::Command::former() - .hint( "Command that is end of a block or a program" ) - .phrase( "end" ) - .form() - } - - /// - /// End of loop/program - /// - - pub fn end( _ : ( Args, Props ), ctx : Context ) -> Result< () > - { - println!( "[LOG] end called" ); - - if let Some( startpoints ) = ctx.get_ref::< StartPointStack >() - { - if let Some( point ) = startpoints.last() - { - let prog_state = ctx.get_mut::< wca::RuntimeState >() - .ok_or_else( || BasicError::new( "Have no Program State" ) )?; - - let endpoints = ctx.get_or_default::< EndPointStack >(); - // if has no point at current instruction - push it - if endpoints.last() != Some( &( prog_state.pos - 1 ) ) - { - endpoints.push( prog_state.pos - 1 ); - } - - // Go to start point - prog_state.pos = *point; - } - } - - Ok( () ) - } -} - -// - -crate::mod_interface! -{ - prelude use end_command; - prelude use end; -} diff --git a/module/blank/willbe_old/src/willbe_old/commands/init.rs b/module/blank/willbe_old/src/willbe_old/commands/init.rs deleted file mode 100644 index ddfa273b42..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/init.rs +++ /dev/null @@ -1,43 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use crate::commands; - - /// - /// Form CA commands grammar. - /// - - pub fn grammar_form() -> Vec< wca::Command > - { - vec! - [ - commands::each::each_command(), - commands::package::info::info_command(), - commands::package::publish::publish_command(), - commands::end::end_command(), - ] - } - - /// - /// Form CA commands executor. - /// - - pub fn executor_form() -> std::collections::HashMap< String, wca::Routine > - { - std::collections::HashMap::from( - [ - ( "each".to_owned(), wca::Routine::new_with_ctx( commands::each::each ) ), - ( "crate.info".to_owned(), wca::Routine::new_with_ctx( commands::package::info::info ) ), - ( "crate.publish".to_owned(), wca::Routine::new_with_ctx( commands::package::publish::publish ) ), - ( "end".to_owned(), wca::Routine::new_with_ctx( commands::end::end ) ), - ]) - } -} - -// - -crate::mod_interface! -{ - prelude use grammar_form; - prelude use executor_form; -} diff --git a/module/blank/willbe_old/src/willbe_old/commands/mod.rs b/module/blank/willbe_old/src/willbe_old/commands/mod.rs deleted file mode 100644 index 5f0d00ec1c..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/mod.rs +++ /dev/null @@ -1,66 +0,0 @@ -crate::mod_interface! -{ - /// Init aggregator commands. - prelude mod init; - - /// Works with crates - prelude mod package; - - /// Iterate over subject - prelude mod each; - - /// End of loop/program - prelude mod end; - - protected use super::init::protected::*; - - protected use super::private::StartPointStack; - protected use super::private::EndPointStack; -} - -mod private -{ - /// Allow to go back to the iterator - #[ derive( Debug, Default, Clone ) ] - pub struct StartPointStack( Vec< usize > ); - - impl std::ops::Deref for StartPointStack - { - type Target = Vec< usize >; - - fn deref( &self ) -> &Self::Target - { - &self.0 - } - } - - impl std::ops::DerefMut for StartPointStack - { - fn deref_mut( &mut self ) -> &mut Self::Target - { - &mut self.0 - } - } - - /// Allow to go back to the end - #[ derive( Debug, Default, Clone ) ] - pub struct EndPointStack( Vec< usize > ); - - impl std::ops::Deref for EndPointStack - { - type Target = Vec< usize >; - - fn deref( &self ) -> &Self::Target - { - &self.0 - } - } - - impl std::ops::DerefMut for EndPointStack - { - fn deref_mut( &mut self ) -> &mut Self::Target - { - &mut self.0 - } - } -} diff --git a/module/blank/willbe_old/src/willbe_old/commands/package/info.rs b/module/blank/willbe_old/src/willbe_old/commands/package/info.rs deleted file mode 100644 index 30121caf50..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/package/info.rs +++ /dev/null @@ -1,80 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use crate::protected::*; - use std::env; - use wca:: - { - Args, Props, - Context, - }; - use error_tools::{ Result, for_app::format_err }; - - /// Info command declaration - pub fn info_command() -> wca::Command - { - wca::Command::former() - .hint( "Prints information about package" ) - .long_hint( "Prints information about package at current directory" ) - .phrase( "crate.info" ) - .form() - } - - /// - /// Prints information about package - /// - - pub fn info( _ : ( Args, Props ), ctx : Context ) -> Result< () > - { - println!( "[LOG] Called info command" ); - - // Get package from context or try to read package at current directory - let package = match ctx.get_ref::< Option< Package > >() - { - Some( Some( package ) ) => package.to_owned(), - None => - { - let path = env::current_dir().unwrap().to_owned(); - Package::try_from( path ) - .map_err( | _ | format_err!( "Package not found at current directory" ) )? - } - _ => return Ok( () ) - }; - - let info = PackageMetadata::try_from( package ) - .map_err( | _ | format_err!( "Can not parse package metadata" ) )?; - let info = info.all().to_owned(); - - println! - ( - r#" -Name: "{}" -Version: "{}" -Description: "{}" -Documentation: "{}" -License: "{}" -Readme: "{}" -Dependencies: {:?} -Location: "{}" - "#, - info.name, - info.version, - info.description.unwrap_or_else( || "Not found".to_string() ), - info.documentation.unwrap_or_else( || "Not found".to_string() ), - info.license.unwrap_or_else( || "Not found".to_string() ), - info.readme.map( String::from ).unwrap_or_else( || "Not found".to_string() ), - info.dependencies.iter().map( | d | &d.name ).collect::< Vec< _ > >(), - info.manifest_path.parent().unwrap() - ); - - Ok( () ) - } -} - -// - -crate::mod_interface! -{ - prelude use info_command; - prelude use info; -} diff --git a/module/blank/willbe_old/src/willbe_old/commands/package/mod.rs b/module/blank/willbe_old/src/willbe_old/commands/package/mod.rs deleted file mode 100644 index 777f955a8c..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/package/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -crate::mod_interface! -{ - /// Information about package - prelude mod info; - - /// Publish package - prelude mod publish; -} diff --git a/module/blank/willbe_old/src/willbe_old/commands/package/publish.rs b/module/blank/willbe_old/src/willbe_old/commands/package/publish.rs deleted file mode 100644 index 622f34bbfb..0000000000 --- a/module/blank/willbe_old/src/willbe_old/commands/package/publish.rs +++ /dev/null @@ -1,66 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use crate::protected::*; - use std::env; - use wca:: - { - Args, Props, - Context, - }; - use error_tools::{ Result, for_app::format_err }; - - /// Publish command declaration - pub fn publish_command() -> wca::Command - { - wca::Command::former() - .hint( "Publish a package" ) - .long_hint( "Validate, runs tests and publish a package" ) - .phrase( "crate.publish" ) - .form() - } - - /// - /// Verify and publish a package - /// - - pub fn publish( _ : ( Args, Props ), ctx : Context ) -> Result< () > - { - println!( "[LOG] Called publish command" ); - - // Get package from context or try to read package at current directory - let package = match ctx.get_ref::< Option< Package > >() - { - Some( Some( package ) ) => package.to_owned(), - None => - { - let path = env::current_dir().unwrap().to_owned(); - Package::try_from( path ) - .map_err( | _ | format_err!( "Package not found at current directory" ) )? - } - _ => return Ok( () ) - }; - - let info = PackageMetadata::try_from( package ) - .map_err( | _ | format_err!( "Can not parse package metadata" ) )?; - - println! - ( - "=== Verification ===\nLicense: {}\nReadme: {}\nDocumentation: {}\nTests: {}", - if info.has_license() { "Yes" } else { "No" }, - if info.has_readme() { "Yes" } else { "No" }, - if info.has_documentation() { "Yes" } else { "No" }, - if info.is_tests_passed() { "Passed" } else { "Failed" } - ); - - Ok( () ) - } -} - -// - -crate::mod_interface! -{ - prelude use publish_command; - prelude use publish; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/mod.rs b/module/blank/willbe_old/src/willbe_old/core/entities/mod.rs deleted file mode 100644 index 768053c884..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/mod.rs +++ /dev/null @@ -1,10 +0,0 @@ - -crate::mod_interface! -{ - /// Package - layer package; - /// Workspace - layer workspace; - /// Auxiliary entities - layer utility; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/package/metadata.rs b/module/blank/willbe_old/src/willbe_old/core/entities/package/metadata.rs deleted file mode 100644 index 3ba8640877..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/package/metadata.rs +++ /dev/null @@ -1,119 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::path::PathBuf; - - use cargo_metadata::MetadataCommand; - - use error_tools::{ BasicError, err }; - - use crate::Package; - - /// Package metadata - #[ derive( Debug ) ] - pub struct PackageMetadata - { - package : Package, - metadata : cargo_metadata::Package, - } - - impl TryFrom< PathBuf > for PackageMetadata - { - type Error = BasicError; - - fn try_from( value : PathBuf ) -> Result< Self, Self::Error > - { - let package = Package::try_from( value )?; - package.try_into() - } - } - - impl TryFrom< Package > for PackageMetadata - { - type Error = BasicError; - - fn try_from( value : Package ) -> Result< Self, Self::Error > - { - let path = value.path().join( "Cargo.toml" ); - let meta = MetadataCommand::new() - .manifest_path( &path ) - .no_deps() - .exec() - .map_err( | _ | err!( "Can not read metadata" ) )?; - - let metadata = meta.packages.iter() - .find( | p | p.manifest_path == path ) - .ok_or_else( || err!( "Can not parse metadata for current package" ) )? - .to_owned(); - Ok( Self - { - package : value, - metadata - }) - } - } - impl PackageMetadata - { - /// Returns name - pub fn name( &self ) -> &String - { - &self.metadata.name - } - - /// Returns version - pub fn version( &self ) -> String - { - self.metadata.version.to_string() - } - } - - impl PackageMetadata - { - /// Returns license from `Cargo.toml` - pub fn license( &self ) -> Option< String > - { - self.metadata.license.to_owned() - } - - /// Returns path to license file if exists - pub fn license_file( &self ) -> Option< PathBuf > - { - self.metadata.license_file.as_ref().map( | r | r.to_owned().into_std_path_buf() ) - } - - /// Returns path to Readme file - pub fn readme( &self ) -> Option< PathBuf > - { - self.metadata.readme.as_ref().map( | r | r.to_owned().into_std_path_buf() ) - } - - /// Returns url to documentation if it is exists - pub fn documentation( &self ) -> Option< String > - { - self.metadata.documentation.to_owned() - } - } - - impl PackageMetadata - { - /// Returns reference to Package - pub fn as_package( &self ) -> &Package - { - &self.package - } - - /// Returns all metadata - pub fn all( &self ) -> &cargo_metadata::Package - { - &self.metadata - } - } - -} - -// - -crate::mod_interface! -{ - prelude use PackageMetadata; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/package/mod.rs b/module/blank/willbe_old/src/willbe_old/core/entities/package/mod.rs deleted file mode 100644 index 62bf81332d..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/package/mod.rs +++ /dev/null @@ -1,10 +0,0 @@ - -crate::mod_interface! -{ - /// Package - layer package; - /// Metadata - layer metadata; - /// Verification methods - layer verification; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/package/package.rs b/module/blank/willbe_old/src/willbe_old/core/entities/package/package.rs deleted file mode 100644 index a455cd1cc1..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/package/package.rs +++ /dev/null @@ -1,53 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::path::PathBuf; - use toml::Value; - - use error_tools::{ BasicError, err }; - - /// Package - #[ derive( Debug, Clone ) ] - pub struct Package - { - path : PathBuf, - } - - impl TryFrom< PathBuf > for Package - { - type Error = BasicError; - - fn try_from( path : PathBuf ) -> Result< Self, Self::Error > - { - let config_str = std::fs::read_to_string( path.join( "Cargo.toml" ) ) - .map_err( | _ | err!( "Can not read \"Cargo.toml\"" ) )?; - let toml = config_str.parse::< Value >() - .map_err( | _ | err!( "Can not parse \"Cargo.toml\"" ) )?; - - if toml.get( "package" ).is_some() - { - Ok( Self{ path } ) - } - else - { - Err( err!( "\"package\" into \"Cargo.toml\" not found" ) ) - } - } - } - - impl Package - { - /// Gets path of package - pub fn path( &self ) -> &PathBuf - { - &self.path - } - } -} - -// - -crate::mod_interface! -{ - prelude use Package; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/package/verification.rs b/module/blank/willbe_old/src/willbe_old/core/entities/package/verification.rs deleted file mode 100644 index 0098940074..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/package/verification.rs +++ /dev/null @@ -1,65 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::process::Command; - - use crate::PackageMetadata; - - /// All package verification methods - pub trait Verification - { - /// Do all checks - fn check_all( &self ) -> bool - { - self.has_readme() - && self.has_documentation() - && self.has_license() - && self.is_tests_passed() - } - /// Check if readme exists - fn has_readme( &self ) -> bool; - /// Check if documentation exists - fn has_documentation( &self ) -> bool; - /// Check if the package has a license - fn has_license( &self ) -> bool; - /// Check if all tests is passed - fn is_tests_passed( &self ) -> bool; - } - - impl Verification for PackageMetadata - { - fn has_readme( &self ) -> bool - { - self.readme().is_some() - } - - fn has_documentation( &self ) -> bool - { - self.documentation().is_some() - } - - fn has_license( &self ) -> bool - { - self.license().is_some() - || - self.license_file().is_some() - } - - fn is_tests_passed( &self ) -> bool - { - let tests_output = Command::new( "cargo" ) - .current_dir( self.as_package().path() ) - .args([ "test" ]) - .output().unwrap(); - - tests_output.status.success() - } - } -} - -// - -crate::mod_interface! -{ - prelude use Verification; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/utility.rs b/module/blank/willbe_old/src/willbe_old/core/entities/utility.rs deleted file mode 100644 index 3968aad1af..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/utility.rs +++ /dev/null @@ -1,124 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use crate::{ Package, PackageMetadata }; - use iter_tools::prelude::*; - - /// Represent with which order strategy to iterate over packages - #[ derive( Debug, Clone, Copy ) ] - pub enum OrderStrategy - { - /// Alphabetical by package name - Alphabetical, - /// Based on their dependencies - Topological, - /// Shuffle packages - Random, - } - - /// This trait defines a method to sort packages by selected order strategy - pub trait Ordered : Iterator - where - Vec< Package > : FromIterator< < Self as Iterator >::Item > - { - /// Collect all iterator elements into a sorted vector - fn ordered( self, order : OrderStrategy ) -> Vec< Package > - where - Self : Sized - { - let v : Vec< Package > = self.collect(); - - match order - { - OrderStrategy::Alphabetical => alphabetical( v ), - OrderStrategy::Topological => toposort( v ), - OrderStrategy::Random => shuffle( v ), - } - } - - /// Returns iterator over sorted Packages - fn ordered_iter( self, order : OrderStrategy ) -> Box< dyn Iterator< Item = Package > > - where - Self : Sized - { - Box::new( self.ordered( order ).into_iter() ) - } - } - - fn alphabetical( packages : Vec< Package > ) -> Vec< Package > - { - packages.iter().cloned() - .filter_map( | p | - { - PackageMetadata::try_from( p ).ok() - }) - - .sorted_by_key( | meta | meta.name().to_owned() ) - - .map( | meta | meta.as_package().to_owned() ) - .collect_vec() - } - - fn toposort( packages : Vec< Package > ) -> Vec< Package > - { - use petgraph::Graph; - use cargo_metadata::DependencyKind; - use std::collections::HashMap; - - let ( deps, package_map ) = packages.iter() - .filter_map( | p | PackageMetadata::try_from( p.to_owned() ).ok() ) - .fold( ( Graph::new(), HashMap::new() ), | ( mut deps, mut packages ), meta | - { - packages.insert( meta.name().to_owned(), meta.as_package().to_owned() ); - - let root_node = if let Some( node ) = deps.node_indices().find( | i | deps[ *i ] == meta.name().to_owned() ) - { node } - else - { deps.add_node( meta.name().to_owned() ) }; - - for dep in &meta.all().dependencies - { - if dep.path.is_some() && dep.kind != DependencyKind::Development - { - let dep_node = if let Some( node ) = deps.node_indices().find( | i | deps[ *i ] == dep.name ) - { node } - else - { deps.add_node( dep.name.to_owned() ) }; - - deps.add_edge( root_node, dep_node, () ); - } - } - - ( deps, packages ) - }); - - let sorted = petgraph::algo::toposort( &deps, None ).unwrap(); - sorted.iter() - .rev() - .map( | &dep_idx | deps.node_weight( dep_idx ).unwrap().to_owned() ) - .filter_map( | name | package_map.get( &name ) ) - .cloned().collect::< Vec< Package > >() - } - - fn shuffle( mut packages : Vec< Package > ) -> Vec< Package > - { - use rand::seq::SliceRandom; - - let mut rng = rand::thread_rng(); - packages.shuffle( &mut rng ); - - packages - } - - - impl< T : ?Sized > Ordered for T - where T : Iterator, Vec< Package >: FromIterator< < T as Iterator >::Item > {} -} - -// - -crate::mod_interface! -{ - prelude use OrderStrategy; - prelude use Ordered; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/entities/workspace.rs b/module/blank/willbe_old/src/willbe_old/core/entities/workspace.rs deleted file mode 100644 index 941f65753b..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/entities/workspace.rs +++ /dev/null @@ -1,95 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::path::PathBuf; - use toml::Value; - - use error_tools::{ BasicError, err }; - - use crate::{ Package, unique_walk }; - - /// Workspace - #[ derive( Debug, Clone ) ] - pub struct Workspace - { - path : PathBuf, - } - - impl TryFrom< PathBuf > for Workspace - { - type Error = BasicError; - - fn try_from( path : PathBuf ) -> Result< Self, Self::Error > - { - let config_str = std::fs::read_to_string( path.join( "Cargo.toml" ) ) - .map_err( | _ | err!( "Can not read \"Cargo.toml\"" ) )?; - let toml = config_str.parse::< Value >() - .map_err( | _ | err!( "Can not parse \"Cargo.toml\"" ) )?; - - if toml.get( "workspace" ).is_some() - { - Ok( Self{ path } ) - } - else - { - Err( err!( "\"workspace\" into \"Cargo.toml\" not found" ) ) - } - } - } - - impl Workspace - { - /// Gets list of packages into workspace - pub fn packages( &self ) -> Vec< Package > - { - let config_str = std::fs::read_to_string( self.path.join( "Cargo.toml" ) ).unwrap(); - let toml = config_str.parse::< Value >().unwrap(); - - // iterate over members into workspace - toml[ "workspace" ] - // members can be doesn't setted - .get( "members" ) - .unwrap_or( &Value::Array( vec![] ) ).as_array() - .unwrap_or( &vec![] ) - .iter() - // fold all packages from members - .fold( vec![], | mut acc, member | - { - let packages_paths = unique_walk - ( - self.path.to_owned(), - &[ member.as_str().unwrap().to_string() ] - ); - - packages_paths - .fold( &mut acc, | acc, package_path | - { - if let Ok( package ) = Package::try_from( package_path.to_owned() ) - { - acc.push( package ); - } - // workspaces into workspace - else if let Ok( workspace ) = Workspace::try_from( package_path ) - { - acc.extend( workspace.packages() ); - } - acc - }); - acc - }) - } - - /// iterate over packages into workspace - pub fn packages_iterate( &self ) -> impl Iterator< Item = Package > - { - self.packages().into_iter() - } - } -} - -// - -crate::mod_interface! -{ - prelude use Workspace; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/iterators.rs b/module/blank/willbe_old/src/willbe_old/core/iterators.rs deleted file mode 100644 index 191d28c61f..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/iterators.rs +++ /dev/null @@ -1,36 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::path::PathBuf; - use crate::*; - - /// Iterate over all packages by PathBuf - pub fn packages_iterate( path : PathBuf ) -> Box< dyn Iterator< Item = Package > > - { - if let Ok( package ) = Package::try_from( path.to_owned() ) - { - return Box::new( Some( package ).into_iter() ) - } - - if let Ok( workspace ) = Workspace::try_from( path ) - { - return Box::new( workspace.packages_iterate() ) - } - - Box::new( None.into_iter() ) - } - - /// Iterate over workspaces iterator - pub fn workspaces_packages_iterate( workspaces : impl Iterator< Item = Workspace > ) -> impl Iterator< Item = Package > - { - workspaces.flat_map( move | workspace | workspace.packages_iterate() ) - } -} - -// - -crate::mod_interface! -{ - prelude use packages_iterate; - prelude use workspaces_packages_iterate; -} diff --git a/module/blank/willbe_old/src/willbe_old/core/mod.rs b/module/blank/willbe_old/src/willbe_old/core/mod.rs deleted file mode 100644 index 23c69db43e..0000000000 --- a/module/blank/willbe_old/src/willbe_old/core/mod.rs +++ /dev/null @@ -1,9 +0,0 @@ - -crate::mod_interface! -{ - /// Entities of the program - layer entities; - - /// Iterators over packages - layer iterators; -} diff --git a/module/blank/willbe_old/src/willbe_old/files.rs b/module/blank/willbe_old/src/willbe_old/files.rs deleted file mode 100644 index 19e480b1c7..0000000000 --- a/module/blank/willbe_old/src/willbe_old/files.rs +++ /dev/null @@ -1,34 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::path::{ Path, PathBuf }; - - use path_absolutize::*; - - use iter_tools::Itertools; - - /// - /// Iterate over unique files in directory using globs - /// - - pub fn unique_walk< P, S >( base_dir : P, patterns : &[ S ] ) -> impl Iterator< Item = PathBuf > - where - P: AsRef< Path >, - S: AsRef< str >, - { - globwalk::GlobWalkerBuilder::from_patterns( base_dir, patterns ) - .follow_links( true ) - .build().unwrap() - .into_iter() - .filter_map( Result::ok ) - .filter_map( | s | s.path().absolutize().map( | p | p.to_path_buf() ).ok() ) - .unique() - } -} - -// - -crate::mod_interface! -{ - prelude use unique_walk; -} diff --git a/module/blank/willbe_old/src/willbe_old/willbe_entry.rs b/module/blank/willbe_old/src/willbe_old/willbe_entry.rs deleted file mode 100644 index a3c1823a48..0000000000 --- a/module/blank/willbe_old/src/willbe_old/willbe_entry.rs +++ /dev/null @@ -1,34 +0,0 @@ -#![ cfg_attr( feature = "no_std", no_std ) ] -#![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] -#![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] -#![ doc( html_root_url = "https://docs.rs/wpublisher/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -//! -//! ___. -//! - -#![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] - -#[ allow( unused_imports ) ] -use ::willbe_old::*; - -// - -#[ cfg( not( feature = "no_std" ) ) ] -fn main() -> error_tools::Result< () > -{ - let args = std::env::args().skip( 1 ).collect::< Vec< String > >(); - - let ca = wca::CommandsAggregator::former() - .grammar( commands::grammar_form() ) - .executor( commands::executor_form() ) - .perform(); - - Ok( ca.perform( if args.is_empty() { "".to_owned() } else { args.join( " " ) + " .end" } )? ) -} - -#[ cfg( feature = "no_std" ) ] -fn main() {} diff --git a/module/blank/willbe_old/src/willbe_old/willbe_lib.rs b/module/blank/willbe_old/src/willbe_old/willbe_lib.rs deleted file mode 100644 index bfbcd09466..0000000000 --- a/module/blank/willbe_old/src/willbe_old/willbe_lib.rs +++ /dev/null @@ -1,36 +0,0 @@ -#![ cfg_attr( feature = "no_std", no_std ) ] -#![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] -#![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] -#![ doc( html_root_url = "https://docs.rs/_blank/latest/_blank/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -// #![ feature( type_name_of_val ) ] -// #![ feature( trace_macros ) ] - -//! -//! ___. -//! - -#![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] - -// -use mod_interface::mod_interface; - -mod_interface! -{ - /// Features of Application Programming Interface that 100% should be implemented - #[ cfg( not( feature = "no_std" ) ) ] - layer core; - - /// Library of utility to work with commands. - #[ cfg( not( feature = "no_std" ) ) ] - layer commands; - - /// Operate over files. - #[ cfg( not( feature = "no_std" ) ) ] - layer files; - - protected( crate ) use ::wtools::prelude::*; -} diff --git a/module/blank/willbe_old/tests/smoke_test.rs b/module/blank/willbe_old/tests/smoke_test.rs deleted file mode 100644 index 2678ac06fe..0000000000 --- a/module/blank/willbe_old/tests/smoke_test.rs +++ /dev/null @@ -1,15 +0,0 @@ - - -#[ test ] -fn local_smoke_test() -{ - ::test_tools::smoke_test_for_local_run(); -} - - -#[ test ] -#[ ignore ] -fn published_smoke_test() -{ - ::test_tools::smoke_test_for_published_run(); -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/empty/.gitignore b/module/blank/willbe_old/tests/willbe_old/_asset/empty/.gitignore deleted file mode 100644 index 44c5ea8fa7..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/empty/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -# Ignore everything in this directory -* -# Except this file -!.gitignore \ No newline at end of file diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/package/Cargo.toml deleted file mode 100644 index 7f16e6e19e..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/package/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "willbe_verified_package" -version = "0.1.0" -edition = "2021" -documentation = "Documentation text" -license = "MIT" -readme = "Readme.md" - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package/License b/module/blank/willbe_old/tests/willbe_old/_asset/package/License deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package/Readme.md b/module/blank/willbe_old/tests/willbe_old/_asset/package/Readme.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/package/src/lib.rs deleted file mode 100644 index 1ccf57060e..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/package/src/lib.rs +++ /dev/null @@ -1,15 +0,0 @@ -/// Adds two numbers and returns result -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package/src/main.rs b/module/blank/willbe_old/tests/willbe_old/_asset/package/src/main.rs deleted file mode 100644 index 80a1832bfa..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/package/src/main.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - println!("Hello, world!"); -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/Cargo.toml deleted file mode 100644 index 766bac3bee..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_no_verified_package" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/lib.rs deleted file mode 100644 index 20cb385493..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/lib.rs +++ /dev/null @@ -1,8 +0,0 @@ -#[cfg(test)] -mod tests { - #[test] - fn failed() - { - assert!( false ); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/main.rs b/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/main.rs deleted file mode 100644 index 80a1832bfa..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/package_no_verified/src/main.rs +++ /dev/null @@ -1,3 +0,0 @@ -fn main() { - println!("Hello, world!"); -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/Cargo.toml deleted file mode 100644 index ad7fb73a88..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[workspace] -resolver = "2" -members = [ - "module/*", -] -exclude = [ - "*", -] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/Cargo.toml deleted file mode 100644 index a183e1b93a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_with_deps_module1" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module1/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/Cargo.toml deleted file mode 100644 index e362e58a66..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "willbe_with_deps_module2" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -willbe_with_deps_module1 = { version = "*", path = "../module1" } \ No newline at end of file diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module2/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/Cargo.toml deleted file mode 100644 index f0302e1626..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[package] -name = "willbe_with_deps_module3" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] -willbe_with_deps_module2 = { version = "*", path = "../module2" } \ No newline at end of file diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspace_with_deps/module/module3/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/Cargo.toml deleted file mode 100644 index c5fc8a2bc2..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/Cargo.toml +++ /dev/null @@ -1,9 +0,0 @@ -[workspace] -resolver = "2" -members = [ - "workspace1", - "workspace2", -] -exclude = [ - "*", -] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/Cargo.toml deleted file mode 100644 index ad7fb73a88..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[workspace] -resolver = "2" -members = [ - "module/*", -] -exclude = [ - "*", -] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/Cargo.toml deleted file mode 100644 index ca345d6e39..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_workspace1_module1" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module1/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/Cargo.toml deleted file mode 100644 index d7de79e725..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_workspace1_module2" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] \ No newline at end of file diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace1/module/module2/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/Cargo.toml deleted file mode 100644 index ad7fb73a88..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[workspace] -resolver = "2" -members = [ - "module/*", -] -exclude = [ - "*", -] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/Cargo.toml deleted file mode 100644 index 1e5b37708b..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_workspace2_module3" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module3/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/Cargo.toml deleted file mode 100644 index ce7cc9e559..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_workspace2_module4" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module4/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/Cargo.toml b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/Cargo.toml deleted file mode 100644 index ca174d8156..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/Cargo.toml +++ /dev/null @@ -1,8 +0,0 @@ -[package] -name = "willbe_workspace2_module5" -version = "0.1.0" -edition = "2021" - -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html - -[dependencies] diff --git a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/src/lib.rs b/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/src/lib.rs deleted file mode 100644 index 3b9acffd5a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/_asset/workspaces/workspace2/module/module5/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -pub fn add(left: usize, right: usize) -> usize { - left + right -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn it_works() { - let result = add(2, 2); - assert_eq!(result, 4); - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/from.rs b/module/blank/willbe_old/tests/willbe_old/tests/from.rs deleted file mode 100644 index 0b3a81d171..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/from.rs +++ /dev/null @@ -1,38 +0,0 @@ -use super::*; - -#[ test ] -fn from_empty_asset() -{ - let asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "empty" ) ); - let path = asset.path_buf(); - - let package = Package::try_from( path.to_owned() ); - assert!( package.is_err() ); - - let workspace = Workspace::try_from( path.to_owned() ); - assert!( workspace.is_err() ); -} - -#[ test ] -fn package_from_path() -{ - let asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ); - let path = asset.path_buf(); - - let package = Package::try_from( path.to_owned() ); - - assert!( package.is_ok() ); - assert_eq!( *path, *package.unwrap().path() ); -} - -#[ test ] -fn workspace_from_path() -{ - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ); - let package_path = package_asset.path_buf(); - assert!( Workspace::try_from( package_path.to_owned() ).is_err() ); - - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ); - let workspace_path = workspace_asset.path_buf(); - assert!( Workspace::try_from( workspace_path.to_owned() ).is_ok() ); -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/integration/each.rs b/module/blank/willbe_old/tests/willbe_old/tests/integration/each.rs deleted file mode 100644 index 21ede7825a..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/integration/each.rs +++ /dev/null @@ -1,130 +0,0 @@ -use super::*; - -#[ test ] -fn many_workspaces_each_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".each .crate.info" ); - - cmd - .assert() - .success() - .stdout - ( - predicate::str::contains( "Name: \"willbe_workspace1_module1\"" ).count( 1 ) - .and - ( - predicate::str::contains( "Name: \"willbe_workspace1_module2\"" ).count( 1 ) - ) - .and - ( - predicate::str::contains( "Name: \"willbe_workspace2_module3\"" ).count( 1 ) - ) - .and - ( - predicate::str::contains( "Name: \"willbe_workspace2_module4\"" ).count( 1 ) - ) - .and - ( - predicate::str::contains( "Name: \"willbe_workspace2_module5\"" ).count( 1 ) - ) - ); - - Ok( () ) -} - -#[ test ] -fn workspace_each_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspace_with_deps" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".each .crate.info" ); - - cmd - .assert() - .success() - .stdout - ( - predicate::str::contains( "Name: \"willbe_with_deps_module1\"" ).count( 1 ) - .and - ( - predicate::str::contains( "Name: \"willbe_with_deps_module2\"" ).count( 1 ) - ) - .and - ( - predicate::str::contains( "Name: \"willbe_with_deps_module3\"" ).count( 1 ) - ) - ); - - Ok( () ) -} - -#[ test ] -fn single_package_each_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".each .crate.info" ); - - cmd - .assert() - .success() - .stdout - ( - predicate::str::contains( "Name: \"willbe_verified_package\"" ).count( 1 ) - ); - - Ok( () ) -} - -#[ test ] -fn empty_path_each_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "empty" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".each .crate.info" ); - - cmd - .assert() - .success() - .stdout - ( - predicate::str::contains( "Any package was found at current directory" ) - ); - - Ok( () ) -} - -#[ test ] -fn another_command_after_each() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".each .crate.info .end .crate.info" ); - - cmd - .assert() - .success() - .stdout - ( - predicate::str::contains( "Name: \"willbe_verified_package\"" ).count( 2 ) - ); - - Ok( () ) -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/integration/info.rs b/module/blank/willbe_old/tests/willbe_old/tests/integration/info.rs deleted file mode 100644 index f9957e9942..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/integration/info.rs +++ /dev/null @@ -1,78 +0,0 @@ -use super::*; - -#[ test ] -fn package_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".crate.info" ); - - cmd - .assert() - .success() - .stdout - ( - predicate::str::contains( "Name: \"willbe_verified_package\"" ) - .and - ( - predicate::str::contains( "Version: \"0.1.0\"" ) - ) - .and - ( - predicate::str::contains( "Description: \"Not found\"" ) - ) - .and - ( - predicate::str::contains( "Documentation: \"Documentation text\"" ) - ) - .and - ( - predicate::str::contains( "License: \"MIT\"" ) - ) - .and - ( - predicate::str::contains( "Dependencies: []" ) - ) - ); - - Ok( () ) -} - -#[ test ] -fn workspace_path_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".crate.info" ); - - cmd - .assert() - .failure() - .stderr( predicate::str::contains( "Package not found at current directory" ) ); - - Ok( () ) -} - -#[ test ] -fn empty_path_info() -> Result< (), Box< dyn std::error::Error > > -{ - let mut cmd = Command::cargo_bin( MODULE_NAME )?; - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "empty" ) ).copied(); - let package_path = package_asset.path_buf(); - - cmd.current_dir( package_path ); - cmd.arg( ".crate.info" ); - - cmd - .assert() - .failure() - .stderr( predicate::str::contains( "Package not found at current directory" ) ); - - Ok( () ) -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/integration/mod.rs b/module/blank/willbe_old/tests/willbe_old/tests/integration/mod.rs deleted file mode 100644 index 6ac91b348b..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/integration/mod.rs +++ /dev/null @@ -1,8 +0,0 @@ -use super::*; -use assert_cmd::Command; -use predicates::prelude::*; - -const MODULE_NAME : &str = "willbe_old"; - -mod each; -mod info; diff --git a/module/blank/willbe_old/tests/willbe_old/tests/iterator.rs b/module/blank/willbe_old/tests/willbe_old/tests/iterator.rs deleted file mode 100644 index 38354cbc40..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/iterator.rs +++ /dev/null @@ -1,149 +0,0 @@ -use super::*; - -#[ test ] -fn over_workspace() -{ - use std::collections::HashSet; - - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ); - let workspace_path = workspace_asset.path_buf(); - let workspace = Workspace::try_from( workspace_path.to_owned() ).unwrap(); - // `workspace.packages()` and `workspace.packages_iterate().collect::< Vec< _ > >()` is the same - let packages = workspace.packages(); - - let expected = HashSet::from([ "willbe_workspace1_module1".to_owned(), "willbe_workspace1_module2".to_owned() ]); - - assert_eq!( expected.len(), packages.len() ); - assert_eq! - ( - expected, - packages.iter().cloned() - .filter_map( | p | - { - PackageMetadata::try_from( p ).ok() - }) - .map( | meta | meta.name().to_owned() ) - .collect::< HashSet< _ > >() - ); -} - -#[ test ] -fn over_workspaces_iterator() -{ - use std::collections::HashSet; - - let assets = vec! - [ - Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ), - Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace2" ) ), - ]; - let workspaces = assets.iter() - .map( | asset | Workspace::try_from( asset.path_buf().to_owned() ) ) - .filter_map( Result::ok ) - .collect::< Vec< _ > >(); - - let packages = workspaces_packages_iterate( workspaces.into_iter() ) - .collect::< Vec< _ > >(); - - let expected = HashSet::from( - [ - "willbe_workspace1_module1".to_owned(), - "willbe_workspace1_module2".to_owned(), - - "willbe_workspace2_module3".to_owned(), - "willbe_workspace2_module4".to_owned(), - "willbe_workspace2_module5".to_owned(), - ]); - - assert_eq!( expected.len(), packages.len() ); - assert_eq! - ( - expected, - packages.iter().cloned() - .filter_map( | p | - { - PackageMetadata::try_from( p ).ok() - }) - .map( | meta | meta.name().to_owned() ) - .collect::< HashSet< _ > >() - ); -} - -#[ test ] -fn over_empty_path() -{ - let empty_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "empty" ) ); - let empty_path = empty_asset.path_buf(); - let packages = packages_iterate( empty_path.to_owned() ).collect::< Vec< _ > >(); - - assert!( packages.is_empty() ); -} - -#[ test ] -fn over_single_package_path() -{ - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ).copied(); - let package_path = package_asset.path_buf(); - let package = packages_iterate( package_path.to_owned() ).collect::< Vec< _ > >(); - - assert_eq!( 1, package.len() ); - assert_eq!( "willbe_verified_package", PackageMetadata::try_from( package[ 0 ].clone() ).unwrap().all().name.as_str() ); -} - -#[ test ] -fn over_single_workspace_path() -{ - use std::collections::HashSet; - - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ).copied(); - let workspace_path = workspace_asset.path_buf(); - let packages = packages_iterate( workspace_path.to_owned() ).collect::< Vec< _ > >(); - - let expected = HashSet::from([ "willbe_workspace1_module1".to_owned(), "willbe_workspace1_module2".to_owned() ]); - - assert_eq!( expected.len(), packages.len() ); - assert_eq! - ( - expected, - packages.iter().cloned() - .filter_map( | p | - { - PackageMetadata::try_from( p ).ok() - }) - .map( | meta | meta.name().to_owned() ) - .collect::< HashSet< _ > >() - ); -} - -#[ test ] -fn over_workspaces_root_path() -{ - use std::collections::HashSet; - - let many_workspaces_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces" ) ).copied(); - let many_workspaces_path = many_workspaces_asset.path_buf(); - let packages = packages_iterate( many_workspaces_path.to_owned() ).collect::< Vec< _ > >(); - - let expected = HashSet::from( - [ - "willbe_workspace1_module1".to_owned(), - "willbe_workspace1_module2".to_owned(), - - "willbe_workspace2_module3".to_owned(), - "willbe_workspace2_module4".to_owned(), - "willbe_workspace2_module5".to_owned(), - ]); - - assert_eq!( expected.len(), packages.len() ); - assert_eq! - ( - expected, - packages.iter().cloned() - .filter_map( | p | - { - PackageMetadata::try_from( p ).ok() - }) - .map( | meta | meta.name().to_owned() ) - .collect::< HashSet< _ > >() - ); -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/metadata.rs b/module/blank/willbe_old/tests/willbe_old/tests/metadata.rs deleted file mode 100644 index efd11d457c..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/metadata.rs +++ /dev/null @@ -1,24 +0,0 @@ -use super::*; - -#[ test ] -fn try_get_from_empty_asset() -{ - let empty_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "empty" ) ).copied(); - let empty_asset_path = empty_asset.path_buf(); - - let meta = PackageMetadata::try_from( empty_asset_path.to_owned() ); - - assert!( meta.is_err() ); -} - -#[ test ] -fn get_info() -{ - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ).copied(); - let package_path = package_asset.path_buf(); - - let meta = PackageMetadata::try_from( package_path.to_owned() ).unwrap(); - - assert!( !meta.name().is_empty() ); - assert!( !meta.version().is_empty() ); -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/mod.rs b/module/blank/willbe_old/tests/willbe_old/tests/mod.rs deleted file mode 100644 index 20e46e0375..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/mod.rs +++ /dev/null @@ -1,11 +0,0 @@ -use super::*; -use utility::*; - -const ASSET_PATH : &str = concat!( env!("CARGO_MANIFEST_DIR"), "/tests/willbe_old/_asset" ); - -mod integration; -mod ordering; -mod from; -mod iterator; -mod verification; -mod metadata; diff --git a/module/blank/willbe_old/tests/willbe_old/tests/ordering/mod.rs b/module/blank/willbe_old/tests/willbe_old/tests/ordering/mod.rs deleted file mode 100644 index 683c5f15eb..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/ordering/mod.rs +++ /dev/null @@ -1,6 +0,0 @@ -use super::*; - -//? Write test for iteration over path has no reason because it delegates everything to another functions - -mod workspace; -mod through_workspaces; diff --git a/module/blank/willbe_old/tests/willbe_old/tests/ordering/through_workspaces.rs b/module/blank/willbe_old/tests/willbe_old/tests/ordering/through_workspaces.rs deleted file mode 100644 index 58ab7788a4..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/ordering/through_workspaces.rs +++ /dev/null @@ -1,74 +0,0 @@ -use super::*; - -#[ test ] -fn alphabetical() -{ - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ).copied(); - let workspace_path = workspace_asset.path_buf(); - let first_workspace = Workspace::try_from( workspace_path.to_owned() ).unwrap(); - - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace2" ) ).copied(); - let workspace_path = workspace_asset.path_buf(); - let second_workspace = Workspace::try_from( workspace_path.to_owned() ).unwrap(); - - let source = - [ - &second_workspace, // module3, module4, module 5 - &first_workspace, // module1, module2 - ]; - - let expected = - [ - "willbe_workspace1_module1", - "willbe_workspace1_module2", - - "willbe_workspace2_module3", - "willbe_workspace2_module4", - "willbe_workspace2_module5" - ]; - - assert_eq! - ( - expected.iter().map( | m | m.to_string() ).collect::< Vec< _ > >(), - workspaces_packages_iterate( source.into_iter().cloned() ) - - .ordered_iter( OrderStrategy::Alphabetical ) - - .filter_map( | p | PackageMetadata::try_from( p ).ok() ) - .map( | p | p.name().to_owned() ) - .collect::< Vec< _ > >() - ); -} - -#[ test ] -fn random() -{ - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1" ) ).copied(); - let workspace_path = workspace_asset.path_buf(); - let first_workspace = Workspace::try_from( workspace_path.to_owned() ).unwrap(); - - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace2" ) ).copied(); - let workspace_path = workspace_asset.path_buf(); - let second_workspace = Workspace::try_from( workspace_path.to_owned() ).unwrap(); - - let source = - [ - &second_workspace, // module3, module4, module 5 - &first_workspace, // module1, module2 - ]; - - dbg! - ( - workspaces_packages_iterate( source.into_iter().cloned() ) - - .ordered( OrderStrategy::Random ) - - .iter().cloned() - .filter_map( | p | PackageMetadata::try_from( p ).ok() ) - .map( | p | p.name().to_owned() ) - .collect::< Vec< _ > >() - ); - - // TODO: make some check. Eg: source is not equal to ordered, but... It may be equal because random - assert!( true ); -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/ordering/workspace.rs b/module/blank/willbe_old/tests/willbe_old/tests/ordering/workspace.rs deleted file mode 100644 index 644fef1118..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/ordering/workspace.rs +++ /dev/null @@ -1,103 +0,0 @@ -use super::*; - -#[ test ] -fn alphabetical() -{ - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1/module/module1" ) ).copied(); - let package_path = package_asset.path_buf(); - let first_package = Package::try_from( package_path.to_owned() ).unwrap(); - - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1/module/module2" ) ).copied(); - let package_path = package_asset.path_buf(); - let second_package = Package::try_from( package_path.to_owned() ).unwrap(); - - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace2/module/module3" ) ).copied(); - let package_path = package_asset.path_buf(); - let third_package = Package::try_from( package_path.to_owned() ).unwrap(); - - let source = - [ - &second_package, // module2 - &first_package, // module1 - &third_package, // module3 - ]; - - let expected = vec![ &first_package, &second_package, &third_package ]; - - assert_eq! - ( - expected.iter().map( | p | p.path().to_owned() ).collect::< Vec< _ > >(), - source.into_iter().cloned() - - .ordered_iter( OrderStrategy::Alphabetical ) - - .map( | p | p.path().to_owned() ).collect::< Vec< _ > >() - ); -} - -#[ test ] -fn topological() -{ - let workspace_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspace_with_deps" ) ).copied(); - let workspace_path = workspace_asset.path_buf(); - let first_package = Package::try_from( workspace_path.to_owned().join( "module/module1" ) ).unwrap(); - let second_package = Package::try_from( workspace_path.to_owned().join( "module/module2" ) ).unwrap(); - let third_package = Package::try_from( workspace_path.to_owned().join( "module/module3" ) ).unwrap(); - - let source = - [ - &third_package, // module3 dependent on module2 - &first_package, // module1 - &second_package, // module2 dependent on module1 - ]; - - let expected = vec![ &first_package, &second_package, &third_package ]; - - assert_eq! - ( - expected.iter().map( | p | p.path().to_owned() ).collect::< Vec< _ > >(), - source.into_iter().cloned() - - .ordered_iter( OrderStrategy::Topological ) - - .map( | p | p.path().to_owned() ).collect::< Vec< _ > >() - ); -} - -#[ test ] -fn random() -{ - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1/module/module1" ) ).copied(); - let package_path = package_asset.path_buf(); - let first_package = Package::try_from( package_path.to_owned() ).unwrap(); - - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace1/module/module2" ) ).copied(); - let package_path = package_asset.path_buf(); - let second_package = Package::try_from( package_path.to_owned() ).unwrap(); - - let package_asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "workspaces/workspace2/module/module3" ) ).copied(); - let package_path = package_asset.path_buf(); - let third_package = Package::try_from( package_path.to_owned() ).unwrap(); - - let source = - [ - &second_package, // module2 - &first_package, // module1 - &third_package, // module3 - ]; - - dbg! - ( - source.into_iter().cloned() - - .ordered( OrderStrategy::Random ) - - .iter().cloned() - .filter_map( | p | PackageMetadata::try_from( p ).ok() ) - .map( | p | p.name().to_owned() ) - .collect::< Vec< _ > >() - ); - - // TODO: make some check. Eg: source is not equal to ordered, but... It may be equal because random - assert!( true ); -} diff --git a/module/blank/willbe_old/tests/willbe_old/tests/verification.rs b/module/blank/willbe_old/tests/willbe_old/tests/verification.rs deleted file mode 100644 index 091a678ed3..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/tests/verification.rs +++ /dev/null @@ -1,26 +0,0 @@ -use super::*; - -#[ test ] -fn verified() -{ - let asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package" ) ).copied(); - let path = asset.path_buf(); - - let meta = PackageMetadata::try_from( path.to_owned() ).unwrap(); - - assert!( meta.check_all() ); -} - -#[ test ] -fn no_verified() -{ - let asset = Asset::from( PathBuf::from( ASSET_PATH ).join( "package_no_verified" ) ).copied(); - let path = asset.path_buf(); - - let meta = PackageMetadata::try_from( path.to_owned() ).unwrap(); - - assert!( !meta.has_license() ); - assert!( !meta.has_readme() ); - assert!( !meta.has_documentation() ); - assert!( !meta.is_tests_passed() ); -} diff --git a/module/blank/willbe_old/tests/willbe_old/utility.rs b/module/blank/willbe_old/tests/willbe_old/utility.rs deleted file mode 100644 index 9b5a83b0ea..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/utility.rs +++ /dev/null @@ -1,71 +0,0 @@ -use std::{ io, fs }; -use std::path::Path; - -pub use std::path::PathBuf; - -#[ derive( Debug ) ] -pub struct Asset -{ - remove_after_use : bool, - path : PathBuf, -} - -impl From< PathBuf > for Asset -{ - fn from( path : PathBuf ) -> Self - { - Self{ remove_after_use : false, path } - } -} - -impl Asset -{ - pub fn path_buf( &self ) -> &PathBuf - { - &self.path - } - - pub fn copied( mut self ) -> Self - { - let tmp_dir = tempfile::tempdir().unwrap(); - - Self::copy_dir_all( &self.path, &tmp_dir ).unwrap(); - self.path = tmp_dir.into_path(); - self.remove_after_use = true; - - self - } -} - -impl Asset -{ - fn copy_dir_all( src : impl AsRef< Path >, dst : impl AsRef< Path > ) -> io::Result< () > - { - fs::create_dir_all(&dst)?; - for entry in fs::read_dir(src)? - { - let entry = entry?; - let ty = entry.file_type()?; - if ty.is_dir() - { - Asset::copy_dir_all( entry.path(), dst.as_ref().join( entry.file_name() ) )?; - } else - { - fs::copy( entry.path(), dst.as_ref().join( entry.file_name() ) )?; - } - } - Ok( () ) - } -} - -impl Drop for Asset -{ - fn drop( &mut self ) - { - if self.remove_after_use - { - fs::remove_dir_all( &self.path ) - .expect( &format!( "Can not delete \"{}\"", &self.path.display() ) ) - } - } -} diff --git a/module/blank/willbe_old/tests/willbe_old/willbe_test.rs b/module/blank/willbe_old/tests/willbe_old/willbe_test.rs deleted file mode 100644 index 2e70508245..0000000000 --- a/module/blank/willbe_old/tests/willbe_old/willbe_test.rs +++ /dev/null @@ -1,7 +0,0 @@ -use willbe_old::*; - -#[ cfg( not( feature = "no_std" ) ) ] -mod utility; - -#[ cfg( not( feature = "no_std" ) ) ] -mod tests; diff --git a/module/blank/wlang/Cargo.toml b/module/blank/wlang/Cargo.toml index 828d17a7c3..0b207714df 100644 --- a/module/blank/wlang/Cargo.toml +++ b/module/blank/wlang/Cargo.toml @@ -22,7 +22,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/_blank", "/Cargo.toml", diff --git a/module/blank/wlang/src/standard_lib.rs b/module/blank/wlang/src/standard_lib.rs index 656b3eb36e..0814be4ffc 100644 --- a/module/blank/wlang/src/standard_lib.rs +++ b/module/blank/wlang/src/standard_lib.rs @@ -36,8 +36,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; } /// Prelude to use essentials: `use my_module::prelude::*`. diff --git a/module/core/clone_dyn/Readme.md b/module/core/clone_dyn/Readme.md index a5fece766e..51f5e9d170 100644 --- a/module/core/clone_dyn/Readme.md +++ b/module/core/clone_dyn/Readme.md @@ -79,6 +79,8 @@ The main function demonstrates the overall usage by creating a vector, obtaining use clone_dyn::{ clone_dyn, CloneDyn }; /// Trait that encapsulates an iterator with specific characteristics, tailored for your needs. + // Uncomment to see what macro expand into + // #[ clone_dyn( debug ) ] #[ clone_dyn ] pub trait IterTrait< 'a, T > where diff --git a/module/core/clone_dyn/src/lib.rs b/module/core/clone_dyn/src/lib.rs index 251186abd6..a930d8f296 100644 --- a/module/core/clone_dyn/src/lib.rs +++ b/module/core/clone_dyn/src/lib.rs @@ -53,8 +53,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/clone_dyn_meta/Cargo.toml b/module/core/clone_dyn_meta/Cargo.toml index e8b060725b..8865a2022a 100644 --- a/module/core/clone_dyn_meta/Cargo.toml +++ b/module/core/clone_dyn_meta/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [lib] proc-macro = true @@ -34,7 +34,7 @@ full = [ "enabled" ] enabled = [ "macro_tools/enabled", "former_types/enabled" ] [dependencies] -macro_tools = { workspace = true } # qqq : xxx : optimize set of features +macro_tools = { workspace = true, features = [ "attr", "diag", "generic_params", "punctuated", "phantom", "item_struct"] } # qqq : xxx : optimize set of features former_types = { workspace = true, features = [ "types_component_assign" ] } # xxx : incapsulate into macro_tools const_format = { version = "0.2.32" } diff --git a/module/core/clone_dyn_types/src/lib.rs b/module/core/clone_dyn_types/src/lib.rs index a6c60f2824..ea2cf45be3 100644 --- a/module/core/clone_dyn_types/src/lib.rs +++ b/module/core/clone_dyn_types/src/lib.rs @@ -235,8 +235,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/collection_tools/Readme.md b/module/core/collection_tools/Readme.md index 1fed2e48ad..39073acc98 100644 --- a/module/core/collection_tools/Readme.md +++ b/module/core/collection_tools/Readme.md @@ -75,7 +75,7 @@ When implementing a `no_std` environment with the `use_alloc` feature in your Ru You can do - + ```rust # #[ cfg( feature = "enabled" ) ] # #[ cfg( any( feature = "use_alloc", not( feature = "no_std" ) ) ) ] diff --git a/module/core/collection_tools/examples/collection_tools_trivial.rs b/module/core/collection_tools/examples/collection_tools_trivial.rs index b817a50c84..8a11bb85bf 100644 --- a/module/core/collection_tools/examples/collection_tools_trivial.rs +++ b/module/core/collection_tools/examples/collection_tools_trivial.rs @@ -27,7 +27,7 @@ )))] fn main(){} -// zzz : aaa : rid off `#[ cfg( not( feature = "use_alloc" ) ) ]` -- Rid of by not relying on std +// zzz : aaa : rid of `#[ cfg( not( feature = "use_alloc" ) ) ]` -- Rid of by not relying on std // #[ cfg( not( feature = "use_alloc" ) ) ] #[ cfg( all( feature = "enabled", feature = "collection_constructors" ) ) ] #[ cfg( any( not( feature = "no_std" ), feature = "use_alloc" ) ) ] diff --git a/module/core/collection_tools/src/lib.rs b/module/core/collection_tools/src/lib.rs index 6171c0e365..e49bd87f31 100644 --- a/module/core/collection_tools/src/lib.rs +++ b/module/core/collection_tools/src/lib.rs @@ -53,8 +53,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/data_type/Cargo.toml b/module/core/data_type/Cargo.toml index cbb30c67f3..c727c2cca7 100644 --- a/module/core/data_type/Cargo.toml +++ b/module/core/data_type/Cargo.toml @@ -55,7 +55,7 @@ use_alloc = [ "no_std" ] enabled = [] # dt_prelude = [ "collection_tools/reexports" ] -dt_prelude = [] # rid off maybe? +dt_prelude = [] # rid of maybe? dt_interval = [ "interval_adapter/enabled" ] dt_collections = [ "collection_tools/enabled" ] dt_either = [ "either" ] diff --git a/module/core/data_type/src/dt.rs b/module/core/data_type/src/dt.rs index 9fb884985f..3bb1802b81 100644 --- a/module/core/data_type/src/dt.rs +++ b/module/core/data_type/src/dt.rs @@ -24,8 +24,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/data_type/src/lib.rs b/module/core/data_type/src/lib.rs index 2ee15e40a1..622b7f82db 100644 --- a/module/core/data_type/src/lib.rs +++ b/module/core/data_type/src/lib.rs @@ -18,7 +18,7 @@ pub mod dependency #[ cfg( feature = "either" ) ] pub use ::either; // #[ cfg( feature = "type_constructor" ) ] - // pub use ::type_constructor; // xxx : rid off + // pub use ::type_constructor; // xxx : rid of #[ cfg( feature = "dt_interval" ) ] pub use ::interval_adapter; #[ cfg( feature = "dt_collection" ) ] @@ -49,8 +49,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/derive_tools/Cargo.toml b/module/core/derive_tools/Cargo.toml index 905ef0bcca..2d724361e8 100644 --- a/module/core/derive_tools/Cargo.toml +++ b/module/core/derive_tools/Cargo.toml @@ -25,7 +25,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] diff --git a/module/core/derive_tools/src/lib.rs b/module/core/derive_tools/src/lib.rs index 8cec034b59..303e909350 100644 --- a/module/core/derive_tools/src/lib.rs +++ b/module/core/derive_tools/src/lib.rs @@ -149,8 +149,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/derive_tools/tests/inc/mod.rs b/module/core/derive_tools/tests/inc/mod.rs index 105aca981e..c0817b4d86 100644 --- a/module/core/derive_tools/tests/inc/mod.rs +++ b/module/core/derive_tools/tests/inc/mod.rs @@ -17,16 +17,16 @@ mod variadic_from_test; mod all_manual_test; #[ cfg ( - all - ( - feature = "derive_as_mut", - feature = "derive_as_ref", - feature = "derive_deref", - feature = "derive_deref_mut", - feature = "derive_from", - feature = "derive_inner_from", - feature = "derive_phantom", - ) + all + ( + feature = "derive_as_mut", + feature = "derive_as_ref", + feature = "derive_deref", + feature = "derive_deref_mut", + feature = "derive_from", + feature = "derive_inner_from", + feature = "derive_phantom" + ) )] mod all_test; @@ -162,8 +162,8 @@ mod deref_mut_tests #[ path = "new" ] mod new_tests { - #[ allow( unused_imports ) ] - use super::*; + #[ allow( unused_imports ) ] + use super::*; // qqq : for each branch add generic test @@ -171,14 +171,14 @@ mod new_tests mod basic_manual_test; mod basic_test; - mod unit_manual_test; - mod unit_test; - mod named_manual_test; - mod named_test; - mod multiple_named_manual_test; - mod multiple_named_test; - mod multiple_unnamed_manual_test; - // mod multiple_unnamed_test; + mod unit_manual_test; + mod unit_test; + mod named_manual_test; + mod named_test; + mod multiple_named_manual_test; + mod multiple_named_test; + mod multiple_unnamed_manual_test; + // mod multiple_unnamed_test; // xxx : continue // @@ -189,8 +189,8 @@ mod new_tests #[ path = "from" ] mod from_tests { - #[ allow( unused_imports ) ] - use super::*; + #[ allow( unused_imports ) ] + use super::*; // qqq : for each branch add generic test @@ -201,34 +201,34 @@ mod from_tests // - mod named_test; - mod named_manual_test; + mod named_test; + mod named_manual_test; - mod multiple_named_manual_test; - mod multiple_unnamed_manual_test; - mod unit_manual_test; - mod multiple_named_test; - mod unit_test; - mod multiple_unnamed_test; + mod multiple_named_manual_test; + mod multiple_unnamed_manual_test; + mod unit_manual_test; + mod multiple_named_test; + mod unit_test; + mod multiple_unnamed_test; - mod variants_manual; - mod variants_derive; + mod variants_manual; + mod variants_derive; - mod variants_duplicates_all_off; - mod variants_duplicates_some_off; - mod variants_duplicates_some_off_default_off; + mod variants_duplicates_all_off; + mod variants_duplicates_some_off; + mod variants_duplicates_some_off_default_off; - mod variants_generics; - mod variants_generics_where; - mod variants_collisions; + mod variants_generics; + mod variants_generics_where; + mod variants_collisions; } #[ cfg( feature = "derive_inner_from" ) ] #[ path = "inner_from" ] mod inner_from_tests { - #[ allow( unused_imports ) ] - use super::*; + #[ allow( unused_imports ) ] + use super::*; // @@ -237,14 +237,14 @@ mod inner_from_tests // - mod unit_test; - mod named_manual_test; - mod multiple_named_manual_test; - mod unit_manual_test; - mod named_test; - mod multiple_named_test; - mod multiple_unnamed_manual_test; - mod multiple_unnamed_test; + mod unit_test; + mod named_manual_test; + mod multiple_named_manual_test; + mod unit_manual_test; + mod named_test; + mod multiple_named_test; + mod multiple_unnamed_manual_test; + mod multiple_unnamed_test; } diff --git a/module/core/derive_tools_meta/Cargo.toml b/module/core/derive_tools_meta/Cargo.toml index ce8da61ae6..fc4959be4a 100644 --- a/module/core/derive_tools_meta/Cargo.toml +++ b/module/core/derive_tools_meta/Cargo.toml @@ -24,8 +24,6 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] - [lib] proc-macro = true @@ -68,10 +66,11 @@ derive_phantom = [] [dependencies] # zzz : qqq : optimize features set -macro_tools = { workspace = true, features = [ "full" ] } -iter_tools = { workspace = true, features = [ "full" ] } +macro_tools = { workspace = true, features = [ "attr", "attr_prop", "container_kind", "diag", "generic_args", "typ", "derive", "generic_params", "name", "struct_like", "quantifier" ] } +iter_tools = { workspace = true, features = [] } former_types = { workspace = true, features = [ "types_component_assign" ] } const_format = { version = "0.2.32" } +# xxx2 : continue [dev-dependencies] test_tools = { workspace = true } diff --git a/module/core/derive_tools_meta/src/derive.rs b/module/core/derive_tools_meta/src/derive.rs index 1cce024835..e86b79ff3c 100644 --- a/module/core/derive_tools_meta/src/derive.rs +++ b/module/core/derive_tools_meta/src/derive.rs @@ -5,7 +5,6 @@ #[ allow( unused_imports ) ] use macro_tools::prelude::*; -// pub use macro_tools::{ Result, Many }; pub use iter_tools as iter; #[ cfg( feature = "derive_as_mut" ) ] diff --git a/module/core/derive_tools_meta/src/derive/from.rs b/module/core/derive_tools_meta/src/derive/from.rs index 1e503d8480..314683d6d2 100644 --- a/module/core/derive_tools_meta/src/derive/from.rs +++ b/module/core/derive_tools_meta/src/derive/from.rs @@ -362,7 +362,7 @@ fn generate_multiple_fields_named< 'a > qt! { #field_name : src.#index } }); - // xxx : qqq : rid off collects + // xxx : qqq : rid of collects // let field_types : Vec< _ > = field_types.collect(); let field_types2 = field_types.clone(); qt! diff --git a/module/core/diagnostics_tools/Cargo.toml b/module/core/diagnostics_tools/Cargo.toml index 5053a7402c..c2b7d7e994 100644 --- a/module/core/diagnostics_tools/Cargo.toml +++ b/module/core/diagnostics_tools/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ diff --git a/module/core/diagnostics_tools/src/diag/cta.rs b/module/core/diagnostics_tools/src/diag/cta.rs index 60fc685938..914efc57aa 100644 --- a/module/core/diagnostics_tools/src/diag/cta.rs +++ b/module/core/diagnostics_tools/src/diag/cta.rs @@ -65,8 +65,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/diagnostics_tools/src/diag/layout.rs b/module/core/diagnostics_tools/src/diag/layout.rs index 174ed9bc0e..898fafc5f5 100644 --- a/module/core/diagnostics_tools/src/diag/layout.rs +++ b/module/core/diagnostics_tools/src/diag/layout.rs @@ -105,8 +105,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/diagnostics_tools/src/diag/mod.rs b/module/core/diagnostics_tools/src/diag/mod.rs index 821b020beb..7b66a16e2d 100644 --- a/module/core/diagnostics_tools/src/diag/mod.rs +++ b/module/core/diagnostics_tools/src/diag/mod.rs @@ -46,8 +46,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/diagnostics_tools/src/diag/rta.rs b/module/core/diagnostics_tools/src/diag/rta.rs index 38b9b5a0f3..0ea56a2425 100644 --- a/module/core/diagnostics_tools/src/diag/rta.rs +++ b/module/core/diagnostics_tools/src/diag/rta.rs @@ -248,8 +248,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/diagnostics_tools/src/lib.rs b/module/core/diagnostics_tools/src/lib.rs index cce36f97b7..43d15c537f 100644 --- a/module/core/diagnostics_tools/src/lib.rs +++ b/module/core/diagnostics_tools/src/lib.rs @@ -45,8 +45,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/error_tools/Cargo.toml b/module/core/error_tools/Cargo.toml index 1ef5cccee8..e7a87b3379 100644 --- a/module/core/error_tools/Cargo.toml +++ b/module/core/error_tools/Cargo.toml @@ -25,27 +25,25 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + # = features [features] default = [ "enabled", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] full = [ - "enabled", - "error_for_lib", - "error_for_app", + "default", ] no_std = [] use_alloc = [ "no_std" ] enabled = [] -error_for_lib = [ "thiserror" ] -error_for_app = [ "anyhow" ] +error_typed = [ "thiserror" ] +error_untyped = [ "anyhow" ] # = entry diff --git a/module/core/error_tools/src/assert.rs b/module/core/error_tools/src/assert.rs index 0fe7bcf1f6..6735ba23ca 100644 --- a/module/core/error_tools/src/assert.rs +++ b/module/core/error_tools/src/assert.rs @@ -135,8 +135,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/error_tools/src/error.rs b/module/core/error_tools/src/error.rs index 0b8fa734ba..53fe212f86 100644 --- a/module/core/error_tools/src/error.rs +++ b/module/core/error_tools/src/error.rs @@ -135,8 +135,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; @@ -150,3 +152,4 @@ pub mod prelude pub use super::private::ErrorInterface; pub use super::private::BasicError; } +// xxx : review \ No newline at end of file diff --git a/module/core/error_tools/src/lib.rs b/module/core/error_tools/src/lib.rs index b672032358..9263d383bc 100644 --- a/module/core/error_tools/src/lib.rs +++ b/module/core/error_tools/src/lib.rs @@ -21,36 +21,25 @@ pub mod dependency #[ doc( inline ) ] #[ allow( unused_imports ) ] - #[ cfg( feature = "error_for_lib" ) ] + #[ cfg( feature = "error_typed" ) ] pub use ::thiserror; #[ doc( inline ) ] #[ allow( unused_imports ) ] - #[ cfg( feature = "error_for_app" ) ] + #[ cfg( feature = "error_untyped" ) ] pub use ::anyhow; } #[ cfg( feature = "enabled" ) ] -/// Exceptions handling mechanism for libs. -pub mod for_lib -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - #[ cfg( feature = "error_for_lib" ) ] - pub use ::thiserror::*; -} +#[ cfg( feature = "error_typed" ) ] +/// Typed exceptions handling mechanism. +pub mod typed; #[ cfg( feature = "enabled" ) ] -// qqq : cover by simple test /* aaa : Dmytro : added trivial test routine `basic` */ -/// Exceptions handling mechanism for apps. -pub mod for_app -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - #[ cfg( feature = "error_for_app" ) ] - pub use ::anyhow::*; -} +#[ cfg( feature = "error_untyped" ) ] +/// Untyped exceptions handling mechanism. +pub mod untyped; #[ cfg( feature = "enabled" ) ] #[ doc( inline ) ] @@ -61,9 +50,28 @@ pub use protected::*; #[ cfg( feature = "enabled" ) ] pub mod protected { + #[ allow( unused_imports ) ] + use super::*; + #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::orphan::*; + pub use assert::orphan::*; + + #[ cfg( not( feature = "no_std" ) ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use error::orphan::*; + + #[ cfg( feature = "error_untyped" ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use untyped::orphan::*; + + #[ cfg( feature = "error_typed" ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use typed::orphan::*; + } /// Shared with parent namespace of the module @@ -75,26 +83,37 @@ pub mod orphan #[ allow( unused_imports ) ] pub use super::exposed::*; - #[ cfg( feature = "error_for_app" ) ] - pub use super::for_app::Result; - } /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; + #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::prelude::*; + pub use prelude::*; + #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::assert::exposed::*; + pub use assert::exposed::*; #[ cfg( not( feature = "no_std" ) ) ] #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::error::exposed::*; + pub use error::exposed::*; + + #[ cfg( feature = "error_untyped" ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use untyped::exposed::*; + + #[ cfg( feature = "error_typed" ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use typed::exposed::*; } @@ -102,4 +121,26 @@ pub mod exposed #[ cfg( feature = "enabled" ) ] pub mod prelude { + #[ allow( unused_imports ) ] + use super::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use assert::prelude::*; + + #[ cfg( not( feature = "no_std" ) ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use error::prelude::*; + + #[ cfg( feature = "error_untyped" ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use untyped::prelude::*; + + #[ cfg( feature = "error_typed" ) ] + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use typed::prelude::*; + } diff --git a/module/core/error_tools/src/typed.rs b/module/core/error_tools/src/typed.rs new file mode 100644 index 0000000000..2e36bcaa5c --- /dev/null +++ b/module/core/error_tools/src/typed.rs @@ -0,0 +1,60 @@ +/// Internal namespace. +pub( crate ) mod private +{ + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; +} + +/// Shared with parent namespace of the module +pub mod orphan +{ + pub use super::super::typed; + pub use super::super::typed as for_lib; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::thiserror::*; + // xxx : qqq : be specific + +} + +/// Exposed namespace of the module. +#[ allow( unused_imports ) ] +pub mod exposed +{ + use super::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use ::thiserror::prelude::*; + +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use thiserror; + +} \ No newline at end of file diff --git a/module/core/error_tools/src/untyped.rs b/module/core/error_tools/src/untyped.rs new file mode 100644 index 0000000000..9c00619bc3 --- /dev/null +++ b/module/core/error_tools/src/untyped.rs @@ -0,0 +1,60 @@ +/// Internal namespace. +pub( crate ) mod private +{ + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; +} + +/// Shared with parent namespace of the module +pub mod orphan +{ + pub use super::super::untyped; + pub use super::super::untyped as for_app; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::anyhow::*; + + // xxx : qqq : be specific + +} + +/// Exposed namespace of the module. +#[ allow( unused_imports ) ] +pub mod exposed +{ + use super::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::anyhow::Result; + + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use ::anyhow::prelude::*; + +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ +} \ No newline at end of file diff --git a/module/core/error_tools/tests/inc/for_app_test.rs b/module/core/error_tools/tests/inc/for_app_test.rs index e2eb7601f6..b76e52a16b 100644 --- a/module/core/error_tools/tests/inc/for_app_test.rs +++ b/module/core/error_tools/tests/inc/for_app_test.rs @@ -3,7 +3,7 @@ use super::*; // -#[ cfg( feature = "error_for_app" ) ] +#[ cfg( feature = "error_untyped" ) ] tests_impls! { fn basic() @@ -19,7 +19,7 @@ tests_impls! // -#[ cfg( feature = "error_for_app" ) ] +#[ cfg( feature = "error_untyped" ) ] tests_index! { basic, diff --git a/module/core/for_each/Cargo.toml b/module/core/for_each/Cargo.toml index 4632085ca1..877ccbe184 100644 --- a/module/core/for_each/Cargo.toml +++ b/module/core/for_each/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + # include = [ # "/rust/impl/meta/for_each", # "/Cargo.toml", diff --git a/module/core/for_each/src/lib.rs b/module/core/for_each/src/lib.rs index c12e9a7513..a9acaba19f 100644 --- a/module/core/for_each/src/lib.rs +++ b/module/core/for_each/src/lib.rs @@ -494,8 +494,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/format_tools/Cargo.toml b/module/core/format_tools/Cargo.toml index f3a45c9d8f..e13c738f11 100644 --- a/module/core/format_tools/Cargo.toml +++ b/module/core/format_tools/Cargo.toml @@ -22,7 +22,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] diff --git a/module/core/format_tools/src/format.rs b/module/core/format_tools/src/format.rs index 845cfd33d2..b97f82657c 100644 --- a/module/core/format_tools/src/format.rs +++ b/module/core/format_tools/src/format.rs @@ -15,6 +15,8 @@ pub mod as_table; pub mod print; pub mod table; +// xxx2 : continue + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use protected::*; @@ -55,8 +57,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/format_tools/src/format/as_table.rs b/module/core/format_tools/src/format/as_table.rs index cc6892a2c8..02e1b53c82 100644 --- a/module/core/format_tools/src/format/as_table.rs +++ b/module/core/format_tools/src/format/as_table.rs @@ -204,8 +204,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/format_tools/src/format/print.rs b/module/core/format_tools/src/format/print.rs index 72c6e7a53f..f39f63bc81 100644 --- a/module/core/format_tools/src/format/print.rs +++ b/module/core/format_tools/src/format/print.rs @@ -238,8 +238,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/format_tools/src/format/table.rs b/module/core/format_tools/src/format/table.rs index c847586e41..0b6f48b630 100644 --- a/module/core/format_tools/src/format/table.rs +++ b/module/core/format_tools/src/format/table.rs @@ -184,8 +184,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/format_tools/src/format/to_string.rs b/module/core/format_tools/src/format/to_string.rs index 7b6dad85b3..148f2a099d 100644 --- a/module/core/format_tools/src/format/to_string.rs +++ b/module/core/format_tools/src/format/to_string.rs @@ -79,8 +79,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::private:: diff --git a/module/core/format_tools/src/format/to_string_with_fallback.rs b/module/core/format_tools/src/format/to_string_with_fallback.rs index 0d551b2a82..cee18104a7 100644 --- a/module/core/format_tools/src/format/to_string_with_fallback.rs +++ b/module/core/format_tools/src/format/to_string_with_fallback.rs @@ -152,8 +152,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::private:: diff --git a/module/core/format_tools/src/format/wrapper.rs b/module/core/format_tools/src/format/wrapper.rs index 4cd134650f..36acb18449 100644 --- a/module/core/format_tools/src/format/wrapper.rs +++ b/module/core/format_tools/src/format/wrapper.rs @@ -31,8 +31,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/format_tools/src/lib.rs b/module/core/format_tools/src/lib.rs index af0ae3ae89..a33026fccc 100644 --- a/module/core/format_tools/src/lib.rs +++ b/module/core/format_tools/src/lib.rs @@ -46,8 +46,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/former/Cargo.toml b/module/core/former/Cargo.toml index ca855af42a..8393ef382f 100644 --- a/module/core/former/Cargo.toml +++ b/module/core/former/Cargo.toml @@ -29,7 +29,6 @@ all-features = false no_std = [ "former_types/no_std", "collection_tools/no_std" ] use_alloc = [ "no_std", "former_types/use_alloc", "collection_tools/use_alloc" ] - # no_std = [ "collection_tools/no_std" ] # use_alloc = [ "no_std", "collection_tools/use_alloc" ] @@ -41,6 +40,8 @@ default = [ "derive_component_assign", "derive_components_assign", "derive_from_components", + "types_former", + "types_component_assign", ] full = [ "enabled", @@ -50,16 +51,20 @@ full = [ "derive_component_assign", "derive_components_assign", "derive_from_components", + "types_former", + "types_component_assign", ] enabled = [ "former_meta/enabled", "former_types/enabled" ] -derive_former = [ "former_meta/derive_former", "former_types/derive_former" ] -derive_components = [ "former_meta/derive_components", "former_types/types_components" ] -derive_component_assign = [ "derive_components", "former_meta/derive_component_assign", "former_types/types_component_assign" ] -derive_components_assign = [ "derive_components", "derive_component_assign", "former_meta/derive_components_assign" ] -derive_component_from = [ "derive_components", "former_meta/derive_component_from" ] -derive_from_components = [ "derive_components", "former_meta/derive_from_components" ] +derive_former = [ "former_meta/derive_former", "types_former" ] +derive_components = [ "former_meta/derive_components", "derive_component_assign", "derive_components_assign", "derive_component_from", "derive_from_components" ] +derive_component_assign = [ "former_meta/derive_component_assign", "types_component_assign" ] +derive_components_assign = [ "derive_component_assign", "former_meta/derive_components_assign" ] +derive_component_from = [ "former_meta/derive_component_from" ] +derive_from_components = [ "former_meta/derive_from_components" ] +types_former = [ "former_types/types_former" ] +types_component_assign = [ "former_types/types_component_assign" ] [dependencies] former_meta = { workspace = true } diff --git a/module/core/former/src/lib.rs b/module/core/former/src/lib.rs index 2eb4e674d2..7d5b86817e 100644 --- a/module/core/former/src/lib.rs +++ b/module/core/former/src/lib.rs @@ -4,34 +4,6 @@ #![ doc( html_root_url = "https://docs.rs/former/latest/former/" ) ] #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] -// /// Axiomatic things. -// #[ cfg( feature = "enabled" ) ] -// #[ cfg( feature = "derive_former" ) ] -// mod axiomatic; -// /// Forming process. -// #[ cfg( feature = "enabled" ) ] -// #[ cfg( feature = "derive_former" ) ] -// mod definition; -// /// Forming process. -// #[ cfg( feature = "enabled" ) ] -// #[ cfg( feature = "derive_former" ) ] -// mod forming; -// /// Storage. -// #[ cfg( feature = "enabled" ) ] -// #[ cfg( feature = "derive_former" ) ] -// mod storage; -// -// /// Interface for collections. -// #[ cfg( feature = "enabled" ) ] -// #[ cfg( any( not( feature = "no_std" ), feature = "use_alloc" ) ) ] -// #[ cfg( feature = "derive_former" ) ] -// mod collection; -// -// /// Component-based forming. -// #[ cfg( feature = "enabled" ) ] -// #[ cfg( any( feature = "derive_component_from", feature = "derive_component_assign" ) ) ] -// mod component; - /// Namespace with dependencies. #[ cfg( feature = "enabled" ) ] pub mod dependency @@ -68,8 +40,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] @@ -83,23 +57,6 @@ pub mod exposed #[ allow( unused_imports ) ] pub use former_types::exposed::*; -// #[ doc( inline ) ] -// #[ allow( unused_imports ) ] -// #[ cfg( feature = "derive_former" ) ] -// pub use super:: -// { -// axiomatic::*, -// definition::*, -// forming::*, -// storage::*, -// }; -// -// #[ doc( inline ) ] -// #[ allow( unused_imports ) ] -// #[ cfg( any( not( feature = "no_std" ), feature = "use_alloc" ) ) ] -// #[ cfg( feature = "derive_former" ) ] -// pub use super::collection::*; - } /// Prelude to use essentials: `use my_module::prelude::*`. @@ -107,11 +64,6 @@ pub mod exposed pub mod prelude { - // #[ doc( inline ) ] - // #[ allow( unused_imports ) ] - // #[ cfg( any( feature = "derive_component_from", feature = "derive_component_assign" ) ) ] - // pub use super::component::*; - #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use former_types::prelude::*; diff --git a/module/core/former/tests/inc/components_tests/component_assign.rs b/module/core/former/tests/inc/components_tests/component_assign.rs index 546cb3852b..cf02ef8935 100644 --- a/module/core/former/tests/inc/components_tests/component_assign.rs +++ b/module/core/former/tests/inc/components_tests/component_assign.rs @@ -3,6 +3,7 @@ use super::*; #[ allow( unused_imports ) ] use former::Assign; +// #[ derive( Default, PartialEq, Debug, former::Assign ) ] // #[ debug ] diff --git a/module/core/former/tests/inc/components_tests/only_test/component_assign.rs b/module/core/former/tests/inc/components_tests/only_test/component_assign.rs index 0adb4ed674..0da82e46a7 100644 --- a/module/core/former/tests/inc/components_tests/only_test/component_assign.rs +++ b/module/core/former/tests/inc/components_tests/only_test/component_assign.rs @@ -9,4 +9,11 @@ fn component_assign() got.assign( "John" ); assert_eq!( got, Person { age : 13, name : "John".to_string() } ); + let mut got : Person = Default::default(); + got = got + .impute( 13 ) + .impute( "John" ) + ; + assert_eq!( got, Person { age : 13, name : "John".to_string() } ); + } diff --git a/module/core/former/tests/inc/former_tests/name_collision_former_hashmap_without_parameter.rs b/module/core/former/tests/inc/former_tests/name_collision_former_hashmap_without_parameter.rs index 31df1f43e6..a1396f3ba2 100644 --- a/module/core/former/tests/inc/former_tests/name_collision_former_hashmap_without_parameter.rs +++ b/module/core/former/tests/inc/former_tests/name_collision_former_hashmap_without_parameter.rs @@ -1,3 +1,5 @@ +#![ allow( dead_code ) ] + use super::*; use the_module::Former; diff --git a/module/core/former/tests/inc/former_tests/name_collision_former_vector_without_parameter.rs b/module/core/former/tests/inc/former_tests/name_collision_former_vector_without_parameter.rs index c79d0e8ba3..605bf9f4f4 100644 --- a/module/core/former/tests/inc/former_tests/name_collision_former_vector_without_parameter.rs +++ b/module/core/former/tests/inc/former_tests/name_collision_former_vector_without_parameter.rs @@ -1,3 +1,5 @@ +#![ allow( dead_code ) ] + use super::*; use the_module::Former; diff --git a/module/core/former/tests/inc/former_tests/name_collisions.rs b/module/core/former/tests/inc/former_tests/name_collisions.rs index e23adcdd45..2dba7ce21f 100644 --- a/module/core/former/tests/inc/former_tests/name_collisions.rs +++ b/module/core/former/tests/inc/former_tests/name_collisions.rs @@ -1,3 +1,5 @@ +#![ allow( dead_code ) ] + #[ allow( unused_imports ) ] use super::*; diff --git a/module/core/former_meta/Cargo.toml b/module/core/former_meta/Cargo.toml index 32b6323ecc..a6d39e082c 100644 --- a/module/core/former_meta/Cargo.toml +++ b/module/core/former_meta/Cargo.toml @@ -47,11 +47,11 @@ full = [ enabled = [ "macro_tools/enabled", "iter_tools/enabled", "former_types/enabled" ] derive_former = [ "convert_case" ] -derive_components = [] +derive_components = [ "derive_component_assign", "derive_components_assign", "derive_component_from", "derive_from_components" ] derive_component_assign = [] -derive_components_assign = [ "derive_components", "derive_component_assign", "convert_case" ] -derive_component_from = [ "derive_components" ] -derive_from_components = [ "derive_components" ] +derive_components_assign = [ "derive_component_assign", "convert_case" ] +derive_component_from = [] +derive_from_components = [] [lib] proc-macro = true diff --git a/module/core/former_types/Cargo.toml b/module/core/former_types/Cargo.toml index 371a31a76a..35d239aa9f 100644 --- a/module/core/former_types/Cargo.toml +++ b/module/core/former_types/Cargo.toml @@ -30,21 +30,18 @@ use_alloc = [ "no_std", "collection_tools/use_alloc" ] default = [ "enabled", - "derive_former", - "types_components", + "types_former", "types_component_assign", ] full = [ "enabled", - "derive_former", - "types_components", + "types_former", "types_component_assign", ] enabled = [ "collection_tools/enabled" ] -derive_former = [] -types_components = [] -types_component_assign = [ "types_components" ] +types_former = [] +types_component_assign = [] [dependencies] diff --git a/module/core/former_types/Readme.md b/module/core/former_types/Readme.md index ac57ecfc04..30e14aaf08 100644 --- a/module/core/former_types/Readme.md +++ b/module/core/former_types/Readme.md @@ -17,10 +17,10 @@ and implements the `Assign` trait for its fields. It shows how to use these impl instance using different types that can be converted into the required types. ```rust -#[ cfg( any( not( feature = "derive_former" ), not( feature = "enabled" ) ) ) ] +#[ cfg( any( not( feature = "types_former" ), not( feature = "enabled" ) ) ) ] fn main() {} -#[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +#[ cfg( all( feature = "types_former", feature = "enabled" ) ) ] fn main() { use former_types::Assign; diff --git a/module/core/former_types/examples/former_types_trivial.rs b/module/core/former_types/examples/former_types_trivial.rs index c379293640..41c937b73a 100644 --- a/module/core/former_types/examples/former_types_trivial.rs +++ b/module/core/former_types/examples/former_types_trivial.rs @@ -20,10 +20,10 @@ //! - `got.assign( "John" )`: Assigns the string `"John"` to the `name` field. //! -#[ cfg( any( not( feature = "derive_former" ), not( feature = "enabled" ) ) ) ] +#[ cfg( any( not( feature = "types_former" ), not( feature = "enabled" ) ) ) ] fn main() {} -#[ cfg( all( feature = "derive_former", feature = "enabled" ) ) ] +#[ cfg( all( feature = "types_former", feature = "enabled" ) ) ] fn main() { use former_types::Assign; diff --git a/module/core/former_types/src/collection.rs b/module/core/former_types/src/collection.rs index eac724c018..f59ca28d52 100644 --- a/module/core/former_types/src/collection.rs +++ b/module/core/former_types/src/collection.rs @@ -534,8 +534,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/former_types/src/collection/binary_heap.rs b/module/core/former_types/src/collection/binary_heap.rs index 6652fe0c4e..ae76f5e4f8 100644 --- a/module/core/former_types/src/collection/binary_heap.rs +++ b/module/core/former_types/src/collection/binary_heap.rs @@ -125,8 +125,7 @@ where /// Holds the generic parameters for the `BinaryHeapDefinition`. /// /// This struct acts as a companion to `BinaryHeapDefinition`, providing a concrete definition of types used -/// in the formation process. It is crucial for linking the type parameters with the operational mechanics -/// of the formation and ensuring type safety and correctness throughout the formation lifecycle. +/// in the formation process. /// /// # Type Parameters /// diff --git a/module/core/former_types/src/component.rs b/module/core/former_types/src/component.rs index 21398497d8..9e846e2673 100644 --- a/module/core/former_types/src/component.rs +++ b/module/core/former_types/src/component.rs @@ -47,6 +47,18 @@ where /// This method takes ownership of the given value (`component`), which is of type `IntoT`. /// `component` is then converted into type `T` and set as the component of the object. fn assign( &mut self, component : IntoT ); + + /// Sets or replaces the component on the object with the given value. + /// Unlike function (`assing`) function (`impute`) also consumes self and return it what is useful for builder pattern. + #[ inline( always ) ] + fn impute( mut self, component : IntoT ) -> Self + where + Self : Sized, + { + self.assign( component ); + self + } + } /// Extension trait to provide a method for setting a component on an `Option` diff --git a/module/core/former_types/src/lib.rs b/module/core/former_types/src/lib.rs index 51cfc78eab..018dde3ed7 100644 --- a/module/core/former_types/src/lib.rs +++ b/module/core/former_types/src/lib.rs @@ -6,25 +6,25 @@ /// Axiomatic things. #[ cfg( feature = "enabled" ) ] -#[ cfg( feature = "derive_former" ) ] +#[ cfg( feature = "types_former" ) ] mod axiomatic; /// Definition of former. #[ cfg( feature = "enabled" ) ] -#[ cfg( feature = "derive_former" ) ] +#[ cfg( feature = "types_former" ) ] mod definition; /// Forming process. #[ cfg( feature = "enabled" ) ] -#[ cfg( feature = "derive_former" ) ] +#[ cfg( feature = "types_former" ) ] mod forming; /// Storage. #[ cfg( feature = "enabled" ) ] -#[ cfg( feature = "derive_former" ) ] +#[ cfg( feature = "types_former" ) ] mod storage; /// Interface for collections. #[ cfg( feature = "enabled" ) ] #[ cfg( any( not( feature = "no_std" ), feature = "use_alloc" ) ) ] -#[ cfg( feature = "derive_former" ) ] +#[ cfg( feature = "types_former" ) ] mod collection; /// Component-based forming. @@ -64,8 +64,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] @@ -73,7 +75,7 @@ pub mod exposed #[ doc( inline ) ] #[ allow( unused_imports ) ] - #[ cfg( feature = "derive_former" ) ] + #[ cfg( feature = "types_former" ) ] pub use super:: { axiomatic::*, @@ -85,7 +87,7 @@ pub mod exposed #[ doc( inline ) ] #[ allow( unused_imports ) ] #[ cfg( any( not( feature = "no_std" ), feature = "use_alloc" ) ) ] - #[ cfg( feature = "derive_former" ) ] + #[ cfg( feature = "types_former" ) ] pub use super::collection::*; } diff --git a/module/core/former_types/tests/inc/mod.rs b/module/core/former_types/tests/inc/mod.rs index 59fea4b027..79269a3c6f 100644 --- a/module/core/former_types/tests/inc/mod.rs +++ b/module/core/former_types/tests/inc/mod.rs @@ -3,7 +3,7 @@ #[ allow( unused_imports ) ] use super::*; -#[ cfg( feature = "derive_former" ) ] +#[ cfg( feature = "types_former" ) ] #[ path = "../../../former/tests/inc/former_tests" ] mod former_tests { @@ -27,7 +27,6 @@ mod former_tests } -#[ cfg( feature = "types_components" ) ] #[ path = "../../../former/tests/inc/components_tests" ] mod components_tests { diff --git a/module/core/fs_tools/src/fs/fs.rs b/module/core/fs_tools/src/fs/fs.rs index eb6f6508de..98da9a36dc 100644 --- a/module/core/fs_tools/src/fs/fs.rs +++ b/module/core/fs_tools/src/fs/fs.rs @@ -73,8 +73,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/fs_tools/src/fs/lib.rs b/module/core/fs_tools/src/fs/lib.rs index 1789116600..a4f7f72697 100644 --- a/module/core/fs_tools/src/fs/lib.rs +++ b/module/core/fs_tools/src/fs/lib.rs @@ -38,8 +38,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/implements/Cargo.toml b/module/core/implements/Cargo.toml index 77f0b3ac2d..44a51ad680 100644 --- a/module/core/implements/Cargo.toml +++ b/module/core/implements/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/core/implements/src/lib.rs b/module/core/implements/src/lib.rs index 92cdcaa96e..2472c1e0a2 100644 --- a/module/core/implements/src/lib.rs +++ b/module/core/implements/src/lib.rs @@ -94,8 +94,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/impls_index/src/impls_index/func.rs b/module/core/impls_index/src/impls_index/func.rs index cf9bed4e69..33da44bbcb 100644 --- a/module/core/impls_index/src/impls_index/func.rs +++ b/module/core/impls_index/src/impls_index/func.rs @@ -233,8 +233,10 @@ pub( crate ) mod private } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/impls_index/src/impls_index/impls.rs b/module/core/impls_index/src/impls_index/impls.rs index 2d07e37d9c..ac34c9f5f1 100644 --- a/module/core/impls_index/src/impls_index/impls.rs +++ b/module/core/impls_index/src/impls_index/impls.rs @@ -376,8 +376,10 @@ pub( crate ) mod private } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/impls_index/src/impls_index/mod.rs b/module/core/impls_index/src/impls_index/mod.rs index 3fa0ed3c93..236dd9f083 100644 --- a/module/core/impls_index/src/impls_index/mod.rs +++ b/module/core/impls_index/src/impls_index/mod.rs @@ -42,8 +42,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/impls_index/src/lib.rs b/module/core/impls_index/src/lib.rs index f70d8cc177..ad03bdb776 100644 --- a/module/core/impls_index/src/lib.rs +++ b/module/core/impls_index/src/lib.rs @@ -44,8 +44,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/impls_index_meta/Cargo.toml b/module/core/impls_index_meta/Cargo.toml index d3b1c21a9f..562eff0f95 100644 --- a/module/core/impls_index_meta/Cargo.toml +++ b/module/core/impls_index_meta/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/core/include_md/Cargo.toml b/module/core/include_md/Cargo.toml index 410d16c21b..ad29aa3f81 100644 --- a/module/core/include_md/Cargo.toml +++ b/module/core/include_md/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/_blank", "/Cargo.toml", diff --git a/module/core/include_md/src/_blank/standard_lib.rs b/module/core/include_md/src/_blank/standard_lib.rs index d335841385..95cfbde4da 100644 --- a/module/core/include_md/src/_blank/standard_lib.rs +++ b/module/core/include_md/src/_blank/standard_lib.rs @@ -42,8 +42,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; } diff --git a/module/core/inspect_type/Cargo.toml b/module/core/inspect_type/Cargo.toml index 8c55f439ac..c5055f1110 100644 --- a/module/core/inspect_type/Cargo.toml +++ b/module/core/inspect_type/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/core/inspect_type/src/lib.rs b/module/core/inspect_type/src/lib.rs index b0e8da7f37..17cfd16a0a 100644 --- a/module/core/inspect_type/src/lib.rs +++ b/module/core/inspect_type/src/lib.rs @@ -84,8 +84,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/interval_adapter/Cargo.toml b/module/core/interval_adapter/Cargo.toml index f08b7f12a0..49ac72262d 100644 --- a/module/core/interval_adapter/Cargo.toml +++ b/module/core/interval_adapter/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/core/interval_adapter/src/lib.rs b/module/core/interval_adapter/src/lib.rs index 8391104eda..e43040b817 100644 --- a/module/core/interval_adapter/src/lib.rs +++ b/module/core/interval_adapter/src/lib.rs @@ -611,8 +611,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/is_slice/Cargo.toml b/module/core/is_slice/Cargo.toml index b3cb822caa..afcff35d65 100644 --- a/module/core/is_slice/Cargo.toml +++ b/module/core/is_slice/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/core/is_slice/src/lib.rs b/module/core/is_slice/src/lib.rs index eea3ea5978..a17856812a 100644 --- a/module/core/is_slice/src/lib.rs +++ b/module/core/is_slice/src/lib.rs @@ -96,8 +96,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/iter_tools/Cargo.toml b/module/core/iter_tools/Cargo.toml index 0675b432e9..8e288271af 100644 --- a/module/core/iter_tools/Cargo.toml +++ b/module/core/iter_tools/Cargo.toml @@ -24,18 +24,23 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] full = [ "enabled" ] no_std = [] use_alloc = [ "itertools/use_alloc" ] -enabled = [] +enabled = [ "clone_dyn_types/enabled" ] [dependencies] + +# external itertools = { version = "~0.11.0", features = [ "use_std" ] } # qqq : update +# internal +clone_dyn_types = { workspace = true, features = [] } + [dev-dependencies] test_tools = { workspace = true } diff --git a/module/core/iter_tools/src/iter.rs b/module/core/iter_tools/src/iter.rs index 3a9218186f..28990645b5 100644 --- a/module/core/iter_tools/src/iter.rs +++ b/module/core/iter_tools/src/iter.rs @@ -4,23 +4,151 @@ pub( crate ) mod private { use ::itertools::process_results; - // fn collect_results< I, T, E >( iter : I ) -> core::result::Result< Vec< T >, E > - // where - // I : Iterator< Item = core::result::Result< T, E > > + Clone, - // E : core::fmt::Debug, - // { - // for e in iter.clone() - // { - // if e.is_err() - // { - // e?; - // } - // } - // Ok( iter.map( | e | e.unwrap() ).collect() ) - // } + use clone_dyn_types::CloneDyn; + + /// Trait that encapsulates an iterator with specific characteristics and implemetning `CloneDyn`. + /// + /// The `_IterTrait` trait is designed to represent iterators that may yield references to items ( `&'a T` ). + /// These iterators must also implement the `ExactSizeIterator` and `DoubleEndedIterator` traits. + /// This combination ensures that the iterator can: + /// - Provide an exact size hint ( `ExactSizeIterator` ), + /// - Be traversed from both ends ( `DoubleEndedIterator` ). + /// + /// Additionally, the iterator must implement the `CloneDyn` trait, which allows cloning of trait objects. + /// + /// # Example + /// ```rust + /// use iter_tools::_IterTrait; + /// + /// // Example struct that implements Iterator, ExactSizeIterator, DoubleEndedIterator, and CloneDyn. + /// #[ derive( Clone ) ] + /// struct MyIterator + /// { + /// // internal fields + /// } + /// + /// impl Iterator for MyIterator + /// { + /// type Item = i32; + /// + /// fn next( &mut self ) -> Option< Self::Item > + /// { + /// // implementation + /// Some( 1 ) + /// } + /// } + /// + /// impl ExactSizeIterator for MyIterator + /// { + /// fn len( &self ) -> usize + /// { + /// // implementation + /// 1 + /// } + /// } + /// + /// impl DoubleEndedIterator for MyIterator + /// { + /// fn next_back( &mut self ) -> Option< Self::Item > + /// { + /// // implementation + /// Some( 1 ) + /// } + /// } + /// + /// ``` + + pub trait _IterTrait< 'a, T > + where + T : 'a, + Self : Iterator< Item = T > + ExactSizeIterator< Item = T > + DoubleEndedIterator, + Self : CloneDyn, + { + } + + impl< 'a, T, I > _IterTrait< 'a, T > for I + where + T : 'a, + Self : Iterator< Item = T > + ExactSizeIterator< Item = T > + DoubleEndedIterator, + Self : CloneDyn, + { + } + + /// Trait that encapsulates a clonable iterator with specific characteristics, tailored for use with the `syn` crate. + /// + /// The `IterTrait` trait is designed to represent iterators that may yield references to items ( `&'a T` ) within the `syn` crate. + /// These iterators must also implement the `ExactSizeIterator`, `DoubleEndedIterator`, and `Clone` traits. + /// This combination ensures that the iterator can: + /// - Provide an exact size hint ( `ExactSizeIterator` ), + /// - Be traversed from both ends ( `DoubleEndedIterator` ), + /// - Be clonable ( `Clone` ). + /// + pub trait IterTrait< 'a, T > + where + T : 'a, + Self : _IterTrait< 'a, T > + Clone, + { + } + + impl< 'a, T, I > IterTrait< 'a, T > for I + where + T : 'a, + Self : _IterTrait< 'a, T > + Clone, + { + } + + /// Implement `Clone` for boxed `_IterTrait` trait objects. + /// + /// This allows cloning of boxed iterators that implement `_IterTrait`. + #[ allow( non_local_definitions ) ] + impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + 'c > + { + #[ inline ] + fn clone( &self ) -> Self + { + clone_dyn_types::clone_into_box( &**self ) + } + } + + #[ allow( non_local_definitions ) ] + impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + Send + 'c > + { + #[ inline ] + fn clone( &self ) -> Self + { + clone_dyn_types::clone_into_box( &**self ) + } + } + + #[ allow( non_local_definitions ) ] + impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + Sync + 'c > + { + #[ inline ] + fn clone( &self ) -> Self + { + clone_dyn_types::clone_into_box( &**self ) + } + } + + #[ allow( non_local_definitions ) ] + impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + Send + Sync + 'c > + { + #[ inline ] + fn clone( &self ) -> Self + { + clone_dyn_types::clone_into_box( &**self ) + } + } + + /// Type alias for boxed `_IterTrait` trait objects. + /// + /// Prefer `BoxedIter` over `impl _IterTrait` when using trait objects ( `dyn _IterTrait` ) because the concrete type in return is less restrictive than `impl _IterTrait`. + /// + pub type BoxedIter< 'a, T > = Box< dyn _IterTrait< 'a, T > + 'a >; /// Extension of iterator. + // xxx : review pub trait IterExt where Self : core::iter::Iterator, @@ -29,11 +157,8 @@ pub( crate ) mod private fn map_result< F, RE, El >( self, f : F ) -> core::result::Result< Vec< El >, RE > where Self : Sized + Clone, - // Self : Sized, F : FnMut( < Self as core::iter::Iterator >::Item ) -> core::result::Result< El, RE >, RE : core::fmt::Debug, - // El : Clone, - // core::result::Result< El, RE > : Clone, ; } @@ -46,16 +171,10 @@ pub( crate ) mod private Self : Sized + Clone, F : FnMut( < Self as core::iter::Iterator >::Item ) -> core::result::Result< El, RE >, RE : core::fmt::Debug, - // El : Clone, - // core::result::Result< El, RE > : Clone, { let vars_maybe = self.map( f ); let vars : Vec< _ > = process_results( vars_maybe, | iter | iter.collect() )?; - // let vars = collect_results( vars_maybe.clone() )?; Ok( vars ) - // let result : ( Vec< _ >, Vec< _ >, Vec< _ > ) - // = vars.into_iter().multiunzip(); - // Ok( result ) } } @@ -73,23 +192,18 @@ pub mod protected #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::orphan::*; + } /// Orphan namespace of the module. #[ cfg( feature = "enabled" ) ] pub mod orphan { + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::exposed::*; -} -/// Exposed namespace of the module. -pub mod exposed -{ - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::prelude::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] @@ -132,6 +246,7 @@ pub mod exposed unfold, // zip, zip_eq, + Itertools, }; #[ cfg( not( feature = "no_std" ) ) ] @@ -141,6 +256,27 @@ pub mod exposed } +/// Exposed namespace of the module. +#[ allow( unused_imports ) ] +pub mod exposed +{ + use super::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + _IterTrait, + IterTrait, + BoxedIter, + }; + +} + /// Prelude to use essentials: `use my_module::prelude::*`. pub mod prelude { @@ -156,7 +292,6 @@ pub mod prelude MinMaxResult, Position, Itertools, - /*MultiUnzip,*/ PeekingNext, }; diff --git a/module/core/iter_tools/src/lib.rs b/module/core/iter_tools/src/lib.rs index 043285af78..a65398090b 100644 --- a/module/core/iter_tools/src/lib.rs +++ b/module/core/iter_tools/src/lib.rs @@ -2,14 +2,6 @@ #![ doc( html_logo_url = "https://raw.githubusercontent.com/Wandalen/wTools/master/asset/img/logo_v3_trans_square.png" ) ] #![ doc( html_favicon_url = "https://raw.githubusercontent.com/Wandalen/wTools/alpha/asset/img/logo_v3_trans_square_icon_small_v2.ico" ) ] #![ doc( html_root_url = "https://docs.rs/iter_tools/latest/iter_tools/" ) ] -// #![ deny( rust_2018_idioms ) ] -// #![ deny( missing_debug_implementations ) ] -// #![ deny( missing_docs ) ] - -//! -//! Collection of general purpose tools to iterate. Currently it simply reexports itertools. -//! - #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] /// Core module. @@ -32,12 +24,15 @@ pub use protected::*; #[ cfg( feature = "enabled" ) ] pub mod protected { + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::orphan::*; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::iter::orphan::*; + } /// Orphan namespace of the module. @@ -51,8 +46,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/Cargo.toml b/module/core/macro_tools/Cargo.toml index 5071cdb8d0..c0506c8eea 100644 --- a/module/core/macro_tools/Cargo.toml +++ b/module/core/macro_tools/Cargo.toml @@ -23,18 +23,69 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] -default = [ "enabled" ] -full = [ "enabled" ] +default = [ + "enabled", + "attr", + "attr_prop", + "components", + "ct", + "container_kind", + "derive", + "diag", + "drop", + "equation", + "generic_args", + "generic_params", + "item", + "item_struct", + "name", + "kw", + "phantom", + "punctuated", + "quantifier", + "struct_like", + "tokens", + "typ", +] +full = [ + "default", +] + enabled = [ "former_types/enabled", "interval_adapter/enabled", "clone_dyn_types/enabled", + "iter_tools/enabled", ] -# qqq : put all files under features: macro_attr, macro_container_kind, ... +attr = [ "diag" ] +attr_prop = [ "components" ] +components = [] +ct = [] +container_kind = [ "typ" ] +derive = [] +diag = [] +drop = [] +equation = [] +generic_args = [] +generic_params = [] +item = [] +item_struct = [] +iter = [] +name = [] +kw = [] +phantom = [ "item" ] +punctuated = [] +quantifier = [] +struct_like = [] +tokens = [] +typ = [] + +# qqq : zzz : put all files under features: macro_attr, macro_container_kind, ... +# qqq : zzz : optimize features list [dependencies] @@ -42,14 +93,13 @@ enabled = [ proc-macro2 = { version = "~1.0.78", features = [] } quote = { version = "~1.0.35", features = [] } syn = { version = "~2.0.52", features = [ "full", "extra-traits" ] } - -# qqq : optimize features list +const_format = { version = "0.2.32", features = [] } ## internal interval_adapter = { workspace = true, features = [] } +iter_tools = { workspace = true, features = [] } +clone_dyn_types = { workspace = true, features = [] } former_types = { workspace = true, features = [ "types_component_assign" ] } -clone_dyn_types = { workspace = true, features = [] } # xxx : qqq : optimize features [dev-dependencies] test_tools = { workspace = true } -const_format = { version = "0.2.32" } diff --git a/module/core/macro_tools/Readme.md b/module/core/macro_tools/Readme.md index 62e2e0013a..1121555152 100644 --- a/module/core/macro_tools/Readme.md +++ b/module/core/macro_tools/Readme.md @@ -89,6 +89,7 @@ fn main() use macro_tools:: { attr, + ct, syn_err, return_syn_err, qt, @@ -97,8 +98,8 @@ fn main() AttributePropertyComponent, AttributePropertyBoolean, AttributePropertySingletone, + Assign, }; - use former_types::Assign; /// Represents the attributes of a struct. Aggregates all its attributes. #[ derive( Debug, Default ) ] @@ -121,12 +122,11 @@ fn main() // Closure to generate an error message for unknown attributes. let error = | attr : & syn::Attribute | -> syn::Error { - let known_attributes = const_format::concatcp! + let known_attributes = ct::str::format! ( - "Known attributes are: ", + "Known attributes are: {}, {}.", "debug", - ", ", AttributeMutator::KEYWORD, - "." + AttributeMutator::KEYWORD, ); syn_err! ( @@ -241,12 +241,12 @@ fn main() let error = | ident : & syn::Ident | -> syn::Error { - let known = const_format::concatcp! + let known = ct::str::format! ( - "Known entries of attribute ", AttributeMutator::KEYWORD, " are: ", + "Known entries of attribute {} are: {}, {}.", + AttributeMutator::KEYWORD, AttributePropertyCustom::KEYWORD, - ", ", AttributePropertyDebug::KEYWORD, - "." + AttributePropertyDebug::KEYWORD, ); syn_err! ( diff --git a/module/core/macro_tools/examples/macro_tools_attr_prop.rs b/module/core/macro_tools/examples/macro_tools_attr_prop.rs index 8a5f62d22c..3c18928fd1 100644 --- a/module/core/macro_tools/examples/macro_tools_attr_prop.rs +++ b/module/core/macro_tools/examples/macro_tools_attr_prop.rs @@ -34,6 +34,7 @@ fn main() use macro_tools:: { + ct, syn_err, return_syn_err, qt, @@ -42,8 +43,8 @@ fn main() AttributePropertyComponent, AttributePropertyBoolean, AttributePropertySingletone, + Assign, }; - use former_types::Assign; /// Represents the attributes of a struct. Aggregates all its attributes. #[ derive( Debug, Default ) ] @@ -66,12 +67,11 @@ fn main() // Closure to generate an error message for unknown attributes. let error = | attr : & syn::Attribute | -> syn::Error { - let known_attributes = const_format::concatcp! + let known_attributes = ct::str::format! ( - "Known attributes are: ", + "Known attributes are: {}, {}.", "debug", - ", ", AttributeMutator::KEYWORD, - "." + AttributeMutator::KEYWORD, ); syn_err! ( @@ -191,12 +191,12 @@ fn main() let error = | ident : & syn::Ident | -> syn::Error { - let known = const_format::concatcp! + let known = ct::str::format! ( - "Known entries of attribute ", AttributeMutator::KEYWORD, " are: ", + "Known entries of attribute {} are: {}, {}.", + AttributeMutator::KEYWORD, AttributePropertyCustom::KEYWORD, - ", ", AttributePropertyDebug::KEYWORD, - "." + AttributePropertyDebug::KEYWORD, ); syn_err! ( diff --git a/module/core/macro_tools/src/attr.rs b/module/core/macro_tools/src/attr.rs index 4524ded53e..fb442d6133 100644 --- a/module/core/macro_tools/src/attr.rs +++ b/module/core/macro_tools/src/attr.rs @@ -50,7 +50,7 @@ pub( crate ) mod private /// ``` /// - pub fn has_debug< 'a >( attrs : impl Iterator< Item = &'a syn::Attribute > ) -> Result< bool > + pub fn has_debug< 'a >( attrs : impl Iterator< Item = &'a syn::Attribute > ) -> syn::Result< bool > { for attr in attrs { @@ -211,7 +211,7 @@ pub( crate ) mod private impl syn::parse::Parse for AttributesInner { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { // let mut result : Self = from!(); let mut result : Self = Default::default(); @@ -286,7 +286,7 @@ pub( crate ) mod private impl syn::parse::Parse for AttributesOuter { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { let mut result : Self = Default::default(); loop @@ -335,7 +335,7 @@ pub( crate ) mod private /// # Example /// /// ```rust - /// use macro_tools::{ AttributeComponent, Result }; + /// use macro_tools::{ AttributeComponent, syn::Result }; /// use syn::{ Attribute, Error }; /// /// struct MyComponent; @@ -344,7 +344,7 @@ pub( crate ) mod private /// { /// const KEYWORD : &'static str = "my_component"; /// - /// fn from_meta( attr : &Attribute ) -> Result + /// fn from_meta( attr : &Attribute ) -> syn::Result /// { /// // Parsing logic here /// // Return Ok(MyComponent) if parsing is successful @@ -360,7 +360,7 @@ pub( crate ) mod private /// /// # Returns /// - /// A `Result` containing the constructed component if successful, or an error if the parsing fails. + /// A `syn::Result` containing the constructed component if successful, or an error if the parsing fails. /// pub trait AttributeComponent where @@ -384,42 +384,9 @@ pub( crate ) mod private /// /// # Returns /// - /// A `Result` containing the constructed component if successful, or an error if the parsing fails. - fn from_meta( attr : &syn::Attribute ) -> Result< Self >; - } - - /// Trait for properties of an attribute component that can be identified by a keyword. - /// - /// The `AttributePropertyComponent` trait defines the interface for attribute properties - /// that can be identified by a specific keyword. Implementors of this trait are required - /// to define a constant `KEYWORD` that identifies the type of the property. - /// - /// This trait is useful in scenarios where attributes may have multiple properties - /// that need to be parsed and handled separately. By defining a unique keyword for each property, - /// the parsing logic can accurately identify and process each property. - /// - /// # Example - /// - /// ```rust - /// use macro_tools::AttributePropertyComponent; - /// - /// struct MyProperty; - /// - /// impl AttributePropertyComponent for MyProperty - /// { - /// const KEYWORD : &'static str = "my_property"; - /// } - /// ``` - /// - pub trait AttributePropertyComponent - where - Self : Sized, - { - /// The keyword that identifies the component. - /// - /// This constant is used to match the attribute to the corresponding property. - /// Each implementor of this trait must provide a unique keyword for its type. - const KEYWORD : &'static str; + /// A `syn::Result` containing the constructed component if successful, or an error if the parsing fails. + fn from_meta( attr : &syn::Attribute ) -> syn::Result< Self >; + // xxx : redo } } @@ -453,9 +420,13 @@ pub mod orphan } /// Exposed namespace of the module. +// xxx2 : continue +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as attr; + use super::*; + pub use super::super::attr; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; @@ -467,7 +438,7 @@ pub mod exposed AttributesOuter, AttributeComponent, - AttributePropertyComponent, + // AttributePropertyComponent, }; } diff --git a/module/core/macro_tools/src/attr_prop.rs b/module/core/macro_tools/src/attr_prop.rs index 4e5020eff2..1942ea9e05 100644 --- a/module/core/macro_tools/src/attr_prop.rs +++ b/module/core/macro_tools/src/attr_prop.rs @@ -107,6 +107,40 @@ pub( crate ) mod private { // use crate::*; + /// Trait for properties of an attribute component that can be identified by a keyword. + /// + /// The `AttributePropertyComponent` trait defines the interface for attribute properties + /// that can be identified by a specific keyword. Implementors of this trait are required + /// to define a constant `KEYWORD` that identifies the type of the property. + /// + /// This trait is useful in scenarios where attributes may have multiple properties + /// that need to be parsed and handled separately. By defining a unique keyword for each property, + /// the parsing logic can accurately identify and process each property. + /// + /// # Example + /// + /// ```rust + /// use macro_tools::AttributePropertyComponent; + /// + /// struct MyProperty; + /// + /// impl AttributePropertyComponent for MyProperty + /// { + /// const KEYWORD : &'static str = "my_property"; + /// } + /// ``` + /// + pub trait AttributePropertyComponent + where + Self : Sized, + { + /// The keyword that identifies the component. + /// + /// This constant is used to match the attribute to the corresponding property. + /// Each implementor of this trait must provide a unique keyword for its type. + const KEYWORD : &'static str; + } + } #[ doc( inline ) ] @@ -135,25 +169,35 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as attr_prop; + use super::*; + pub use super::super::attr_prop; + + // pub use super::protected as attr_prop; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: { + private::AttributePropertyComponent, + singletone::AttributePropertySingletone, singletone::AttributePropertySingletoneMarker, singletone_optional::AttributePropertyOptionalSingletone, singletone_optional::AttributePropertyOptionalSingletoneMarker, + boolean::AttributePropertyBoolean, boolean::AttributePropertyBooleanMarker, boolean_optional::AttributePropertyOptionalBoolean, boolean_optional::AttributePropertyOptionalBooleanMarker, + syn::AttributePropertySyn, syn::AttributePropertySynMarker, syn_optional::AttributePropertyOptionalSyn, diff --git a/module/core/macro_tools/src/attr_prop/boolean.rs b/module/core/macro_tools/src/attr_prop/boolean.rs index 4472b3ae42..fa786b990d 100644 --- a/module/core/macro_tools/src/attr_prop/boolean.rs +++ b/module/core/macro_tools/src/attr_prop/boolean.rs @@ -4,7 +4,7 @@ //! use crate::*; -use former_types::Assign; +// use former_types::Assign; /// Default marker for `AttributePropertyBoolean`. /// Used if no marker is defined as parameter. diff --git a/module/core/macro_tools/src/attr_prop/boolean_optional.rs b/module/core/macro_tools/src/attr_prop/boolean_optional.rs index 680803f4c8..35f6f4f081 100644 --- a/module/core/macro_tools/src/attr_prop/boolean_optional.rs +++ b/module/core/macro_tools/src/attr_prop/boolean_optional.rs @@ -4,7 +4,8 @@ //! use crate::*; -use former_types::Assign; +use components::Assign; +//xxx /// Default marker for `AttributePropertyOptionalSingletone`. /// Used if no marker is defined as parameter. diff --git a/module/core/macro_tools/src/attr_prop/singletone.rs b/module/core/macro_tools/src/attr_prop/singletone.rs index 1d55d9ac7c..0e6970bdd0 100644 --- a/module/core/macro_tools/src/attr_prop/singletone.rs +++ b/module/core/macro_tools/src/attr_prop/singletone.rs @@ -12,7 +12,7 @@ //! This is useful for attributes that need to enable or disable features or flags. use crate::*; -use former_types::Assign; +// use former_types::Assign; /// Default marker for `AttributePropertySingletone`. /// Used if no marker is defined as parameter. diff --git a/module/core/macro_tools/src/attr_prop/singletone_optional.rs b/module/core/macro_tools/src/attr_prop/singletone_optional.rs index 39c3dd9940..7d500cc94f 100644 --- a/module/core/macro_tools/src/attr_prop/singletone_optional.rs +++ b/module/core/macro_tools/src/attr_prop/singletone_optional.rs @@ -14,7 +14,7 @@ //! This is useful for attributes that need to enable or disable features or flags. use crate::*; -use former_types::Assign; +// use former_types::Assign; /// Default marker for `AttributePropertyOptionalSingletone`. /// Used if no marker is defined as parameter. diff --git a/module/core/macro_tools/src/attr_prop/syn.rs b/module/core/macro_tools/src/attr_prop/syn.rs index c60a21cfdd..183ead1a3a 100644 --- a/module/core/macro_tools/src/attr_prop/syn.rs +++ b/module/core/macro_tools/src/attr_prop/syn.rs @@ -3,7 +3,7 @@ //! use crate::*; -use former_types::Assign; +// use former_types::Assign; /// Default marker for `AttributePropertySyn`. /// Used if no marker is defined as parameter. diff --git a/module/core/macro_tools/src/attr_prop/syn_optional.rs b/module/core/macro_tools/src/attr_prop/syn_optional.rs index d595e9496a..4e5bba2783 100644 --- a/module/core/macro_tools/src/attr_prop/syn_optional.rs +++ b/module/core/macro_tools/src/attr_prop/syn_optional.rs @@ -3,7 +3,7 @@ //! use crate::*; -use former_types::Assign; +// use former_types::Assign; /// Default marker for `AttributePropertyOptionalSyn`. /// Used if no marker is defined as parameter. diff --git a/module/core/macro_tools/src/components.rs b/module/core/macro_tools/src/components.rs new file mode 100644 index 0000000000..c4341873d2 --- /dev/null +++ b/module/core/macro_tools/src/components.rs @@ -0,0 +1,70 @@ +//! +//! Compile-time const expressions. +//! + +/// Internal namespace. +pub( crate ) mod private +{ +} + +/// Compile-time const expressions. +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + }; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::former_types::protected::*; +} + +/// Orphan namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; +} + +/// Exposed namespace of the module. +#[ allow( unused_imports ) ] +pub mod exposed +{ + use super::*; + pub use super::super::components; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::former_types::exposed::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + }; + +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::former_types::prelude::*; + +} diff --git a/module/core/macro_tools/src/container_kind.rs b/module/core/macro_tools/src/container_kind.rs index a516594e47..8d0886a026 100644 --- a/module/core/macro_tools/src/container_kind.rs +++ b/module/core/macro_tools/src/container_kind.rs @@ -127,9 +127,13 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as container_kind; + use super::*; + pub use super::super::container_kind; + + // pub use super::protected as container_kind; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/macro_tools/src/ct.rs b/module/core/macro_tools/src/ct.rs new file mode 100644 index 0000000000..bb4420ad8b --- /dev/null +++ b/module/core/macro_tools/src/ct.rs @@ -0,0 +1,63 @@ +//! +//! Compile-time tools. +//! + +/// Internal namespace. +pub( crate ) mod private +{ +} + +/// Compile-time const expressions for strings. +pub mod str; + +/// Compile-time tools. +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + }; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use ::const_format::*; +} + +/// Orphan namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; +} + +/// Exposed namespace of the module. +#[ allow( unused_imports ) ] +pub mod exposed +{ + use super::*; + pub use super::super::ct; + + // pub use super::protected as ct; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + }; +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ +} diff --git a/module/core/macro_tools/src/ct/str.rs b/module/core/macro_tools/src/ct/str.rs new file mode 100644 index 0000000000..f0fd4271e2 --- /dev/null +++ b/module/core/macro_tools/src/ct/str.rs @@ -0,0 +1,8 @@ + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use ::const_format:: +{ + concatcp as concat, + formatcp as format, +}; diff --git a/module/core/macro_tools/src/derive.rs b/module/core/macro_tools/src/derive.rs index 13db46db90..8d0ab26396 100644 --- a/module/core/macro_tools/src/derive.rs +++ b/module/core/macro_tools/src/derive.rs @@ -75,9 +75,11 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as derive; + use super::*; + pub use super::super::derive; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/core/macro_tools/src/diag.rs b/module/core/macro_tools/src/diag.rs index 10a7e9e0a5..851c6d13b2 100644 --- a/module/core/macro_tools/src/diag.rs +++ b/module/core/macro_tools/src/diag.rs @@ -389,9 +389,11 @@ pub use protected::*; /// Protected namespace of the module. pub mod protected { + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::orphan::*; + } /// Parented namespace of the module. @@ -400,12 +402,22 @@ pub mod orphan #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::exposed::*; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + Result, + }; + } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as diag; + use super::*; + pub use super::super::diag; #[ doc( inline ) ] #[ allow( unused_imports ) ] @@ -415,7 +427,6 @@ pub mod exposed #[ allow( unused_imports ) ] pub use super::private:: { - Result, indentation, report_format, report_print, diff --git a/module/core/macro_tools/src/drop.rs b/module/core/macro_tools/src/drop.rs index f6e5814e48..61268d454c 100644 --- a/module/core/macro_tools/src/drop.rs +++ b/module/core/macro_tools/src/drop.rs @@ -82,9 +82,12 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as drop; + use super::*; + pub use super::super::drop; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/equation.rs b/module/core/macro_tools/src/equation.rs index 36bab1ccab..2756bcf51f 100644 --- a/module/core/macro_tools/src/equation.rs +++ b/module/core/macro_tools/src/equation.rs @@ -140,9 +140,12 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as equation; + use super::*; + pub use super::super::equation; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/generic_args.rs b/module/core/macro_tools/src/generic_args.rs index a2a175989b..6d74a0a5ff 100644 --- a/module/core/macro_tools/src/generic_args.rs +++ b/module/core/macro_tools/src/generic_args.rs @@ -178,9 +178,12 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as generic_args; + use super::*; + pub use super::super::generic_args; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/macro_tools/src/generic_params.rs b/module/core/macro_tools/src/generic_params.rs index 3729237876..710aea054c 100644 --- a/module/core/macro_tools/src/generic_params.rs +++ b/module/core/macro_tools/src/generic_params.rs @@ -6,6 +6,8 @@ pub( crate ) mod private { use crate::*; + use crate::IterTrait; + // use iter_tools::IterTrait; /// A `GenericsWithWhere` struct to handle the parsing of Rust generics with an explicit `where` clause. /// @@ -542,9 +544,12 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as generic_params; + use super::*; + pub use super::super::generic_params; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/macro_tools/src/item.rs b/module/core/macro_tools/src/item.rs index 2bd19814b2..55e43aee78 100644 --- a/module/core/macro_tools/src/item.rs +++ b/module/core/macro_tools/src/item.rs @@ -86,7 +86,7 @@ pub( crate ) mod private #[ allow( unused_imports ) ] pub use protected::*; -// qqq : zzz : make sure documentation look good. generate, review and fix every file +// qqq : xxx : make sure documentation look good. generate, review and fix every file /// This module provides various utilities and namespaces for working with `syn::Item`, specifically focusing on /// ensuring syntactical correctness and managing different visibility levels within the code. It includes functions /// to manipulate the structure of items, handle different kinds of fields, and provide a structured approach to @@ -118,10 +118,12 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - // pub use super::protected as item; - pub use super::protected as item; + use super::*; + pub use super::super::item; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/macro_tools/src/item_struct.rs b/module/core/macro_tools/src/item_struct.rs index f5a33a21c2..45ec77b724 100644 --- a/module/core/macro_tools/src/item_struct.rs +++ b/module/core/macro_tools/src/item_struct.rs @@ -6,10 +6,12 @@ pub( crate ) mod private { use crate::*; + // use iter_tools::{ IterTrait, BoxedIter }; /// Extracts the types of each field into a vector. pub fn field_types< 'a >( t : &'a syn::ItemStruct ) - -> impl IterTrait< 'a, &'a syn::Type > + -> + impl IterTrait< 'a, &'a syn::Type > // -> std::iter::Map // < // syn::punctuated::Iter< 'a, syn::Field >, @@ -88,7 +90,6 @@ pub mod protected #[ allow( unused_imports ) ] pub use super::private:: { - // fields_many, field_types, field_names, first_field_type, @@ -105,9 +106,12 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as item_struct; + use super::*; + pub use super::super::item_struct; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/iter.rs b/module/core/macro_tools/src/iter.rs index f3553f773b..6b5e5851ce 100644 --- a/module/core/macro_tools/src/iter.rs +++ b/module/core/macro_tools/src/iter.rs @@ -1,158 +1,17 @@ //! -//! Iterators. +//! Tailored iterator. //! /// Internal namespace. pub( crate ) mod private { - use clone_dyn_types::CloneDyn; - - /// Trait that encapsulates an iterator with specific characteristics, tailored for use with the `syn` crate and implemetning `CloneDyn`. - /// - /// The `_IterTrait` trait is designed to represent iterators that may yield references to items ( `&'a T` ) within the `syn` crate. - /// These iterators must also implement the `ExactSizeIterator` and `DoubleEndedIterator` traits. - /// This combination ensures that the iterator can: - /// - Provide an exact size hint ( `ExactSizeIterator` ), - /// - Be traversed from both ends ( `DoubleEndedIterator` ). - /// - /// Additionally, the iterator must implement the `CloneDyn` trait, which allows cloning of trait objects. - /// - /// # Example - /// ```rust - /// use macro_tools::_IterTrait; - /// - /// // Example struct that implements Iterator, ExactSizeIterator, DoubleEndedIterator, and CloneDyn. - /// #[ derive( Clone ) ] - /// struct MyIterator - /// { - /// // internal fields - /// } - /// - /// impl Iterator for MyIterator - /// { - /// type Item = i32; - /// - /// fn next( &mut self ) -> Option< Self::Item > - /// { - /// // implementation - /// Some( 1 ) - /// } - /// } - /// - /// impl ExactSizeIterator for MyIterator - /// { - /// fn len( &self ) -> usize - /// { - /// // implementation - /// 1 - /// } - /// } - /// - /// impl DoubleEndedIterator for MyIterator - /// { - /// fn next_back( &mut self ) -> Option< Self::Item > - /// { - /// // implementation - /// Some( 1 ) - /// } - /// } - /// - /// ``` - pub trait _IterTrait< 'a, T > - where - T : 'a, - Self : Iterator< Item = T > + ExactSizeIterator< Item = T > + DoubleEndedIterator, - Self : CloneDyn, - { - } - - impl< 'a, T, I > _IterTrait< 'a, T > for I - where - T : 'a, - Self : Iterator< Item = T > + ExactSizeIterator< Item = T > + DoubleEndedIterator, - Self : CloneDyn, - { - } - - /// Implement `Clone` for boxed `_IterTrait` trait objects. - /// - /// This allows cloning of boxed iterators that implement `_IterTrait`. - #[ allow( non_local_definitions ) ] - impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + 'c > - { - #[ inline ] - fn clone( &self ) -> Self - { - clone_dyn_types::clone_into_box( &**self ) - } - } - - #[ allow( non_local_definitions ) ] - impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + Send + 'c > - { - #[ inline ] - fn clone( &self ) -> Self - { - clone_dyn_types::clone_into_box( &**self ) - } - } - - #[ allow( non_local_definitions ) ] - impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + Sync + 'c > - { - #[ inline ] - fn clone( &self ) -> Self - { - clone_dyn_types::clone_into_box( &**self ) - } - } - - #[ allow( non_local_definitions ) ] - impl< 'c, T > Clone for Box< dyn _IterTrait< 'c, T > + Send + Sync + 'c > - { - #[ inline ] - fn clone( &self ) -> Self - { - clone_dyn_types::clone_into_box( &**self ) - } - } - - /// Type alias for boxed `_IterTrait` trait objects. - /// - /// Prefer `BoxedIter` over `impl _IterTrait` when using trait objects ( `dyn _IterTrait` ) because the concrete type in return is less restrictive than `impl _IterTrait`. - /// - pub type BoxedIter< 'a, T > = Box< dyn _IterTrait< 'a, T > + 'a >; - - /// Trait that encapsulates a clonable iterator with specific characteristics, tailored for use with the `syn` crate. - /// - /// The `IterTrait` trait is designed to represent iterators that may yield references to items ( `&'a T` ) within the `syn` crate. - /// These iterators must also implement the `ExactSizeIterator`, `DoubleEndedIterator`, and `Clone` traits. - /// This combination ensures that the iterator can: - /// - Provide an exact size hint ( `ExactSizeIterator` ), - /// - Be traversed from both ends ( `DoubleEndedIterator` ), - /// - Be clonable ( `Clone` ). - /// - pub trait IterTrait< 'a, T > - where - T : 'a, - Self : _IterTrait< 'a, T > + Clone, - { - } - - impl< 'a, T, I > IterTrait< 'a, T > for I - where - T : 'a, - Self : _IterTrait< 'a, T > + Clone, - { - } - } #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use protected::*; -/// Protected namespace of the module. +/// Tailoted iterator. pub mod protected { #[ doc( inline ) ] @@ -160,9 +19,7 @@ pub mod protected pub use super::orphan::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::private:: - { - }; + pub use iter_tools::protected::*; } /// Orphan namespace of the module. @@ -174,23 +31,28 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as iter; + use super::*; + // pub use super::super::iter; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; + #[ doc( inline ) ] #[ allow( unused_imports ) ] - pub use super::private:: - { - _IterTrait, - IterTrait, - BoxedIter, - }; + pub use iter_tools::exposed::*; + } /// Prelude to use essentials: `use my_module::prelude::*`. pub mod prelude { + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use iter_tools::prelude::*; + } diff --git a/module/core/macro_tools/src/kw.rs b/module/core/macro_tools/src/kw.rs new file mode 100644 index 0000000000..f7eb1c9c05 --- /dev/null +++ b/module/core/macro_tools/src/kw.rs @@ -0,0 +1,69 @@ +//! +//! Keywords +//! + +/// Internal namespace. +pub( crate ) mod private +{ + // use crate::*; + + const KEYWORDS : &[ &str ] = + &[ + "as", "break", "const", "continue", "crate", "else", "enum", "extern", "false", "fn", + "for", "if", "impl", "in", "let", "loop", "match", "mod", "move", "mut", "pub", "ref", + "return", "self", "Self", "static", "struct", "super", "trait", "true", "type", "unsafe", + "use", "where", "while", "async", "await", "dyn", + ]; + + // qqq : zzz : cover by tests + /// Check is string a keyword. + pub fn is( src : &str ) -> bool + { + KEYWORDS.contains( &src ) + } + +} + +#[ doc( inline ) ] +#[ allow( unused_imports ) ] +pub use protected::*; + +/// Protected namespace of the module. +pub mod protected +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::orphan::*; +} + +/// Orphan namespace of the module. +pub mod orphan +{ + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::exposed::*; +} + +/// Exposed namespace of the module. +#[ allow( unused_imports ) ] +pub mod exposed +{ + use super::*; + pub use super::super::kw; + + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::prelude::*; + #[ doc( inline ) ] + #[ allow( unused_imports ) ] + pub use super::private:: + { + is, + }; +} + +/// Prelude to use essentials: `use my_module::prelude::*`. +pub mod prelude +{ +} + diff --git a/module/core/macro_tools/src/lib.rs b/module/core/macro_tools/src/lib.rs index 85bafc8eb4..6efba82cb5 100644 --- a/module/core/macro_tools/src/lib.rs +++ b/module/core/macro_tools/src/lib.rs @@ -20,44 +20,66 @@ // exposed // Exposed namespace of the module. -/// Modular files. -#[ cfg( feature = "enabled" ) ] -#[ path = "." ] -mod file -{ - // use super::*; - pub mod attr; - pub mod attr_prop; - pub mod container_kind; - pub mod derive; - pub mod diag; - pub mod drop; - pub mod equation; - pub mod generic_args; - pub mod generic_params; - pub mod item; - pub mod item_struct; - pub mod iter; - pub mod name; - pub mod phantom; - pub mod punctuated; - pub mod quantifier; - pub mod struct_like; - pub mod tokens; - pub mod typ; -} +#[ cfg( all( feature = "enabled", feature = "attr" ) ) ] +pub mod attr; +#[ cfg( all( feature = "enabled", feature = "attr_prop" ) ) ] +pub mod attr_prop; +#[ cfg( all( feature = "enabled", feature = "components" ) ) ] +pub mod components; +#[ cfg( all( feature = "enabled", feature = "ct" ) ) ] +pub mod ct; +#[ cfg( all( feature = "enabled", feature = "container_kind" ) ) ] +pub mod container_kind; +#[ cfg( all( feature = "enabled", feature = "derive" ) ) ] +pub mod derive; +#[ cfg( all( feature = "enabled", feature = "diag" ) ) ] +pub mod diag; +#[ cfg( all( feature = "enabled", feature = "drop" ) ) ] +pub mod drop; +#[ cfg( all( feature = "enabled", feature = "equation" ) ) ] +pub mod equation; +#[ cfg( all( feature = "enabled", feature = "generic_args" ) ) ] +pub mod generic_args; +#[ cfg( all( feature = "enabled", feature = "generic_params" ) ) ] +pub mod generic_params; +#[ cfg( all( feature = "enabled", feature = "item" ) ) ] +pub mod item; +#[ cfg( all( feature = "enabled", feature = "item_struct" ) ) ] +pub mod item_struct; +#[ cfg( all( feature = "enabled", feature = "name" ) ) ] +pub mod name; +#[ cfg( all( feature = "enabled", feature = "kw" ) ) ] +pub mod kw; +#[ cfg( all( feature = "enabled", feature = "phantom" ) ) ] +pub mod phantom; +#[ cfg( all( feature = "enabled", feature = "punctuated" ) ) ] +pub mod punctuated; +#[ cfg( all( feature = "enabled", feature = "quantifier" ) ) ] +pub mod quantifier; +#[ cfg( all( feature = "enabled", feature = "struct_like" ) ) ] +pub mod struct_like; +#[ cfg( all( feature = "enabled", feature = "tokens" ) ) ] +pub mod tokens; +#[ cfg( all( feature = "enabled", feature = "typ" ) ) ] +pub mod typ; + +#[ cfg( all( feature = "enabled" ) ) ] +pub mod iter; /// /// Dependencies of the module. /// #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod dependency { pub use ::syn; pub use ::quote; pub use ::proc_macro2; pub use ::interval_adapter; + pub use ::clone_dyn_types; + pub use ::former_types; } #[ doc( inline ) ] @@ -65,110 +87,218 @@ pub mod dependency #[ cfg( feature = "enabled" ) ] pub use protected::*; +// qqq : put every file of the first level under feature + /// Protected namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod protected { - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super:: + + mod _all { - orphan::*, - }; + use super::super::*; + pub use orphan::*; + + #[ cfg( feature = "attr" ) ] + pub use attr::orphan::*; + #[ cfg( feature = "attr_prop" ) ] + pub use attr_prop::orphan::*; + #[ cfg( feature = "components" ) ] + pub use components::orphan::*; + #[ cfg( feature = "container_kind" ) ] + pub use container_kind::orphan::*; + #[ cfg( feature = "ct" ) ] + pub use ct::orphan::*; + #[ cfg( feature = "derive" ) ] + pub use derive::orphan::*; + #[ cfg( feature = "diag" ) ] + pub use diag::orphan::*; + #[ cfg( feature = "drop" ) ] + pub use drop::orphan::*; + #[ cfg( feature = "equation" ) ] + pub use equation::orphan::*; + #[ cfg( feature = "generic_args" ) ] + pub use generic_args::orphan::*; + #[ cfg( feature = "generic_params" ) ] + pub use generic_params::orphan::*; + #[ cfg( feature = "item" ) ] + pub use item::orphan::*; + #[ cfg( feature = "item_struct" ) ] + pub use item_struct::orphan::*; + #[ cfg( feature = "name" ) ] + pub use name::orphan::*; + #[ cfg( feature = "kw" ) ] + pub use kw::orphan::*; + #[ cfg( feature = "phantom" ) ] + pub use phantom::orphan::*; + #[ cfg( feature = "punctuated" ) ] + pub use punctuated::orphan::*; + #[ cfg( feature = "quantifier" ) ] + pub use quantifier::orphan::*; + #[ cfg( feature = "struct_like" ) ] + pub use struct_like::orphan::*; + #[ cfg( feature = "tokens" ) ] + pub use tokens::orphan::*; + #[ cfg( feature = "typ" ) ] + pub use typ::orphan::*; + + pub use iter::orphan::*; + + } + #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::file:: - { - attr::orphan::*, - attr_prop::orphan::*, - container_kind::orphan::*, - derive::orphan::*, - diag::orphan::*, - drop::orphan::*, - equation::orphan::*, - generic_args::orphan::*, - generic_params::orphan::*, - item::orphan::*, - item_struct::orphan::*, - iter::orphan::*, - name::orphan::*, - phantom::orphan::*, - punctuated::orphan::*, - quantifier::orphan::*, - struct_like::orphan::*, - tokens::orphan::*, - typ::orphan::*, - }; + pub use _all::*; + } /// Parented namespace of the module. #[ cfg( feature = "enabled" ) ] + #[ allow( unused_imports ) ] pub mod orphan { + + // xxx : remove pub? + mod _all + { + use super::super::*; + pub use exposed::*; + } + #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::exposed::*; + pub use _all::*; + } /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use quote:: + mod _all { - format_ident, - quote, - quote_spanned, - }; + use super::super::*; + pub use prelude::*; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super:: - { - prelude::*, - }; + #[ cfg( feature = "attr" ) ] + pub use attr::exposed::*; + #[ cfg( feature = "attr_prop" ) ] + pub use attr_prop::exposed::*; + #[ cfg( feature = "components" ) ] + pub use components::exposed::*; + #[ cfg( feature = "container_kind" ) ] + pub use container_kind::exposed::*; + #[ cfg( feature = "ct" ) ] + pub use ct::exposed::*; + #[ cfg( feature = "derive" ) ] + pub use derive::exposed::*; + #[ cfg( feature = "diag" ) ] + pub use diag::exposed::*; + #[ cfg( feature = "drop" ) ] + pub use drop::exposed::*; + #[ cfg( feature = "equation" ) ] + pub use equation::exposed::*; + #[ cfg( feature = "generic_args" ) ] + pub use generic_args::exposed::*; + #[ cfg( feature = "generic_params" ) ] + pub use generic_params::exposed::*; + #[ cfg( feature = "item" ) ] + pub use item::exposed::*; + #[ cfg( feature = "item_struct" ) ] + pub use item_struct::exposed::*; + #[ cfg( feature = "name" ) ] + pub use name::exposed::*; + #[ cfg( feature = "kw" ) ] + pub use kw::exposed::*; + #[ cfg( feature = "phantom" ) ] + pub use phantom::exposed::*; + #[ cfg( feature = "punctuated" ) ] + pub use punctuated::exposed::*; + #[ cfg( feature = "quantifier" ) ] + pub use quantifier::exposed::*; + #[ cfg( feature = "struct_like" ) ] + pub use struct_like::exposed::*; + #[ cfg( feature = "tokens" ) ] + pub use tokens::exposed::*; + #[ cfg( feature = "typ" ) ] + pub use typ::exposed::*; + + pub use iter::exposed::*; + + } #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::file:: - { - attr::exposed::*, - attr_prop::exposed::*, - container_kind::exposed::*, - derive::orphan::*, - diag::exposed::*, - drop::exposed::*, - equation::exposed::*, - generic_args::exposed::*, - generic_params::exposed::*, - item::exposed::*, - item_struct::exposed::*, - iter::exposed::*, - name::exposed::*, - phantom::exposed::*, - punctuated::exposed::*, - quantifier::exposed::*, - struct_like::exposed::*, - tokens::exposed::*, - typ::exposed::*, - }; + pub use _all::*; } /// Prelude to use essentials: `use my_module::prelude::*`. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod prelude { + mod _all + { + use super::super::*; + // pub use prelude::*; + + #[ cfg( feature = "attr" ) ] + pub use attr::prelude::*; + #[ cfg( feature = "attr_prop" ) ] + pub use attr_prop::prelude::*; + #[ cfg( feature = "components" ) ] + pub use components::prelude::*; + #[ cfg( feature = "container_kind" ) ] + pub use container_kind::prelude::*; + #[ cfg( feature = "ct" ) ] + pub use ct::prelude::*; + #[ cfg( feature = "derive" ) ] + pub use derive::prelude::*; + #[ cfg( feature = "diag" ) ] + pub use diag::prelude::*; + #[ cfg( feature = "drop" ) ] + pub use drop::prelude::*; + #[ cfg( feature = "equation" ) ] + pub use equation::prelude::*; + #[ cfg( feature = "generic_args" ) ] + pub use generic_args::prelude::*; + #[ cfg( feature = "generic_params" ) ] + pub use generic_params::prelude::*; + #[ cfg( feature = "item" ) ] + pub use item::prelude::*; + #[ cfg( feature = "item_struct" ) ] + pub use item_struct::prelude::*; + #[ cfg( feature = "name" ) ] + pub use name::prelude::*; + #[ cfg( feature = "kw" ) ] + pub use kw::exposed::*; + #[ cfg( feature = "phantom" ) ] + pub use phantom::prelude::*; + #[ cfg( feature = "punctuated" ) ] + pub use punctuated::prelude::*; + #[ cfg( feature = "quantifier" ) ] + pub use quantifier::prelude::*; + #[ cfg( feature = "struct_like" ) ] + pub use struct_like::prelude::*; + #[ cfg( feature = "tokens" ) ] + pub use tokens::prelude::*; + #[ cfg( feature = "typ" ) ] + pub use typ::prelude::*; + + pub use iter::prelude::*; + + } + + #[ doc( inline ) ] + pub use _all::*; + #[ doc( inline ) ] - #[ allow( unused_imports ) ] pub use ::interval_adapter::prelude::*; #[ doc( inline ) ] - #[ allow( unused_imports ) ] pub use ::syn; #[ doc( inline ) ] @@ -183,13 +313,15 @@ pub mod prelude #[ allow( unused_imports ) ] pub use ::quote:: { + quote, quote as qt, + quote_spanned, format_ident, }; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use ::syn::spanned::Spanned; + // #[ doc( inline ) ] + // #[ allow( unused_imports ) ] + // pub use ::syn::spanned::Spanned; #[ doc( inline ) ] #[ allow( unused_imports ) ] @@ -197,6 +329,7 @@ pub mod prelude { parse::ParseStream, Token, + spanned::Spanned, braced, bracketed, custom_keyword, @@ -209,31 +342,6 @@ pub mod prelude parse_quote_spanned as parse_qt_spanned, }; - #[ doc( inline ) ] - #[ allow( unused_imports ) ] - pub use super::file:: - { - attr::prelude::*, - attr_prop::prelude::*, - container_kind::prelude::*, - derive::orphan::*, - diag::prelude::*, - drop::prelude::*, - equation::prelude::*, - generic_args::prelude::*, - generic_params::prelude::*, - item::prelude::*, - item_struct::prelude::*, - iter::prelude::*, - name::prelude::*, - phantom::prelude::*, - punctuated::prelude::*, - quantifier::prelude::*, - struct_like::prelude::*, - tokens::prelude::*, - typ::prelude::*, - }; - } // qqq : introduce features. make it smart. discuss list of features before implementing diff --git a/module/core/macro_tools/src/name.rs b/module/core/macro_tools/src/name.rs index b88fc03f0a..b7e33aa49d 100644 --- a/module/core/macro_tools/src/name.rs +++ b/module/core/macro_tools/src/name.rs @@ -245,9 +245,14 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as name; + use super::*; + + pub use super::super::name; + // pub use super::protected as name; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/phantom.rs b/module/core/macro_tools/src/phantom.rs index 8f4a24900f..2e45c64928 100644 --- a/module/core/macro_tools/src/phantom.rs +++ b/module/core/macro_tools/src/phantom.rs @@ -229,9 +229,14 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as phantom; + use super::*; + + pub use super::super::phantom; + // pub use super::protected as phantom; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/macro_tools/src/punctuated.rs b/module/core/macro_tools/src/punctuated.rs index 0eb1eb50dd..838e74cd5d 100644 --- a/module/core/macro_tools/src/punctuated.rs +++ b/module/core/macro_tools/src/punctuated.rs @@ -52,9 +52,14 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as punctuated; + use super::*; + + pub use super::super::punctuated; + // pub use super::protected as punctuated; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/macro_tools/src/quantifier.rs b/module/core/macro_tools/src/quantifier.rs index 15901b7f89..0f81f9f0b3 100644 --- a/module/core/macro_tools/src/quantifier.rs +++ b/module/core/macro_tools/src/quantifier.rs @@ -75,7 +75,7 @@ pub( crate ) mod private T1 : Element + syn::parse::Parse, T2 : Element + syn::parse::Parse, { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { Ok( Self( input.parse()?, input.parse()? ) ) } @@ -195,7 +195,7 @@ pub( crate ) mod private where T : Element + syn::parse::Parse + AsMuchAsPossibleNoDelimiter, { - fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > + fn parse( input : syn::parse::ParseStream< '_ > ) -> syn::Result< Self > { let mut items = vec![]; while !input.is_empty() @@ -214,7 +214,7 @@ pub( crate ) mod private // where // T : Element + WhileDelimiter, // { -// fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > +// fn parse( input : syn::parse::ParseStream< '_ > ) -> syn::Result< Self > // { // let mut result = Self::new(); // loop @@ -245,7 +245,7 @@ pub( crate ) mod private impl syn::parse::Parse for Many< AttributesInner > { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { let mut result = Self::new(); loop @@ -264,7 +264,7 @@ pub( crate ) mod private impl syn::parse::Parse for Many< AttributesOuter > { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { let mut result = Self::new(); loop @@ -285,7 +285,7 @@ pub( crate ) mod private // impl syn::parse::Parse // for Many< syn::Item > // { - // fn parse( input : syn::parse::ParseStream< '_ > ) -> Result< Self > + // fn parse( input : syn::parse::ParseStream< '_ > ) -> syn::Result< Self > // { // let mut items = vec![]; // while !input.is_empty() @@ -321,9 +321,14 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as quantifier; + use super::*; + + pub use super::super::quantifier; + // pub use super::protected as quantifier; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/struct_like.rs b/module/core/macro_tools/src/struct_like.rs index 5a58f5df7d..b851049402 100644 --- a/module/core/macro_tools/src/struct_like.rs +++ b/module/core/macro_tools/src/struct_like.rs @@ -6,6 +6,7 @@ pub( crate ) mod private { use crate::*; + // use iter_tools::{ IterTrait, BoxedIter }; // use interval_adapter::BoundExt; /// Enum to encapsulate either a field from a struct or a variant from an enum. @@ -465,9 +466,15 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as struct_like; + use super::*; + + pub use super::super::struct_like; + + // pub use super::protected as struct_like; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/tokens.rs b/module/core/macro_tools/src/tokens.rs index 7a09fc4689..48109a5b21 100644 --- a/module/core/macro_tools/src/tokens.rs +++ b/module/core/macro_tools/src/tokens.rs @@ -94,9 +94,14 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as tokens; + use super::*; + + pub use super::super::tokens; + // pub use super::protected as tokens; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/src/typ.rs b/module/core/macro_tools/src/typ.rs index 609cc717e3..fce30e9aae 100644 --- a/module/core/macro_tools/src/typ.rs +++ b/module/core/macro_tools/src/typ.rs @@ -185,9 +185,15 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { - pub use super::protected as typ; + use super::*; + + pub use super::super::typ; + + // pub use super::protected as typ; + #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/macro_tools/tests/inc/attr_prop_test.rs b/module/core/macro_tools/tests/inc/attr_prop_test.rs index e2fdb5ecb6..4621031b8a 100644 --- a/module/core/macro_tools/tests/inc/attr_prop_test.rs +++ b/module/core/macro_tools/tests/inc/attr_prop_test.rs @@ -4,6 +4,7 @@ use quote::ToTokens; #[ test ] fn attr_prop_test() { + use the_module::{ AttributePropertyComponent, AttributePropertyBoolean, AttributePropertyOptionalSingletone }; #[ derive( Debug, Default, Clone, Copy ) ] pub struct DebugMarker; @@ -11,10 +12,10 @@ fn attr_prop_test() #[ derive( Debug, Default, Clone, Copy ) ] pub struct EnabledMarker; - pub trait AttributePropertyComponent - { - const KEYWORD : &'static str; - } + // pub trait AttributePropertyComponent + // { + // const KEYWORD : &'static str; + // } impl AttributePropertyComponent for DebugMarker { @@ -104,6 +105,8 @@ fn attr_prop_test() #[ test ] fn attribute_property_enabled() { + use the_module::AttributePropertyOptionalSingletone; + // Test default value let attr : AttributePropertyOptionalSingletone = Default::default(); assert_eq!( attr.internal(), None ); diff --git a/module/core/macro_tools/tests/inc/attr_test.rs b/module/core/macro_tools/tests/inc/attr_test.rs index dab489f65d..ff787e8f00 100644 --- a/module/core/macro_tools/tests/inc/attr_test.rs +++ b/module/core/macro_tools/tests/inc/attr_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::{ attr, qt, Result }; // @@ -35,6 +36,7 @@ fn is_standard_edge_cases() #[ test ] fn attribute_component_from_meta() { + use the_module::AttributeComponent; struct MyComponent; impl AttributeComponent for MyComponent @@ -74,3 +76,67 @@ fn attribute_component_from_meta() // Assert that the construction failed assert!( result.is_err() ); } + +#[ test ] +fn attribute_basic() -> Result< () > +{ + use macro_tools::syn::parse::Parser; + + // test.case( "AttributesOuter" ); + let code = qt! + { + #[ derive( Copy ) ] + #[ derive( Clone ) ] + #[ derive( Debug ) ] + }; + let got = syn::parse2::< the_module::AttributesOuter >( code ).unwrap(); + let exp = the_module::AttributesOuter::from( syn::Attribute::parse_outer.parse2( qt! + { + #[ derive( Copy ) ] + #[ derive( Clone ) ] + #[ derive( Debug ) ] + } )? ); + a_id!( got, exp ); + + // test.case( "AttributesInner" ); + let code = qt! + { + // #![ deny( missing_docs ) ] + #![ warn( something ) ] + }; + let got = syn::parse2::< the_module::AttributesInner >( code ).unwrap(); + let exp = the_module::AttributesInner::from( syn::Attribute::parse_inner.parse2( qt! + { + // #![ deny( missing_docs ) ] + #![ warn( something ) ] + } )? ); + a_id!( got, exp ); + + // test.case( "AttributesInner" ); + let code = qt! + { + #![ warn( missing_docs1 ) ] + #![ warn( missing_docs2 ) ] + #[ warn( something1 ) ] + #[ warn( something2 ) ] + }; + let got = syn::parse2::< the_module::Pair< the_module::AttributesInner, the_module::AttributesOuter > >( code ).unwrap(); + let exp = the_module::Pair::from + (( + the_module::AttributesInner::from( syn::Attribute::parse_inner.parse2( qt! + { + #![ warn( missing_docs1 ) ] + #![ warn( missing_docs2 ) ] + } )? ), + the_module::AttributesOuter::from( syn::Attribute::parse_outer.parse2( qt! + { + #[ warn( something1 ) ] + #[ warn( something2 ) ] + } )? ), + )); + a_id!( got, exp ); + + // + + Ok( () ) +} diff --git a/module/core/macro_tools/tests/inc/compile_time_test.rs b/module/core/macro_tools/tests/inc/compile_time_test.rs new file mode 100644 index 0000000000..90e9dd7fca --- /dev/null +++ b/module/core/macro_tools/tests/inc/compile_time_test.rs @@ -0,0 +1,40 @@ + +use super::*; + +// + +#[ test ] +fn concat() +{ + use the_module::ct; + + const KEYWORD : &'static str = "keyword"; + let got = ct::str::concat! + ( + "Known attirbutes are : ", + KEYWORD, + ".", + ); + let exp = "Known attirbutes are : keyword."; + a_id!( got, exp ); + +} + +// + +#[ test ] +fn format() +{ + use the_module::ct; + + const KEYWORD : &'static str = "keyword"; + let got = ct::str::format! + ( + "Known attirbutes are : {}{}", + KEYWORD, + ".", + ); + let exp = "Known attirbutes are : keyword."; + a_id!( got, exp ); + +} diff --git a/module/core/macro_tools/tests/inc/container_kind_test.rs b/module/core/macro_tools/tests/inc/container_kind_test.rs index e6669410a1..b88fae9b22 100644 --- a/module/core/macro_tools/tests/inc/container_kind_test.rs +++ b/module/core/macro_tools/tests/inc/container_kind_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::qt; // diff --git a/module/core/macro_tools/tests/inc/derive_test.rs b/module/core/macro_tools/tests/inc/derive_test.rs index bd36f417cf..9142c0cadd 100644 --- a/module/core/macro_tools/tests/inc/derive_test.rs +++ b/module/core/macro_tools/tests/inc/derive_test.rs @@ -6,7 +6,7 @@ use super::*; #[test] fn named_fields_with_named_fields() { - use syn::{parse_quote, punctuated::Punctuated, Field, token::Comma}; + use syn::{ parse_quote, punctuated::Punctuated, Field, token::Comma }; use the_module::derive; let ast: syn::DeriveInput = parse_quote! diff --git a/module/core/macro_tools/tests/inc/diag_test.rs b/module/core/macro_tools/tests/inc/diag_test.rs index e39db7d824..6ac8786a9b 100644 --- a/module/core/macro_tools/tests/inc/diag_test.rs +++ b/module/core/macro_tools/tests/inc/diag_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::{ qt, tree_print }; // diff --git a/module/core/macro_tools/tests/inc/equation_test.rs b/module/core/macro_tools/tests/inc/equation_test.rs index 735d8261fc..6ae0e9c806 100644 --- a/module/core/macro_tools/tests/inc/equation_test.rs +++ b/module/core/macro_tools/tests/inc/equation_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::{ parse_quote, qt, code_to_str, tree_print, Result }; // @@ -10,6 +11,7 @@ tests_impls! fn equation_test() -> Result< () > { use syn::spanned::Spanned; + use the_module::equation; // test.case( "basic" ); let input = qt! @@ -77,6 +79,7 @@ tests_impls! fn equation_from_meta_test() { + use the_module::equation; let attr1 : syn::Attribute = syn::parse_quote!( #[ default( 31 ) ] ); tree_print!( attr1 ); diff --git a/module/core/macro_tools/tests/inc/generic_args_test.rs b/module/core/macro_tools/tests/inc/generic_args_test.rs index 8076737930..56cbe65c50 100644 --- a/module/core/macro_tools/tests/inc/generic_args_test.rs +++ b/module/core/macro_tools/tests/inc/generic_args_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::parse_quote; // diff --git a/module/core/macro_tools/tests/inc/generic_params_test.rs b/module/core/macro_tools/tests/inc/generic_params_test.rs index 12e82142a9..18904d5d8f 100644 --- a/module/core/macro_tools/tests/inc/generic_params_test.rs +++ b/module/core/macro_tools/tests/inc/generic_params_test.rs @@ -1,5 +1,7 @@ use super::*; +use the_module::parse_quote; +// xxx : move `parse_quote` under a namespace `typed` // diff --git a/module/core/macro_tools/tests/inc/mod.rs b/module/core/macro_tools/tests/inc/mod.rs index 9ed0a80bee..198ab61378 100644 --- a/module/core/macro_tools/tests/inc/mod.rs +++ b/module/core/macro_tools/tests/inc/mod.rs @@ -3,8 +3,6 @@ use super::*; #[ allow( unused_imports ) ] use test_tools::exposed::*; -#[ allow( unused_imports ) ] -use the_module::protected::*; #[ allow( unused_imports ) ] #[ cfg( feature = "enabled" ) ] @@ -17,6 +15,7 @@ mod if_enabled mod attr_test; mod attr_prop_test; mod basic_test; + mod compile_time_test; mod container_kind_test; mod derive_test; mod diag_test; @@ -29,7 +28,6 @@ mod if_enabled mod phantom_test; mod quantifier_test; mod struct_like_test; - mod syntax_test; mod tokens_test; mod typ_test; diff --git a/module/core/macro_tools/tests/inc/phantom_test.rs b/module/core/macro_tools/tests/inc/phantom_test.rs index 44c3610e66..65ad2e653a 100644 --- a/module/core/macro_tools/tests/inc/phantom_test.rs +++ b/module/core/macro_tools/tests/inc/phantom_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::{ tree_print }; #[ test ] fn phantom_add_basic() diff --git a/module/core/macro_tools/tests/inc/quantifier_test.rs b/module/core/macro_tools/tests/inc/quantifier_test.rs index 5a6c59bcf3..a0e3a52ad8 100644 --- a/module/core/macro_tools/tests/inc/quantifier_test.rs +++ b/module/core/macro_tools/tests/inc/quantifier_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::{ qt, Result }; // diff --git a/module/core/macro_tools/tests/inc/syntax_test.rs b/module/core/macro_tools/tests/inc/syntax_test.rs deleted file mode 100644 index adddd1285d..0000000000 --- a/module/core/macro_tools/tests/inc/syntax_test.rs +++ /dev/null @@ -1,83 +0,0 @@ - -use super::*; - -// - -tests_impls! -{ - - // - - fn attribute() -> Result< () > - { - use macro_tools::syn::parse::Parser; - - // test.case( "AttributesOuter" ); - let code = qt! - { - #[ derive( Copy ) ] - #[ derive( Clone ) ] - #[ derive( Debug ) ] - }; - let got = syn::parse2::< the_module::AttributesOuter >( code ).unwrap(); - let exp = the_module::AttributesOuter::from( syn::Attribute::parse_outer.parse2( qt! - { - #[ derive( Copy ) ] - #[ derive( Clone ) ] - #[ derive( Debug ) ] - } )? ); - a_id!( got, exp ); - - // test.case( "AttributesInner" ); - let code = qt! - { - // #![ deny( missing_docs ) ] - #![ warn( something ) ] - }; - let got = syn::parse2::< the_module::AttributesInner >( code ).unwrap(); - let exp = the_module::AttributesInner::from( syn::Attribute::parse_inner.parse2( qt! - { - // #![ deny( missing_docs ) ] - #![ warn( something ) ] - } )? ); - a_id!( got, exp ); - - // test.case( "AttributesInner" ); - let code = qt! - { - #![ warn( missing_docs1 ) ] - #![ warn( missing_docs2 ) ] - #[ warn( something1 ) ] - #[ warn( something2 ) ] - }; - let got = syn::parse2::< the_module::Pair< the_module::AttributesInner, the_module::AttributesOuter > >( code ).unwrap(); - let exp = the_module::Pair::from - (( - the_module::AttributesInner::from( syn::Attribute::parse_inner.parse2( qt! - { - #![ warn( missing_docs1 ) ] - #![ warn( missing_docs2 ) ] - } )? ), - the_module::AttributesOuter::from( syn::Attribute::parse_outer.parse2( qt! - { - #[ warn( something1 ) ] - #[ warn( something2 ) ] - } )? ), - )); - a_id!( got, exp ); - - // - - Ok( () ) - } - - // - -} - -// - -tests_index! -{ - attribute, -} diff --git a/module/core/macro_tools/tests/inc/tokens_test.rs b/module/core/macro_tools/tests/inc/tokens_test.rs index fcae746f5d..8e26e06d57 100644 --- a/module/core/macro_tools/tests/inc/tokens_test.rs +++ b/module/core/macro_tools/tests/inc/tokens_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::{ tree_print }; // diff --git a/module/core/macro_tools/tests/inc/typ_Test.rs b/module/core/macro_tools/tests/inc/typ_test.rs similarity index 94% rename from module/core/macro_tools/tests/inc/typ_Test.rs rename to module/core/macro_tools/tests/inc/typ_test.rs index 174c2c243b..cd3a08a7a0 100644 --- a/module/core/macro_tools/tests/inc/typ_Test.rs +++ b/module/core/macro_tools/tests/inc/typ_test.rs @@ -1,5 +1,6 @@ use super::*; +use the_module::qt; // @@ -7,7 +8,7 @@ use super::*; fn is_optional_with_option_type() { use syn::parse_str; - use macro_tools::typ::is_optional; + use the_module::typ::is_optional; let type_string = "Option"; let parsed_type : syn::Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -19,7 +20,7 @@ fn is_optional_with_option_type() fn is_optional_with_non_option_type() { use syn::parse_str; - use macro_tools::typ::is_optional; + use the_module::typ::is_optional; let type_string = "Vec"; let parsed_type : syn::Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -31,7 +32,7 @@ fn is_optional_with_non_option_type() fn is_optional_with_nested_option_type() { use syn::parse_str; - use macro_tools::typ::is_optional; + use the_module::typ::is_optional; let type_string = "Option>"; let parsed_type : syn::Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -43,7 +44,7 @@ fn is_optional_with_nested_option_type() fn is_optional_with_similar_name_type() { use syn::parse_str; - use macro_tools::typ::is_optional; + use the_module::typ::is_optional; let type_string = "OptionalValue"; let parsed_type : syn::Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -55,7 +56,7 @@ fn is_optional_with_similar_name_type() fn is_optional_with_empty_input() { use syn::{ parse_str, Type }; - use macro_tools::typ::is_optional; + use the_module::typ::is_optional; let type_string = ""; let parsed_type_result = parse_str::< Type >( type_string ); @@ -69,7 +70,7 @@ fn is_optional_with_empty_input() fn parameter_first_with_multiple_generics() { use syn::{ parse_str, Type }; - use macro_tools::typ::parameter_first; + use the_module::typ::parameter_first; let type_string = "Result, Error>"; let parsed_type : Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -84,7 +85,7 @@ fn parameter_first_with_multiple_generics() fn parameter_first_with_no_generics() { use syn::{ parse_str, Type }; - use macro_tools::typ::parameter_first; + use the_module::typ::parameter_first; let type_string = "i32"; let parsed_type : Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -101,7 +102,7 @@ fn parameter_first_with_no_generics() fn parameter_first_with_single_generic() { use syn::{ parse_str, Type }; - use macro_tools::typ::parameter_first; + use the_module::typ::parameter_first; let type_string = "Vec< i32 >"; let parsed_type : Type = parse_str( type_string ).expect( "Type should parse correctly" ); @@ -116,7 +117,7 @@ fn parameter_first_with_single_generic() fn parameter_first_with_deeply_nested_generics() { use syn::{ parse_str, Type }; - use macro_tools::typ::parameter_first; + use the_module::typ::parameter_first; let type_string = "Vec< HashMap< String, Option< i32 > > >"; let parsed_type : Type = parse_str( type_string ).expect( "Type should parse correctly" ); diff --git a/module/core/mem_tools/Cargo.toml b/module/core/mem_tools/Cargo.toml index 08bc56fc1a..fd90f9d727 100644 --- a/module/core/mem_tools/Cargo.toml +++ b/module/core/mem_tools/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/mem", "/Cargo.toml", diff --git a/module/core/mem_tools/src/lib.rs b/module/core/mem_tools/src/lib.rs index fffffc6fdd..6e02217854 100644 --- a/module/core/mem_tools/src/lib.rs +++ b/module/core/mem_tools/src/lib.rs @@ -51,8 +51,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mem_tools/src/mem.rs b/module/core/mem_tools/src/mem.rs index b0c7e199f5..37de32b6c1 100644 --- a/module/core/mem_tools/src/mem.rs +++ b/module/core/mem_tools/src/mem.rs @@ -95,8 +95,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/meta_tools/Cargo.toml b/module/core/meta_tools/Cargo.toml index 1fc7ff399b..282db2f43f 100644 --- a/module/core/meta_tools/Cargo.toml +++ b/module/core/meta_tools/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ diff --git a/module/core/mod_interface/Cargo.toml b/module/core/mod_interface/Cargo.toml index bc096312e0..2dbf5da6c3 100644 --- a/module/core/mod_interface/Cargo.toml +++ b/module/core/mod_interface/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/core/mod_interface/Readme.md b/module/core/mod_interface/Readme.md index 1115dea469..337053a260 100644 --- a/module/core/mod_interface/Readme.md +++ b/module/core/mod_interface/Readme.md @@ -114,8 +114,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::prelude::*; pub use super::inner::exposed::*; } diff --git a/module/core/mod_interface/src/lib.rs b/module/core/mod_interface/src/lib.rs index 582ad8e8c9..bccf35a07b 100644 --- a/module/core/mod_interface/src/lib.rs +++ b/module/core/mod_interface/src/lib.rs @@ -42,8 +42,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/attr_debug/layer_a.rs b/module/core/mod_interface/tests/inc/derive/attr_debug/layer_a.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/derive/attr_debug/layer_a.rs +++ b/module/core/mod_interface/tests/inc/derive/attr_debug/layer_a.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/layer/layer_a.rs b/module/core/mod_interface/tests/inc/derive/layer/layer_a.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/derive/layer/layer_a.rs +++ b/module/core/mod_interface/tests/inc/derive/layer/layer_a.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/layer/layer_b.rs b/module/core/mod_interface/tests/inc/derive/layer/layer_b.rs index 8051e73936..5367a38023 100644 --- a/module/core/mod_interface/tests/inc/derive/layer/layer_b.rs +++ b/module/core/mod_interface/tests/inc/derive/layer/layer_b.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/layer_bad_vis/layer_a.rs b/module/core/mod_interface/tests/inc/derive/layer_bad_vis/layer_a.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/derive/layer_bad_vis/layer_a.rs +++ b/module/core/mod_interface/tests/inc/derive/layer_bad_vis/layer_a.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/layer_unknown_vis/layer_a.rs b/module/core/mod_interface/tests/inc/derive/layer_unknown_vis/layer_a.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/derive/layer_unknown_vis/layer_a.rs +++ b/module/core/mod_interface/tests/inc/derive/layer_unknown_vis/layer_a.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/use_as/layer_x.rs b/module/core/mod_interface/tests/inc/derive/use_as/layer_x.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/derive/use_as/layer_x.rs +++ b/module/core/mod_interface/tests/inc/derive/use_as/layer_x.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/use_as/layer_y.rs b/module/core/mod_interface/tests/inc/derive/use_as/layer_y.rs index 8051e73936..5367a38023 100644 --- a/module/core/mod_interface/tests/inc/derive/use_as/layer_y.rs +++ b/module/core/mod_interface/tests/inc/derive/use_as/layer_y.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/use_as/manual_only.rs b/module/core/mod_interface/tests/inc/derive/use_as/manual_only.rs index 816287978c..8b2f13d663 100644 --- a/module/core/mod_interface/tests/inc/derive/use_as/manual_only.rs +++ b/module/core/mod_interface/tests/inc/derive/use_as/manual_only.rs @@ -31,8 +31,10 @@ pub mod orphan } #[doc = r" Exposed namespace of the module."] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[doc(inline)] #[allow(unused_imports)] pub use super :: prelude :: * ; diff --git a/module/core/mod_interface/tests/inc/derive/use_basic/layer_a.rs b/module/core/mod_interface/tests/inc/derive/use_basic/layer_a.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/derive/use_basic/layer_a.rs +++ b/module/core/mod_interface/tests/inc/derive/use_basic/layer_a.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/derive/use_basic/layer_b.rs b/module/core/mod_interface/tests/inc/derive/use_basic/layer_b.rs index 8051e73936..5367a38023 100644 --- a/module/core/mod_interface/tests/inc/derive/use_basic/layer_b.rs +++ b/module/core/mod_interface/tests/inc/derive/use_basic/layer_b.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/layer/layer_a.rs b/module/core/mod_interface/tests/inc/manual/layer/layer_a.rs index f1e9f256fb..e0a06be337 100644 --- a/module/core/mod_interface/tests/inc/manual/layer/layer_a.rs +++ b/module/core/mod_interface/tests/inc/manual/layer/layer_a.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/layer/layer_b.rs b/module/core/mod_interface/tests/inc/manual/layer/layer_b.rs index 8051e73936..5367a38023 100644 --- a/module/core/mod_interface/tests/inc/manual/layer/layer_b.rs +++ b/module/core/mod_interface/tests/inc/manual/layer/layer_b.rs @@ -35,8 +35,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/layer/mod.rs b/module/core/mod_interface/tests/inc/manual/layer/mod.rs index ea4b8b33bc..5dbc62d0ba 100644 --- a/module/core/mod_interface/tests/inc/manual/layer/mod.rs +++ b/module/core/mod_interface/tests/inc/manual/layer/mod.rs @@ -38,8 +38,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/layer_use/layer_a.rs b/module/core/mod_interface/tests/inc/manual/layer_use/layer_a.rs index 7e0b8bd94b..5585c1db64 100644 --- a/module/core/mod_interface/tests/inc/manual/layer_use/layer_a.rs +++ b/module/core/mod_interface/tests/inc/manual/layer_use/layer_a.rs @@ -56,8 +56,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/layer_use/layer_b.rs b/module/core/mod_interface/tests/inc/manual/layer_use/layer_b.rs index 9a68ea64fd..fa85219843 100644 --- a/module/core/mod_interface/tests/inc/manual/layer_use/layer_b.rs +++ b/module/core/mod_interface/tests/inc/manual/layer_use/layer_b.rs @@ -56,8 +56,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/layer_use/mod.rs b/module/core/mod_interface/tests/inc/manual/layer_use/mod.rs index ea4b8b33bc..5dbc62d0ba 100644 --- a/module/core/mod_interface/tests/inc/manual/layer_use/mod.rs +++ b/module/core/mod_interface/tests/inc/manual/layer_use/mod.rs @@ -38,8 +38,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/micro_modules/mod.rs b/module/core/mod_interface/tests/inc/manual/micro_modules/mod.rs index f052ac97cc..9c32dd6afe 100644 --- a/module/core/mod_interface/tests/inc/manual/micro_modules/mod.rs +++ b/module/core/mod_interface/tests/inc/manual/micro_modules/mod.rs @@ -34,8 +34,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface/tests/inc/manual/micro_modules_two/mod.rs b/module/core/mod_interface/tests/inc/manual/micro_modules_two/mod.rs index a48af7bc73..7a28028526 100644 --- a/module/core/mod_interface/tests/inc/manual/micro_modules_two/mod.rs +++ b/module/core/mod_interface/tests/inc/manual/micro_modules_two/mod.rs @@ -41,8 +41,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface_meta/Cargo.toml b/module/core/mod_interface_meta/Cargo.toml index 8364db34d6..10201d821f 100644 --- a/module/core/mod_interface_meta/Cargo.toml +++ b/module/core/mod_interface_meta/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/meta/mod_interface_meta_lib.rs", "/rust/impl/meta/mod_interface/meta", diff --git a/module/core/mod_interface_meta/src/impls.rs b/module/core/mod_interface_meta/src/impls.rs index 0f4608e420..fa314cbb6c 100644 --- a/module/core/mod_interface_meta/src/impls.rs +++ b/module/core/mod_interface_meta/src/impls.rs @@ -110,7 +110,7 @@ pub( crate ) mod private // clauses_map : &mut HashMap< u32, Vec< proc_macro2::TokenStream > >, ) -> - Result< () > + syn::Result< () > { let attrs1 = &record.attrs; @@ -183,7 +183,7 @@ pub( crate ) mod private // clauses_map : &mut HashMap< u32, Vec< proc_macro2::TokenStream > >, ) -> - Result< () > + syn::Result< () > { let attrs1 = &record.attrs; let path = record.use_elements.as_ref().unwrap(); @@ -233,7 +233,7 @@ pub( crate ) mod private c : &'_ mut RecordContext< '_ >, ) -> - Result< () > + syn::Result< () > { let attrs1 = &record.attrs; let attrs2 = &element.0; @@ -280,7 +280,7 @@ pub( crate ) mod private c : &'_ mut RecordContext< '_ >, ) -> - Result< () > + syn::Result< () > { let attrs1 = &record.attrs; let attrs2 = &element.0; @@ -337,7 +337,7 @@ pub( crate ) mod private /// Protocol of modularity unifying interface of a module and introducing layers. /// #[ allow ( dead_code ) ] - pub fn mod_interface( input : proc_macro::TokenStream ) -> Result< proc_macro2::TokenStream > + pub fn mod_interface( input : proc_macro::TokenStream ) -> syn::Result< proc_macro2::TokenStream > { use ElementType::*; @@ -384,7 +384,7 @@ pub( crate ) mod private }, _ => { - record.elements.iter().try_for_each( | element | -> Result::< () > + record.elements.iter().try_for_each( | element | -> syn::Result::< () > { match record.element_type { @@ -400,12 +400,12 @@ pub( crate ) mod private { }, } - Result::Ok( () ) + syn::Result::Ok( () ) })?; } }; - Result::Ok( () ) + syn::Result::Ok( () ) })?; let immediates_clause = clauses_map.get( &ClauseImmediates::Kind() ).unwrap(); @@ -489,8 +489,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::prelude::*; #[ allow( unused_imports ) ] pub use super::private:: diff --git a/module/core/mod_interface_meta/src/record.rs b/module/core/mod_interface_meta/src/record.rs index b078dcf6c6..31d42633c5 100644 --- a/module/core/mod_interface_meta/src/record.rs +++ b/module/core/mod_interface_meta/src/record.rs @@ -31,7 +31,7 @@ pub( crate ) mod private impl syn::parse::Parse for ElementType { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { let lookahead = input.lookahead1(); let element_type = match() @@ -94,7 +94,7 @@ pub( crate ) mod private impl syn::parse::Parse for Record { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { let attrs = input.parse()?; @@ -187,7 +187,7 @@ pub( crate ) mod private { /// Validate each inner attribute of the thesis. #[ allow ( dead_code ) ] - pub fn inner_attributes_validate( &self ) -> Result< () > + pub fn inner_attributes_validate( &self ) -> syn::Result< () > { self.head.iter().try_for_each( | attr | { @@ -210,7 +210,7 @@ pub( crate ) mod private )); } - Result::Ok( () ) + syn::Result::Ok( () ) })?; Ok( () ) } @@ -229,7 +229,7 @@ pub( crate ) mod private impl syn::parse::Parse for Thesis { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { let head = input.parse()?; // let head = Default::default(); @@ -273,8 +273,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ allow( unused_imports ) ] pub use super::prelude::*; #[ allow( unused_imports ) ] diff --git a/module/core/mod_interface_meta/src/use_tree.rs b/module/core/mod_interface_meta/src/use_tree.rs index a7c1397e96..55779072d7 100644 --- a/module/core/mod_interface_meta/src/use_tree.rs +++ b/module/core/mod_interface_meta/src/use_tree.rs @@ -2,7 +2,7 @@ pub( crate ) mod private { use macro_tools::prelude::*; - use macro_tools::Result; + // use macro_tools::syn::Result; // use macro_tools::err; #[ derive( Debug, PartialEq, Eq, Clone ) ] @@ -44,7 +44,7 @@ pub( crate ) mod private } /// Get pure path, cutting off `as module2` from `use module1 as module2`. - pub fn pure_path( &self ) -> Result< syn::punctuated::Punctuated< syn::Ident, Token![::] > > + pub fn pure_path( &self ) -> syn::Result< syn::punctuated::Punctuated< syn::Ident, Token![::] > > { use syn::UseTree::*; @@ -89,7 +89,7 @@ pub( crate ) mod private /// Pure path without super. /// Get pure path, cutting off `as module2` from `use module1 as module2`. /// Strip first `super::` in `super::some::module` - pub fn pure_without_super_path( &self ) -> Result< syn::punctuated::Punctuated< syn::Ident, Token![::] > > + pub fn pure_without_super_path( &self ) -> syn::Result< syn::punctuated::Punctuated< syn::Ident, Token![::] > > { let path = self.pure_path()?; if path.len() < 1 @@ -107,7 +107,7 @@ pub( crate ) mod private /// Adjusted path. /// Add `super::private::` to path unless it starts from `::` or `super` or `crate`. - pub fn adjsuted_implicit_path( &self ) -> Result< syn::punctuated::Punctuated< syn::Ident, Token![::] > > + pub fn adjsuted_implicit_path( &self ) -> syn::Result< syn::punctuated::Punctuated< syn::Ident, Token![::] > > { // use syn::UseTree::*; let pure_path = self.pure_path()?; @@ -144,7 +144,7 @@ pub( crate ) mod private impl syn::parse::Parse for UseTree { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { use syn::UseTree::*; let leading_colon = input.parse()?; @@ -224,8 +224,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/mod_interface_meta/src/visibility.rs b/module/core/mod_interface_meta/src/visibility.rs index 10d8a68c48..9e551429ea 100644 --- a/module/core/mod_interface_meta/src/visibility.rs +++ b/module/core/mod_interface_meta/src/visibility.rs @@ -2,7 +2,7 @@ pub( crate ) mod private { use macro_tools::prelude::*; - use macro_tools::Result; + // use macro_tools::syn::Result; use core::hash::{ Hash, Hasher }; pub const VALID_VISIBILITY_LIST_STR : &str = "[ private, protected, orphan, exposed, prelude ]"; @@ -298,37 +298,37 @@ pub( crate ) mod private impl Visibility { - fn parse_protected( input : ParseStream< '_ > ) -> Result< Self > + fn parse_protected( input : ParseStream< '_ > ) -> syn::Result< Self > { Self::_parse_vis::< VisProtected >( input ) } - fn parse_orphan( input : ParseStream< '_ > ) -> Result< Self > + fn parse_orphan( input : ParseStream< '_ > ) -> syn::Result< Self > { Self::_parse_vis::< VisOrphan >( input ) } - fn parse_exposed( input : ParseStream< '_ > ) -> Result< Self > + fn parse_exposed( input : ParseStream< '_ > ) -> syn::Result< Self > { Self::_parse_vis::< VisExposed >( input ) } - fn parse_prelude( input : ParseStream< '_ > ) -> Result< Self > + fn parse_prelude( input : ParseStream< '_ > ) -> syn::Result< Self > { Self::_parse_vis::< VisPrelude >( input ) } - fn parse_pub( input : ParseStream< '_ > ) -> Result< Self > + fn parse_pub( input : ParseStream< '_ > ) -> syn::Result< Self > { Self::_parse_vis::< VisPublic >( input ) } - // fn parse_pub( input : ParseStream< '_ > ) -> Result< Self > + // fn parse_pub( input : ParseStream< '_ > ) -> syn::Result< Self > // { // Ok( Visibility::Public( syn::VisPublic { pub_token : input.parse()? } ) ) // } - fn _parse_vis< Vis >( input : ParseStream< '_ > ) -> Result< Self > + fn _parse_vis< Vis >( input : ParseStream< '_ > ) -> syn::Result< Self > where Vis : Into< Visibility > + VisibilityInterface, { @@ -380,7 +380,7 @@ pub( crate ) mod private ).into() ) } - // fn parse_in_crate( input : ParseStream< '_ > ) -> Result< Self > + // fn parse_in_crate( input : ParseStream< '_ > ) -> syn::Result< Self > // { // if input.peek2( Token![ :: ] ) // { @@ -435,7 +435,7 @@ pub( crate ) mod private impl syn::parse::Parse for Visibility { - fn parse( input : ParseStream< '_ > ) -> Result< Self > + fn parse( input : ParseStream< '_ > ) -> syn::Result< Self > { // Recognize an empty None-delimited group, as produced by a $:vis // matcher that matched no tokens. @@ -531,8 +531,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/process_tools/Cargo.toml b/module/core/process_tools/Cargo.toml index f3e45a836d..926b981a38 100644 --- a/module/core/process_tools/Cargo.toml +++ b/module/core/process_tools/Cargo.toml @@ -27,7 +27,7 @@ all-features = false [features] default = [ "enabled", "process_environment_is_cicd" ] -full = [ "enabled", "process_environment_is_cicd" ] +full = [ "default" ] enabled = [ "mod_interface/enabled", "former/enabled", @@ -43,7 +43,7 @@ process_environment_is_cicd = [] mod_interface = { workspace = true } former = { workspace = true, features = [ "derive_former" ] } proper_path_tools = { workspace = true } -error_tools = { workspace = true, features = [ "error_for_app" ] } # qqq : xxx : rid off error_for_app +error_tools = { workspace = true, features = [ "error_untyped" ] } iter_tools = { workspace = true } ## external diff --git a/module/core/program_tools/Cargo.toml b/module/core/program_tools/Cargo.toml index 30faf2edc3..1ba2675334 100644 --- a/module/core/program_tools/Cargo.toml +++ b/module/core/program_tools/Cargo.toml @@ -45,7 +45,7 @@ enabled = [ mod_interface = { workspace = true } former = { workspace = true, features = [ "derive_former" ] } proper_path_tools = { workspace = true } -error_tools = { workspace = true, features = [ "error_for_app" ] } # qqq : xxx : rid off error_for_app +error_tools = { workspace = true, features = [ "error_untyped" ] } # qqq : xxx : rid of error_untyped iter_tools = { workspace = true } # ## external diff --git a/module/core/program_tools/src/lib.rs b/module/core/program_tools/src/lib.rs index 71c19f2ab4..19f55a0993 100644 --- a/module/core/program_tools/src/lib.rs +++ b/module/core/program_tools/src/lib.rs @@ -4,7 +4,7 @@ #![ doc( html_root_url = "https://docs.rs/program_tools/latest/program_tools/" ) ] #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] -#![ allow( unused_imports, dead_code, missing_docs ) ] // xxx : rid off +#![ allow( unused_imports, dead_code, missing_docs ) ] // xxx : rid of #[ cfg( feature = "enabled" ) ] use mod_interface::mod_interface; diff --git a/module/core/proper_path_tools/Cargo.toml b/module/core/proper_path_tools/Cargo.toml index 1571d43e0f..1480bf782b 100644 --- a/module/core/proper_path_tools/Cargo.toml +++ b/module/core/proper_path_tools/Cargo.toml @@ -24,19 +24,29 @@ features = [ "full" ] all-features = false [features] -default = [ "enabled", "path_unique_folder_name" ] -full = [ "enabled", "path_unique_folder_name", "derive_serde" ] +default = [ + "enabled", + "path_unique_folder_name", + "path_utf8", +] +full = [ + "default", + "derive_serde", + "path_utf8", +] no_std = [] use_alloc = [ "no_std" ] enabled = [ "mod_interface/enabled" ] path_unique_folder_name = [] derive_serde = [ "serde" ] +path_utf8 = [ "camino" ] [dependencies] regex = { version = "1.10.3" } mod_interface = { workspace = true } serde = { version = "1.0.197", optional = true, features = [ "derive" ] } +camino = { version = "1.1.7", optional = true, features = [] } [dev-dependencies] test_tools = { workspace = true } diff --git a/module/core/proper_path_tools/src/lib.rs b/module/core/proper_path_tools/src/lib.rs index d437f55081..96b91321c3 100644 --- a/module/core/proper_path_tools/src/lib.rs +++ b/module/core/proper_path_tools/src/lib.rs @@ -18,4 +18,11 @@ mod_interface! /// Basic functionality. layer path; + /// Transitive TryFrom and TryInto. + layer transitive; + + #[ cfg( feature = "path_utf8" ) ] + protected use ::camino::{ Utf8Path, Utf8PathBuf }; + protected use ::std::path::{ PathBuf, Path }; + } diff --git a/module/core/proper_path_tools/src/path.rs b/module/core/proper_path_tools/src/path.rs index 7e00e31d19..b7f1b0e277 100644 --- a/module/core/proper_path_tools/src/path.rs +++ b/module/core/proper_path_tools/src/path.rs @@ -5,20 +5,6 @@ pub( crate ) mod private #[ cfg( feature = "no_std" ) ] extern crate std; - // use std:: - // { - // path::{ Component, Path, PathBuf }, - // time::{ SystemTime, UNIX_EPOCH, SystemTimeError }, - // }; - // use cargo_metadata::camino::{ Utf8Path, Utf8PathBuf }; - - // // xxx : it's not path, but file - // /// Check if path is valid. - // pub fn valid_is( path : &str ) -> bool - // { - // std::fs::metadata( path ).is_ok() - // } - /// Determines if a given path string contains unescaped glob pattern characters. /// /// # Parameters: @@ -48,42 +34,43 @@ pub( crate ) mod private /// assert_eq!( path::is_glob( "file\\[0-9].txt" ), false ); // Escaped brackets, not a glob pattern /// ``` - pub fn is_glob( path : &str ) -> bool + // qqq : xxx : should probably be Path + pub fn is_glob( path : &str ) -> bool { let mut chars = path.chars().peekable(); let mut is_escaped = false; let mut in_brackets = false; let mut in_braces = false; - while let Some( c ) = chars.next() + while let Some( c ) = chars.next() { - if is_escaped + if is_escaped { // If the character is escaped, ignore its special meaning in the next iteration is_escaped = false; continue; } - match c + match c { - '\\' => + '\\' => { is_escaped = !is_escaped; } '*' | '?' if !in_brackets && !in_braces => return true, - '[' if !in_brackets && !in_braces && !is_escaped => + '[' if !in_brackets && !in_braces && !is_escaped => { // Enter a bracket block, indicating potential glob pattern in_brackets = true; // continue; // Ensure we don't immediately exit on the next char if it's ']' } - ']' if in_brackets => + ']' if in_brackets => { // in_brackets = false; return true; } '{' if !in_braces && !is_escaped => in_braces = true, - '}' if in_braces => + '}' if in_braces => { // in_braces = false; return true; @@ -152,33 +139,33 @@ pub( crate ) mod private let mut starts_with_dot = false; let mut iter = path.as_ref().components().peekable(); - if let Some( first ) = iter.peek() + if let Some( first ) = iter.peek() { starts_with_dot = matches!( first, Component::CurDir ); - if matches!( first, Component::RootDir ) + if matches!( first, Component::RootDir ) { components.push( Component::RootDir ); iter.next(); // Skip the root component in further processing } } - for component in iter + for component in iter { match component { - Component::ParentDir => + Component::ParentDir => { - match components.last() + match components.last() { - Some( Component::Normal( _ ) ) => + Some( Component::Normal( _ ) ) => { components.pop(); } - Some( Component::RootDir ) => + Some( Component::RootDir ) => { components.push( Component::ParentDir ); } - Some( Component::ParentDir ) | None => + Some( Component::ParentDir ) | None => { components.push( Component::ParentDir ); } @@ -191,12 +178,12 @@ pub( crate ) mod private } let mut normalized = PathBuf::new(); - if starts_with_dot || components.is_empty() + if starts_with_dot || components.is_empty() { normalized.push( "." ); } - for component in components.iter() + for component in components.iter() { normalized.push( component.as_os_str() ); } @@ -226,19 +213,22 @@ pub( crate ) mod private // println!( "b" ); let path = normalize( path ); - // In Windows the regular/legacy paths (C :\foo) are supported by all programs, but have lots of bizarre restrictions for backwards compatibility with MS-DOS. - // And there are Windows NT UNC paths (\\?\C :\foo), which are more robust and with fewer gotchas, but are rarely supported by Windows programs. Even Microsoft’s own! + // In Windows the regular/legacy paths (C:\foo) are supported by all programs, but have lots of bizarre restrictions for backwards compatibility with MS-DOS. + // And there are Windows NT UNC paths (\\?\C:\foo), which are more robust and with fewer gotchas, but are rarely supported by Windows programs. Even Microsoft’s own! // // https://github.com/rust-lang/rust/issues/42869 #[ cfg( target_os = "windows" ) ] - let path = + let path = { - const VERBATIM_PREFIX: &str = r#"\\?\"#; + const VERBATIM_PREFIX : &str = r#"\\?\"#; + // is necessary because of the normalization step that replaces the backslash with a slash. + const VERBATIM_PREFIX_MIRRORS_EDGE : &str = "//?/"; let p = path.display().to_string(); - if p.starts_with( VERBATIM_PREFIX ) + if p.starts_with( VERBATIM_PREFIX ) || p.starts_with( VERBATIM_PREFIX_MIRRORS_EDGE ) { PathBuf::from( &p[ VERBATIM_PREFIX.len().. ] ) - } else + } + else { path.into() } @@ -291,7 +281,7 @@ pub( crate ) mod private } // Increment and get the current value of the counter safely - let count = COUNTER.with( | counter | + let count = COUNTER.with( | counter | { let val = counter.get(); counter.set( val + 1 ); @@ -317,23 +307,24 @@ pub( crate ) mod private /// Examples: /// /// ``` + /// use std::path::PathBuf; + /// use proper_path_tools::path; /// - /// let paths = vec![ "a/b/c", "/d/e", "f/g" ]; - /// let joined = proper_path_tools::path::join_paths( paths.into_iter() ); + /// let paths = vec![ PathBuf::from( "a/b/c" ), PathBuf::from( "/d/e" ), PathBuf::from( "f/g" ) ]; + /// let joined = path::join_paths( paths.iter().map( | p | p.as_path() ) ); /// assert_eq!( joined, std::path::PathBuf::from( "/d/e/f/g" ) ); /// - /// let paths = vec![]; - /// let joined = proper_path_tools::path::join_paths( paths.into_iter() ); - /// assert_eq!( joined, std::path::PathBuf::from( "" ) ); - /// - /// let paths = vec![ "", "a/b", "", "c", "" ]; - /// let joined = proper_path_tools::path::join_paths( paths.into_iter() ); - /// assert_eq!( joined, std::path::PathBuf::from( "/a/b/c" ) ); + /// let paths = vec![ PathBuf::from( "" ), PathBuf::from( "a/b" ), PathBuf::from( "" ), PathBuf::from( "c" ), PathBuf::from( "" ) ]; + /// let joined = path::join_paths( paths.iter().map( | p | p.as_path() ) ); + /// assert_eq!( joined, std::path::PathBuf::from( PathBuf::from( "/a/b/c" ) ) ); /// /// ``` + // qqq : make macro paths_join!( ... ) pub fn join_paths< 'a, I >( paths : I ) -> std::path::PathBuf where - I : Iterator< Item = &'a str >, + // AsPath : AsRef< std::path::Path >, + // I : Iterator< Item = AsPath >, + I : Iterator< Item = &'a std::path::Path >, { #[ cfg( feature = "no_std" ) ] extern crate alloc; @@ -344,37 +335,39 @@ pub( crate ) mod private let mut result = String::new(); - for path in paths { - let mut path = path.replace( '\\', "/" ); + for path in paths + { + let mut path = path.to_string_lossy().replace( '\\', "/" ); path = path.replace( ':', "" ); + // qqq : this is a bug let mut added_slah = false; // If the path is empty, skip it - if path.is_empty() + if path.is_empty() { continue; } // If the path starts with '/', clear the result and set it to '/' - if path.starts_with('/') + if path.starts_with( '/' ) { result.clear(); result.push( '/' ); } // If the result doesn't end with '/', append '/' - else if !result.ends_with( '/' ) + else if !result.ends_with( '/' ) { added_slah = true; result.push( '/' ); } let components: Vec<&str> = path.split( '/' ).collect(); // Split the path into components - for ( idx, component ) in components.clone().into_iter().enumerate() + for ( idx, component ) in components.clone().into_iter().enumerate() { - match component + match component { - "." => + "." => { if ( result.ends_with( '/' ) && components.len() > idx + 1 && components[ idx + 1 ].is_empty() ) || components.len() == idx + 1 @@ -382,48 +375,48 @@ pub( crate ) mod private result.pop(); } } - ".." => + ".." => { - if result != "/" + if result != "/" { - if added_slah + if added_slah { result.pop(); added_slah = false; } let mut parts : Vec< _ > = result.split( '/' ).collect(); parts.pop(); - if let Some( part ) = parts.last() + if let Some( part ) = parts.last() { - if part.is_empty() + if part.is_empty() { parts.push( "" ); } } result = parts.join( "/" ); - if result.is_empty() + if result.is_empty() { result.push( '/' ); } - } else + } else { result.push_str( &components[ idx.. ].to_vec().join( "/" ) ); break; } } - _ => + _ => { - if !component.is_empty() + if !component.is_empty() { - if result.ends_with( '/' ) + if result.ends_with( '/' ) { result.push_str( component ); - } else + } else { result.push( '/' ); result.push_str( component ); } - } else if components.len() > idx + 1 && components[ idx + 1 ].is_empty() && path != "/" + } else if components.len() > idx + 1 && components[ idx + 1 ].is_empty() && path != "/" { result.push( '/' ); } @@ -431,7 +424,7 @@ pub( crate ) mod private } } - if path.ends_with( '/' ) && result != "/" + if path.ends_with( '/' ) && result != "/" { result.push( '/' ); } @@ -472,23 +465,25 @@ pub( crate ) mod private /// assert_eq!( extensions, expected ); /// ``` /// - pub fn exts( path : impl AsRef< std::path::Path > ) -> std::vec::Vec< std::string::String > + + // qqq : xxx : should return iterator + pub fn exts( path : impl AsRef< std::path::Path > ) -> std::vec::Vec< std::string::String > { #[ cfg( feature = "no_std" ) ] extern crate alloc; #[ cfg( feature = "no_std" ) ] use alloc::string::ToString; - if let Some( file_name ) = std::path::Path::new( path.as_ref() ).file_name() + if let Some( file_name ) = std::path::Path::new( path.as_ref() ).file_name() { - if let Some( file_name_str ) = file_name.to_str() + if let Some( file_name_str ) = file_name.to_str() { let mut file_name_str = file_name_str.to_string(); if file_name_str.starts_with( '.' ) { file_name_str.remove( 0 ); } - if let Some( dot_index ) = file_name_str.find( '.' ) + if let Some( dot_index ) = file_name_str.find( '.' ) { let extensions = &file_name_str[ dot_index + 1.. ]; @@ -533,7 +528,7 @@ pub( crate ) mod private /// assert_eq!(modified_path, None); /// ``` /// - pub fn without_ext( path : impl AsRef< std::path::Path > ) -> core::option::Option< std::path::PathBuf > + pub fn without_ext( path : impl AsRef< std::path::Path > ) -> core::option::Option< std::path::PathBuf > { use std::path::{ Path, PathBuf }; #[ cfg( feature = "no_std" ) ] @@ -548,17 +543,17 @@ pub( crate ) mod private let path_buf = Path::new( path.as_ref() ); - let parent = match path_buf.parent() + let parent = match path_buf.parent() { Some( parent ) => parent, None => return None, }; - let file_stem = match path_buf.file_stem() + let file_stem = match path_buf.file_stem() { - Some( name ) => + Some( name ) => { let ends = format!( "{}/", name.to_string_lossy() ); - if path.as_ref().to_string_lossy().ends_with( &ends ) + if path.as_ref().to_string_lossy().ends_with( &ends ) { ends } @@ -612,19 +607,19 @@ pub( crate ) mod private /// assert_eq!( modified_path, None ); /// ``` /// - pub fn change_ext( path : impl AsRef< std::path::Path >, ext : &str ) -> Option< std::path::PathBuf > + pub fn change_ext( path : impl AsRef< std::path::Path >, ext : &str ) -> Option< std::path::PathBuf > { use std::path::PathBuf; - if path.as_ref().to_string_lossy().is_empty() || !path.as_ref().to_string_lossy().is_ascii() || !ext.is_ascii() + if path.as_ref().to_string_lossy().is_empty() || !path.as_ref().to_string_lossy().is_ascii() || !ext.is_ascii() { return None; } let without_ext = without_ext( path )?; - if ext.is_empty() + if ext.is_empty() { Some( without_ext ) - } else + } else { Some( PathBuf::from( format!( "{}.{}", without_ext.to_string_lossy(), ext ) ) ) } @@ -654,9 +649,11 @@ pub( crate ) mod private /// assert_eq!( common_path, Some( "/a/b/".to_string() ) ); /// ``` /// - pub fn path_common< 'a, I >( paths : I ) -> Option< std::string::String > + + // xxx : qqq : should probably be PathBuf? + pub fn path_common< 'a, I >( paths : I ) -> Option< std::string::String > where - I: Iterator, + I: Iterator< Item = &'a str >, { use std::collections::HashMap; #[ cfg( feature = "no_std" ) ] @@ -665,18 +662,18 @@ pub( crate ) mod private use alloc::{ string::{ String, ToString }, vec::Vec }; let orig_paths : Vec< String > = paths.map( | path | path.to_string() ).collect(); - - if orig_paths.is_empty() + + if orig_paths.is_empty() { return None; } // Create a map to store directory frequencies let mut dir_freqs : HashMap< String, usize > = HashMap::new(); - + let mut paths = orig_paths.clone(); // Iterate over paths to count directory frequencies - for path in paths.iter_mut() + for path in paths.iter_mut() { path_remove_dots( path ); path_remove_double_dots( path ); @@ -686,11 +683,11 @@ pub( crate ) mod private // Iterate over directories for i in 0..dirs.len() { - + // Construct directory path let mut dir_path = dirs[ 0..i + 1 ].join( "/" ); - + // Increment frequency count *dir_freqs.entry( dir_path.clone() ).or_insert( 0 ) += 1; @@ -712,10 +709,10 @@ pub( crate ) mod private let mut result = common_dir.to_string(); - if result.is_empty() + if result.is_empty() { if orig_paths.iter().any( | path | path.starts_with( '/' ) ) - { + { result.push( '/' ); } else if orig_paths.iter().any( | path | path.starts_with( ".." ) ) @@ -743,10 +740,12 @@ pub( crate ) mod private /// /// * `path` - A mutable reference to a string representing the path to be cleaned. /// - fn path_remove_dots( path : &mut std::string::String ) + + // xxx : qqq : should probably be Path? + fn path_remove_dots( path : &mut std::string::String ) { let mut cleaned_parts = vec![]; - for part in path.split( '/' ) + for part in path.split( '/' ) { if part == "." { @@ -766,7 +765,9 @@ pub( crate ) mod private /// /// * `path` - A mutable reference to a string representing the path to be cleaned. /// - fn path_remove_double_dots( path : &mut std::string::String ) + + // xxx : qqq : should probably be Path? + fn path_remove_double_dots( path : &mut std::string::String ) { #[ cfg( feature = "no_std" ) ] extern crate alloc; @@ -775,9 +776,9 @@ pub( crate ) mod private let mut cleaned_parts: Vec< &str > = Vec::new(); let mut delete_empty_part = false; - for part in path.split( '/' ) + for part in path.split( '/' ) { - if part == ".." + if part == ".." { if let Some( pop ) = cleaned_parts.pop() { @@ -795,8 +796,8 @@ pub( crate ) mod private { cleaned_parts.push( ".." ); } - } - else + } + else { cleaned_parts.push( part ); } @@ -809,7 +810,7 @@ pub( crate ) mod private { *path = cleaned_parts.join( "/" ); } - + } /// Rebase the file path relative to a new base path, optionally removing a common prefix. @@ -849,7 +850,13 @@ pub( crate ) mod private /// assert_eq!( rebased_path, PathBuf::from( "/mnt/storage/documents/file.txt" ) ); /// ``` /// - pub fn rebase< T : AsRef< std::path::Path > >( file_path : T, new_path : T, old_path : Option< T > ) -> Option< std::path::PathBuf > + pub fn rebase< T : AsRef< std::path::Path > > + ( + file_path : T, + new_path : T, + old_path : Option< T > + ) + -> Option< std::path::PathBuf > { use std::path::Path; use std::path::PathBuf; @@ -858,7 +865,7 @@ pub( crate ) mod private if old_path.is_some() { let common = path_common( vec![ file_path.as_ref().to_str().unwrap(), old_path.unwrap().as_ref().to_str().unwrap() ].into_iter() )?; - + main_file_path = match main_file_path.strip_prefix( common ) { Ok( rel ) => rel, @@ -896,7 +903,7 @@ pub( crate ) mod private /// let relative_path = proper_path_tools::path::path_relative( from, to ); /// assert_eq!( relative_path, PathBuf::from( "../c/d" ) ); /// ``` - pub fn path_relative< T : AsRef< std::path::Path > >( from : T, to : T ) -> std::path::PathBuf + pub fn path_relative< T : AsRef< std::path::Path > >( from : T, to : T ) -> std::path::PathBuf { use std::path::PathBuf; #[ cfg( feature = "no_std" ) ] @@ -921,7 +928,7 @@ pub( crate ) mod private path_remove_double_dots( &mut to ); path_remove_dots( &mut from ); path_remove_dots( &mut to ); - + let mut from_parts: Vec< &str > = from.split( '/' ).collect(); let mut to_parts: Vec< &str > = to.split( '/' ).collect(); if from_parts.len() == 1 && from_parts[ 0 ].is_empty() @@ -933,9 +940,9 @@ pub( crate ) mod private to_parts.pop(); } let mut common_prefix = 0; - for ( idx, ( f, t ) ) in from_parts.iter().zip( to_parts.iter() ).enumerate() + for ( idx, ( f, t ) ) in from_parts.iter().zip( to_parts.iter() ).enumerate() { - if f != t + if f != t { break; } @@ -943,13 +950,13 @@ pub( crate ) mod private } let mut result = Vec::new(); // Add ".." for each directory not in common - for i in common_prefix..from_parts.len() + for i in common_prefix..from_parts.len() { - if from_parts[ common_prefix ].is_empty() || - ( - i == from_parts.len() - 1 + if from_parts[ common_prefix ].is_empty() || + ( + i == from_parts.len() - 1 && from_parts[ i ].is_empty() - && !to_parts.last().unwrap_or( &"" ).is_empty() + && !to_parts.last().unwrap_or( &"" ).is_empty() ) { continue; @@ -957,7 +964,7 @@ pub( crate ) mod private result.push( ".." ); } // Add the remaining directories from 'to' - for part in to_parts.iter().skip( common_prefix ) + for part in to_parts.iter().skip( common_prefix ) { result.push( *part ); } @@ -965,11 +972,11 @@ pub( crate ) mod private let mut relative_path = result.join( "/" ); // If the relative path is empty or the 'to' path is the same as the 'from' path, // set the relative path to "." - if relative_path.is_empty() || from == to + if relative_path.is_empty() || from == to { relative_path = ".".to_string(); } - + if to.ends_with( '/' ) && !relative_path.ends_with( '/' ) && to != "/" { relative_path.push( '/' ); @@ -985,9 +992,6 @@ pub( crate ) mod private PathBuf::from( relative_path ) } - - - /// Extracts the extension from the given path. /// /// This function takes a path and returns a string representing the extension of the file. @@ -1019,7 +1023,7 @@ pub( crate ) mod private /// assert_eq!( extension, "" ); /// ``` /// - pub fn ext( path : impl AsRef< std::path::Path > ) -> std::string::String + pub fn ext( path : impl AsRef< std::path::Path > ) -> std::string::String { use std::path::Path; #[ cfg( feature = "no_std" ) ] @@ -1027,12 +1031,12 @@ pub( crate ) mod private #[ cfg( feature = "no_std" ) ] use alloc::string::{ String, ToString }; - if path.as_ref().to_string_lossy().is_empty() + if path.as_ref().to_string_lossy().is_empty() { return String::new(); } let path_buf = Path::new( path.as_ref() ); - match path_buf.extension() + match path_buf.extension() { Some( ext ) => ext.to_string_lossy().to_string(), None => String::new(), @@ -1040,22 +1044,31 @@ pub( crate ) mod private } } -crate::mod_interface! { - protected use ext; - protected use exts; - protected use change_ext; - protected use path_relative; - protected use rebase; - protected use path_common; - protected use join_paths; - protected use without_ext; - protected use is_glob; - protected use normalize; - protected use canonicalize; +crate::mod_interface! +{ + + orphan use ext; + orphan use exts; + orphan use change_ext; + orphan use path_relative; + orphan use rebase; + orphan use path_common; + orphan use join_paths; + orphan use without_ext; + orphan use is_glob; + orphan use normalize; + orphan use canonicalize; + #[ cfg( feature = "path_unique_folder_name" ) ] - protected use unique_folder_name; + orphan use unique_folder_name; - /// Describe absolute path. Prefer using absolute path instead of relative when ever possible. + /// Describe absolute path. Prefer using absolute path instead of relative paths when ever possible. layer absolute_path; + /// Describe canonical path. Prefer using canonical path instead of native paths when ever possible. + layer canonical_path; + /// A type to symbolyze the crruent path. + layer current_path; + /// Describe native path. Use to pass path to the platfrom. + layer native_path; } diff --git a/module/core/proper_path_tools/src/path/absolute_path.rs b/module/core/proper_path_tools/src/path/absolute_path.rs index 115c71d37b..ae9542d615 100644 --- a/module/core/proper_path_tools/src/path/absolute_path.rs +++ b/module/core/proper_path_tools/src/path/absolute_path.rs @@ -1,48 +1,120 @@ /// Internal namespace. pub( crate ) mod private { - #[cfg(feature="no_std")] - extern crate std; + use crate::*; + use std:: { - borrow::Cow, - fmt, + // borrow::Cow, path::{ Path, PathBuf }, + io, + }; + + use core:: + { + fmt, + ops:: + { + Deref, + DerefMut, + }, }; + + #[cfg(feature="no_std")] + extern crate std; + #[ cfg( feature = "derive_serde" ) ] use serde::{ Serialize, Deserialize }; + #[ cfg( feature = "path_utf8" ) ] + use camino::{ Utf8Path, Utf8PathBuf }; + /// Absolute path. #[ cfg_attr( feature = "derive_serde", derive( Serialize, Deserialize ) ) ] #[ derive( Debug, Default, Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] pub struct AbsolutePath( PathBuf ); + impl AbsolutePath + { + + /// Returns the Path without its final component, if there is one. + /// Returns None if the path terminates in a root or prefix, or if it's the empty string. + #[ inline ] + pub fn parent( &self ) -> Option< AbsolutePath > + { + self.0.parent().map( PathBuf::from ).map( AbsolutePath ) + } + + /// Creates an owned `AbsolutePath` with path adjoined to self. + #[ inline ] + pub fn join< P >( &self, path : P ) -> AbsolutePath + where + P : AsRef< Path >, + { + Self::try_from( self.0.join( path ) ).unwrap() + } + + // /// Converts a `AbsolutePath` to a `Cow` + // pub fn to_string_lossy( &self ) -> Cow< '_, str > + // { + // self.0.to_string_lossy() + // } + + /// Determines whether base is a prefix of self. + /// + /// Only considers whole path components to match. + #[ inline ] + pub fn starts_with< P : AsRef< Path > >( &self, base : P ) -> bool + { + self.0.starts_with( base ) + } + + /// Returns inner type which is PathBuf. + #[ inline( always ) ] + pub fn inner( self ) -> PathBuf + { + self.0 + } + + } + impl fmt::Display for AbsolutePath { + #[ inline ] fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result { write!( f, "{}", self.0.display() ) } } - impl< 'a > TryFrom< &'a str > for AbsolutePath + #[ inline ] + fn is_absolute( path : &Path ) -> bool + { + // None - not absolute + // with `.` or `..` at the first component - not absolute + !path.components().next().is_some_and( | c | c.as_os_str() == "." || c.as_os_str() == ".." ) + } + + impl TryFrom< PathBuf > for AbsolutePath { type Error = std::io::Error; - fn try_from( value : &'a str ) -> Result< Self, Self::Error > + #[ inline ] + fn try_from( src : PathBuf ) -> Result< Self, Self::Error > { - Ok( Self( path::canonicalize( value )? ) ) + < Self as TryFrom< &Path > >::try_from( &src.as_path() ) } } - impl TryFrom< PathBuf > for AbsolutePath + impl TryFrom< &PathBuf > for AbsolutePath { type Error = std::io::Error; - fn try_from( value : PathBuf ) -> Result< Self, Self::Error > + #[ inline ] + fn try_from( src : &PathBuf ) -> Result< Self, Self::Error > { - Ok( Self( path::canonicalize( value )? ) ) + < Self as TryFrom< &Path > >::try_from( &src.as_path() ) } } @@ -51,28 +123,123 @@ pub( crate ) mod private { type Error = std::io::Error; - fn try_from( value : &Path ) -> Result< Self, Self::Error > + #[ inline ] + fn try_from( src : &Path ) -> Result< Self, Self::Error > + { + // < Self as TryFrom< &str > >::try_from( src.to_string_lossy() ) + let path = path::canonicalize( src )?; + + // xxx + if !is_absolute( &path ) + { + return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute, but it's not {path}" ) ) + } + + Ok( Self( path ) ) + } + } + + impl< 'a > TryFrom< &'a str > for AbsolutePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( src : &'a str ) -> Result< Self, Self::Error > + { + < Self as TryFrom< &Path > >::try_from( src.as_ref() ) + } + } + +// impl TryFrom< &str > for AbsolutePath +// { +// type Error = std::io::Error; +// // type Error = PathError; +// +// #[ inline( always ) ] +// fn try_from( src : &str ) -> Result< Self, Self::Error > +// { +// Self::try_from( AbsolutePath::try_from( src )? ) +// } +// } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< Utf8PathBuf > for AbsolutePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( src : Utf8PathBuf ) -> Result< Self, Self::Error > { - Ok( Self( path::canonicalize( value )? ) ) + AbsolutePath::try_from( src.as_std_path() ) } } - impl From< AbsolutePath > for PathBuf + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< &Utf8PathBuf > for AbsolutePath { - fn from( abs_path: AbsolutePath ) -> Self + type Error = std::io::Error; + + #[ inline ] + fn try_from( src : &Utf8PathBuf ) -> Result< Self, Self::Error > { - abs_path.0 + AbsolutePath::try_from( src.as_std_path() ) } } + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< &Utf8Path > for AbsolutePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( src : &Utf8Path ) -> Result< Self, Self::Error > + { + AbsolutePath::try_from( src.as_std_path() ) + } + } + + impl From< AbsolutePath > for PathBuf + { + #[ inline ] + fn from( src : AbsolutePath ) -> Self + { + src.0 + } + } + + impl< 'a > TryFrom< &'a AbsolutePath > for &'a str + { + type Error = std::io::Error; + #[ inline ] + fn try_from( src : &'a AbsolutePath ) -> Result< &'a str, Self::Error > + { + src + .to_str() + .ok_or_else + ( + move || io::Error::new( io::ErrorKind::Other, format!( "Can't convert &PathBuf into &str {src}" ) ) + ) + } + } + + impl TryFrom< &AbsolutePath > for String + { + type Error = std::io::Error; + #[ inline ] + fn try_from( src : &AbsolutePath ) -> Result< String, Self::Error > + { + let src2 : &str = src.try_into()?; + Ok( src2.into() ) + } + } // impl TryFrom< Utf8PathBuf > for AbsolutePath // { // type Error = std::io::Error; // -// fn try_from( value : Utf8PathBuf ) -> Result< Self, Self::Error > +// fn try_from( src : Utf8PathBuf ) -> Result< Self, Self::Error > // { -// AbsolutePath::try_from( value.as_std_path() ) +// AbsolutePath::try_from( src.as_std_path() ) // } // } @@ -80,49 +247,93 @@ pub( crate ) mod private // { // type Error = std::io::Error; // -// fn try_from( value : &Utf8Path ) -> Result< Self, Self::Error > +// fn try_from( src : &Utf8Path ) -> Result< Self, Self::Error > // { -// AbsolutePath::try_from( value.as_std_path() ) +// AbsolutePath::try_from( src.as_std_path() ) // } // } - // xxx : use derives + // // xxx : use derives + // impl AsRef< Path > for AbsolutePath + // { + // fn as_ref( &self ) -> &Path + // { + // self.0.as_ref() + // } + // } + impl AsRef< Path > for AbsolutePath { + #[ inline ] fn as_ref( &self ) -> &Path { self.0.as_ref() } } - impl AbsolutePath + impl AsMut< Path > for AbsolutePath { - /// Returns the Path without its final component, if there is one. - /// Returns None if the path terminates in a root or prefix, or if it's the empty string. - pub fn parent( &self ) -> Option< AbsolutePath > + #[ inline ] + fn as_mut( &mut self ) -> &mut Path { - self.0.parent().map( PathBuf::from ).map( AbsolutePath ) + &mut self.0 } + } - /// Creates an owned `AbsolutePath` with path adjoined to self. - pub fn join< P >( &self, path : P ) -> AbsolutePath - where - P : AsRef< Path >, + impl Deref for AbsolutePath + { + type Target = Path; + #[ inline ] + fn deref( &self ) -> &Self::Target { - Self::try_from( self.0.join( path ) ).unwrap() + &self.0 } + } - /// Converts a `AbsolutePath` to a `Cow` - pub fn to_string_lossy( &self ) -> Cow< '_, str > + impl DerefMut for AbsolutePath + { + #[ inline ] + fn deref_mut( &mut self ) -> &mut Self::Target { - self.0.to_string_lossy() + &mut self.0 } - } +// /// Convertable into absolute path entity should implement the trait. +// pub trait TryIntoAbsolutePath +// { +// /// Error returned if conversion is failed. +// type Error; +// /// Method to convert the type into absolute path. +// fn into_absolute_path( self ) -> Result< AbsolutePath, Self::Error >; +// } +// +// // impl TryIntoAbsolutePath for AbsolutePath +// // { +// // type Error = std::io::Error; +// // #[ inline ] +// // fn into_absolute_path( self ) -> Result< AbsolutePath, Self::Error > +// // { +// // Ok( self ) +// // } +// // } +// +// impl< TryIntoAbsolutePathType > TryIntoAbsolutePath for TryIntoAbsolutePathType +// where +// TryIntoAbsolutePathType : TryInto< AbsolutePath >, +// { +// type Error = < Self as TryInto< AbsolutePath > >::Error; +// #[ inline ] +// fn into_absolute_path( self ) -> Result< AbsolutePath, Self::Error > +// { +// self.try_into() +// } +// } + } crate::mod_interface! { exposed use AbsolutePath; + // exposed use TryIntoAbsolutePath; } diff --git a/module/core/proper_path_tools/src/path/canonical_path.rs b/module/core/proper_path_tools/src/path/canonical_path.rs new file mode 100644 index 0000000000..b02c2dccbb --- /dev/null +++ b/module/core/proper_path_tools/src/path/canonical_path.rs @@ -0,0 +1,287 @@ +/// Internal namespace. +pub( crate ) mod private +{ + + use crate::*; + + use std:: + { + // borrow::Cow, + path::{ Path, PathBuf }, + io, + }; + + use core:: + { + fmt, + ops:: + { + Deref, + DerefMut, + }, + }; + + #[cfg(feature="no_std")] + extern crate std; + + #[ cfg( feature = "derive_serde" ) ] + use serde::{ Serialize, Deserialize }; + + #[ cfg( feature = "path_utf8" ) ] + use camino::{ Utf8Path, Utf8PathBuf }; + + /// Caninical path. + #[ cfg_attr( feature = "derive_serde", derive( Serialize, Deserialize ) ) ] + #[ derive( Debug, Default, Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] + pub struct CanonicalPath( PathBuf ); + + impl CanonicalPath + { + + /// Returns the Path without its final component, if there is one. + /// Returns None if the path terminates in a root or prefix, or if it's the empty string. + #[ inline ] + pub fn parent( &self ) -> Option< CanonicalPath > + { + self.0.parent().map( PathBuf::from ).map( CanonicalPath ) + } + + /// Creates an owned `CanonicalPath` with path adjoined to self. + #[ inline ] + pub fn join< P >( &self, path : P ) -> CanonicalPath + where + P : AsRef< Path >, + { + Self::try_from( self.0.join( path ) ).unwrap() + } + + // /// Converts a `CanonicalPath` to a `Cow` + // pub fn to_string_lossy( &self ) -> Cow< '_, str > + // { + // self.0.to_string_lossy() + // } + + /// Determines whether base is a prefix of self. + /// + /// Only considers whole path components to match. + #[ inline ] + pub fn starts_with< P : AsRef< Path > >( &self, base : P ) -> bool + { + self.0.starts_with( base ) + } + + /// Returns inner type which is PathBuf. + #[ inline( always ) ] + pub fn inner( self ) -> PathBuf + { + self.0 + } + + } + + impl fmt::Display for CanonicalPath + { + #[ inline ] + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "{}", self.0.display() ) + } + } + + // fn is_absolute( path : &Path ) -> bool + // { + // // None - not absolute + // // with `.` or `..` at the first component - not absolute + // !path.components().next().is_some_and( | c | c.as_os_str() == "." || c.as_os_str() == ".." ) + // } + + impl< 'a > TryFrom< &'a str > for CanonicalPath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &'a str ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + // if !is_absolute( &path ) + // { + // return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) + // } + Ok( Self( path ) ) + } + } + + impl TryFrom< PathBuf > for CanonicalPath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : PathBuf ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + + // if !is_absolute( &path ) { return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) } + + Ok( Self( path ) ) + } + } + + // xxx : qqq : use Into< Path > + impl TryFrom< &Path > for CanonicalPath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &Path ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + + // if !is_absolute( &path ) { return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) } + + Ok( Self( path ) ) + } + } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< Utf8PathBuf > for CanonicalPath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : Utf8PathBuf ) -> Result< Self, Self::Error > + { + CanonicalPath::try_from( value.as_std_path() ) + } + } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< &Utf8PathBuf > for CanonicalPath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &Utf8PathBuf ) -> Result< Self, Self::Error > + { + CanonicalPath::try_from( value.as_std_path() ) + } + } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< &Utf8Path > for CanonicalPath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &Utf8Path ) -> Result< Self, Self::Error > + { + CanonicalPath::try_from( value.as_std_path() ) + } + } + + impl From< CanonicalPath > for PathBuf + { + #[ inline ] + fn from( src : CanonicalPath ) -> Self + { + src.0 + } + } + + impl< 'a > TryFrom< &'a CanonicalPath > for &'a str + { + type Error = std::io::Error; + #[ inline ] + fn try_from( src : &'a CanonicalPath ) -> Result< &'a str, Self::Error > + { + src + .to_str() + .ok_or_else + ( + move || io::Error::new( io::ErrorKind::Other, format!( "Can't convert &PathBuf into &str {src}" ) ) + ) + } + } + + impl TryFrom< &CanonicalPath > for String + { + type Error = std::io::Error; + #[ inline ] + fn try_from( src : &CanonicalPath ) -> Result< String, Self::Error > + { + let src2 : &str = src.try_into()?; + Ok( src2.into() ) + } + } + +// impl TryFrom< Utf8PathBuf > for CanonicalPath +// { +// type Error = std::io::Error; +// +// fn try_from( value : Utf8PathBuf ) -> Result< Self, Self::Error > +// { +// CanonicalPath::try_from( value.as_std_path() ) +// } +// } + +// impl TryFrom< &Utf8Path > for CanonicalPath +// { +// type Error = std::io::Error; +// +// fn try_from( value : &Utf8Path ) -> Result< Self, Self::Error > +// { +// CanonicalPath::try_from( value.as_std_path() ) +// } +// } + + // // xxx : use derives + // impl AsRef< Path > for CanonicalPath + // { + // fn as_ref( &self ) -> &Path + // { + // self.0.as_ref() + // } + // } + + impl AsRef< Path > for CanonicalPath + { + #[ inline ] + fn as_ref( &self ) -> &Path + { + self.0.as_ref() + } + } + + impl AsMut< Path > for CanonicalPath + { + #[ inline ] + fn as_mut( &mut self ) -> &mut Path + { + &mut self.0 + } + } + + impl Deref for CanonicalPath + { + type Target = Path; + #[ inline ] + fn deref( &self ) -> &Self::Target + { + &self.0 + } + } + + impl DerefMut for CanonicalPath + { + #[ inline ] + fn deref_mut( &mut self ) -> &mut Self::Target + { + &mut self.0 + } + } + +} + +crate::mod_interface! +{ + exposed use CanonicalPath; +} diff --git a/module/core/proper_path_tools/src/path/current_path.rs b/module/core/proper_path_tools/src/path/current_path.rs new file mode 100644 index 0000000000..fe5c164242 --- /dev/null +++ b/module/core/proper_path_tools/src/path/current_path.rs @@ -0,0 +1,62 @@ +/// Internal namespace. +pub( crate ) mod private +{ + + use crate::*; + use std::env; + + /// Symbolize current path. + #[ derive( Clone, Copy, Debug, Default, PartialEq, Eq ) ] + pub struct CurrentPath; + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< CurrentPath > for Utf8PathBuf + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( src : CurrentPath ) -> Result< Self, Self::Error > + { + Utf8PathBuf::try_from( PathBuf::try_from( src )? ) + .map_err + ( + | err | + { + std::io::Error::new + ( + std::io::ErrorKind::NotFound, + format!( "Cant convert to utf8 {}", err ), + ) + } + ) + } + } + + impl TryFrom< CurrentPath > for PathBuf + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( _ : CurrentPath ) -> Result< Self, Self::Error > + { + env::current_dir() + } + } + + impl TryFrom< CurrentPath > for AbsolutePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( src : CurrentPath ) -> Result< Self, Self::Error > + { + AbsolutePath::try_from( PathBuf::try_from( src )? ) + } + } + +} + +crate::mod_interface! +{ + exposed use CurrentPath; +} diff --git a/module/core/proper_path_tools/src/path/native_path.rs b/module/core/proper_path_tools/src/path/native_path.rs new file mode 100644 index 0000000000..d192cc6966 --- /dev/null +++ b/module/core/proper_path_tools/src/path/native_path.rs @@ -0,0 +1,302 @@ +/// Internal namespace. +pub( crate ) mod private +{ + + use crate::*; + + use std:: + { + // borrow::Cow, + path::{ Path, PathBuf }, + io, + }; + + use core:: + { + fmt, + ops:: + { + Deref, + DerefMut, + }, + }; + + #[cfg(feature="no_std")] + extern crate std; + + #[ cfg( feature = "derive_serde" ) ] + use serde::{ Serialize, Deserialize }; + + #[ cfg( feature = "path_utf8" ) ] + use camino::{ Utf8Path, Utf8PathBuf }; + + /// Caninical path. + #[ cfg_attr( feature = "derive_serde", derive( Serialize, Deserialize ) ) ] + #[ derive( Debug, Default, Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] + pub struct NativePath( PathBuf ); + + impl NativePath + { + + /// Returns the Path without its final component, if there is one. + /// Returns None if the path terminates in a root or prefix, or if it's the empty string. + #[ inline ] + pub fn parent( &self ) -> Option< NativePath > + { + self.0.parent().map( PathBuf::from ).map( NativePath ) + } + + /// Creates an owned `NativePath` with path adjoined to self. + #[ inline ] + pub fn join< P >( &self, path : P ) -> NativePath + where + P : AsRef< Path >, + { + Self::try_from( self.0.join( path ) ).unwrap() + } + + // /// Converts a `NativePath` to a `Cow` + // pub fn to_string_lossy( &self ) -> Cow< '_, str > + // { + // self.0.to_string_lossy() + // } + + /// Determines whether base is a prefix of self. + /// + /// Only considers whole path components to match. + #[ inline ] + pub fn starts_with< P : AsRef< Path > >( &self, base : P ) -> bool + { + self.0.starts_with( base ) + } + + /// Returns inner type which is PathBuf. + #[ inline( always ) ] + pub fn inner( self ) -> PathBuf + { + self.0 + } + + } + + impl fmt::Display for NativePath + { + #[ inline ] + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "{}", self.0.display() ) + } + } + + // fn is_absolute( path : &Path ) -> bool + // { + // // None - not absolute + // // with `.` or `..` at the first component - not absolute + // !path.components().next().is_some_and( | c | c.as_os_str() == "." || c.as_os_str() == ".." ) + // } + + impl< 'a > TryFrom< &'a str > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &'a str ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + // if !is_absolute( &path ) + // { + // return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) + // } + Ok( Self( path ) ) + } + } + + impl TryFrom< PathBuf > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : PathBuf ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + + // if !is_absolute( &path ) { return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) } + + Ok( Self( path ) ) + } + } + + impl TryFrom< &PathBuf > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &PathBuf ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + + // if !is_absolute( &path ) { return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) } + + Ok( Self( path ) ) + } + } + + // xxx : qqq : use Into< Path > + impl TryFrom< &Path > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &Path ) -> Result< Self, Self::Error > + { + let path = path::canonicalize( value )?; + + // if !is_absolute( &path ) { return Err( io::Error::new( io::ErrorKind::InvalidData, "Path expected to be absolute" ) ) } + + Ok( Self( path ) ) + } + } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< Utf8PathBuf > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : Utf8PathBuf ) -> Result< Self, Self::Error > + { + NativePath::try_from( value.as_std_path() ) + } + } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< &Utf8PathBuf > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &Utf8PathBuf ) -> Result< Self, Self::Error > + { + NativePath::try_from( value.as_std_path() ) + } + } + + #[ cfg( feature = "path_utf8" ) ] + impl TryFrom< &Utf8Path > for NativePath + { + type Error = std::io::Error; + + #[ inline ] + fn try_from( value : &Utf8Path ) -> Result< Self, Self::Error > + { + NativePath::try_from( value.as_std_path() ) + } + } + + impl From< NativePath > for PathBuf + { + #[ inline ] + fn from( src : NativePath ) -> Self + { + src.0 + } + } + + impl< 'a > TryFrom< &'a NativePath > for &'a str + { + type Error = std::io::Error; + #[ inline ] + fn try_from( src : &'a NativePath ) -> Result< &'a str, Self::Error > + { + src + .to_str() + .ok_or_else + ( + move || io::Error::new( io::ErrorKind::Other, format!( "Can't convert &PathBuf into &str {src}" ) ) + ) + } + } + + impl TryFrom< &NativePath > for String + { + type Error = std::io::Error; + #[ inline ] + fn try_from( src : &NativePath ) -> Result< String, Self::Error > + { + let src2 : &str = src.try_into()?; + Ok( src2.into() ) + } + } + +// impl TryFrom< Utf8PathBuf > for NativePath +// { +// type Error = std::io::Error; +// +// fn try_from( value : Utf8PathBuf ) -> Result< Self, Self::Error > +// { +// NativePath::try_from( value.as_std_path() ) +// } +// } + +// impl TryFrom< &Utf8Path > for NativePath +// { +// type Error = std::io::Error; +// +// fn try_from( value : &Utf8Path ) -> Result< Self, Self::Error > +// { +// NativePath::try_from( value.as_std_path() ) +// } +// } + + // // xxx : use derives + // impl AsRef< Path > for NativePath + // { + // fn as_ref( &self ) -> &Path + // { + // self.0.as_ref() + // } + // } + + impl AsRef< Path > for NativePath + { + #[ inline ] + fn as_ref( &self ) -> &Path + { + self.0.as_ref() + } + } + + impl AsMut< Path > for NativePath + { + #[ inline ] + fn as_mut( &mut self ) -> &mut Path + { + &mut self.0 + } + } + + impl Deref for NativePath + { + type Target = Path; + #[ inline ] + fn deref( &self ) -> &Self::Target + { + &self.0 + } + } + + impl DerefMut for NativePath + { + #[ inline ] + fn deref_mut( &mut self ) -> &mut Self::Target + { + &mut self.0 + } + } + +} + +crate::mod_interface! +{ + exposed use NativePath; +} diff --git a/module/core/proper_path_tools/src/transitive.rs b/module/core/proper_path_tools/src/transitive.rs new file mode 100644 index 0000000000..a5bdd22627 --- /dev/null +++ b/module/core/proper_path_tools/src/transitive.rs @@ -0,0 +1,201 @@ +/// Internal namespace. +pub( crate ) mod private +{ + // xxx : move to derive_tools + + // qqq : write tests, lool into example + // + // impl< Initial > TransitiveTryFrom< AbsolutePath, PathError, Initial > + // for CrateDir + // where + // AbsolutePath : TryFrom< Initial >, + // PathError : From< < AbsolutePath as TryFrom< Initial > >::Error >, + // { + // } + + // qqq : implement transitive_from + // qqq : implement transitive_into + + // qqq : move to derive_tools + // qqq : write tests, look into example + // + // impl< Initial > TransitiveTryFrom< AbsolutePath, PathError, Initial > + // for CrateDir + // where + // AbsolutePath : TryFrom< Initial >, + // PathError : From< < AbsolutePath as TryFrom< Initial > >::Error >, + // { + // } + // qqq : implement transitive_try_into + // qqq : implement transitive_from + // qqq : implement transitive_into + + /// A trait to perform a transitive `try_from` conversion. + /// + /// This trait allows for a two-step conversion process where an initial type `Initial` + /// is first converted to an intermediate type `Transitive`, and then to the final type `Self`. + /// + /// # Type Parameters + /// + /// - `Error`: The error type that can be produced during the conversion. + /// - `Initial`: The initial type from which the conversion starts. + /// + /// # Requirements + /// + /// - `Transitive` must implement `TryFrom`. + /// - `Self` must implement `TryFrom` with the same error type. + /// - `Error` must implement `From<>::Error>`. + /// + /// # Example + /// + /// ```rust + /// use proper_path_tools::TransitiveTryFrom; + /// use std::convert::TryFrom; + /// + /// struct InitialType; + /// struct IntermediateType; + /// struct FinalType; + /// struct ConversionError; + /// + /// impl TryFrom< InitialType > for IntermediateType + /// { + /// type Error = ConversionError; + /// fn try_from( value : InitialType ) -> Result< Self, Self::Error > + /// { + /// // Conversion logic here + /// Ok( IntermediateType ) + /// } + /// } + /// + /// impl TryFrom< IntermediateType > for FinalType + /// { + /// type Error = ConversionError; + /// fn try_from( value : IntermediateType ) -> Result< Self, Self::Error > + /// { + /// // Conversion logic here + /// Ok( FinalType ) + /// } + /// } + /// + /// let initial = InitialType; + /// let final_result : Result< FinalType, ConversionError > = FinalType::transitive_try_from::< IntermediateType >( initial ); + /// ``` + pub trait TransitiveTryFrom< Error, Initial > + { + /// Performs a transitive `try_from` conversion. + /// + /// This method first converts the `src` of type `Initial` to the intermediate type `Transitive`, + /// and then converts the intermediate type to the final type `Self`. + /// + /// # Arguments + /// + /// - `src`: The initial value to be converted. + /// + /// # Returns + /// + /// - `Ok(Self)`: If both conversions succeed. + /// - `Err(Error)`: If either conversion fails. + /// + /// # Example + /// + /// See the trait-level documentation for an example. + #[ inline( always ) ] + fn transitive_try_from< Transitive >( src : Initial ) -> Result< Self, Error > + where + Transitive : TryFrom< Initial >, + Self : TryFrom< Transitive, Error = Error >, + Error : From< < Transitive as TryFrom< Initial > >::Error >, + { + let src2 = TryFrom::< Initial >::try_from( src )?; + TryFrom::< Transitive >::try_from( src2 ) + } + } + + impl< Initial, Error, Final > TransitiveTryFrom< Error, Initial > for Final {} + + /// A trait to perform a transitive `try_into` conversion. + /// + /// This trait allows for a two-step conversion process where an initial type `Self` + /// is first converted to an intermediate type `Transitive`, and then to the final type `Final`. + /// + /// # Type Parameters + /// + /// - `Error`: The error type that can be produced during the conversion. + /// - `Final`: The final type to which `Transitive` is converted. + /// + /// # Requirements + /// + /// - `Self` must implement `TryInto`. + /// - `Transitive` must implement `TryInto` with the same error type. + /// - `Error` must implement `From<>::Error>`. + /// + /// # Example + /// + /// ```rust + /// use proper_path_tools::TransitiveTryInto; + /// use std::convert::TryInto; + /// + /// struct InitialType; + /// struct IntermediateType; + /// struct FinalType; + /// struct ConversionError; + /// + /// impl TryInto< IntermediateType > for InitialType + /// { + /// type Error = ConversionError; + /// fn try_into( self ) -> Result< IntermediateType, Self::Error > + /// { + /// // Conversion logic here + /// Ok( IntermediateType ) + /// } + /// } + /// + /// impl TryInto< FinalType > for IntermediateType + /// { + /// type Error = ConversionError; + /// fn try_into( self ) -> Result< FinalType, Self::Error > + /// { + /// // Conversion logic here + /// Ok( FinalType ) + /// } + /// } + /// + /// let initial = InitialType; + /// let final_result : Result< FinalType, ConversionError > = initial.transitive_try_into::< IntermediateType >(); + /// ``` + pub trait TransitiveTryInto< Error, Final > : Sized + { + /// Performs a transitive `try_into` conversion. + /// + /// This method first converts `self` to the intermediate type `Transitive`, + /// and then converts the intermediate type to the final type `Final`. + /// + /// # Returns + /// + /// - `Ok(Final)`: If both conversions succeed. + /// - `Err(Error)`: If either conversion fails. + /// + /// # Example + /// + /// See the trait-level documentation for an example. + #[ inline( always ) ] + fn transitive_try_into< Transitive >( self ) -> Result< Final, Error > + where + Self : TryInto< Transitive >, + Transitive : TryInto< Final, Error = Error >, + Error : From< < Self as TryInto< Transitive > >::Error >, + { + let src2 = TryInto::< Transitive >::try_into( self )?; + TryInto::< Final >::try_into( src2 ) + } + } + + impl< Error, Final, Initial > TransitiveTryInto< Error, Final > for Initial {} + +} + +crate::mod_interface! +{ + exposed use TransitiveTryFrom; + exposed use TransitiveTryInto; +} diff --git a/module/core/proper_path_tools/tests/inc/absolute_path.rs b/module/core/proper_path_tools/tests/inc/absolute_path.rs index 9ef2c83a5f..247be8c4b4 100644 --- a/module/core/proper_path_tools/tests/inc/absolute_path.rs +++ b/module/core/proper_path_tools/tests/inc/absolute_path.rs @@ -1,8 +1,15 @@ #[ allow( unused_imports ) ] use super::*; -use the_module::AbsolutePath; -use std::path::Path; -use std::path::PathBuf; + +use the_module:: +{ + AbsolutePath, + Path, + PathBuf, +}; + +// #[ cfg( feature = "path_utf8" ) ] +// use the_module::Utf8PathBuf; #[ test ] fn basic() @@ -15,32 +22,31 @@ fn basic() } #[ test ] -fn test_to_string_lossy() +fn test_to_string_lossy() { let path : AbsolutePath = "/path/to/file.txt".try_into().unwrap(); let result = path.to_string_lossy(); assert_eq!( result, "/path/to/file.txt" ); } #[test] -fn test_to_string_lossy_hard() +fn test_to_string_lossy_hard() { let abs_path : AbsolutePath = "/path/with/😀/unicode.txt".try_into().unwrap(); let string_lossy = abs_path.to_string_lossy(); assert_eq!( string_lossy, "/path/with/\u{1F600}/unicode.txt" ); } - #[test] -fn test_try_from_pathbuf() +fn test_try_from_pathbuf() { - + let path_buf = PathBuf::from( "/path/to/some/file.txt" ); let abs_path : AbsolutePath = path_buf.try_into().unwrap(); assert_eq!( abs_path.to_string_lossy(), "/path/to/some/file.txt" ); } #[test] -fn test_try_from_path() +fn test_try_from_path() { let path = Path::new( "/path/to/some/file.txt" ); let abs_path : AbsolutePath = path.try_into().unwrap(); @@ -48,7 +54,7 @@ fn test_try_from_path() } #[test] -fn test_parent() +fn test_parent() { let abs_path : AbsolutePath = "/path/to/some/file.txt".try_into().unwrap(); let parent_path = abs_path.parent().unwrap(); @@ -56,7 +62,7 @@ fn test_parent() } #[test] -fn test_join() +fn test_join() { let abs_path : AbsolutePath = "/path/to/some".try_into().unwrap(); let joined_path = abs_path.join( "file.txt" ); @@ -65,7 +71,7 @@ fn test_join() #[test] -fn test_relative_path_try_from_str() +fn test_relative_path_try_from_str() { let rel_path_str = "src/main.rs"; let rel_path = AbsolutePath::try_from( rel_path_str ).unwrap(); @@ -73,7 +79,7 @@ fn test_relative_path_try_from_str() } #[test] -fn test_relative_path_try_from_pathbuf() +fn test_relative_path_try_from_pathbuf() { let rel_path_buf = PathBuf::from( "src/main.rs" ); let rel_path = AbsolutePath::try_from( rel_path_buf.clone() ).unwrap(); @@ -81,7 +87,7 @@ fn test_relative_path_try_from_pathbuf() } #[test] -fn test_relative_path_try_from_path() +fn test_relative_path_try_from_path() { let rel_path = Path::new( "src/main.rs" ); let rel_path_result = AbsolutePath::try_from( rel_path ); @@ -90,7 +96,7 @@ fn test_relative_path_try_from_path() } #[test] -fn test_relative_path_parent() +fn test_relative_path_parent() { let rel_path = AbsolutePath::try_from( "src/main.rs" ).unwrap(); let parent_path = rel_path.parent().unwrap(); @@ -98,9 +104,9 @@ fn test_relative_path_parent() } #[test] -fn test_relative_path_join() +fn test_relative_path_join() { let rel_path = AbsolutePath::try_from( "src" ).unwrap(); let joined = rel_path.join( "main.rs" ); assert_eq!( joined.to_string_lossy(), "src/main.rs" ); -} \ No newline at end of file +} diff --git a/module/core/proper_path_tools/tests/inc/current_path.rs b/module/core/proper_path_tools/tests/inc/current_path.rs new file mode 100644 index 0000000000..628873a346 --- /dev/null +++ b/module/core/proper_path_tools/tests/inc/current_path.rs @@ -0,0 +1,33 @@ +#[ allow( unused_imports ) ] +use super::*; + +use the_module:: +{ + AbsolutePath, + // Path, + PathBuf, +}; + +#[ cfg( feature = "path_utf8" ) ] +use the_module::Utf8PathBuf; + +#[ test ] +fn basic() +{ + + let cd = the_module::CurrentPath; + let cd_path : PathBuf = cd.try_into().unwrap(); + println!( "cd_path : {cd_path:?}" ); + + let cd = the_module::CurrentPath; + let absolute_path : AbsolutePath = cd.try_into().unwrap(); + println!( "absolute_path : {absolute_path:?}" ); + + #[ cfg( feature = "path_utf8" ) ] + { + let cd = the_module::CurrentPath; + let utf8_path : Utf8PathBuf = cd.try_into().unwrap(); + println!( "utf8_path : {utf8_path:?}" ); + } + +} diff --git a/module/core/proper_path_tools/tests/inc/mod.rs b/module/core/proper_path_tools/tests/inc/mod.rs index 31872cad50..58e8721710 100644 --- a/module/core/proper_path_tools/tests/inc/mod.rs +++ b/module/core/proper_path_tools/tests/inc/mod.rs @@ -3,6 +3,8 @@ use super::*; mod absolute_path; +mod current_path; +mod path_canonicalize; mod path_change_ext; mod path_common; mod path_ext; @@ -12,6 +14,7 @@ mod path_join; mod path_normalize; mod path_relative; mod rebase_path; +mod transitive; mod without_ext; #[cfg(feature = "path_unique_folder_name")] diff --git a/module/core/proper_path_tools/tests/inc/path_canonicalize.rs b/module/core/proper_path_tools/tests/inc/path_canonicalize.rs new file mode 100644 index 0000000000..64cd4665f2 --- /dev/null +++ b/module/core/proper_path_tools/tests/inc/path_canonicalize.rs @@ -0,0 +1,50 @@ +#[ allow( unused_imports ) ] +use super::*; +use std::path::PathBuf; +use the_module::path; + +#[ test ] +fn assumptions() +{ + + assert_eq!( PathBuf::from( "c:/src/" ).is_absolute(), true ); + assert_eq!( PathBuf::from( "/c/src/" ).is_absolute(), false ); + assert_eq!( PathBuf::from( "/c:/src/" ).is_absolute(), false ); + assert_eq!( PathBuf::from( "/c/src/" ).is_absolute(), false ); + +} + +#[ test ] +fn basic() +{ + + let got = path::canonicalize( PathBuf::from( "src" ) ); + let exp = PathBuf::from( "src" ); + assert_eq!( got.unwrap(), exp ); + + let got = path::canonicalize( PathBuf::from( "\\src" ) ); + let exp = PathBuf::from( "/src" ); + assert_eq!( got.unwrap(), exp ); + + let got = path::canonicalize( PathBuf::from( "\\src\\" ) ); + let exp = PathBuf::from( "/src/" ); + assert_eq!( got.unwrap(), exp ); + + let got = path::canonicalize( PathBuf::from( "/src" ) ); + let exp = PathBuf::from( "/src" ); + assert_eq!( got.unwrap(), exp ); + + let got = path::canonicalize( PathBuf::from( "/src/" ) ); + let exp = PathBuf::from( "/src/" ); + assert_eq!( got.unwrap(), exp ); + + let got = path::canonicalize( PathBuf::from( "./src/" ) ); + let exp = PathBuf::from( "./src/" ); + assert_eq!( got.unwrap(), exp ); + + // xxx : qqq : does not work + // let got = path::canonicalize( PathBuf::from( "c:/src/" ) ); + // let exp = PathBuf::from( "/c/src/" ); + // assert_eq!( got.unwrap(), exp ); + +} diff --git a/module/core/proper_path_tools/tests/inc/path_join.rs b/module/core/proper_path_tools/tests/inc/path_join.rs index 59217c1fe5..fa526ee19d 100644 --- a/module/core/proper_path_tools/tests/inc/path_join.rs +++ b/module/core/proper_path_tools/tests/inc/path_join.rs @@ -1,252 +1,450 @@ use super::*; - +use std::path::PathBuf; #[ test ] fn join_empty() { - let ( expected, paths ) = ( "", vec![ "" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "".into(), vec![ "".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } #[ test ] fn join_several_empties() { - let ( expected, paths ) = ( "", vec![ "", "" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "".into(), vec![ "".into(), "".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn root_with_absolute() { - let ( expected, paths ) = ( "/a/b", vec![ "/", "/a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/".into(), "/a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn root_with_relative() { - let ( expected, paths ) = ( "/a/b", vec![ "/", "a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/".into(), "a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn dir_with_absolute() { - let ( expected, paths ) = ( "/a/b", vec![ "/dir", "/a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/dir".into(), "/a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - - #[ test ] fn dir_with_relative() { - let ( expected, paths ) = ( "/dir/a/b", vec![ "/dir", "a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/dir/a/b".into(), vec![ "/dir".into(), "a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn trailed_dir_with_absolute() { - let ( expected, paths ) = ( "/a/b", vec![ "/dir/", "/a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/dir/".into(), "/a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } #[ test ] fn trailed_dir_with_relative() { - let ( expected, paths ) = ( "/dir/a/b", vec![ "/dir/", "a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/dir/a/b".into(), vec![ "/dir/".into(), "a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn dir_with_down() { - let ( expected, paths ) = ( "/a/b", vec![ "/dir", "../a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/dir".into(), "../a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn trailed_dir_with_down() { - let ( expected, paths ) = ( "/dir/a/b", vec![ "/dir/", "../a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/dir/a/b".into(), vec![ "/dir/".into(), "../a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - - #[ test ] fn dir_with_several_down() { - let ( expected, paths ) = ( "/a/b", vec![ "/dir/dir2", "../../a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/dir/dir2".into(), "../../a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn trailed_dir_with_several_down() { - let ( expected, paths ) = ( "/a/b", vec![ "/dir/", "../../a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/dir/".into(), "../../a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn dir_with_several_down_go_out_of_root() { - let ( expected, paths ) = ( "/../a/b", vec![ "/dir", "../../a/b" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/../a/b".into(), vec![ "/dir".into(), "../../a/b".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } #[ test ] fn trailed_absolute_with_trailed_down() { - let ( expected, paths ) = ( "/a/b/", vec![ "/a/b/", "../" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b/".into(), vec![ "/a/b/".into(), "../".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn absolute_with_trailed_down() { - let ( expected, paths ) = ( "/a/", vec![ "/a/b", "../" ]) ; - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/".into(), vec![ "/a/b".into(), "../".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn trailed_absolute_with_down() { - let ( expected, paths ) = ( "/a/b", vec![ "/a/b/", ".." ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/a/b/".into(), "..".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn trailed_absolute_with_trailed_here() { - let ( expected, paths ) = ( "/a/b/", vec![ "/a/b/", "./" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b/".into(), vec![ "/a/b/".into(), "./".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - - #[ test ] fn absolute_with_trailed_here() { - let ( expected, paths ) = ( "/a/b/", vec![ "/a/b", "./" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b/".into(), vec![ "/a/b".into(), "./".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn trailed_absolute_with_here() { - let ( expected, paths ) = ( "/a/b", vec![ "/a/b/", "." ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b".into(), vec![ "/a/b/".into(), ".".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn join_with_empty() { - let ( expected, paths ) = ( "/a/b/c", vec![ "", "a/b", "", "c", "" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/a/b/c".into(), vec![ "".into(), "a/b".into(), "".into(), "c".into(), "".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } #[ test ] fn join_windows_os_paths() { - let ( expected, paths ) = ( "/c/foo/bar/", vec![ "c :\\", "foo\\", "bar\\" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/c/foo/bar/".into(), vec![ "c:\\".into(), "foo\\".into(), "bar\\".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn join_unix_os_paths() { - let ( expected, paths ) = ( "/baz/foo", vec![ "/bar/", "/baz", "foo/", "." ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/baz/foo".into(), vec![ "/bar/".into(), "/baz".into(), "foo/".into(), ".".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn join_unix_os_paths_2() { - let ( expected, paths ) = ( "/baz/foo/z", vec![ "/bar/", "/baz", "foo/", ".", "z" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/baz/foo/z".into(), vec![ "/bar/".into(), "/baz".into(), "foo/".into(), ".".into(), "z".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn more_complicated_cases_1() { - let ( expected, paths ) = ( "/aa/bb//cc", vec![ "/aa", "bb//", "cc" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/aa/bb//cc".into(), vec![ "/aa".into(), "bb//".into(), "cc".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - - #[ test ] fn more_complicated_cases_2() { - let ( expected, paths ) = ( "/bb/cc", vec![ "/aa", "/bb", "cc" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/bb/cc".into(), vec![ "/aa".into(), "/bb".into(), "cc".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn more_complicated_cases_3() { - let ( expected, paths ) = ( "//aa/bb//cc//", vec![ "//aa", "bb//", "cc//" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "//aa/bb//cc//".into(), vec![ "//aa".into(), "bb//".into(), "cc//".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } - #[ test ] fn more_complicated_cases_4() { - let ( expected, paths ) = ( "/aa/bb//cc", vec![ "/aa", "bb//", "cc", "." ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "/aa/bb//cc".into(), vec![ "/aa".into(), "bb//".into(), "cc".into(), ".".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } #[ test ] fn more_complicated_cases_5() { - let ( expected, paths ) = ( "//b//d/..e", vec![ "/", "a", "//b//", "././c", "../d", "..e" ] ); - let result = the_module::path::join_paths( paths.clone().into_iter() ); - assert_eq!( result, std::path::PathBuf::from( expected ), "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", paths, expected, result.to_string_lossy() ); + let ( expected, paths ) : ( PathBuf, Vec< PathBuf > ) = ( "//b//d/..e".into(), vec![ "/".into(), "a".into(), "//b//".into(), "././c".into(), "../d".into(), "..e".into() ] ); + let result = the_module::path::join_paths( paths.iter().map( |p| p.as_path() ) ); + assert_eq! + ( + result, + expected, + "Test failed. Paths: '{:?}', Expected: '{}', Got: '{}'", + paths, + expected.display(), + result.to_string_lossy(), + ); } \ No newline at end of file diff --git a/module/core/proper_path_tools/tests/inc/path_relative.rs b/module/core/proper_path_tools/tests/inc/path_relative.rs index f94fcc5ed6..7d5f0536c7 100644 --- a/module/core/proper_path_tools/tests/inc/path_relative.rs +++ b/module/core/proper_path_tools/tests/inc/path_relative.rs @@ -155,7 +155,7 @@ fn test_absolute_relative_root_to_root() fn test_windows_disks() { let from = "d:/"; - let to = "c :/x/y"; + let to = "c:/x/y"; let expected = "../c/x/y"; assert_eq!( the_module::path::path_relative( from, to ), PathBuf::from( expected ) ); } diff --git a/module/core/proper_path_tools/tests/inc/transitive.rs b/module/core/proper_path_tools/tests/inc/transitive.rs new file mode 100644 index 0000000000..e0b2da7acc --- /dev/null +++ b/module/core/proper_path_tools/tests/inc/transitive.rs @@ -0,0 +1,86 @@ +#[ allow( unused_imports ) ] +use super::*; + +#[ test ] +fn basic_from() +{ + use proper_path_tools::TransitiveTryFrom; + use std::convert::TryFrom; + + struct InitialType; + struct IntermediateType; + struct FinalType; + struct ConversionError; + + impl TryFrom< InitialType > for IntermediateType + { + type Error = ConversionError; + fn try_from( _value : InitialType ) -> Result< Self, Self::Error > + { + // Conversion logic here + Ok( IntermediateType ) + } + } + + impl TryFrom< IntermediateType > for FinalType + { + type Error = ConversionError; + fn try_from( _value : IntermediateType ) -> Result< Self, Self::Error > + { + // Conversion logic here + Ok( FinalType ) + } + } + + // impl TransitiveTryFrom< IntermediateType, ConversionError, InitialType > for FinalType {} + + let initial = InitialType; + let _final_result : Result< FinalType, ConversionError > = FinalType::transitive_try_from::< IntermediateType >( initial ); + +} + +#[ test ] +fn test_transitive_try_into() +{ + use proper_path_tools::TransitiveTryInto; + + // Define NewType1 wrapping a String + #[ derive( Debug, PartialEq ) ] + struct NewType1( String ); + + // Define NewType2 wrapping NewType1 + #[ derive( Debug, PartialEq ) ] + struct NewType2( NewType1 ); + + // Define an error type for conversion + #[ derive( Debug, PartialEq ) ] + struct ConversionError; + + // Implement TryInto for converting String to NewType1 + impl TryInto< NewType1 > for String + { + type Error = ConversionError; + fn try_into( self ) -> Result< NewType1, Self::Error > + { + Ok( NewType1( self ) ) + } + } + + // Implement TryInto for converting NewType1 to NewType2 + impl TryInto< NewType2 > for NewType1 + { + type Error = ConversionError; + fn try_into( self ) -> Result< NewType2, Self::Error > + { + Ok( NewType2( self ) ) + } + } + + let initial = String::from( "Hello, world!" ); + let final_result : Result< NewType2, ConversionError > = initial.transitive_try_into::< NewType1 >(); + assert_eq!( final_result, Ok( NewType2( NewType1( String::from( "Hello, world!" ) ) ) ) ); + + let initial = String::from( "Hello, world!" ); + let _final_result : NewType2 = initial.transitive_try_into::< NewType1 >().unwrap(); + +} diff --git a/module/core/reflect_tools/Cargo.toml b/module/core/reflect_tools/Cargo.toml index 42a0b8e4e7..8fb902cee9 100644 --- a/module/core/reflect_tools/Cargo.toml +++ b/module/core/reflect_tools/Cargo.toml @@ -22,7 +22,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] diff --git a/module/core/reflect_tools/src/lib.rs b/module/core/reflect_tools/src/lib.rs index 50f0a2231f..de402a175f 100644 --- a/module/core/reflect_tools/src/lib.rs +++ b/module/core/reflect_tools/src/lib.rs @@ -46,8 +46,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect.rs b/module/core/reflect_tools/src/reflect.rs index 9d3c8a288e..6fe9078c51 100644 --- a/module/core/reflect_tools/src/reflect.rs +++ b/module/core/reflect_tools/src/reflect.rs @@ -130,8 +130,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/axiomatic.rs b/module/core/reflect_tools/src/reflect/axiomatic.rs index 24c9a8ed68..91ac436e4f 100644 --- a/module/core/reflect_tools/src/reflect/axiomatic.rs +++ b/module/core/reflect_tools/src/reflect/axiomatic.rs @@ -527,8 +527,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/entity_array.rs b/module/core/reflect_tools/src/reflect/entity_array.rs index 5c171783e4..1e2b86439b 100644 --- a/module/core/reflect_tools/src/reflect/entity_array.rs +++ b/module/core/reflect_tools/src/reflect/entity_array.rs @@ -97,8 +97,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/entity_hashmap.rs b/module/core/reflect_tools/src/reflect/entity_hashmap.rs index 696f644db5..23297d4c12 100644 --- a/module/core/reflect_tools/src/reflect/entity_hashmap.rs +++ b/module/core/reflect_tools/src/reflect/entity_hashmap.rs @@ -32,7 +32,7 @@ pub mod private KeyedCollectionDescriptor::< Self >::new( 0, Vec::new() ) } } - + impl< K, V > Entity for KeyedCollectionDescriptor< HashMap< K, V > > where K : 'static + Instance + IsScalar + Clone, @@ -69,7 +69,7 @@ pub mod private let mut result : Vec< KeyVal > = ( 0 .. self.len() ) .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < V as Instance >::Reflect() ) } ) .collect(); - + for i in 0..self.len() { result[ i ] = KeyVal { key : self.keys[ i ].clone(), val : Box::new( < V as Instance >::Reflect() ) } @@ -104,8 +104,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/entity_hashset.rs b/module/core/reflect_tools/src/reflect/entity_hashset.rs index d51fda1030..c04bcd9e36 100644 --- a/module/core/reflect_tools/src/reflect/entity_hashset.rs +++ b/module/core/reflect_tools/src/reflect/entity_hashset.rs @@ -27,7 +27,7 @@ pub mod private CollectionDescriptor::< Self >::new( 0 ) } } - + impl< T > Entity for CollectionDescriptor< HashSet< T > > where T : 'static + Instance, @@ -93,8 +93,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/entity_slice.rs b/module/core/reflect_tools/src/reflect/entity_slice.rs index 90416afcbc..5cab0850bb 100644 --- a/module/core/reflect_tools/src/reflect/entity_slice.rs +++ b/module/core/reflect_tools/src/reflect/entity_slice.rs @@ -37,33 +37,33 @@ pub mod private { true } - + #[ inline( always ) ] fn len( &self ) -> usize { self.len } - + #[ inline( always ) ] fn type_name( &self ) -> &'static str { core::any::type_name::< &'static [ T ] >() } - + #[ inline( always ) ] fn type_id( &self ) -> core::any::TypeId { core::any::TypeId::of::< &'static [ T ] >() } - + #[ inline( always ) ] fn elements( &self ) -> Box< dyn Iterator< Item = KeyVal > > { - + let result : Vec< KeyVal > = ( 0 .. self.len() ) .map( | k | KeyVal { key : Primitive::usize( k ), val : Box::new( < T as Instance >::Reflect() ) } ) .collect(); - + Box::new( result.into_iter() ) } } @@ -93,8 +93,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/entity_vec.rs b/module/core/reflect_tools/src/reflect/entity_vec.rs index 997e32b18c..5d565abc72 100644 --- a/module/core/reflect_tools/src/reflect/entity_vec.rs +++ b/module/core/reflect_tools/src/reflect/entity_vec.rs @@ -26,7 +26,7 @@ pub mod private CollectionDescriptor::< Self >::new( 0 ) } } - + impl< T > Entity for CollectionDescriptor< Vec< T > > where T : 'static + Instance, @@ -92,8 +92,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/fields.rs b/module/core/reflect_tools/src/reflect/fields.rs index 2956c8180f..a867713780 100644 --- a/module/core/reflect_tools/src/reflect/fields.rs +++ b/module/core/reflect_tools/src/reflect/fields.rs @@ -114,8 +114,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::private:: diff --git a/module/core/reflect_tools/src/reflect/primitive.rs b/module/core/reflect_tools/src/reflect/primitive.rs index f696eccf75..51abd2ddcb 100644 --- a/module/core/reflect_tools/src/reflect/primitive.rs +++ b/module/core/reflect_tools/src/reflect/primitive.rs @@ -247,8 +247,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/reflect_tools/src/reflect/wrapper.rs b/module/core/reflect_tools/src/reflect/wrapper.rs index 4cd134650f..36acb18449 100644 --- a/module/core/reflect_tools/src/reflect/wrapper.rs +++ b/module/core/reflect_tools/src/reflect/wrapper.rs @@ -31,8 +31,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super:: diff --git a/module/core/reflect_tools_meta/Cargo.toml b/module/core/reflect_tools_meta/Cargo.toml index 75a842e9a1..0ace8e3a22 100644 --- a/module/core/reflect_tools_meta/Cargo.toml +++ b/module/core/reflect_tools_meta/Cargo.toml @@ -22,7 +22,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [lib] proc-macro = true diff --git a/module/core/strs_tools/Cargo.toml b/module/core/strs_tools/Cargo.toml index 09308568ad..8fbb59a2de 100644 --- a/module/core/strs_tools/Cargo.toml +++ b/module/core/strs_tools/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ diff --git a/module/core/strs_tools/src/lib.rs b/module/core/strs_tools/src/lib.rs index 1c5bc9300e..cd3d26ffb2 100644 --- a/module/core/strs_tools/src/lib.rs +++ b/module/core/strs_tools/src/lib.rs @@ -32,8 +32,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::string::exposed::*; } diff --git a/module/core/strs_tools/src/string/indentation.rs b/module/core/strs_tools/src/string/indentation.rs index 16f7208663..a84ce726fa 100644 --- a/module/core/strs_tools/src/string/indentation.rs +++ b/module/core/strs_tools/src/string/indentation.rs @@ -103,8 +103,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::protected as indentation; #[ allow( unused_imports ) ] diff --git a/module/core/strs_tools/src/string/isolate.rs b/module/core/strs_tools/src/string/isolate.rs index 78d23f6658..5ad02635ba 100644 --- a/module/core/strs_tools/src/string/isolate.rs +++ b/module/core/strs_tools/src/string/isolate.rs @@ -195,8 +195,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::protected as isolate; use super::private as i; diff --git a/module/core/strs_tools/src/string/mod.rs b/module/core/strs_tools/src/string/mod.rs index d15f35b69e..4c2443df0a 100644 --- a/module/core/strs_tools/src/string/mod.rs +++ b/module/core/strs_tools/src/string/mod.rs @@ -54,8 +54,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ cfg( all( feature = "string_indentation", not( feature = "no_std" ) ) ) ] #[ allow( unused_imports ) ] pub use super::indentation::exposed::*; diff --git a/module/core/strs_tools/src/string/number.rs b/module/core/strs_tools/src/string/number.rs index 29da7a5520..cd6696aea3 100644 --- a/module/core/strs_tools/src/string/number.rs +++ b/module/core/strs_tools/src/string/number.rs @@ -34,8 +34,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::protected as number; #[ allow( unused_imports ) ] diff --git a/module/core/strs_tools/src/string/parse_request.rs b/module/core/strs_tools/src/string/parse_request.rs index e3c68de8f9..25f0f581d3 100644 --- a/module/core/strs_tools/src/string/parse_request.rs +++ b/module/core/strs_tools/src/string/parse_request.rs @@ -506,8 +506,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::protected as parse_request; pub use super::private:: diff --git a/module/core/strs_tools/src/string/split.rs b/module/core/strs_tools/src/string/split.rs index fc9135fdf5..0594f21c8e 100644 --- a/module/core/strs_tools/src/string/split.rs +++ b/module/core/strs_tools/src/string/split.rs @@ -670,8 +670,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::protected as split; pub use super::private:: diff --git a/module/core/test_tools/Cargo.toml b/module/core/test_tools/Cargo.toml index 18c79678fc..3c928e30e1 100644 --- a/module/core/test_tools/Cargo.toml +++ b/module/core/test_tools/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + # = features diff --git a/module/core/time_tools/Cargo.toml b/module/core/time_tools/Cargo.toml index e12847049c..6625bd17ca 100644 --- a/module/core/time_tools/Cargo.toml +++ b/module/core/time_tools/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + # include = [ # "/rust/impl/time", # "/Cargo.toml", diff --git a/module/core/time_tools/src/lib.rs b/module/core/time_tools/src/lib.rs index 9af60e8522..25563d52ba 100644 --- a/module/core/time_tools/src/lib.rs +++ b/module/core/time_tools/src/lib.rs @@ -50,8 +50,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/typing_tools/Cargo.toml b/module/core/typing_tools/Cargo.toml index 22f2d3f982..5b128ba0e5 100644 --- a/module/core/typing_tools/Cargo.toml +++ b/module/core/typing_tools/Cargo.toml @@ -25,7 +25,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] diff --git a/module/core/typing_tools/src/lib.rs b/module/core/typing_tools/src/lib.rs index 0b54e804ec..43b070d362 100644 --- a/module/core/typing_tools/src/lib.rs +++ b/module/core/typing_tools/src/lib.rs @@ -57,8 +57,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/typing_tools/src/typing.rs b/module/core/typing_tools/src/typing.rs index 2e434594ae..f3016d04ef 100644 --- a/module/core/typing_tools/src/typing.rs +++ b/module/core/typing_tools/src/typing.rs @@ -32,8 +32,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/variadic_from/Cargo.toml b/module/core/variadic_from/Cargo.toml index 3f41686cfe..bddb31aeaf 100644 --- a/module/core/variadic_from/Cargo.toml +++ b/module/core/variadic_from/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] diff --git a/module/core/variadic_from/src/lib.rs b/module/core/variadic_from/src/lib.rs index e05fa1bee2..23616661d1 100644 --- a/module/core/variadic_from/src/lib.rs +++ b/module/core/variadic_from/src/lib.rs @@ -45,8 +45,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/variadic_from/src/variadic.rs b/module/core/variadic_from/src/variadic.rs index ed30e42677..6929bb9679 100644 --- a/module/core/variadic_from/src/variadic.rs +++ b/module/core/variadic_from/src/variadic.rs @@ -401,8 +401,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/core/wtools/Cargo.toml b/module/core/wtools/Cargo.toml index 3d2ec71314..8488ccd02b 100644 --- a/module/core/wtools/Cargo.toml +++ b/module/core/wtools/Cargo.toml @@ -25,7 +25,7 @@ features = [ "full" ] all-features = false # rustdoc-args = [] -# exclude = [ "/tests", "/examples", "-*" ] + # = features @@ -173,22 +173,22 @@ string_split = [ "string", "strs_tools/string_split", "strs_tools/string_parse_r error = [ "error_tools" ] error_default = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", # "error_use_std", ] error_full = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", # "error_use_std", ] # error_use_std = [ "error", "error_tools/use_std" ] error_no_std = [ "error", "error_tools/no_std" ] #error_use_alloc = [ "error", "error_tools/use_alloc" ] -error_for_lib = [ "error", "error_tools/error_for_lib" ] -error_for_app = [ "error", "error_tools/error_for_app" ] +error_typed = [ "error", "error_tools/error_typed" ] +error_untyped = [ "error", "error_tools/error_untyped" ] # derive diff --git a/module/core/wtools/src/lib.rs b/module/core/wtools/src/lib.rs index 6f8412f139..9c9b37b6c3 100644 --- a/module/core/wtools/src/lib.rs +++ b/module/core/wtools/src/lib.rs @@ -121,8 +121,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/move/assistant/src/client.rs b/module/move/assistant/src/client.rs index 4cd02b5829..0adc107f96 100644 --- a/module/move/assistant/src/client.rs +++ b/module/move/assistant/src/client.rs @@ -60,8 +60,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/move/assistant/src/lib.rs b/module/move/assistant/src/lib.rs index 4d5468f9f7..d2c66f4231 100644 --- a/module/move/assistant/src/lib.rs +++ b/module/move/assistant/src/lib.rs @@ -41,8 +41,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] diff --git a/module/move/crates_tools/src/lib.rs b/module/move/crates_tools/src/lib.rs index 20a89cd7cf..b797cb5e50 100644 --- a/module/move/crates_tools/src/lib.rs +++ b/module/move/crates_tools/src/lib.rs @@ -149,8 +149,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/move/deterministic_rand/Cargo.toml b/module/move/deterministic_rand/Cargo.toml index d0cc6d1fe5..1a469f1249 100644 --- a/module/move/deterministic_rand/Cargo.toml +++ b/module/move/deterministic_rand/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled", "determinism" ] diff --git a/module/move/graphs_tools/Cargo.toml b/module/move/graphs_tools/Cargo.toml index 64e17ebbd0..f0eeb97831 100644 --- a/module/move/graphs_tools/Cargo.toml +++ b/module/move/graphs_tools/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ diff --git a/module/move/plot_interface/Cargo.toml b/module/move/plot_interface/Cargo.toml index 39ad1ab4f8..655513f31d 100644 --- a/module/move/plot_interface/Cargo.toml +++ b/module/move/plot_interface/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/_blank", "/Cargo.toml", diff --git a/module/move/sqlx_query/Cargo.toml b/module/move/sqlx_query/Cargo.toml index 08a3ff8a98..fbccba1f74 100644 --- a/module/move/sqlx_query/Cargo.toml +++ b/module/move/sqlx_query/Cargo.toml @@ -21,7 +21,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] default = [ "enabled" ] diff --git a/module/move/sqlx_query/src/lib.rs b/module/move/sqlx_query/src/lib.rs index 1285b5a3de..1a30944275 100644 --- a/module/move/sqlx_query/src/lib.rs +++ b/module/move/sqlx_query/src/lib.rs @@ -114,8 +114,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/move/willbe/Cargo.toml b/module/move/willbe/Cargo.toml index 63c66bd6c0..0fbd4cefdc 100644 --- a/module/move/willbe/Cargo.toml +++ b/module/move/willbe/Cargo.toml @@ -24,16 +24,37 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + [features] -default = [ "enabled", "progress_bar" ] -full = [ "enabled", "progress_bar" ] -enabled = [] +default = [ + "enabled", + "progress_bar", +] +full = [ + "default" +] +enabled = [ + "crates_tools/enabled", + "error_tools/enabled", + "former/enabled", + "iter_tools/enabled", + "mod_interface/enabled", + "wca/enabled", + "proper_path_tools/enabled", + "process_tools/enabled", + "derive_tools/enabled", + "data_type/enabled", + "collection_tools/enabled", + "macro_tools/enabled", +] + tracing = [ "dep:tracing", "dep:tracing-subscriber" ] progress_bar = [ "dep:indicatif" ] [dependencies] + +## external cargo_metadata = "~0.18.1" convert_case = "0.6.0" flate2 = "~1.0" @@ -57,6 +78,8 @@ indicatif = { version = "0.17", optional = true } prettytable-rs = "0.10" serde_json = "1.0" # for CargoMetadata::Package::metadata (need serde_json::Value) serde = "1.0" # for CargoMetadata::Package +parse-display = "0.9" # need because derive_tools don't reexport this correctly +walkdir = "2.3" ## internal crates_tools = { workspace = true } @@ -65,8 +88,12 @@ former = { workspace = true, features = [ "default" ] } iter_tools = { workspace = true, features = [ "default" ] } mod_interface = { workspace = true, features = [ "default" ] } wca = { workspace = true, features = [ "default" ] } -proper_path_tools = { workspace = true, features = [ "default" ] } +proper_path_tools = { workspace = true, features = [ "default", "path_utf8" ] } process_tools = { workspace = true, features = [ "default" ] } +derive_tools = { workspace = true, features = [ "derive_display", "derive_from_str", "derive_deref", "derive_from", "derive_as_ref" ] } +data_type = { workspace = true, features = [ "either" ] } +collection_tools = { workspace = true, features = [ "collection_constructors", "collection_into_constructors" ] } +macro_tools = { workspace = true, features = [ "default" ] } [dev-dependencies] test_tools = { workspace = true } diff --git a/module/move/willbe/src/description.md b/module/move/willbe/src/Readme.md similarity index 100% rename from module/move/willbe/src/description.md rename to module/move/willbe/src/Readme.md diff --git a/module/move/willbe/src/action/cicd_renew.rs b/module/move/willbe/src/action/cicd_renew.rs index 573a2a13bc..989b9238d1 100644 --- a/module/move/willbe/src/action/cicd_renew.rs +++ b/module/move/willbe/src/action/cicd_renew.rs @@ -4,27 +4,22 @@ mod private use std:: { - path::Path, fs::File, io::{ Write, Read }, - collections::BTreeMap }; - // aaa : for Petro : don't use cargo_metadata and Package directly, use facade - // aaa : ✅ + use path::{ Path }; + use collection::BTreeMap; use convert_case::{ Casing, Case }; use handlebars::{ RenderError, TemplateError }; use toml_edit::Document; - use _path::AbsolutePath; - use crate::manifest::private::CrateDirError; - use error_tools::for_lib::Error; - use error_tools::dependency::*; - use workspace::WorkspacePackage; + use entity::PathError; + use error::typed::Error; - use wtools::error::for_app::{ Result, Error as wError }; - use entity::WorkspaceError; - use error_tools::err; + use error::untyped::{ Result, Error as wError }; + use entity::WorkspaceInitError; + use error::err; #[ derive( Debug, Error ) ] pub enum CiCdGenerateError @@ -34,9 +29,9 @@ mod private #[ error( "I/O error: {0}" ) ] IO( #[ from ] std::io::Error ), #[ error( "Crate directory error: {0}" ) ] - CrateDir( #[ from ] CrateDirError ), + CrateDir( #[ from ] PathError ), #[ error( "Workspace error: {0}" ) ] - Workspace( #[ from ] WorkspaceError), + Workspace( #[ from ] WorkspaceInitError ), #[ error( "Template error: {0}" ) ] Template( #[ from ] TemplateError ), #[ error( "Render error: {0}" ) ] @@ -47,69 +42,155 @@ mod private /// Generate workflows for modules in .github/workflows directory. pub fn cicd_renew( base_path : &Path ) -> Result< (), CiCdGenerateError > { - let workspace_cache = Workspace::with_crate_dir( AbsolutePath::try_from( base_path )?.try_into()? )?; - let packages = workspace_cache.packages()?; - let username_and_repository = &username_and_repository( &workspace_cache.workspace_root()?.join( "Cargo.toml" ).try_into()?, packages.as_slice() )?; - let workspace_root = workspace_cache.workspace_root()?; + let workspace_cache = Workspace::try_from( CrateDir::try_from( base_path )? )?; + let packages = workspace_cache.packages(); + let username_and_repository = &username_and_repository + ( + &workspace_cache.workspace_root().join( "Cargo.toml" ).to_path_buf().try_into()?, // qqq + packages.clone(), + // packages.as_slice(), + )?; + let workspace_root : &Path = &workspace_cache.workspace_root(); // find directory for workflows let workflow_root = workspace_root.join( ".github" ).join( "workflows" ); // map packages name's to naming standard - // aaa : for Petro : avoid calling packages_get twice - // aaa : remove it - let names = packages.iter().map( | p | p.name() ).collect::< Vec< _ > >(); - // map packages path to relative paths fom workspace root, for example D :/work/wTools/module/core/iter_tools => module/core/iter_tools - let relative_paths = - packages - .iter() - .map( | p | p.manifest_path() ) - .filter_map( | p | p.strip_prefix( workspace_root ).ok() ) - .map( | p | p.with_file_name( "" ) ) - .collect::< Vec< _ > >(); + // let names = packages.map( | p | p.name() ).collect::< Vec< _ > >(); + let names = packages.clone().map( | p | p.name().to_string() ); + + // dbg!( &workflow_root ); + + // map packages path to relative paths fom workspace root, + // for example D:/work/wTools/module/core/iter_tools => module/core/iter_tools + let relative_paths = packages + .map( | p | p.manifest_file().ok() ) // aaa : rid of unwrap : removed + .filter_map( | p | + { + // dbg!( &workspace_root ); + Some( path::normalize( workspace_root ) ).and_then( | root_str | + { + + dbg!( &root_str ); + dbg!( &p ); + + Some( p?.strip_prefix( root_str ).ok()?.to_path_buf() ) + //.map( | s | s.display().to_string() ).ok() + }) + }) + .map( | p | + { + path::normalize( p.parent().unwrap() ) + // dbg!( &p ); + // let mut path = PathBuf::from( p ); + // path.set_file_name( "" ); + // path + }); // preparing templates let mut handlebars = handlebars::Handlebars::new(); - handlebars.register_template_string( "auto_pr_to", include_str!( "../../template/workflow/auto_pr_to.hbs" ) )?; - handlebars.register_template_string( "appropraite_branch_for", include_str!( "../../template/workflow/appropraite_branch_for.hbs" ) )?; - handlebars.register_template_string( "auto_merge_to", include_str!( "../../template/workflow/auto_merge_to.hbs" ) )?; - handlebars.register_template_string( "standard_rust_pull_request", include_str!( "../../template/workflow/standard_rust_pull_request.hbs" ) )?; - handlebars.register_template_string( "module_push", include_str!( "../../template/workflow/module_push.hbs" ) )?; - + handlebars.register_template_string + ( + "auto_pr_to", + include_str!( "../../template/workflow/auto_pr_to.hbs" ) + )?; + handlebars.register_template_string + ( + "appropraite_branch_for", + include_str!( "../../template/workflow/appropraite_branch_for.hbs" ) + )?; + handlebars.register_template_string + ( + "auto_merge_to", + include_str!( "../../template/workflow/auto_merge_to.hbs" ) + )?; + handlebars.register_template_string + ( + "standard_rust_pull_request", + include_str!( "../../template/workflow/standard_rust_pull_request.hbs" ) + )?; + handlebars.register_template_string + ( + "module_push", + include_str!( "../../template/workflow/module_push.hbs" ) + )?; // qqq : for Petro : instead of iterating each file manually, iterate each file in loop + // use similar::DiffableStr; + // creating workflow for each module - for ( name, relative_path ) in names.iter().zip( relative_paths.iter() ) + for ( name, relative_path ) in names.zip( relative_paths ) { // generate file names - let workflow_file_name = workflow_root.join( format!( "module_{}_push.yml", name.to_case( Case::Snake ) ) ); - let path = relative_path.join( "Cargo.toml" ); - let mut data = BTreeMap::new(); + let workflow_file_name = workflow_root + .join( format!( "module_{}_push.yml", name.to_case( Case::Snake ) ) ); + let manifest_file = relative_path.join( "Cargo.toml" ); + let mut data : BTreeMap< &str, &str > = BTreeMap::new(); data.insert( "name", name.as_str() ); data.insert( "username_and_repository", username_and_repository.0.as_str() ); data.insert( "branch", "alpha" ); - let path = path.as_str().replace( "\\", "/" ); - data.insert( "manifest_path", path.as_str() ); + let manifest_file = manifest_file.to_string_lossy().replace( "\\", "/" ); + let manifest_file = manifest_file.trim_start_matches( '/' ); + data.insert( "manifest_path", manifest_file ); let content = handlebars.render( "module_push", &data )?; file_write( &workflow_file_name, &content )?; + + println!( "file_write : {:?}", &workflow_file_name ) } - file_write( &workflow_root.join( "appropriate_branch.yml" ), include_str!( "../../template/workflow/appropriate_branch.yml" ) )?; + dbg!( &workflow_root ); + + file_write + ( + &workflow_root + .join("appropriate_branch.yml" ), + include_str!( "../../template/workflow/appropriate_branch.yml" ) + )?; + + let data = map_prepare_for_appropriative_branch + ( + "- beta", + username_and_repository.0.as_str(), + "alpha", + "alpha", + "beta" + ); + file_write + ( + &workflow_root.join( "appropriate_branch_beta.yml" ), + &handlebars.render( "appropraite_branch_for", &data )? + )?; - let data = map_prepare_for_appropriative_branch( "- beta", username_and_repository.0.as_str(), "alpha", "alpha", "beta" ); - file_write( &workflow_root.join( "appropriate_branch_beta.yml" ), &handlebars.render( "appropraite_branch_for", &data )? )?; + let data = map_prepare_for_appropriative_branch + ( + "- main\n - master", + username_and_repository.0.as_str(), + "alpha", + "beta", + "master" + ); - let data = map_prepare_for_appropriative_branch( "- main\n - master", username_and_repository.0.as_str(), "alpha", "beta", "master" ); - file_write( &workflow_root.join( "appropriate_branch_master.yml" ), &handlebars.render( "appropraite_branch_for", &data )? )?; + file_write + ( + &workflow_root.join( "appropriate_branch_master.yml" ), + &handlebars.render( "appropraite_branch_for", &data )? + )?; let mut data = BTreeMap::new(); data.insert( "name", "beta" ); data.insert( "group_branch", "beta" ); data.insert( "branch", "alpha" ); - file_write( &workflow_root.join( "auto_merge_to_beta.yml" ), &handlebars.render( "auto_merge_to", &data )? )?; - - file_write( &workflow_root.join( "auto_pr.yml" ), include_str!( "../../template/workflow/auto_pr.yml" ) )?; + file_write + ( + &workflow_root.join( "auto_merge_to_beta.yml" ), + &handlebars.render( "auto_merge_to", &data )? + )?; + file_write + ( + &workflow_root.join( "auto_pr.yml" ), + include_str!( "../../template/workflow/auto_pr.yml" ) + )?; let mut data = BTreeMap::new(); data.insert( "name", "alpha" ); @@ -135,7 +216,11 @@ mod private data.insert( "src_branch", "${{ github.ref_name }}" ); data.insert( "dest_branch", "alpha" ); - file_write( &workflow_root.join( "auto_pr_to_alpha.yml" ), &handlebars.render( "auto_pr_to", &data )? )?; + file_write + ( + &workflow_root.join( "auto_pr_to_alpha.yml" ), + &handlebars.render( "auto_pr_to", &data )? + )?; let mut data = BTreeMap::new(); data.insert( "name", "beta" ); @@ -145,7 +230,11 @@ mod private data.insert( "src_branch", "alpha" ); data.insert( "dest_branch", "beta" ); - file_write( &workflow_root.join( "auto_pr_to_beta.yml" ), &handlebars.render( "auto_pr_to", &data )? )?; + file_write + ( + &workflow_root.join( "auto_pr_to_beta.yml" ), + &handlebars.render( "auto_pr_to", &data )? + )?; let mut data = BTreeMap::new(); data.insert( "name", "master" ); @@ -155,27 +244,63 @@ mod private data.insert( "src_branch", "beta" ); data.insert( "dest_branch", "master" ); - file_write( &workflow_root.join( "auto_pr_to_master.yml" ), &handlebars.render( "auto_pr_to", &data )? )?; + file_write + ( + &workflow_root.join( "auto_pr_to_master.yml" ), + &handlebars.render( "auto_pr_to", &data )? + )?; - file_write( &workflow_root.join( "runs_clean.yml" ), include_str!( "../../template/workflow/rust_clean.yml" ) )?; + file_write + ( + &workflow_root.join( "runs_clean.yml" ), + include_str!( "../../template/workflow/rust_clean.yml" ) + )?; let mut data = BTreeMap::new(); data.insert( "username_and_repository", username_and_repository.0.as_str() ); - file_write( &workflow_root.join( "standard_rust_pull_request.yml" ), &handlebars.render( "standard_rust_pull_request", &data )? )?; + file_write + ( + &workflow_root.join( "standard_rust_pull_request.yml" ), + &handlebars.render( "standard_rust_pull_request", &data )? + )?; - file_write( &workflow_root.join( "standard_rust_push.yml" ), include_str!( "../../template/workflow/standard_rust_push.yml" ) )?; + file_write + ( + &workflow_root.join( "standard_rust_push.yml" ), + include_str!( "../../template/workflow/standard_rust_push.yml" ) + )?; - file_write( &workflow_root.join( "for_pr_rust_push.yml" ), include_str!( "../../template/workflow/for_pr_rust_push.yml" ) )?; + file_write + ( + &workflow_root.join( "for_pr_rust_push.yml" ), + include_str!( "../../template/workflow/for_pr_rust_push.yml" ) + )?; + + file_write + ( + &workflow_root.join( "standard_rust_scheduled.yml" ), + include_str!( "../../template/workflow/standard_rust_scheduled.yml" ) + )?; - file_write( &workflow_root.join( "standard_rust_scheduled.yml" ), include_str!( "../../template/workflow/standard_rust_scheduled.yml" ) )?; + file_write + ( + &workflow_root.join( "standard_rust_status.yml" ), + include_str!( "../../template/workflow/standard_rust_status.yml" ) + )?; - file_write( &workflow_root.join( "standard_rust_status.yml" ), include_str!( "../../template/workflow/standard_rust_status.yml" ) )?; + file_write + ( + &workflow_root.join( "status_checks_rules_update.yml" ), + include_str!( "../../template/workflow/status_checks_rules_update.yml" ) + )?; - file_write( &workflow_root.join( "status_checks_rules_update.yml" ), include_str!( "../../template/workflow/status_checks_rules_update.yml" ) )?; + file_write + ( + &workflow_root.join( "Readme.md" ), + include_str!( "../../template/workflow/Readme.md" ) + )?; - file_write( &workflow_root.join( "Readme.md" ), include_str!( "../../template/workflow/Readme.md" ) )?; - Ok( () ) } @@ -220,18 +345,17 @@ mod private #[derive( Debug ) ] struct UsernameAndRepository( String ); - // aaa : for Petro : not clear how output should look - // aaa : add to documentation - // aaa : for Petro : newtype? - // aaa : replace to AbsolutePath - // aaa : for Petro : why mut? - // aaa : change signature /// Searches and extracts the username and repository name from the repository URL. /// The repository URL is first sought in the Cargo.toml file of the workspace; /// if not found there, it is then searched in the Cargo.toml file of the module. /// If it is still not found, the search continues in the GitHub remotes. /// Result looks like this: `Wandalen/wTools` - fn username_and_repository( cargo_toml_path : &AbsolutePath, packages : &[ WorkspacePackage ] ) -> Result< UsernameAndRepository > + fn username_and_repository< 'a > + ( + cargo_toml_path : &AbsolutePath, + packages : impl Iterator< Item = WorkspacePackageRef< 'a > >, + ) + -> Result< UsernameAndRepository > { let mut contents = String::new(); File::open( cargo_toml_path )?.read_to_string( &mut contents )?; @@ -245,7 +369,7 @@ mod private .map( String::from ); if let Some( url ) = url { - return url::extract_repo_url( &url ) + return url::repo_url_extract( &url ) .and_then( | url | url::git_info_extract( &url ).ok() ) .map( UsernameAndRepository ) .ok_or_else( || err!( "Fail to parse repository url from workspace Cargo.toml")) @@ -255,14 +379,15 @@ mod private let mut url = None; for package in packages { - if let Ok( wu ) = manifest::private::repo_url( package.manifest_path().parent().unwrap().as_std_path() ) + // if let Ok( wu ) = manifest::private::repo_url( package.manifest_file().parent().unwrap().as_std_path() ) + if let Ok( wu ) = manifest::repo_url( &package.crate_dir()? ) { url = Some( wu ); break; } } return url - .and_then( | url | url::extract_repo_url( &url ) ) + .and_then( | url | url::repo_url_extract( &url ) ) .and_then( | url | url::git_info_extract( &url ).ok() ) .map( UsernameAndRepository ) .ok_or_else( || err!( "Fail to extract repository url") ) diff --git a/module/move/willbe/src/action/deploy_renew.rs b/module/move/willbe/src/action/deploy_renew.rs index 2bd04f640c..8c45a714cb 100644 --- a/module/move/willbe/src/action/deploy_renew.rs +++ b/module/move/willbe/src/action/deploy_renew.rs @@ -2,153 +2,149 @@ mod private { use crate::*; use std::path::Path; - use error_tools::{for_app::Context, Result}; + use error::{ untyped::Context, Result }; use tool::template::*; - /// Template for creating deploy files. - /// - /// Includes terraform deploy options to GCP, and Hetzner, - /// a Makefile for useful commands, and a key directory. - #[ derive( Debug ) ] - pub struct DeployTemplate - { - files : DeployTemplateFiles, - parameters : TemplateParameters, - values : TemplateValues, - } - - // qqq : for Viktor : why DeployTemplate can't be part of template.rs? - - impl Template< DeployTemplateFiles > for DeployTemplate - { - fn create_all( self, path : &Path ) -> Result< () > - { - self.files.create_all( path, &self.values ) - } - - fn parameters( &self ) -> &TemplateParameters - { - &self.parameters - } - - fn set_values( &mut self, values : TemplateValues ) - { - self.values = values - } - - fn get_values( &self ) -> &TemplateValues - { - &self.values - } - - fn get_values_mut( &mut self ) -> &mut TemplateValues - { - &mut self.values - } - - fn parameter_storage( &self ) -> &Path { - "./.deploy_template.toml".as_ref() - } - - fn template_name( &self ) -> &'static str { - "deploy" - } - } - - impl Default for DeployTemplate - { - fn default() -> Self - { - let parameters = TemplateParameters::former() - .parameter( "gcp_project_id" ).is_mandatory( true ).end() - .parameter( "gcp_region" ).end() - .parameter( "gcp_artifact_repo_name" ).end() - .parameter( "docker_image_name" ).end() - .form(); - - Self - { - files : Default::default(), - parameters, - values : Default::default(), - } - } - } - - // qqq : for Viktor : is that structure required? - /// Files for the deploy template. - /// - /// Default implementation contains all required files. - #[ derive( Debug ) ] - pub struct DeployTemplateFiles( Vec< TemplateFileDescriptor > ); - - impl Default for DeployTemplateFiles - { - fn default() -> Self - { - let formed = TemplateFilesBuilder::former() - // root - .file().data( include_str!( "../../template/deploy/.deploy_template.toml.hbs" ) ).path( "./.deploy_template.toml" ).mode( WriteMode::TomlExtend ).is_template( true ).end() - .file().data( include_str!( "../../template/deploy/Makefile.hbs" ) ).path( "./Makefile" ).is_template( true ).end() - // /key - .file().data( include_str!( "../../template/deploy/key/pack.sh" ) ).path( "./key/pack.sh" ).end() - .file().data( include_str!( "../../template/deploy/key/Readme.md" ) ).path( "./key/Readme.md" ).end() - // /deploy/ - .file().data( include_str!( "../../template/deploy/deploy/Dockerfile" ) ).path( "./deploy/Dockerfile" ).end() - .file().data( include_str!( "../../template/deploy/deploy/Readme.md" ) ).path( "./deploy/Readme.md" ).end() - // /deploy/gar - .file().data( include_str!( "../../template/deploy/deploy/gar/Readme.md" ) ).path( "./deploy/gar/Readme.md" ).end() - .file().data( include_str!( "../../template/deploy/deploy/gar/main.tf" ) ).path( "./deploy/gar/main.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/gar/outputs.tf" ) ).path( "./deploy/gar/outputs.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/gar/variables.tf" ) ).path( "./deploy/gar/variables.tf" ).end() - // /deploy/gce - .file().data( include_str!( "../../template/deploy/deploy/gce/Readme.md" ) ).path( "./deploy/gce/Readme.md" ).end() - .file().data( include_str!( "../../template/deploy/deploy/gce/main.tf" ) ).path( "./deploy/gce/main.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/gce/outputs.tf" ) ).path( "./deploy/gce/outputs.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/gce/variables.tf" ) ).path( "./deploy/gce/variables.tf" ).end() - // /deploy/gce/templates - .file().data( include_str!( "../../template/deploy/deploy/gce/templates/cloud-init.tpl" ) ).path( "./deploy/gce/templates/cloud-init.tpl" ).end() - // /deploy/gcs - .file().data( include_str!( "../../template/deploy/deploy/gcs/main.tf" ) ).path( "./deploy/gcs/main.tf" ).end() - // /deploy/hetzner - .file().data( include_str!( "../../template/deploy/deploy/hetzner/main.tf" ) ).path( "./deploy/hetzner/main.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/hetzner/outputs.tf" ) ).path( "./deploy/hetzner/outputs.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/hetzner/variables.tf" ) ).path( "./deploy/hetzner/variables.tf" ).end() - // /deploy/hetzner/templates - .file().data( include_str!( "../../template/deploy/deploy/hetzner/templates/cloud-init.tpl" ) ).path( "./deploy/hetzner/templates/cloud-init.tpl" ).end() - // /deploy/aws - .file().data( include_str!( "../../template/deploy/deploy/aws/main.tf" ) ).path( "./deploy/aws/main.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/aws/outputs.tf" ) ).path( "./deploy/aws/outputs.tf" ).end() - .file().data( include_str!( "../../template/deploy/deploy/aws/variables.tf" ) ).path( "./deploy/aws/variables.tf" ).end() - // /deploy/aws/templates - .file().data( include_str!( "../../template/deploy/deploy/aws/templates/cloud-init.tpl" ) ).path( "./deploy/aws/templates/cloud-init.tpl" ).end() - .form(); - - Self( formed.files ) - } - } - - // qqq : for Viktor : should not be required - impl TemplateFiles for DeployTemplateFiles {} - // qqq : for Viktor : should not be required - impl IntoIterator for DeployTemplateFiles - { - type Item = TemplateFileDescriptor; - - type IntoIter = std::vec::IntoIter< Self::Item >; - - fn into_iter( self ) -> Self::IntoIter - { - self.0.into_iter() - } - } - - fn get_dir_name() -> Result< String > - { - let current_dir = std::env::current_dir()?; - let current_dir = current_dir.components().last().context( "Invalid current directory" )?; - Ok( current_dir.as_os_str().to_string_lossy().into() ) - } + // /// Template for creating deploy files. + // /// + // /// Includes terraform deploy options to GCP, and Hetzner, + // /// a Makefile for useful commands, and a key directory. + // #[ derive( Debug ) ] + // pub struct DeployTemplate + // { + // files : DeployTemplateFiles, + // parameters : TemplateParameters, + // values : TemplateValues, + // } + + // // qqq : for Viktor : why DeployTemplate can't be part of template.rs? + + // impl Template< DeployTemplateFiles > for DeployTemplate + // { + // fn create_all( self, path : &Path ) -> Result< () > + // { + // self.files.create_all( path, &self.values ) + // } + + // fn parameters( &self ) -> &TemplateParameters + // { + // &self.parameters + // } + + // fn set_values( &mut self, values : TemplateValues ) + // { + // self.values = values + // } + + // fn get_values( &self ) -> &TemplateValues + // { + // &self.values + // } + + // fn get_values_mut( &mut self ) -> &mut TemplateValues + // { + // &mut self.values + // } + + // fn parameter_storage( &self ) -> &Path { + // "./.deploy_template.toml".as_ref() + // } + + // fn template_name( &self ) -> &'static str { + // "deploy" + // } + // } + + // impl Default for DeployTemplate + // { + // fn default() -> Self + // { + // let parameters = TemplateParameters::former() + // .parameter( "gcp_project_id" ).is_mandatory( true ).end() + // .parameter( "gcp_region" ).end() + // .parameter( "gcp_artifact_repo_name" ).end() + // .parameter( "docker_image_name" ).end() + // .form(); + + // Self + // { + // files : Default::default(), + // parameters, + // values : Default::default(), + // } + // } + // } + + // // qqq : for Viktor : is that structure required? + // /// Files for the deploy template. + // /// + // /// Default implementation contains all required files. + // #[ derive( Debug ) ] + // pub struct DeployTemplateFiles( Vec< TemplateFileDescriptor > ); + + // impl Default for DeployTemplateFiles + // { + // fn default() -> Self + // { + // let formed = TemplateFilesBuilder::former() + // // root + // .file().data( include_str!( "../../template/deploy/.deploy_template.toml.hbs" ) ).path( "./.deploy_template.toml" ).mode( WriteMode::TomlExtend ).is_template( true ).end() + // .file().data( include_str!( "../../template/deploy/Makefile.hbs" ) ).path( "./Makefile" ).is_template( true ).end() + // // /key + // .file().data( include_str!( "../../template/deploy/key/pack.sh" ) ).path( "./key/pack.sh" ).end() + // .file().data( include_str!( "../../template/deploy/key/Readme.md" ) ).path( "./key/Readme.md" ).end() + // // /deploy/ + // .file().data( include_str!( "../../template/deploy/deploy/Dockerfile" ) ).path( "./deploy/Dockerfile" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/Readme.md" ) ).path( "./deploy/Readme.md" ).end() + // // /deploy/gar + // .file().data( include_str!( "../../template/deploy/deploy/gar/Readme.md" ) ).path( "./deploy/gar/Readme.md" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/gar/main.tf" ) ).path( "./deploy/gar/main.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/gar/outputs.tf" ) ).path( "./deploy/gar/outputs.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/gar/variables.tf" ) ).path( "./deploy/gar/variables.tf" ).end() + // // /deploy/gce + // .file().data( include_str!( "../../template/deploy/deploy/gce/Readme.md" ) ).path( "./deploy/gce/Readme.md" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/gce/main.tf" ) ).path( "./deploy/gce/main.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/gce/outputs.tf" ) ).path( "./deploy/gce/outputs.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/gce/variables.tf" ) ).path( "./deploy/gce/variables.tf" ).end() + // // /deploy/gce/templates + // .file().data( include_str!( "../../template/deploy/deploy/gce/templates/cloud-init.tpl" ) ).path( "./deploy/gce/templates/cloud-init.tpl" ).end() + // // /deploy/gcs + // .file().data( include_str!( "../../template/deploy/deploy/gcs/main.tf" ) ).path( "./deploy/gcs/main.tf" ).end() + // // /deploy/hetzner + // .file().data( include_str!( "../../template/deploy/deploy/hetzner/main.tf" ) ).path( "./deploy/hetzner/main.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/hetzner/outputs.tf" ) ).path( "./deploy/hetzner/outputs.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/hetzner/variables.tf" ) ).path( "./deploy/hetzner/variables.tf" ).end() + // // /deploy/hetzner/templates + // .file().data( include_str!( "../../template/deploy/deploy/hetzner/templates/cloud-init.tpl" ) ).path( "./deploy/hetzner/templates/cloud-init.tpl" ).end() + // // /deploy/aws + // .file().data( include_str!( "../../template/deploy/deploy/aws/main.tf" ) ).path( "./deploy/aws/main.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/aws/outputs.tf" ) ).path( "./deploy/aws/outputs.tf" ).end() + // .file().data( include_str!( "../../template/deploy/deploy/aws/variables.tf" ) ).path( "./deploy/aws/variables.tf" ).end() + // // /deploy/aws/templates + // .file().data( include_str!( "../../template/deploy/deploy/aws/templates/cloud-init.tpl" ) ).path( "./deploy/aws/templates/cloud-init.tpl" ).end() + // .form(); + + // Self( formed.files ) + // } + // } + + // // qqq : for Viktor : should not be required + // impl TemplateFiles for DeployTemplateFiles {} + // // qqq : for Viktor : should not be required + // impl IntoIterator for DeployTemplateFiles + // { + // type Item = TemplateFileDescriptor; + + // type IntoIter = std::vec::IntoIter< Self::Item >; + + // fn into_iter( self ) -> Self::IntoIter + // { + // self.0.into_iter() + // } + // } + + // aaa : for Petro : redundant function + // aaa : this function not my, but ok I'll remove it. fn dir_name_to_formatted( dir_name : &str, separator : &str ) -> String { @@ -162,17 +158,32 @@ mod private pub fn deploy_renew ( path : &Path, - mut template : DeployTemplate + mut template : TemplateHolder ) -> Result< () > { if let None = template.load_existing_params( path ) { - let current_dir = get_dir_name()?; + let current_dir = std::env::current_dir()?; + // qqq : for Petro : use file_name + // qqq : for Kos : bad description + let current_dir = current_dir + .components() + .last() + .context( "Invalid current directory" )?; + + let current_dir = current_dir.as_os_str().to_string_lossy(); let artifact_repo_name = dir_name_to_formatted( ¤t_dir, "-" ); let docker_image_name = dir_name_to_formatted( ¤t_dir, "_" ); - template.values.insert_if_empty( "gcp_artifact_repo_name", wca::Value::String( artifact_repo_name ) ); - template.values.insert_if_empty( "docker_image_name", wca::Value::String( docker_image_name ) ); - template.values.insert_if_empty( "gcp_region", wca::Value::String( "europe-central2".into() ) ); + template + .values + .insert_if_empty( "gcp_artifact_repo_name", wca::Value::String( artifact_repo_name ) ); + + template + .values + .insert_if_empty( "docker_image_name", wca::Value::String( docker_image_name ) ); + template + .values + .insert_if_empty( "gcp_region", wca::Value::String( "europe-central2".into() ) ); } template.create_all( path )?; Ok( () ) @@ -183,5 +194,5 @@ mod private crate::mod_interface! { orphan use deploy_renew; - orphan use DeployTemplate; + //orphan use DeployTemplate; } diff --git a/module/move/willbe/src/action/features.rs b/module/move/willbe/src/action/features.rs index 47b9e98f44..6560d1982b 100644 --- a/module/move/willbe/src/action/features.rs +++ b/module/move/willbe/src/action/features.rs @@ -4,20 +4,21 @@ mod private use std:: { - collections::{ BTreeMap, HashMap }, fmt }; + use collection::{ BTreeMap, HashMap }; - use _path::AbsolutePath; + // // use path::AbsolutePath; use former::Former; - use error_tools::{ for_app::Context, Result }; - use workspace::Workspace; + use error::{ untyped::Context, Result }; + // use workspace::Workspace; /// Options available for the .features command #[ derive( Debug, Former ) ] pub struct FeaturesOptions { - manifest_dir : AbsolutePath, + // crate_dir : AbsolutePath, + crate_dir : CrateDir, with_features_deps : bool, } @@ -62,23 +63,38 @@ mod private ) } ) - } + } } /// List features - pub fn features( FeaturesOptions { manifest_dir, with_features_deps } : FeaturesOptions ) -> Result< FeaturesReport > + pub fn features( FeaturesOptions { crate_dir, with_features_deps } : FeaturesOptions ) + -> Result< FeaturesReport > { - let workspace = Workspace::with_crate_dir( CrateDir::try_from( manifest_dir.clone() )? ).context( "Failed to find workspace" )?; - let packages = workspace.packages()?.into_iter().filter - ( | package | - package.manifest_path().as_str().starts_with( manifest_dir.as_ref().as_os_str().to_str().unwrap() ) - ).collect::< Vec< _ > >(); + let workspace = Workspace::try_from( crate_dir.clone() ).context( "Failed to find workspace" )?; + let packages = workspace.packages().filter + ( + | package | + { + if let Ok( manifest_file ) = package.manifest_file() + { + manifest_file.inner().starts_with(crate_dir.clone().absolute_path()) + } + else + { + false + } + } // aaa : remove unwrap + // aaa : done + ); + // ).collect::< Vec< _ > >(); qqq : rid of. put type at var let mut report = FeaturesReport { with_features_deps, ..Default::default() }; - packages.iter().for_each + packages + // .iter() + .for_each ( | package | { let features = package.features(); diff --git a/module/move/willbe/src/action/list.rs b/module/move/willbe/src/action/list.rs index 6e4bd01dc0..73e50679e4 100644 --- a/module/move/willbe/src/action/list.rs +++ b/module/move/willbe/src/action/list.rs @@ -2,35 +2,25 @@ mod private { use crate::*; - use std:: - { - fmt::{ Formatter, Write }, - path::PathBuf, - collections::HashSet, - }; - use std::collections::HashMap; + + use std::{ fmt, str }; use petgraph:: { - prelude::*, + prelude::{ Dfs, EdgeRef }, algo::toposort, visit::Topo, + Graph, }; - use std::str::FromStr; - use packages::FilterMapOptions; - use wtools::error:: + use { - for_app::{ Error, Context }, - err + error::{ Context, untyped, format_err, err }, + error::ErrWith, }; - // aaa : for Petro : don't use cargo_metadata and Package directly, use facade - // aaa : ✅ - - use petgraph::prelude::{ Dfs, EdgeRef }; - use former::Former; + use tool::{ TreePrinter, ListNodeReport }; - use workspace::Workspace; - use _path::AbsolutePath; - use workspace::WorkspacePackage; +// use petgraph::prelude::{ Dfs, EdgeRef }; +// use former::Former; +// use workspace::Workspace; /// Args for `list` action. #[ derive( Debug, Default, Copy, Clone ) ] @@ -43,9 +33,9 @@ mod private Topological, } - impl FromStr for ListFormat + impl str::FromStr for ListFormat { - type Err = Error; + type Err = untyped::Error; fn from_str( s : &str ) -> Result< Self, Self::Err > { @@ -109,9 +99,9 @@ mod private Local, } - impl FromStr for ListFilter + impl str::FromStr for ListFilter { - type Err = Error; + type Err = untyped::Error; fn from_str( s : &str ) -> Result< Self, Self::Err > { @@ -144,144 +134,143 @@ mod private /// - `path_to_manifest`: A `CrateDir` representing the path to the manifest of the crates. /// - `format`: A `ListFormat` enum representing the desired format of the output. /// - `dependency_sources`: A `HashSet` of `DependencySource` representing the sources of the dependencies. - #[ derive( Debug, Former ) ] + #[ derive( Debug, former::Former ) ] pub struct ListOptions { path_to_manifest : CrateDir, format : ListFormat, - info : HashSet< PackageAdditionalInfo >, - dependency_sources : HashSet< DependencySource >, - dependency_categories : HashSet< DependencyCategory >, - } - - struct Symbols - { - down : &'static str, - tee : &'static str, - ell : &'static str, - right : &'static str, - } - - // qqq : fro Bohdan : abstract and move out tree printing. or reuse ready solution for tree printing - // stick to single responsibility - const UTF8_SYMBOLS : Symbols = Symbols - { - down : "│", - tee : "├", - ell : "└", - right : "─", - }; - - /// Represents a node in a dependency graph. - /// It holds essential information about the project dependencies. It is also capable - /// of holding any nested dependencies in a recursive manner, allowing the modeling - /// of complex dependency structures. - #[ derive( Debug, Clone, Eq, PartialEq ) ] - pub struct ListNodeReport - { - /// This could be the name of the library or crate. - pub name : String, - /// Ihe version of the crate. - pub version : Option< String >, - /// The path to the node's source files in the local filesystem. This is - /// optional as not all nodes may have a local presence (e.g., nodes representing remote crates). - pub path : Option< PathBuf >, - /// This field is a flag indicating whether the Node is a duplicate or not. - pub duplicate : bool, - /// A list that stores normal dependencies. - /// Each element in the list is also of the same 'ListNodeReport' type to allow - /// storage of nested dependencies. - pub normal_dependencies : Vec< ListNodeReport >, - /// A list that stores dev dependencies(dependencies required for tests or examples). - /// Each element in the list is also of the same 'ListNodeReport' type to allow - /// storage of nested dependencies. - pub dev_dependencies : Vec< ListNodeReport >, - /// A list that stores build dependencies. - /// Each element in the list is also of the same 'ListNodeReport' type to allow - /// storage of nested dependencies. - pub build_dependencies : Vec< ListNodeReport >, + info : collection::HashSet< PackageAdditionalInfo >, + dependency_sources : collection::HashSet< DependencySource >, + dependency_categories : collection::HashSet< DependencyCategory >, } - impl ListNodeReport - { - /// Displays the name, version, path, and dependencies of a package with appropriate indentation and spacing. - /// - /// # Arguments - /// - /// * `spacer` - A string used for indentation. - /// - /// # Returns - /// - /// * A `Result` containing the formatted string or a `std::fmt::Error` if formatting fails. - pub fn display_with_spacer( &self, spacer : &str ) -> Result< String, std::fmt::Error > - { - let mut f = String::new(); - - write!( f, "{}", self.name )?; - if let Some( version ) = &self.version { write!( f, " {version}" )? } - if let Some( path ) = &self.path { write!( f, " {}", path.display() )? } - if self.duplicate { write!( f, "(*)" )? } - write!( f, "\n" )?; - - let mut new_spacer = format!( "{spacer}{} ", if self.normal_dependencies.len() < 2 { " " } else { UTF8_SYMBOLS.down } ); - - let mut normal_dependencies_iter = self.normal_dependencies.iter(); - let last = normal_dependencies_iter.next_back(); - - for dep in normal_dependencies_iter - { - write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.tee, UTF8_SYMBOLS.right, dep.display_with_spacer( &new_spacer )? )?; - } - if let Some( last ) = last - { - new_spacer = format!( "{spacer} " ); - write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.ell, UTF8_SYMBOLS.right, last.display_with_spacer( &new_spacer )? )?; - } - if !self.dev_dependencies.is_empty() - { - let mut dev_dependencies_iter = self.dev_dependencies.iter(); - let last = dev_dependencies_iter.next_back(); - write!( f, "{spacer}[dev-dependencies]\n" )?; - for dep in dev_dependencies_iter - { - write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.tee, UTF8_SYMBOLS.right, dep.display_with_spacer( &new_spacer )? )?; - } - // unwrap - safe because `is_empty` check - write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.ell, UTF8_SYMBOLS.right, last.unwrap().display_with_spacer( &new_spacer )? )?; - } - if !self.build_dependencies.is_empty() - { - let mut build_dependencies_iter = self.build_dependencies.iter(); - let last = build_dependencies_iter.next_back(); - write!( f, "{spacer}[build-dependencies]\n" )?; - for dep in build_dependencies_iter - { - write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.tee, UTF8_SYMBOLS.right, dep.display_with_spacer( &new_spacer )? )?; - } - // unwrap - safe because `is_empty` check - write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.ell, UTF8_SYMBOLS.right, last.unwrap().display_with_spacer( &new_spacer )? )?; - } - - Ok( f ) - } - } - - impl std::fmt::Display for ListNodeReport - { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result - { - write!( f, "{}", self.display_with_spacer( "" )? )?; - - Ok( () ) - } - } + // struct Symbols + // { + // down : &'static str, + // tee : &'static str, + // ell : &'static str, + // right : &'static str, + // } + + // // qqq : for Mykyta : make facade, abstract and move out tree printing. or reuse ready solution for tree printing + // // stick to single responsibility + // const UTF8_SYMBOLS : Symbols = Symbols + // { + // down : "│", + // tee : "├", + // ell : "└", + // right : "─", + // }; + + // /// Represents a node in a dependency graph. + // /// It holds essential information about the project dependencies. It is also capable + // /// of holding any nested dependencies in a recursive manner, allowing the modeling + // /// of complex dependency structures. + // #[ derive( Debug, Clone, Eq, PartialEq ) ] + // pub struct ListNodeReport + // { + // /// This could be the name of the library or crate. + // pub name : String, + // /// Ihe version of the crate. + // pub version : Option< String >, + // /// The path to the node's source files in the local filesystem. This is + // /// optional as not all nodes may have a local presence (e.g., nodes representing remote crates). + // pub crate_dir : Option< CrateDir >, + // /// This field is a flag indicating whether the Node is a duplicate or not. + // pub duplicate : bool, + // /// A list that stores normal dependencies. + // /// Each element in the list is also of the same 'ListNodeReport' type to allow + // /// storage of nested dependencies. + // pub normal_dependencies : Vec< ListNodeReport >, + // /// A list that stores dev dependencies(dependencies required for tests or examples). + // /// Each element in the list is also of the same 'ListNodeReport' type to allow + // /// storage of nested dependencies. + // pub dev_dependencies : Vec< ListNodeReport >, + // /// A list that stores build dependencies. + // /// Each element in the list is also of the same 'ListNodeReport' type to allow + // /// storage of nested dependencies. + // pub build_dependencies : Vec< ListNodeReport >, + // } + + // impl ListNodeReport + // { + // /// Displays the name, version, path, and dependencies of a package with appropriate indentation and spacing. + // /// + // /// # Arguments + // /// + // /// * `spacer` - A string used for indentation. + // /// + // /// # Returns + // /// + // /// * A `Result` containing the formatted string or a `std::fmt::Error` if formatting fails. + // pub fn display_with_spacer( &self, spacer : &str ) -> Result< String, std::fmt::Error > + // { + // let mut f = String::new(); + + // write!( f, "{}", self.name )?; + // if let Some( version ) = &self.version { write!( f, " {version}" )? } + // if let Some( crate_dir ) = &self.crate_dir { write!( f, " {}", crate_dir )? } + // if self.duplicate { write!( f, "(*)" )? } + // write!( f, "\n" )?; + + // let mut new_spacer = format!( "{spacer}{} ", if self.normal_dependencies.len() < 2 { " " } else { UTF8_SYMBOLS.down } ); + // let mut normal_dependencies_iter = self.normal_dependencies.iter(); + // let last = normal_dependencies_iter.next_back(); + + // for dep in normal_dependencies_iter + // { + // write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.tee, UTF8_SYMBOLS.right, dep.display_with_spacer( &new_spacer )? )?; + // } + // if let Some( last ) = last + // { + // new_spacer = format!( "{spacer} " ); + // write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.ell, UTF8_SYMBOLS.right, last.display_with_spacer( &new_spacer )? )?; + // } + // if !self.dev_dependencies.is_empty() + // { + // let mut dev_dependencies_iter = self.dev_dependencies.iter(); + // let last = dev_dependencies_iter.next_back(); + // write!( f, "{spacer}[dev-dependencies]\n" )?; + // for dep in dev_dependencies_iter + // { + // write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.tee, UTF8_SYMBOLS.right, dep.display_with_spacer( &new_spacer )? )?; + // } + // // unwrap - safe because `is_empty` check + // write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.ell, UTF8_SYMBOLS.right, last.unwrap().display_with_spacer( &new_spacer )? )?; + // } + // if !self.build_dependencies.is_empty() + // { + // let mut build_dependencies_iter = self.build_dependencies.iter(); + // let last = build_dependencies_iter.next_back(); + // write!( f, "{spacer}[build-dependencies]\n" )?; + // for dep in build_dependencies_iter + // { + // write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.tee, UTF8_SYMBOLS.right, dep.display_with_spacer( &new_spacer )? )?; + // } + // // unwrap - safe because `is_empty` check + // write!( f, "{spacer}{}{} {}", UTF8_SYMBOLS.ell, UTF8_SYMBOLS.right, last.unwrap().display_with_spacer( &new_spacer )? )?; + // } + + // Ok( f ) + // } + // } + + // impl std::fmt::Display for ListNodeReport + // { + // fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + // { + // write!( f, "{}", self.display_with_spacer( "" )? )?; + + // Ok( () ) + // } + // } /// Represents the different report formats for the `list` action. #[ derive( Debug, Default, Clone ) ] pub enum ListReport { /// Represents a tree-like report format. - Tree( Vec< ListNodeReport > ), + Tree( Vec< tool::TreePrinter > ), /// Represents a standard list report format in topological order. List( Vec< String > ), /// Represents an empty report format. @@ -289,42 +278,100 @@ mod private Empty, } - impl std::fmt::Display for ListReport + impl fmt::Display for ListReport { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> fmt::Result { match self { - Self::Tree( v ) => write!( f, "{}", v.iter().map( | l | l.to_string() ).collect::< Vec< _ > >().join( "\n" ) ), - Self::List( v ) => write!( f, "{}", v.iter().enumerate().map( |( i, v )| format!( "[{i}] {v}" ) ).collect::< Vec< _ > >().join( "\n" ) ), + Self::Tree( v ) => + write! + ( + f, + "{}", + v.iter().map( | l | l.to_string() ).collect::< Vec< _ > >().join( "\n" ) + ), + + Self::List( v ) => + write! + ( + f, + "{}", + v.iter().enumerate().map( |( i, v )| format!( "[{i}] {v}" ) ).collect::< Vec< _ > >().join( "\n" ) + ), + Self::Empty => write!( f, "Nothing" ), } } } - fn process_package_dependency + // aaa : for Bohdan : descirption // aaa : done + /// The `DependencyId` struct encapsulates the essential attributes of a dependency, + #[ derive( Debug, Clone, PartialEq, Eq, Hash ) ] + pub struct DependencyId + { + /// The name of the dependency. + /// + /// This is typically the name of the library or package that the package relies on. + pub name : String, + /// The version requirements for the dependency. + /// + /// Note: This will be compared to other dependencies and packages to build the tree + pub version : semver::VersionReq, + /// An optional path to the manifest file of the dependency. + /// + /// This field may contain a path to the manifest file when the dependency is a local package + /// or when specific path information is needed to locate the dependency's manifest. + pub path : Option< ManifestFile >, + } + + fn process_package_dependency< 'a > ( workspace : &Workspace, - package : &WorkspacePackage, + package : &WorkspacePackageRef< 'a >, args : &ListOptions, - dep_rep : &mut ListNodeReport, - visited : &mut HashSet< String > + dep_rep : &mut tool::ListNodeReport, + visited : &mut collection::HashSet< DependencyId > ) { - for dependency in &package.dependencies() + for dependency in package.dependencies() { - if dependency.path().is_some() && !args.dependency_sources.contains( &DependencySource::Local ) { continue; } - if dependency.path().is_none() && !args.dependency_sources.contains( &DependencySource::Remote ) { continue; } - let dep_id = format!( "{}+{}+{}", dependency.name(), dependency.req(), dependency.path().as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() ); - let mut temp_vis = visited.clone(); - let dependency_rep = process_dependency( workspace, dependency, args, &mut temp_vis ); + // aaa : for Bohdan : bad : suboptimal + // aaa : Is that what you had in mind? + let dep_crate_dir = dependency.crate_dir(); + if dep_crate_dir.is_some() && !args.dependency_sources.contains( &DependencySource::Local ) { continue; } + if dep_crate_dir.is_none() && !args.dependency_sources.contains( &DependencySource::Remote ) { continue; } + // aaa : extend test coverage. NewType. Description + // aaa : NewType ✅ Description ✅ test coverage ❌ how to test structure without logic? + // qqq : extend test coverage. NewType. Description + let dep_id = DependencyId + { + name : dependency.name(), + // unwrap should be safe because of `semver::VersionReq` + version : dependency.req(), + path : dependency.crate_dir().map( | p | p.manifest_file() ), + }; + // format!( "{}+{}+{}", dependency.name(), dependency.req(), dependency.crate_dir().unwrap().manifest_file() ); + // let dep_id = format!( "{}+{}+{}", dependency.name(), dependency.req(), dependency.path().as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() ); + + let mut temp_vis = visited.clone(); + let dependency_rep = process_dependency + ( + workspace, + dependency, + args, + &mut temp_vis + ); match dependency.kind() { - workspace::DependencyKind::Normal if args.dependency_categories.contains( &DependencyCategory::Primary ) => dep_rep.normal_dependencies.push( dependency_rep ), - workspace::DependencyKind::Development if args.dependency_categories.contains( &DependencyCategory::Dev ) => dep_rep.dev_dependencies.push( dependency_rep ), - workspace::DependencyKind::Build if args.dependency_categories.contains( &DependencyCategory::Build ) => dep_rep.build_dependencies.push( dependency_rep ), + DependencyKind::Normal if args.dependency_categories.contains( &DependencyCategory::Primary ) => + dep_rep.normal_dependencies.push( dependency_rep ), + DependencyKind::Development if args.dependency_categories.contains( &DependencyCategory::Dev ) => + dep_rep.dev_dependencies.push( dependency_rep ), + DependencyKind::Build if args.dependency_categories.contains( &DependencyCategory::Build ) => + dep_rep.build_dependencies.push( dependency_rep ), _ => { visited.remove( &dep_id ); std::mem::swap( &mut temp_vis, visited ); } } @@ -332,20 +379,35 @@ mod private } } - fn process_dependency( workspace : &Workspace, dep : &workspace::Dependency, args : &ListOptions, visited : &mut HashSet< String > ) -> ListNodeReport + fn process_dependency + ( + workspace : &Workspace, + dep : DependencyRef< '_ >, + args : &ListOptions, + visited : &mut collection::HashSet< DependencyId > + ) + -> tool::ListNodeReport { - let mut dep_rep = ListNodeReport + let mut dep_rep = tool::ListNodeReport { name : dep.name().clone(), version : if args.info.contains( &PackageAdditionalInfo::Version ) { Some( dep.req().to_string() ) } else { None }, - path : if args.info.contains( &PackageAdditionalInfo::Path ) { dep.path().as_ref().map( | p | p.clone().into_std_path_buf() ) } else { None }, + // manifest_file : if args.info.contains( &PackageAdditionalInfo::Path ) { dep.manifest_file().as_ref().map( | p | p.clone().into_std_path_buf() ) } else { None }, + crate_dir : if args.info.contains( &PackageAdditionalInfo::Path ) { dep.crate_dir() } else { None }, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], build_dependencies : vec![], }; - let dep_id = format!( "{}+{}+{}", dep.name(), dep.req(), dep.path().as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() ); + // let dep_id = format!( "{}+{}+{}", dep.name(), dep.req(), dep.crate_dir().as_ref().map( | p | p.join( "Cargo.toml" ) ).unwrap_or_default() ); + let dep_id = DependencyId + { + name : dep.name(), + // unwrap should be safe because of `semver::VersionReq` + version : dep.req(), + path : dep.crate_dir().map( | p | p.manifest_file() ), + }; // if this is a cycle (we have visited this node before) if visited.contains( &dep_id ) { @@ -356,9 +418,9 @@ mod private // if we have not visited this node before, mark it as visited visited.insert( dep_id ); - if let Some( path ) = &dep.path() + if let Some( crate_dir ) = &dep.crate_dir() { - if let Some( package ) = workspace.package_find_by_manifest( path.as_std_path().join( "Cargo.toml" ) ) + if let Some( package ) = workspace.package_find_by_manifest( crate_dir.clone().manifest_file() ) { process_package_dependency( workspace, &package, args, &mut dep_rep, visited ); } @@ -367,19 +429,6 @@ mod private dep_rep } - trait ErrWith< T, T1, E > - { - fn err_with( self, v : T ) -> std::result::Result< T1, ( T, E ) >; - } - - impl< T, T1, E > ErrWith< T, T1, E > for Result< T1, E > - { - fn err_with( self, v : T ) -> Result< T1, ( T, E ) > - { - self.map_err( | e | ( v, e ) ) - } - } - /// Retrieve a list of packages based on the given arguments. /// /// # Arguments @@ -391,106 +440,177 @@ mod private /// - `Result` - A result containing the list report if successful, /// or a tuple containing the list report and error if not successful. #[ cfg_attr( feature = "tracing", tracing::instrument ) ] - pub fn list( args : ListOptions ) -> Result< ListReport, ( ListReport, Error ) > + pub fn list( args : ListOptions ) -> ResultWithReport< ListReport, untyped::Error > // qqq : should be specific error { let mut report = ListReport::default(); - let manifest = manifest::open( args.path_to_manifest.absolute_path() ).context( "List of packages by specified manifest path" ).err_with( report.clone() )?; - let metadata = Workspace::with_crate_dir( manifest.crate_dir() ).err_with( report.clone() )?; + // let manifest = Manifest::try_from( args.path_to_manifest.absolute_path() ) + dbg!( &args.path_to_manifest ); + let manifest = Manifest::try_from( args.path_to_manifest.clone() ) + .context( "List of packages by specified manifest path" ) + .err_with( || report.clone() )?; - let is_package = manifest.package_is().context( "try to identify manifest type" ).err_with( report.clone() )?; + let workspace = Workspace::try_from( manifest.crate_dir() ) + .context( "Reading workspace" ) + .err_with( || report.clone() )?; - let tree_package_report = | path : AbsolutePath, report : &mut ListReport, visited : &mut HashSet< String > | + let is_package = manifest.package_is(); + // let is_package = manifest.package_is().context( "try to identify manifest type" ).err_with( report.clone() )?; + + let tree_package_report = + | manifest_file : ManifestFile, report : &mut ListReport, visited : &mut HashSet< DependencyId > | { - let package = metadata.package_find_by_manifest( path ).unwrap(); - let mut package_report = ListNodeReport + + // aaa : is it safe to use unwrap here? // aaa : done + let package = workspace + .package_find_by_manifest( manifest_file ) + .ok_or_else( || format_err!( "Package not found in the workspace" ) ) + .err_with( || report.clone() )?; + let mut package_report = tool::ListNodeReport { name : package.name().to_string(), version : if args.info.contains( &PackageAdditionalInfo::Version ) { Some( package.version().to_string() ) } else { None }, - path : if args.info.contains( &PackageAdditionalInfo::Path ) { Some( package.manifest_path().as_std_path().to_path_buf() ) } else { None }, + crate_dir : if args.info.contains( &PackageAdditionalInfo::Path ) + { Some( package.crate_dir() ).transpose() } + else + { Ok( None ) } + .err_with( || report.clone() )?, + // aaa : is it safe to use unwrap here? // aaa : now returns an error duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], build_dependencies : vec![], }; - process_package_dependency( &metadata, &package, &args, &mut package_report, visited ); + process_package_dependency( &workspace, &package, &args, &mut package_report, visited ); + let printer = TreePrinter::new( &package_report ); *report = match report { - ListReport::Tree( ref mut v ) => ListReport::Tree( { v.extend([ package_report ]); v.clone() } ), - ListReport::Empty => ListReport::Tree( vec![ package_report ] ), + ListReport::Tree( ref mut v ) => ListReport::Tree + ( { v.extend([ printer ]); v.clone() } ), + ListReport::Empty => ListReport::Tree( vec![ printer ] ), ListReport::List( _ ) => unreachable!(), }; + Ok( () ) }; + match args.format { ListFormat::Tree if is_package => { - let mut visited = HashSet::new(); - tree_package_report( manifest.manifest_path, &mut report, &mut visited ); + let mut visited = collection::HashSet::new(); + tree_package_report( manifest.manifest_file, &mut report, &mut visited )?; let ListReport::Tree( tree ) = report else { unreachable!() }; - let tree = rearrange_duplicates( merge_dev_dependencies( merge_build_dependencies( tree ) ) ); + let printer = merge_build_dependencies( tree ); + let rep : Vec< ListNodeReport > = printer + .iter() + .map( | printer | printer.info.clone() ) + .collect(); + let tree = rearrange_duplicates( rep ); report = ListReport::Tree( tree ); } ListFormat::Tree => { - let packages = metadata.packages().context( "workspace packages" ).err_with( report.clone() )?; - let mut visited = packages.iter().map( | p | format!( "{}+{}+{}", p.name(), p.version().to_string(), p.manifest_path() ) ).collect(); + let packages = workspace.packages(); + let mut visited = packages + .clone() + .map + ( + // aaa : is it safe to use unwrap here + // unwrap is safe because Version has less information than VersionReq + | p | + DependencyId + { + name : p.name().into(), + version : semver::VersionReq::parse( &p.version().to_string() ).unwrap(), + path : p.manifest_file().ok() + } + ) + .collect(); for package in packages { - tree_package_report( package.manifest_path().as_std_path().try_into().unwrap(), &mut report, &mut visited ) + tree_package_report( package.manifest_file().unwrap(), &mut report, &mut visited )? } let ListReport::Tree( tree ) = report else { unreachable!() }; - let tree = merge_dev_dependencies( merge_build_dependencies( tree ) ); + let printer = merge_build_dependencies( tree ); + let rep : Vec< ListNodeReport > = printer + .iter() + .map( | printer | printer.info.clone() ) + .collect(); + let tree = merge_dev_dependencies( rep ); report = ListReport::Tree( tree ); } ListFormat::Topological => { - let root_crate = manifest - .manifest_data - .as_ref() - .and_then( | m | m.get( "package" ) ) + + let root_crate = manifest.data.get( "package" ) .map( | m | m[ "name" ].to_string().trim().replace( '\"', "" ) ) .unwrap_or_default(); - let dep_filter = move | _p : &WorkspacePackage, d : &workspace::Dependency | + // let root_crate = manifest + // .data + // // .as_ref() + // .and_then( | m | m.get( "package" ) ) + // .map( | m | m[ "name" ].to_string().trim().replace( '\"', "" ) ) + // .unwrap_or_default(); + + let dep_filter = move | _p : WorkspacePackageRef< '_ >, d : DependencyRef< '_ > | { ( - args.dependency_categories.contains( &DependencyCategory::Primary ) && d.kind() == workspace::DependencyKind::Normal - || args.dependency_categories.contains( &DependencyCategory::Dev ) && d.kind() == workspace::DependencyKind::Development - || args.dependency_categories.contains( &DependencyCategory::Build ) && d.kind() == workspace::DependencyKind::Build + args.dependency_categories.contains( &DependencyCategory::Primary ) && d.kind() == DependencyKind::Normal + || args.dependency_categories.contains( &DependencyCategory::Dev ) && d.kind() == DependencyKind::Development + || args.dependency_categories.contains( &DependencyCategory::Build ) && d.kind() == DependencyKind::Build ) && ( - args.dependency_sources.contains( &DependencySource::Remote ) && d.path().is_none() - || args.dependency_sources.contains( &DependencySource::Local ) && d.path().is_some() + args.dependency_sources.contains( &DependencySource::Remote ) && d.crate_dir().is_none() + || args.dependency_sources.contains( &DependencySource::Local ) && d.crate_dir().is_some() ) }; - let packages = metadata.packages().context( "workspace packages" ).err_with( report.clone() )?; - let packages_map = packages::filter + let packages = workspace.packages(); + let packages_map : collection::HashMap< package::PackageName, collection::HashSet< package::PackageName > > = packages::filter ( - packages.as_slice(), - FilterMapOptions { dependency_filter : Some( Box::new( dep_filter ) ), ..Default::default() } + packages.clone(), + packages::FilterMapOptions + { + dependency_filter : Some( Box::new( dep_filter ) ), + ..Default::default() + } ); let graph = graph::construct( &packages_map ); - let sorted = toposort( &graph, None ).map_err( | e | { use std::ops::Index; ( report.clone(), err!( "Failed to process toposort for package : {:?}", graph.index( e.node_id() ) ) ) } )?; - let packages_info = packages.iter().map( | p | ( p.name().clone(), p ) ).collect::< HashMap< _, _ > >(); + let sorted = toposort( &graph, None ) + .map_err + ( + | e | + { + use std::ops::Index; + format_err! + ( + "Failed to process toposort for package : {:?}", + graph.index( e.node_id() ) + ) + } + ) + .err_with( || report.clone() )?; + let packages_info : collection::HashMap< String, WorkspacePackageRef< '_ > > = + packages.map( | p | ( p.name().to_string(), p ) ).collect(); if root_crate.is_empty() { - let names = sorted - .iter() + let names : Vec< String > = sorted + .into_iter() .rev() - .map( | dep_idx | graph.node_weight( *dep_idx ).unwrap().to_string() ) + .map( | dep_idx | graph.node_weight( dep_idx ).unwrap() ) .map ( - | mut name | + | name : &&package::PackageName | { - if let Some( p ) = packages_info.get( &name ) + let mut name : String = name.to_string(); + if let Some( p ) = packages_info.get( &name[ .. ] ) { if args.info.contains( &PackageAdditionalInfo::Version ) { @@ -500,22 +620,27 @@ mod private if args.info.contains( &PackageAdditionalInfo::Path ) { name.push_str( " " ); - name.push_str( &p.manifest_path().to_string() ); + name.push_str( &p.manifest_file()?.to_string() ); + // aaa : is it safe to use unwrap here? // aaa : should be safe, but now returns an error } } - name + Ok::< String, PathError >( name ) } ) - .collect::< Vec< String > >(); + .collect::< Result< _, _ >>() + .err_with( || report.clone() )?; report = ListReport::List( names ); } else { - let node = graph.node_indices().find( | n | graph.node_weight( *n ).unwrap() == &&root_crate ).unwrap(); + let node = graph + .node_indices() + .find( | n | graph.node_weight( *n ).unwrap().as_str() == root_crate ) + .unwrap(); let mut dfs = Dfs::new( &graph, node ); let mut subgraph = Graph::new(); - let mut node_map = HashMap::new(); + let mut node_map = collection::HashMap::new(); while let Some( n )= dfs.next( &graph ) { node_map.insert( n, subgraph.add_node( graph[ n ] ) ); @@ -523,7 +648,11 @@ mod private for e in graph.edge_references() { - if let ( Some( &s ), Some( &t ) ) = ( node_map.get( &e.source() ), node_map.get( &e.target() ) ) + if let ( Some( &s ), Some( &t ) ) = + ( + node_map.get( &e.source() ), + node_map.get( &e.target() ) + ) { subgraph.add_edge( s, t, () ); } @@ -533,8 +662,8 @@ mod private let mut names = Vec::new(); while let Some( n ) = topo.next( &subgraph ) { - let mut name = subgraph[ n ].clone(); - if let Some( p ) = packages_info.get( &name ) + let mut name : String = subgraph[ n ].to_string(); + if let Some( p ) = packages_info.get( &name[ .. ] ) { if args.info.contains( &PackageAdditionalInfo::Version ) { @@ -544,7 +673,7 @@ mod private if args.info.contains( &PackageAdditionalInfo::Path ) { name.push_str( " " ); - name.push_str( &p.manifest_path().to_string() ); + name.push_str( &p.manifest_file().unwrap().to_string() ); } } names.push( name ); @@ -559,26 +688,35 @@ mod private Ok( report ) } - fn merge_build_dependencies( mut report: Vec< ListNodeReport > ) -> Vec< ListNodeReport > + fn merge_build_dependencies( mut report: Vec< tool::TreePrinter > ) -> Vec< tool::TreePrinter > { let mut build_dependencies = vec![]; for node_report in &mut report { - build_dependencies = merge_build_dependencies_impl( node_report, build_dependencies ); + build_dependencies = merge_build_dependencies_impl + ( + &mut node_report.info, + build_dependencies + ); } if let Some( last_report ) = report.last_mut() { - last_report.build_dependencies = build_dependencies; + last_report.info.build_dependencies = build_dependencies; } report } - - fn merge_build_dependencies_impl( report : &mut ListNodeReport, mut build_deps_acc : Vec< ListNodeReport > ) -> Vec< ListNodeReport > + + fn merge_build_dependencies_impl + ( + report : &mut tool::ListNodeReport, + mut build_deps_acc : Vec< tool::ListNodeReport > + ) + -> Vec< tool::ListNodeReport > { for dep in report.normal_dependencies.iter_mut() - .chain( report.dev_dependencies.iter_mut() ) - .chain( report.build_dependencies.iter_mut() ) + .chain( report.dev_dependencies.iter_mut() ) + .chain( report.build_dependencies.iter_mut() ) { build_deps_acc = merge_build_dependencies_impl(dep, build_deps_acc ); } @@ -593,8 +731,8 @@ mod private build_deps_acc } - - fn merge_dev_dependencies( mut report: Vec< ListNodeReport > ) -> Vec< ListNodeReport > + + fn merge_dev_dependencies( mut report : Vec< tool::ListNodeReport > ) -> Vec< tool::TreePrinter > { let mut dev_dependencies = vec![]; for node_report in &mut report @@ -605,11 +743,18 @@ mod private { last_report.dev_dependencies = dev_dependencies; } - - report + let printer : Vec< TreePrinter > = report + .iter() + .map( | rep | TreePrinter::new( rep ) ) + .collect(); + printer } - fn merge_dev_dependencies_impl( report : &mut ListNodeReport, mut dev_deps_acc : Vec< ListNodeReport > ) -> Vec< ListNodeReport > + fn merge_dev_dependencies_impl + ( + report : &mut ListNodeReport, + mut dev_deps_acc : Vec< ListNodeReport > + ) -> Vec< ListNodeReport > { for dep in report.normal_dependencies.iter_mut() .chain( report.dev_dependencies.iter_mut() ) @@ -628,27 +773,41 @@ mod private dev_deps_acc } - - fn rearrange_duplicates( mut report : Vec< ListNodeReport > ) -> Vec< ListNodeReport > + + fn rearrange_duplicates( mut report : Vec< tool::ListNodeReport > ) -> Vec< tool::TreePrinter > { - let mut required_normal : HashMap< usize, Vec< ListNodeReport > > = HashMap::new(); + let mut required_normal : collection::HashMap< usize, Vec< tool::ListNodeReport > > = collection::HashMap::new(); for i in 0 .. report.len() { - let ( required, exist ) : ( Vec< _ >, Vec< _ > ) = std::mem::take( &mut report[ i ].normal_dependencies ).into_iter().partition( | d | d.duplicate ); + let ( required, exist ) : ( Vec< _ >, Vec< _ > ) = std::mem::take + ( + &mut report[ i ].normal_dependencies + ) + .into_iter() + .partition( | d | d.duplicate ); report[ i ].normal_dependencies = exist; required_normal.insert( i, required ); } - + rearrange_duplicates_resolver( &mut report, &mut required_normal ); for ( i, deps ) in required_normal { report[ i ].normal_dependencies.extend( deps ); } - - report + + let printer : Vec< TreePrinter > = report + .iter() + .map( | rep | TreePrinter::new( rep ) ) + .collect(); + + printer } - - fn rearrange_duplicates_resolver( report : &mut [ ListNodeReport ], required : &mut HashMap< usize, Vec< ListNodeReport > > ) + + fn rearrange_duplicates_resolver + ( + report : &mut [ ListNodeReport ], + required : &mut HashMap< usize, Vec< ListNodeReport > > + ) { for node in report { @@ -659,7 +818,11 @@ mod private if !node.duplicate { if let Some( r ) = required.iter_mut().flat_map( |( _, v )| v ) - .find( | r | r.name == node.name && r.version == node.version && r.path == node.path ) + .find + ( + | r | + r.name == node.name && r.version == node.version && r.crate_dir == node.crate_dir + ) { std::mem::swap( r, node ); } @@ -687,7 +850,7 @@ crate::mod_interface! /// Contains output of the action. protected use ListReport; /// Contains output of a single node of the action. - protected use ListNodeReport; + // protected use ListNodeReport; /// List packages in workspace. orphan use list; } diff --git a/module/move/willbe/src/action/main_header.rs b/module/move/willbe/src/action/main_header.rs index e1b5a7a16e..1f4359961a 100644 --- a/module/move/willbe/src/action/main_header.rs +++ b/module/move/willbe/src/action/main_header.rs @@ -15,37 +15,33 @@ mod private }; use std::path::PathBuf; use regex::Regex; - use wca::wtools::anyhow::Error; - use action::readme_health_table_renew:: + use { CrateDir, query, url, Workspace }; + use entity::{ PathError, WorkspaceInitError }; + use error:: { - readme_path, - workspace_root - }; - use _path::AbsolutePath; - use { CrateDir, query, url, Workspace, wtools }; - use entity::{ CrateDirError, WorkspaceError }; - use wtools::error:: - { - anyhow::format_err, err, - for_app:: + Result, + untyped:: { - Result, - Error as wError, + Error, Context, }, }; - use error_tools:: - { - dependency::*, - for_lib::Error, - }; - + use workspace_md_extension::WorkspaceMdExtension; + // use error::ErrWith; + static TAGS_TEMPLATE : std::sync::OnceLock< Regex > = std::sync::OnceLock::new(); fn regexes_initialize() { - TAGS_TEMPLATE.set( Regex::new( r"(.|\n|\r\n)+" ).unwrap() ).ok(); + TAGS_TEMPLATE.set + ( + Regex::new + ( + r"(.|\n|\r\n)+" + ) + .unwrap() + ).ok(); } /// Report. @@ -67,14 +63,17 @@ mod private { writeln!( f, "File successful changed : {file_path}." )?; } - else + else { writeln!( f, "File successful changed but contains non-UTF-8 characters." )?; } } - else + else { - if let Some( Some( file_path ) ) = self.found_file.as_ref().map( | p | p.to_str() ) + if let Some( Some( file_path ) ) = self + .found_file + .as_ref() + .map( | p | p.to_str() ) { writeln!( f, "File found but not changed : {file_path}." )?; } @@ -87,17 +86,23 @@ mod private } } - #[ derive( Debug, Error ) ] + /// The `MainHeaderRenewError` enum represents the various errors that can occur during + /// the renewal of the main header. + #[ derive( Debug, error::Error ) ] pub enum MainHeaderRenewError { + /// Represents a common error. #[ error( "Common error: {0}" ) ] - Common(#[ from ] wError ), + Common(#[ from ] Error ), + /// Represents an I/O error. #[ error( "I/O error: {0}" ) ] IO( #[ from ] std::io::Error ), + /// Represents an error related to workspace initialization. #[ error( "Workspace error: {0}" ) ] - Workspace( #[ from ] WorkspaceError), + Workspace( #[ from ] WorkspaceInitError ), + /// Represents an error related to directory paths. #[ error( "Directory error: {0}" ) ] - Directory( #[ from ] CrateDirError ), + Directory( #[ from ] PathError ), } /// The `HeaderParameters` structure represents a set of parameters, used for creating url for header. @@ -112,12 +117,22 @@ mod private impl HeaderParameters { /// Create `HeaderParameters` instance from the folder where Cargo.toml is stored. - fn from_cargo_toml( workspace : Workspace ) -> Result< Self, MainHeaderRenewError > + fn from_cargo_toml( workspace : &Workspace ) -> Result< Self, MainHeaderRenewError > { - let repository_url = workspace.repository_url()?.ok_or_else::< Error, _ >( || err!( "repo_url not found in workspace Cargo.toml" ) )?; - let master_branch = workspace.master_branch()?.unwrap_or( "master".into() ); - let workspace_name = workspace.workspace_name()?.ok_or_else::< Error, _ >( || err!( "workspace_name not found in workspace Cargo.toml" ) )?; - let discord_url = workspace.discord_url()?; + // aaa : for Petro : too long lines, review all files + // aaa : done + let repository_url = workspace + .repository_url() + .ok_or_else::< Error, _ > + ( || err!( "repo_url not found in workspace Cargo.toml" ) )?; + + let master_branch = workspace.master_branch().unwrap_or( "master".into() ); + let workspace_name = workspace + .workspace_name() + .ok_or_else::< Error, _ > + ( || err!( "workspace_name not found in workspace Cargo.toml" ) )?; + + let discord_url = workspace.discord_url(); Ok ( @@ -134,8 +149,15 @@ mod private /// Convert `Self`to header. fn to_header( self ) -> Result< String, MainHeaderRenewError > { - let discord = self.discord_url.map( | discord | - format!( "\n[![discord](https://img.shields.io/discord/872391416519737405?color=eee&logo=discord&logoColor=eee&label=ask)]({discord})" ) + let discord = self.discord_url + .map + ( + | discord | + format! + ( + "\n[![discord](https://img.shields.io/discord/872391416519737405?color=eee&logo=discord&logoColor=eee&label=ask)]({})", + discord + ) ) .unwrap_or_default(); @@ -178,40 +200,40 @@ mod private /// [![docs.rs](https://raster.shields.io/static/v1?label=docs&message=online&color=eee&logo=docsdotrs&logoColor=eee)](https://docs.rs/wtools) /// /// ``` - pub fn readme_header_renew( path : AbsolutePath ) -> Result< MainHeaderRenewReport, ( MainHeaderRenewReport, MainHeaderRenewError ) > + pub fn readme_header_renew( crate_dir : CrateDir ) + -> Result< MainHeaderRenewReport, ( MainHeaderRenewReport, MainHeaderRenewError ) > { let mut report = MainHeaderRenewReport::default(); regexes_initialize(); - let mut cargo_metadata = Workspace::with_crate_dir - ( - CrateDir::try_from( path ) - .map_err( | e | ( report.clone(), e.into() ) )? - ).map_err( | e | ( report.clone(), e.into() ) )?; - - let workspace_root = workspace_root( &mut cargo_metadata ) - .map_err( | e | ( report.clone(), e.into() ) )?; - - let header_param = HeaderParameters::from_cargo_toml( cargo_metadata ) - .map_err( | e | ( report.clone(), e.into() ) )?; - + let workspace = Workspace::try_from + ( + crate_dir + ) + .err_with( || report.clone() )?; + + let workspace_root = workspace + .workspace_root(); + + let header_param = HeaderParameters::from_cargo_toml( &workspace ) + .err_with( || report.clone() )?; + let read_me_path = workspace_root.join - ( - readme_path( &workspace_root ) - .ok_or_else( || format_err!( "Fail to find README.md" ) ) - .map_err( | e | ( report.clone(), e.into() ) )? + ( + repository::readme_path( &workspace_root ) + .err_with( || report.clone() )? ); - - report.found_file = Some( read_me_path.clone() ); - + + report.found_file = Some( read_me_path.clone().to_path_buf() ); + let mut file = OpenOptions::new() .read( true ) .write( true ) .open( &read_me_path ) - .map_err( | e | ( report.clone(), e.into() ) )?; + .err_with( || report.clone() )?; let mut content = String::new(); - file.read_to_string( &mut content ).map_err( | e | ( report.clone(), e.into() ) )?; + file.read_to_string( &mut content ).err_with( || report.clone() )?; let raw_params = TAGS_TEMPLATE .get() @@ -223,17 +245,22 @@ mod private _ = query::parse( raw_params ).context( "Fail to parse arguments" ); - let header = header_param.to_header().map_err( | e | ( report.clone(), e.into() ) )?; + let header = header_param.to_header().err_with( || report.clone() )?; let content : String = TAGS_TEMPLATE.get().unwrap().replace - ( - &content, - &format!( "\n{header}\n" ) + ( + &content, + &format! + ( + "\n{}\n", + raw_params, + header, + ) ).into(); - - file.set_len( 0 ).map_err( | e | ( report.clone(), e.into() ) )?; - file.seek( SeekFrom::Start( 0 ) ).map_err( | e | ( report.clone(), e.into() ) )?; - file.write_all( content.as_bytes() ).map_err( | e | ( report.clone(), e.into() ) )?; - report.touched_file = read_me_path; + + file.set_len( 0 ).err_with( || report.clone() )?; + file.seek( SeekFrom::Start( 0 ) ).err_with( || report.clone() )?; + file.write_all( content.as_bytes() ).err_with( || report.clone() )?; + report.touched_file = read_me_path.to_path_buf(); report.success = true; Ok( report ) } @@ -245,4 +272,6 @@ crate::mod_interface! orphan use readme_header_renew; /// Report. orphan use MainHeaderRenewReport; + /// Error. + orphan use MainHeaderRenewError; } \ No newline at end of file diff --git a/module/move/willbe/src/action/mod.rs b/module/move/willbe/src/action/mod.rs index 0c66bb41ce..728271c2a5 100644 --- a/module/move/willbe/src/action/mod.rs +++ b/module/move/willbe/src/action/mod.rs @@ -11,8 +11,6 @@ crate::mod_interface! /// Return the differences between a local and remote package versions. layer publish_diff; /// Generates health table in main Readme.md file of workspace. - // aaa : for Petro : give high quality explanations - // aaa : add more details to description layer readme_health_table_renew; /// Module headers. layer readme_modules_headers_renew; diff --git a/module/move/willbe/src/action/publish.rs b/module/move/willbe/src/action/publish.rs index c426e01fd3..a0b6aa0675 100644 --- a/module/move/willbe/src/action/publish.rs +++ b/module/move/willbe/src/action/publish.rs @@ -3,15 +3,21 @@ mod private { use crate::*; - use std::collections::{ HashSet, HashMap }; - use core::fmt::Formatter; - use std::{ env, fs }; + use std::{ env, fmt, fs }; + use + { + error::untyped, + error::ErrWith, + }; + +// use collection::{ HashSet, HashMap }; +// use core::fmt::Formatter; +// use std::{ env, fs }; - use wtools::error::for_app::{ Error, anyhow }; - use _path::AbsolutePath; - use workspace::Workspace; - use package::Package; - use channel::Channel; +// use error::untyped::Error; +// use workspace::Workspace; +// use package::Package; +// use channel::Channel; /// Represents a report of publishing packages #[ derive( Debug, Default, Clone ) ] @@ -19,14 +25,14 @@ mod private { /// Represents the absolute path to the root directory of the workspace. pub workspace_root_dir : Option< AbsolutePath >, - pub plan : Option< package::PublishPlan >, + pub plan : Option< publish::PublishPlan >, /// Represents a collection of packages and their associated publishing reports. - pub packages : Vec<( AbsolutePath, package::PublishReport )> + pub packages : Vec<( AbsolutePath, publish::PublishReport )> } - impl std::fmt::Display for PublishReport + impl fmt::Display for PublishReport { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> fmt::Result { if self.packages.is_empty() { @@ -52,12 +58,20 @@ mod private { if !plan.dry { - let expected_to_publish = plan + let expected_to_publish : Vec< _ > = plan .plans .iter() - .map( | p | ( p.version_bump.crate_dir.absolute_path(), p.package_name.clone(), p.version_bump.clone() ) ) - .collect::< Vec< _ > >(); - let mut actually_published = self.packages.iter() + .map + ( + | p | + ( + p.bump.crate_dir.clone().absolute_path(), + p.package_name.clone(), + p.bump.clone() + ) + ) + .collect(); + let mut actually_published : Vec< _ > = self.packages.iter() .filter_map ( |( path, repo )| @@ -70,12 +84,14 @@ mod private None } ) - .collect::< Vec< _ > >(); + .collect(); writeln!( f, "Status :" )?; - for ( path, name, version ) in expected_to_publish + for ( path, name, version ) in expected_to_publish { - if let Some( pos ) = actually_published.iter().position( | p | p == &path ) + if let Some( pos ) = actually_published + .iter() + .position( | p | p == &path ) { writeln!( f, "✅ {name} {}", version.new_version )?; // want to check that only expected packages actually published @@ -108,45 +124,74 @@ mod private /// # Returns /// A Result containing a `PublishPlan` if successful, or an `Error` otherwise. #[ cfg_attr( feature = "tracing", tracing::instrument ) ] - pub fn publish_plan( patterns : Vec< String >, channel : Channel, dry : bool, temp : bool ) -> Result< package::PublishPlan, Error > + pub fn publish_plan + ( + patterns : Vec< String >, + channel : channel::Channel, + dry : bool, + temp : bool + ) -> Result< publish::PublishPlan, untyped::Error > { - let mut paths = HashSet::new(); + let mut paths = collection::HashSet::new(); // find all packages by specified folders for pattern in &patterns { - let current_path = AbsolutePath::try_from( std::path::PathBuf::from( pattern ) )?; + let current_path = AbsolutePath::try_from + ( + fs::canonicalize( pattern.as_str() )? + )?; + // let current_path = AbsolutePath::try_from( std::path::PathBuf::from( pattern ) )?; // let current_paths = files::find( current_path, &[ "Cargo.toml" ] ); paths.extend( Some( current_path ) ); } - let mut metadata = if paths.is_empty() + let workspace = if paths.is_empty() { - Workspace::from_current_path()? + Workspace::try_from( CurrentPath )? } else { - // FIX : patterns can point to different workspaces. Current solution take first random path from list + // qqq : patterns can point to different workspaces. Current solution take first random path from list. + // A problem may arise if a user provides paths to packages from different workspaces + // and we do not check whether all packages are within the same workspace + // In the current solution, we'll choose the workspace related to the first package let current_path = paths.iter().next().unwrap().clone(); let dir = CrateDir::try_from( current_path )?; - - Workspace::with_crate_dir( dir )? + Workspace::try_from( dir )? }; - let workspace_root_dir : AbsolutePath = metadata - .workspace_root()? + + let workspace_root_dir : AbsolutePath = workspace + .workspace_root() .try_into()?; - let packages = metadata.load()?.packages()?; - let packages_to_publish : Vec< _ > = packages - .iter() - .filter( | &package | paths.contains( &AbsolutePath::try_from( package.manifest_path().as_std_path().parent().unwrap() ).unwrap() ) ) - .map( | p | p.name().clone() ) + + let packages = workspace.packages(); + let packages_to_publish : Vec< String > = packages + .clone() + .filter( | &package | paths.contains( &package.crate_dir().unwrap().into() ) ) + .map( | p | p.name().to_string() ) + .collect(); + let package_map : collection::HashMap< String, package::Package< '_ > > = packages + .map( | p | ( p.name().to_string(), package::Package::from( p ) ) ) .collect(); - let package_map = packages.into_iter().map( | p | ( p.name().clone(), Package::from( p.clone() ) ) ).collect::< HashMap< _, _ > >(); - let graph = metadata.graph(); - let subgraph_wanted = graph::subgraph( &graph, &packages_to_publish ); - let tmp = subgraph_wanted.map( | _, n | graph[ *n ].clone(), | _, e | graph[ *e ].clone() ); + let graph = workspace_graph::graph( &workspace ); + let subgraph_wanted = graph::subgraph + ( + &graph, + &packages_to_publish[ .. ] + ); + let tmp = subgraph_wanted + .map + ( + | _, n | + graph[ *n ].clone(), | _, e | graph[ *e ].clone() + ); - let mut unique_name = format!( "temp_dir_for_publish_command_{}", path_tools::path::unique_folder_name()? ); + let mut unique_name = format! + ( + "temp_dir_for_publish_command_{}", + path::unique_folder_name()? + ); let dir = if temp { @@ -154,7 +199,11 @@ mod private while temp_dir.exists() { - unique_name = format!( "temp_dir_for_publish_command_{}", path_tools::path::unique_folder_name()? ); + unique_name = format! + ( + "temp_dir_for_publish_command_{}", + path::unique_folder_name()? + ); temp_dir = env::temp_dir().join( unique_name ); } @@ -166,14 +215,28 @@ mod private None }; - let subgraph = graph::remove_not_required_to_publish( &package_map, &tmp, &packages_to_publish, dir.clone() )?; - let subgraph = subgraph.map( | _, n | n, | _, e | e ); + let subgraph = graph::remove_not_required_to_publish + ( + &package_map, + &tmp, + &packages_to_publish, + dir.clone() + )?; + let subgraph = subgraph + .map( | _, n | n, | _, e | e ); - let queue = graph::toposort( subgraph ).unwrap().into_iter().map( | n | package_map.get( &n ).unwrap() ).cloned().collect::< Vec< _ > >(); + let queue : Vec< _ > = graph::toposort( subgraph ) + .unwrap() + .into_iter() + .map( | n | package_map.get( &n ).unwrap() ) + .cloned() + .collect(); - let roots = packages_to_publish.iter().map( | p | package_map.get( p ).unwrap().crate_dir() ).collect::< Vec< _ > >(); + let roots : Vec< _ > = packages_to_publish + .iter() + .map( | p | package_map.get( p ).unwrap().crate_dir() ).collect(); - let plan = package::PublishPlan::former() + let plan = publish::PublishPlan::former() .channel( channel ) .workspace_dir( CrateDir::try_from( workspace_root_dir ).unwrap() ) .option_base_temp_dir( dir.clone() ) @@ -190,13 +253,13 @@ mod private /// #[ cfg_attr( feature = "tracing", tracing::instrument ) ] - pub fn publish( plan : package::PublishPlan ) -> Result< PublishReport, ( PublishReport, Error ) > + pub fn publish( plan : publish::PublishPlan ) -> ResultWithReport< PublishReport, untyped::Error > { let mut report = PublishReport::default(); let temp = plan.base_temp_dir.clone(); report.plan = Some( plan.clone() ); - for package_report in package::perform_packages_publish( plan ).err_with( || report.clone() )? + for package_report in publish::perform_packages_publish( plan ).err_with( || report.clone() )? { let path : &std::path::Path = package_report.get_info.as_ref().unwrap().current_path.as_ref(); report.packages.push(( AbsolutePath::try_from( path ).unwrap(), package_report )); @@ -209,26 +272,6 @@ mod private Ok( report ) } - - - trait ErrWith< T, T1, E > - { - fn err_with< F >( self, f : F ) -> std::result::Result< T1, ( T, E ) > - where - F : FnOnce() -> T; - } - - impl< T, T1, E > ErrWith< T, T1, Error > for Result< T1, E > - where - E : std::fmt::Debug + std::fmt::Display + Send + Sync + 'static, - { - fn err_with< F >( self, f : F ) -> Result< T1, ( T, Error ) > - where - F : FnOnce() -> T, - { - self.map_err( | e | ( f(), anyhow!( e ) ) ) - } - } } // diff --git a/module/move/willbe/src/action/publish_diff.rs b/module/move/willbe/src/action/publish_diff.rs index 1bf0241e15..d27920c7bc 100644 --- a/module/move/willbe/src/action/publish_diff.rs +++ b/module/move/willbe/src/action/publish_diff.rs @@ -3,17 +3,19 @@ mod private { use crate::*; - use std::path::PathBuf; - use std::collections::HashMap; - use std::fmt::Formatter; + use path::PathBuf; + use collection::HashMap; + use std::fmt; use colored::Colorize; use crates_tools::CrateArchive; - use action::list::{ ListReport, ListNodeReport }; - use _path::AbsolutePath; - use wtools::error::for_app::Result; + use action::list::ListReport; + use error::untyped::Result; + // qqq : group dependencies use diff::{ DiffReport, crate_diff }; - use error_tools::for_app::format_err; + use error::untyped::format_err; + use tool::ListNodeReport; + use tool::TreePrinter; /// Options for `publish_diff` command #[ derive( Debug, former::Former ) ] @@ -22,7 +24,7 @@ mod private path : PathBuf, keep_archive : Option< PathBuf >, } - + #[ derive( Debug ) ] pub struct PublishDiffReport { @@ -30,29 +32,46 @@ mod private pub root_path : AbsolutePath, pub tree : ListNodeReport, } - + impl std::fmt::Display for PublishDiffReport { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> std::fmt::Result { let mut tree = self.tree.clone(); - let root_path = tree.path.as_ref().unwrap().clone(); + let root_path = tree.crate_dir.as_ref().unwrap().clone(); let root_name = tree.name.clone(); let root_version = tree.version.as_ref().unwrap().clone(); - + fn modify( diffs : &HashMap< AbsolutePath, DiffReport >, tree : &mut ListNodeReport ) { - let path = tree.path.take().unwrap(); - let path = path.as_path().to_string_lossy(); - let path = path.strip_suffix( "Cargo.toml" ).unwrap_or( &path ); - let root = AbsolutePath::try_from( path ).unwrap(); + let path = tree.crate_dir.take().unwrap(); + let root = AbsolutePath::from( path ); let diff = diffs.get( &root ).unwrap(); let has_changes = diff.has_changes(); - tree.name = if has_changes { format!( "{}", tree.name.yellow() ) } else { tree.name.clone() }; - tree.version.as_mut().map( | v | *v = format!( "{} {}", if has_changes { v.yellow() } else { v.as_str().into() }, if has_changes { "MODIFIED" } else { "" } ) ); - + tree.name = if has_changes + { + format!( "{}", tree.name.yellow() ) + } + else + { + tree.name.clone() + }; + tree + .version + .as_mut() + .map + ( + | v | + *v = format! + ( + "{} {}", + if has_changes { v.yellow() } else { v.as_str().into() }, + if has_changes { "MODIFIED" } else { "" } + ) + ); + for dep in &mut tree.normal_dependencies { modify( diffs, dep ) @@ -60,12 +79,10 @@ mod private } modify( &self.diffs, &mut tree ); - let path = root_path.as_path().to_string_lossy(); - let path = path.strip_suffix( "Cargo.toml" ).unwrap_or( &path ); - let root = AbsolutePath::try_from( path ).unwrap(); + let root = AbsolutePath::from( root_path ); let diff = self.diffs.get( &root ).unwrap(); - - writeln!( f, "Tree:\n{}", tree )?; + let printer = TreePrinter::new( &tree ); + writeln!( f, "Tree:\n{}", printer )?; if diff.has_changes() { writeln!( f, "Changes detected in `{root_name} {root_version}`:" )?; @@ -75,7 +92,7 @@ mod private writeln!( f, "No changes found in `{root_name} {root_version}`. Files:" )?; } write!( f, "{}", diff )?; - + Ok( () ) } } @@ -83,10 +100,11 @@ mod private /// Return the differences between a local and remote package versions. #[ cfg_attr( feature = "tracing", tracing::instrument ) ] pub fn publish_diff( o : PublishDiffOptions ) -> Result< PublishDiffReport > + // qqq : don't use 1-prameter Result { let path = AbsolutePath::try_from( o.path )?; let dir = CrateDir::try_from( path.clone() )?; - + let list = action::list ( action::list::ListOptions::former() @@ -98,22 +116,39 @@ mod private .form() ) .unwrap(); - let ListReport::Tree( mut tree ) = list else { return Err( format_err!( "Logical error. Unexpected list format" ) ) }; + let ListReport::Tree( tree ) = list + else + { + return Err( format_err!( "Logical error. Unexpected list format" ) ) + }; let mut tasks = vec![ tree[ 0 ].clone() ]; let mut diffs = HashMap::new(); let mut current_idx = 0; while current_idx < tasks.len() { - let path = tasks[ current_idx ].path.as_ref().unwrap().to_string_lossy(); - let path = path.strip_suffix( "Cargo.toml" ).unwrap_or( &path ); - let path = AbsolutePath::try_from( path )?; + // let path = tasks[ current_idx ].crate_dir.as_ref().unwrap().to_string_lossy(); + let path = tasks[ current_idx ] + .info + .crate_dir + .as_ref() + .unwrap() + .clone() + .absolute_path(); + // aaa : looks bad. use ready newtypes // aaa : removed let dir = CrateDir::try_from( path.clone() )?; let package = package::Package::try_from( dir.clone() )?; let name = &package.name()?; let version = &package.version()?; - _ = cargo::pack( cargo::PackOptions::former().path( dir.as_ref() ).allow_dirty( true ).no_verify( true ).dry( false ).form() )?; + _ = cargo::pack + ( + cargo::PackOptions::former() + .path( dir.as_ref() ) + .allow_dirty( true ) + .checking_consistency( false ) + .dry( false ).form() + )?; let l = CrateArchive::read( packed_crate::local_path( name, version, dir )? )?; let r = CrateArchive::download_crates_io( name, version ).unwrap(); @@ -133,15 +168,25 @@ mod private } } diffs.insert( path, crate_diff( &l, &r ).exclude( diff::PUBLISH_IGNORE_LIST ) ); - tasks.extend( tasks[ current_idx ].normal_dependencies.clone() ); - + let report = tasks[ current_idx ].info.normal_dependencies.clone(); + let printer : Vec< TreePrinter > = report + .iter() + .map( | rep | TreePrinter::new( rep ) ) + .collect(); + tasks.extend( printer ); + current_idx += 1; } + let printer = tree; + let mut rep : Vec< ListNodeReport > = printer + .iter() + .map( | printer | printer.info.clone() ) + .collect(); let report = PublishDiffReport { root_path : path.clone(), diffs, - tree : tree.remove( 0 ), + tree : rep.remove( 0 ), }; Ok( report ) diff --git a/module/move/willbe/src/action/readme_health_table_renew.rs b/module/move/willbe/src/action/readme_health_table_renew.rs index e9a9bb72e6..b7a7736b03 100644 --- a/module/move/willbe/src/action/readme_health_table_renew.rs +++ b/module/move/willbe/src/action/readme_health_table_renew.rs @@ -4,50 +4,71 @@ mod private use std:: { - str::FromStr, - fs::{ OpenOptions, File, read_dir }, - path::{ Path, PathBuf }, + fs::{ OpenOptions, File }, io::{ Write, Read, Seek, SeekFrom }, - collections::HashMap, }; - - // aaa : for Petro : don't use cargo_metadata and Package directly, use facade - // aaa : ✅ - - - use convert_case::{ Case, Casing }; + use path::{ Path, PathBuf }; + use convert_case::Casing; use toml_edit::Document; use regex::bytes::Regex; + use collection::HashMap; - use wtools::error:: + use error:: { - err, - for_app:: + Error, + untyped:: { - Error, + Error as wError, Result, Context, format_err, - bail, } }; - use manifest::private::repo_url; - use _path::AbsolutePath; + use manifest::repo_url; + // use path::AbsolutePath; static TAG_TEMPLATE: std::sync::OnceLock< Regex > = std::sync::OnceLock::new(); static CLOSE_TAG: std::sync::OnceLock< Regex > = std::sync::OnceLock::new(); - /// Initializes two global regular expressions that are used to match tags. fn regexes_initialize() { - TAG_TEMPLATE.set( regex::bytes::Regex::new( r#""# ).unwrap() ).ok(); - CLOSE_TAG.set( regex::bytes::Regex::new( r#""# ).unwrap() ).ok(); + TAG_TEMPLATE.set + ( + regex::bytes::Regex::new + ( + r#""# + ).unwrap() + ).ok(); + CLOSE_TAG.set + ( + regex::bytes::Regex::new + ( + r#""# + ).unwrap() + ).ok(); } + #[ derive( Debug, Error )] + pub enum HealthTableRenewError + { + #[ error( "Common error: {0}" ) ] + Common( #[ from ] wError ), + #[ error( "I/O error: {0}" ) ] + IO( #[ from ] std::io::Error ), + #[ error( "Path error: {0}" ) ] + Path( #[ from ] PathError ), + #[ error( "Workspace error: {0}" ) ] + Workspace( #[ from ] WorkspaceInitError ), + #[ error( "Utf8Error error: {0}" ) ] + Utf8Error( #[ from ] std::str::Utf8Error ), + #[ error( "Toml edit error: {0}" ) ] + Toml( #[ from ] toml_edit::TomlError ) + } /// `Stability` is an enumeration that represents the stability level of a feature. - #[ derive( Debug ) ] + #[ derive( Debug, derive_tools::FromStr ) ] + #[ display( style = "snake_case" ) ] pub enum Stability { /// The feature is still being tested and may change. @@ -62,26 +83,11 @@ mod private Deprecated, } - impl FromStr for Stability - { - type Err = Error; - - fn from_str( s : &str ) -> Result< Self, Self::Err > - { - match s - { - "experimental" => Ok( Stability::Experimental ), - "unstable" => Ok( Stability::Unstable ), - "stable" => Ok( Stability::Stable ), - "frozen" => Ok( Stability::Frozen ), - "deprecated" => Ok( Stability::Deprecated ), - _ => Err( err!( "Fail to parse stability" ) ), - } - } - } + // aaa : qqq : derive? + // aaa : add /// Retrieves the stability level of a package from its `Cargo.toml` file. - fn stability_get( package_path: &Path ) -> Result< Stability > + fn stability_get( package_path : &Path ) -> Result< Stability, HealthTableRenewError > { let path = package_path.join( "Cargo.toml" ); if path.exists() @@ -101,48 +107,66 @@ mod private } else { - Err( err!( "No Cargo.toml found" ) ) + Err( HealthTableRenewError::Common( wError::msg( "Cannot find Cargo.toml" ))) } } /// Represents parameters that are common for all tables #[ derive( Debug ) ] - struct GlobalTableParameters + struct GlobalTableOptions { /// Path to the root repository. - core_url: String, + core_url : String, /// User and repository name, written through '/'. - user_and_repo: String, + user_and_repo : String, /// List of branches in the repository. - branches: Option< Vec< String > >, + branches : Option< Vec< String > >, /// workspace root - workspace_root : String, + workspace_root : PathBuf, + // aaa : for Petro : is not that path? + // aaa : done } /// Structure that holds the parameters for generating a table. #[ derive( Debug ) ] - struct TableParameters + struct TableOptions { // Relative path from workspace root to directory with modules - base_path: String, + base_path : String, // include branches column flag - include_branches: bool, + include_branches : bool, // include stability column flag - include_stability: bool, + include_stability : bool, // include docs column flag - include_docs: bool, + include_docs : bool, // include sample column flag - include: bool, + include : bool, } - impl From< HashMap< String, query::Value > > for TableParameters + impl From< HashMap< String, query::Value > > for TableOptions { - fn from( value : HashMap< String, query::Value >) -> Self + fn from( value : HashMap< String, query::Value > ) -> Self { - let include_branches = value.get( "with_branches" ).map( | v | bool::from( v ) ).unwrap_or( true ); - let include_stability = value.get( "with_stability" ).map( | v | bool::from( v ) ).unwrap_or( true ); - let include_docs = value.get( "with_docs" ).map( | v | bool::from( v ) ).unwrap_or( true ); - let include = value.get( "with_gitpod" ).map( | v | bool::from( v ) ).unwrap_or( true ); + let include_branches = value + .get( "with_branches" ) + .map( | v | bool::from( v ) ) + .unwrap_or( true ); + + let include_stability = value + .get( "with_stability" ) + .map( | v | bool::from( v ) ) + .unwrap_or( true ); + + let include_docs = value + .get( "with_docs" ) + .map( | v | bool::from( v ) ) + .unwrap_or( true ); + + let include = value + .get( "with_gitpod" ) + .map( | v | bool::from( v ) ) + .unwrap_or( true ); + let b_p = value.get( "1" ); let base_path = if let Some( query::Value::String( path ) ) = value.get( "path" ).or( b_p ) { @@ -152,19 +176,27 @@ mod private { "./" }; - Self { base_path: base_path.to_string(), include_branches, include_stability, include_docs, include } + Self + { + base_path: base_path.to_string(), + include_branches, + include_stability, + include_docs, + include + } } } - impl GlobalTableParameters + impl GlobalTableOptions { /// Initializes the struct's fields from a `Cargo.toml` file located at a specified path. - fn initialize_from_path( path: &Path ) -> Result< Self > + fn initialize_from_path( path : &Path ) -> Result< Self, HealthTableRenewError > { + let cargo_toml_path = path.join( "Cargo.toml" ); if !cargo_toml_path.exists() { - bail!( "Cannot find Cargo.toml" ) + return Err( HealthTableRenewError::Common( wError::msg( "Cannot find Cargo.toml" ))) } else { @@ -200,7 +232,16 @@ mod private { user_and_repo = url::git_info_extract( core_url )?; } - Ok( Self { core_url: core_url.unwrap_or_default(), user_and_repo, branches, workspace_root: path.to_string_lossy().to_string() } ) + Ok + ( + Self + { + core_url : core_url.unwrap_or_default(), + user_and_repo, + branches, + workspace_root : path.to_path_buf() + } + ) } } @@ -216,15 +257,20 @@ mod private /// will mean that at this place the table with modules located in the directory module/core will be generated. /// The tags do not disappear after generation. /// Anything between the opening and closing tag will be destroyed. - pub fn readme_health_table_renew( path : &Path ) -> Result< () > + // aaa : for Petro : typed errors + // aaa : done + pub fn readme_health_table_renew( path : &Path ) -> Result< (), HealthTableRenewError > { regexes_initialize(); - let absolute_path = AbsolutePath::try_from( path )?; - let mut cargo_metadata = Workspace::with_crate_dir( CrateDir::try_from( absolute_path )? )?; - let workspace_root = workspace_root( &mut cargo_metadata )?; - let mut parameters = GlobalTableParameters::initialize_from_path( &workspace_root )?; + let workspace = Workspace::try_from( CrateDir::try_from( path )? )?; + let workspace_root = workspace.workspace_root(); + let mut parameters = GlobalTableOptions::initialize_from_path + ( + &workspace_root + )?; - let read_me_path = workspace_root.join( readme_path(&workspace_root ).ok_or_else( || format_err!( "Fail to find README.md" ) )?); + let read_me_path = workspace_root + .join( repository::readme_path( &workspace_root )? ); let mut file = OpenOptions::new() .read( true ) .write( true ) @@ -253,8 +299,18 @@ mod private .ok_or( format_err!( "Fail to parse group" ) )? .as_bytes() )?; - let params: TableParameters = query::parse( raw_table_params ).unwrap().into_map( vec![] ).into(); - let table = package_readme_health_table_generate( &mut cargo_metadata, ¶ms, &mut parameters )?; + let params: TableOptions = query::parse + ( + raw_table_params + ).unwrap() + .into_map( vec![] ) + .into(); + let table = package_readme_health_table_generate + ( + &workspace, + ¶ms, + &mut parameters + )?; tables.push( table ); tags_closures.push( ( open.end(), close.start() ) ); } @@ -266,11 +322,22 @@ mod private } /// Writes tables into a file at specified positions. - fn tables_write_into_file( tags_closures : Vec< ( usize, usize ) >, tables: Vec< String >, contents: Vec< u8 >, mut file: File ) -> Result< () > + fn tables_write_into_file + ( + tags_closures : Vec< ( usize, usize ) >, + tables: Vec< String >, + contents: Vec< u8 >, + mut file: File + ) -> Result< (), HealthTableRenewError > { let mut buffer: Vec< u8 > = vec![]; let mut start: usize = 0; - for ( ( end_of_start_tag, start_of_end_tag ), con ) in tags_closures.iter().zip( tables.iter() ) + for + ( + ( end_of_start_tag, start_of_end_tag ), + con + ) + in tags_closures.iter().zip( tables.iter() ) { range_to_target_copy( &*contents, &mut buffer, start, *end_of_start_tag )?; range_to_target_copy( con.as_bytes(), &mut buffer, 0,con.len() - 1 )?; @@ -285,24 +352,33 @@ mod private /// Generate table from `table_parameters`. /// Generate header, iterate over all modules in package (from table_parameters) and append row. - fn package_readme_health_table_generate( cache : &mut Workspace, table_parameters: &TableParameters, parameters: & mut GlobalTableParameters ) -> Result< String, Error > + fn package_readme_health_table_generate + ( + workspace : &Workspace, + table_parameters: &TableOptions, + parameters: &mut GlobalTableOptions, + ) -> Result< String, HealthTableRenewError > { let directory_names = directory_names ( - cache - .workspace_root()? - .join( &table_parameters.base_path ), - &cache - .load()? + workspace + .workspace_root() + .join( &table_parameters.base_path ).to_path_buf(), + workspace .packages() - .map_err( | err | format_err!( err ) )? )?; let mut table = table_header_generate( parameters, &table_parameters ); for package_name in directory_names { let stability = if table_parameters.include_stability { - Some( stability_get( &cache.workspace_root()?.join( &table_parameters.base_path ).join( &package_name ) )? ) + Some + ( + stability_get + ( + &workspace.workspace_root().join( &table_parameters.base_path ).join( &package_name ) + )? + ) } else { @@ -310,63 +386,125 @@ mod private }; if parameters.core_url == "" { - let module_path = &cache.workspace_root()?.join( &table_parameters.base_path ).join( &package_name ); - parameters.core_url = repo_url( &module_path ) + let module_path = workspace + .workspace_root() + .join( &table_parameters.base_path ) + .join( &package_name ); + // parameters.core_url = repo_url( &module_path ) + parameters.core_url = repo_url( &module_path.clone().try_into()? ) .context ( - format_err!( "Can not find Cargo.toml in {} or Fail to extract repository url from git remote.\n specify the correct path to the main repository in Cargo.toml of workspace (in the [workspace.metadata] section named repo_url) in {} OR in Cargo.toml of each module (in the [package] section named repository, specify the full path to the module) for example {} OR ensure that at least one remotest is present in git. ", module_path.display(), cache.workspace_root()?.join( "Cargo.toml" ).display(), module_path.join( "Cargo.toml" ).display() ) + // aaa : for Petro : unreadable : check other lines of code which are long + // aaa : done + format_err! + ( + "Can not find Cargo.toml in {} or Fail to extract repository url from git remote.\n\ +specify the correct path to the main repository in Cargo.toml of workspace (in the [workspace.metadata] section named repo_url) in {} \ +OR in Cargo.toml of each module (in the [package] section named repository, specify the full path to the module) for example {} OR\ +ensure that at least one remotest is present in git. ", + module_path.display(), + workspace.workspace_root().join( "Cargo.toml" ).display(), + module_path.join( "Cargo.toml" ).display() + ) )?; parameters.user_and_repo = url::git_info_extract( ¶meters.core_url )?; } - table.push_str( &row_generate(&package_name, stability.as_ref(), parameters, &table_parameters) ); + table.push_str + ( + &row_generate + ( + &package_name, + stability.as_ref(), + parameters, + &table_parameters + ) + ); } Ok( table ) } /// Return topologically sorted modules name, from packages list, in specified directory. - fn directory_names( path : PathBuf, packages : &[ workspace::WorkspacePackage ] ) -> Result< Vec< String > > + // fn directory_names( path : PathBuf, packages : &[ WorkspacePackageRef< '_ > ] ) -> Result< Vec< String > > + fn directory_names< 'a > + ( + path : PathBuf, + packages : impl Iterator< Item = WorkspacePackageRef< 'a > >, + ) -> Result< Vec< String >, HealthTableRenewError > { let path_clone = path.clone(); - let module_package_filter: Option< Box< dyn Fn( &workspace::WorkspacePackage ) -> bool > > = Some + let module_package_filter : Option< Box< dyn Fn( WorkspacePackageRef< '_ > ) -> bool > > = Some ( Box::new ( move | p | - p.publish().is_none() && p.manifest_path().starts_with( &path ) + { + let manifest_file = p.manifest_file(); + if let Ok( pa ) = manifest_file + { + p.publish().is_none() && pa.starts_with( &path ) + } + else + { + false + } + } // aaa : rid of unwraps + // aaa : done ) ); - let module_dependency_filter: Option< Box< dyn Fn( &workspace::WorkspacePackage, &workspace::Dependency ) -> bool > > = Some + let module_dependency_filter : Option< Box< dyn Fn( WorkspacePackageRef< '_ >, DependencyRef< '_ > ) -> bool > > = Some ( Box::new ( move | _, d | - d.path().is_some() && d.kind() != workspace::DependencyKind::Development && d.path().as_ref().unwrap().starts_with( &path_clone ) + d.crate_dir().is_some() && + d.kind() != + DependencyKind::Development && + d.crate_dir().as_ref().unwrap().starts_with( &path_clone ) ) ); let module_packages_map = packages::filter ( packages, - packages::FilterMapOptions { package_filter: module_package_filter, dependency_filter: module_dependency_filter }, + packages::FilterMapOptions + { + package_filter : module_package_filter, + dependency_filter : module_dependency_filter + }, ); let module_graph = graph::construct( &module_packages_map ); - let names = graph::topological_sort_with_grouping( module_graph ) + let names : Vec< String > = graph::topological_sort_with_grouping( module_graph ) .into_iter() .map - ( - | mut group | + ( + | mut group | { group.sort(); - group - } - ).flatten().collect::< Vec< _ > >(); - - Ok(names) + group + } + ) + .flatten() + .map( | n | n.to_string() ) + .collect(); + + Ok( names ) } /// Generate row that represents a module, with a link to it in the repository and optionals for stability, branches, documentation and links to the gitpod. - fn row_generate( module_name : &str, stability : Option< &Stability >, parameters : &GlobalTableParameters, table_parameters : &TableParameters ) -> String + fn row_generate + ( + module_name : &str, + stability : Option< &Stability >, + parameters : &GlobalTableOptions, + table_parameters : &TableOptions + ) -> String { - let mut rou = format!( "| [{}]({}/{}) |", &module_name, &table_parameters.base_path, &module_name ); + let mut rou = format! + ( + "| [{}]({}/{}) |", + &module_name, + &table_parameters.base_path, + &module_name + ); if table_parameters.include_stability { let mut stability = stability_generate( &stability.as_ref().unwrap() ); @@ -379,7 +517,14 @@ mod private } if table_parameters.include_docs { - rou.push_str( &format!( " [![docs.rs](https://raster.shields.io/static/v1?label=&message=docs&color=eee)](https://docs.rs/{}) |", &module_name ) ); + rou.push_str + ( + &format! + ( + " [![docs.rs](https://raster.shields.io/static/v1?label=&message=docs&color=eee)](https://docs.rs/{}) |", + &module_name + ) + ); } if table_parameters.include { @@ -389,12 +534,19 @@ mod private let example = if let Some( name ) = find_example_file( p.as_path(), &module_name ) { let path = path.to_string_lossy().replace( '\\', "/" ).replace( "/", "%2F" ); - let tmp = name.replace( '\\', "/" ); + let tmp = name.to_string_lossy().replace( '\\', "/" ); let file_name = tmp.split( '/' ).last().unwrap(); let name = file_name.strip_suffix( ".rs" ).unwrap(); - format!( "[![Open in Gitpod](https://raster.shields.io/static/v1?label=&message=try&color=eee)](https://gitpod.io/#RUN_PATH=.,SAMPLE_FILE={path}%2Fexamples%2F{file_name},RUN_POSTFIX=--example%20{name}/{})", parameters.core_url ) + format! + ( + "[![Open in Gitpod](https://raster.shields.io/static/v1?label=&message=try&color=eee)](https://gitpod.io/#RUN_PATH=.,SAMPLE_FILE={}%2Fexamples%2F{},RUN_POSTFIX=--example%20{}/{})", + path, + file_name, + name, + parameters.core_url, + ) } - else + else { "".into() }; @@ -402,26 +554,26 @@ mod private } format!( "{rou}\n" ) } - + /// todo - pub fn find_example_file(base_path : &Path, module_name : &str ) -> Option< String > + pub fn find_example_file( base_path : &Path, module_name : &str ) -> Option< PathBuf > { let examples_dir = base_path.join("examples" ); if examples_dir.exists() && examples_dir.is_dir() { - if let Ok( entries ) = std::fs::read_dir( &examples_dir ) + if let Ok( entries ) = std::fs::read_dir( &examples_dir ) { - for entry in entries + for entry in entries { - if let Ok( entry ) = entry + if let Ok( entry ) = entry { let file_name = entry.file_name(); - if let Some( file_name_str ) = file_name.to_str() + if let Some( file_name_str ) = file_name.to_str() { - if file_name_str == format!( "{module_name}_trivial.rs" ) + if file_name_str == format!( "{module_name}_trivial.rs" ) { - return Some( entry.path().to_string_lossy().into() ) + return Some( entry.path() ) } } } @@ -430,18 +582,18 @@ mod private } // If module_trivial.rs doesn't exist, return any other file in the examples directory - if let Ok( entries ) = std::fs::read_dir( &examples_dir ) + if let Ok( entries ) = std::fs::read_dir( &examples_dir ) { - for entry in entries + for entry in entries { - if let Ok( entry ) = entry + if let Ok( entry ) = entry { let file_name = entry.file_name(); - if let Some( file_name_str ) = file_name.to_str() + if let Some( file_name_str ) = file_name.to_str() { - if file_name_str.ends_with( ".rs" ) + if file_name_str.ends_with( ".rs" ) { - return Some( entry.path().to_string_lossy().into() ) + return Some( entry.path() ) } } } @@ -465,7 +617,11 @@ mod private } /// Generate table header - fn table_header_generate( parameters : &GlobalTableParameters, table_parameters : &TableParameters ) -> String + fn table_header_generate + ( + parameters : &GlobalTableOptions, + table_parameters : &TableOptions + ) -> String { let mut header = String::from( "| Module |" ); let mut separator = String::from( "|--------|" ); @@ -504,7 +660,7 @@ mod private } /// Generate cells for each branch - fn branch_cells_generate( table_parameters: &GlobalTableParameters, module_name: &str ) -> String + fn branch_cells_generate( table_parameters : &GlobalTableOptions, module_name : &str ) -> String { let cells = table_parameters .branches @@ -514,20 +670,29 @@ mod private .map ( | b | - format!( "[![rust-status](https://img.shields.io/github/actions/workflow/status/{}/module_{}_push.yml?label=&branch={b})]({}/actions/workflows/module_{}_push.yml?query=branch%3A{})", table_parameters.user_and_repo, &module_name.to_case( Case::Snake ), table_parameters.core_url, &module_name.to_case( Case::Snake ), b ) + format! + ( + "[![rust-status](https://img.shields.io/github/actions/workflow/status/{}/module_{}_push.yml?label=&branch={})]({}/actions/workflows/module_{}_push.yml?query=branch%3A{})", + table_parameters.user_and_repo, + &module_name.to_case( convert_case::Case::Snake ), + b, + table_parameters.core_url, + &module_name.to_case( convert_case::Case::Snake ), + b, + ) ) .collect::< Vec< String > >() .join( " | " ); format!( " {cells} |" ) } - /// Return workspace root - pub fn workspace_root( metadata : &mut Workspace ) -> Result< PathBuf > - { - Ok( metadata.load()?.workspace_root()?.to_path_buf() ) - } - - fn range_to_target_copy< T : Clone >( source : &[ T ], target : &mut Vec< T >, from : usize, to : usize ) -> Result< () > + fn range_to_target_copy< T : Clone > + ( + source : &[ T ], + target : &mut Vec< T >, + from : usize, + to : usize + ) -> Result< (), HealthTableRenewError > { if from < source.len() && to < source.len() && from <= to { @@ -536,66 +701,15 @@ mod private } else { - bail!( "Incorrect indexes" ) - } - } - - /// Searches for a README file in specific subdirectories of the given directory path. - /// - /// This function attempts to find a README file in the following subdirectories: ".github", - /// the root directory, and "./docs". It returns the path to the first found README file, or - /// `None` if no README file is found in any of these locations. - pub fn readme_path( dir_path : &Path ) -> Option< PathBuf > - { - if let Some( path ) = readme_in_dir_find( &dir_path.join( ".github" ) ) - { - Some( path ) - } - else if let Some( path ) = readme_in_dir_find( dir_path ) - { - Some( path ) - } - else if let Some( path ) = readme_in_dir_find( &dir_path.join( "docs" ) ) - { - Some( path ) - } - else - { - None + Err( HealthTableRenewError::Common( wError::msg( "Incorrect indexes" ))) } } - - /// Searches for a file named "readme.md" in the specified directory path. - /// - /// Given a directory path, this function searches for a file named "readme.md" in the specified - /// directory. - fn readme_in_dir_find( path : &Path ) -> Option< PathBuf > - { - read_dir( path ) - .ok()? - .filter_map( Result::ok ) - .filter( | p | p.path().is_file() ) - .filter_map( | f | - { - let l_f = f.file_name().to_ascii_lowercase(); - if l_f == "readme.md" - { - return Some( f.file_name() ) - } - None - }) - .max() - .map( PathBuf::from ) - } - } crate::mod_interface! { - /// Return workspace root - protected use workspace_root; - /// Find readme.md file in directory - protected use readme_path; + // /// Return workspace root + // protected use workspace_root; /// Stability protected use Stability; /// Generate Stability badge diff --git a/module/move/willbe/src/action/readme_modules_headers_renew.rs b/module/move/willbe/src/action/readme_modules_headers_renew.rs index 5809369b10..b44e6de2d5 100644 --- a/module/move/willbe/src/action/readme_modules_headers_renew.rs +++ b/module/move/willbe/src/action/readme_modules_headers_renew.rs @@ -1,62 +1,84 @@ mod private { use crate::*; - use _path::AbsolutePath; - use action::readme_health_table_renew::{ readme_path, Stability, stability_generate, find_example_file }; + use std:: + { + borrow::Cow, + fs::OpenOptions, + fmt, + io:: + { + Read, + Seek, + Write, + SeekFrom, + } + }; + use collection::BTreeSet; + // use path::AbsolutePath; + use action::readme_health_table_renew::{ Stability, stability_generate, find_example_file }; use package::Package; - use wtools::error:: + use error:: { err, - for_app:: - { - Result, + untyped:: + { + Result, Error as wError, Context, }, }; - use std::borrow::Cow; - use std::collections::BTreeSet; - use std::fmt::{Display, Formatter}; - use std::fs::{ OpenOptions }; - use std::io::{ Read, Seek, SeekFrom, Write }; + // aaa : for Petro : group properly, don't repeat std:: + // aaa : done use std::path::PathBuf; use convert_case::{ Case, Casing }; + // use rayon::scope_fifo; use regex::Regex; - use entity::WorkspaceError; - use manifest::private::CrateDirError; + use entity::{ WorkspaceInitError, PathError }; use package::PackageError; - use error_tools::for_lib::Error; - use error_tools::dependency::*; - // aaa : for Petro : rid off crate::x. ask - // aaa : add `use crate::*` first + use error::typed::Error; + use workspace_md_extension::WorkspaceMdExtension; + // use error::ErrWith; static TAGS_TEMPLATE : std::sync::OnceLock< Regex > = std::sync::OnceLock::new(); fn regexes_initialize() { - TAGS_TEMPLATE.set( Regex::new( r"(.|\n|\r\n)+" ).unwrap() ).ok(); + TAGS_TEMPLATE.set + ( + Regex::new + ( + r"(.|\n|\r\n)+" + ).unwrap() + ).ok(); } /// Report. #[ derive( Debug, Default, Clone ) ] pub struct ModulesHeadersRenewReport - { - found_files : BTreeSet< PathBuf >, + { + found_files : BTreeSet< PathBuf >, touched_files : BTreeSet< PathBuf >, } - impl Display for ModulesHeadersRenewReport + impl fmt::Display for ModulesHeadersRenewReport { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> fmt::Result { - if self.touched_files.len() < self.found_files.len() + if self.touched_files.len() < self.found_files.len() { - writeln!( f, "Something went wrong.\n{}/{} was touched.", self.found_files.len(), self.touched_files.len() )?; + writeln! + ( + f, + "Something went wrong.\n{}/{} was touched.", + self.found_files.len(), + self.touched_files.len() + )?; return Ok(()) } writeln!( f, "Touched files :" )?; let mut count = self.found_files.len(); - for path in &self.touched_files + for path in &self.touched_files { if let Some( file_path ) = path.to_str() { @@ -72,19 +94,26 @@ mod private } } + /// The `ModulesHeadersRenewError` enum represents the various errors that can occur during + /// the renewal of module headers. #[ derive( Debug, Error ) ] pub enum ModulesHeadersRenewError { + /// Represents a common error. #[ error( "Common error: {0}" ) ] Common(#[ from ] wError ), + /// Represents an I/O error. #[ error( "I/O error: {0}" ) ] IO( #[ from ] std::io::Error ), + /// Represents an error related to workspace initialization. #[ error( "Workspace error: {0}" ) ] - Workspace( #[ from ] WorkspaceError), + Workspace( #[ from ] WorkspaceInitError ), + /// Represents an error related to a package. #[ error( "Package error: {0}" ) ] - Package( #[ from ] PackageError), + Package( #[ from ] PackageError ), + /// Represents an error related to directory paths. #[ error( "Directory error: {0}" ) ] - Directory( #[ from ] CrateDirError ), + Directory( #[ from ] PathError ), } /// The `ModuleHeader` structure represents a set of parameters, used for creating url for header. @@ -101,22 +130,28 @@ mod private { /// Create `ModuleHeader` instance from the folder where Cargo.toml is stored. - fn from_cargo_toml( package : Package, default_discord_url : &Option< String > ) -> Result< Self, ModulesHeadersRenewError > + fn from_cargo_toml< 'a > + ( + package : Package< 'a >, + default_discord_url : &Option< String >, + ) + -> Result< Self, ModulesHeadersRenewError > { let stability = package.stability()?; - let module_name = package.name()?; + let repository_url = package.repository()? + .ok_or_else::< wError, _ >( || err!( "Fail to find repository_url in module`s Cargo.toml" ) )?; - let repository_url = package.repository()?.ok_or_else::< wError, _ >( || err!( "Fail to find repository_url in module`s Cargo.toml" ) )?; - - let discord_url = package.discord_url()?.or_else( || default_discord_url.clone() ); + let discord_url = package + .discord_url()? + .or_else( || default_discord_url.clone() ); Ok ( Self { - module_path: package.manifest_path().parent().unwrap().as_ref().to_path_buf(), + module_path: package.manifest_file().parent().unwrap().as_ref().to_path_buf(), stability, - module_name, + module_name : module_name.to_string(), repository_url, discord_url, } @@ -127,18 +162,42 @@ mod private fn to_header( self, workspace_path : &str ) -> Result< String, ModulesHeadersRenewError > { let discord = self.discord_url.map( | discord_url | - format!( " [![discord](https://img.shields.io/discord/872391416519737405?color=eee&logo=discord&logoColor=eee&label=ask)]({discord_url})" ) + format! + ( + " [![discord](https://img.shields.io/discord/872391416519737405?color=eee&logo=discord&logoColor=eee&label=ask)]({})", + discord_url + ) ) .unwrap_or_default(); - let repo_url = url::extract_repo_url( &self.repository_url ).and_then( | r | url::git_info_extract( &r ).ok() ).ok_or_else::< wError, _ >( || err!( "Fail to parse repository url" ) )?; - let example = if let Some( name ) = find_example_file( self.module_path.as_path(), &self.module_name ) + let repo_url = url::repo_url_extract( &self.repository_url ) + .and_then( | r | url::git_info_extract( &r ).ok() ) + .ok_or_else::< wError, _ >( || err!( "Fail to parse repository url" ) )?; + let example= if let Some( name ) = find_example_file + ( + self.module_path.as_path(), + &self.module_name + ) { - // qqq : for Petro : Hardcoded Strings, would be better to use `PathBuf` to avoid separator mismatch on Windows and Unix - let p = name.strip_prefix( workspace_path ).unwrap().get( 1.. ).unwrap().replace( "\\","%2F" ); - let name = name.replace( "/", "\\" ); - let name = name.split( "\\" ).last().unwrap().split( "." ).next().unwrap(); - format!( " [![Open in Gitpod](https://raster.shields.io/static/v1?label=try&message=online&color=eee&logo=gitpod&logoColor=eee)](https://gitpod.io/#RUN_PATH=.,SAMPLE_FILE={p},RUN_POSTFIX=--example%20{}/https://github.com/{})", name, repo_url ) + let relative_path = proper_path_tools::path::path_relative + ( + workspace_path.try_into().unwrap(), + name + ) + .to_string_lossy() + .to_string(); + #[ cfg( target_os = "windows" ) ] + let relative_path = relative_path.replace( "\\", "/" ); + // aaa : for Petro : use path_toools + // aaa : used + let p = relative_path.replace( "/","%2F" ); + format! + ( + " [![Open in Gitpod](https://raster.shields.io/static/v1?label=try&message=online&color=eee&logo=gitpod&logoColor=eee)](https://gitpod.io/#RUN_PATH=.,SAMPLE_FILE={},RUN_POSTFIX=--example%20{}/https://github.com/{})", + p, + p, + repo_url + ) } else { @@ -180,32 +239,64 @@ mod private /// [![experimental](https://raster.shields.io/static/v1?label=&message=experimental&color=orange)](https://github.com/emersion/stability-badges#experimental) | [![rust-status](https://github.com/Username/test/actions/workflows/ModuleChainOfPackagesAPush.yml/badge.svg)](https://github.com/Username/test/actions/workflows/ModuleChainOfPackagesAPush.yml)[![docs.rs](https://img.shields.io/docsrs/_chain_of_packages_a?color=e3e8f0&logo=docs.rs)](https://docs.rs/_chain_of_packages_a)[![Open in Gitpod](https://raster.shields.io/static/v1?label=try&message=online&color=eee&logo=gitpod&logoColor=eee)](https://gitpod.io/#RUN_PATH=.,SAMPLE_FILE=sample%2Frust%2F_chain_of_packages_a_trivial%2Fsrc%2Fmain.rs,RUN_POSTFIX=--example%20_chain_of_packages_a_trivial/https://github.com/Username/test) /// /// ``` - pub fn readme_modules_headers_renew( path : AbsolutePath ) -> Result< ModulesHeadersRenewReport, ( ModulesHeadersRenewReport, ModulesHeadersRenewError ) > + pub fn readme_modules_headers_renew( crate_dir : CrateDir ) + -> ResultWithReport< ModulesHeadersRenewReport, ModulesHeadersRenewError > + // -> Result< ModulesHeadersRenewReport, ( ModulesHeadersRenewReport, ModulesHeadersRenewError ) > + // xxx : newtype { let mut report = ModulesHeadersRenewReport::default(); regexes_initialize(); - let cargo_metadata = Workspace::with_crate_dir( CrateDir::try_from( path ).map_err( | e | ( report.clone(), e.into() ) )? ).map_err( | e | ( report.clone(), e.into() ) )?; - let discord_url = cargo_metadata.discord_url().map_err( | e | ( report.clone(), e.into() ) )?; - let paths = cargo_metadata.packages().map_err( | e | ( report.clone(), e.into() ) )?.into_iter().filter_map( | p | AbsolutePath::try_from( p.manifest_path() ).ok()).collect::< Vec< _ > >(); - report.found_files = paths.iter().map( | ap | ap.as_ref().to_path_buf() ).collect(); + let workspace = Workspace::try_from( crate_dir ) + .err_with( || report.clone() )?; // xxx : qqq : use trait. everywhere + let discord_url = workspace.discord_url(); + + // qqq : inspect each collect in willbe and rid of most of them + + let paths : Vec< AbsolutePath > = workspace + .packages() + .filter_map( | p | p.manifest_file().ok().and_then( | a | Some( a.inner() ) ) ) + .collect(); + + report.found_files = paths + .iter() + .map( | ap | ap.as_ref().to_path_buf() ) + .collect(); + for path in paths { let read_me_path = path .parent() .unwrap() - .join( readme_path( path.parent().unwrap().as_ref() ).ok_or_else::< wError, _ >( || err!( "Fail to find README.md" ) ).map_err( | e | ( report.clone(), e.into() ) )? ); + .join + ( + repository::readme_path( path.parent().unwrap().as_ref() ) + // .ok_or_else::< wError, _ >( || err!( "Fail to find README.md at {}", &path ) ) + .err_with( || report.clone() )? + ); - let pakage = Package::try_from( path.clone() ).map_err( | e | ( report.clone(), e.into() ) )?; - let header = ModuleHeader::from_cargo_toml( pakage.into(), &discord_url ).map_err( | e | ( report.clone(), e.into() ) )?; + let pakage = Package::try_from + ( + CrateDir::try_from + ( + &path + .parent() + .unwrap() + ) + .err_with( || report.clone() )? + ) + .err_with( || report.clone() )?; + + let header = ModuleHeader::from_cargo_toml( pakage.into(), &discord_url ) + .err_with( || report.clone() )?; let mut file = OpenOptions::new() .read( true ) .write( true ) .open( &read_me_path ) - .map_err( | e | ( report.clone(), e.into() ) )?; + .err_with( || report.clone() )?; let mut content = String::new(); - file.read_to_string( &mut content ).map_err( | e | ( report.clone(), e.into() ) )?; + file.read_to_string( &mut content ).err_with( || report.clone() )?; let raw_params = TAGS_TEMPLATE .get() @@ -217,20 +308,44 @@ mod private _ = query::parse( raw_params ).context( "Fail to parse raw params." ); - let content = header_content_generate( &content, header, raw_params, cargo_metadata.workspace_root().map_err( | e | ( report.clone(), e.into() ) )?.to_str().unwrap() ).map_err( | e | ( report.clone(), e.into() ) )?; + let content = header_content_generate + ( + &content, + header, + raw_params, + workspace.workspace_root().to_str().unwrap() + ).err_with( || report.clone() )?; - file.set_len( 0 ).map_err( | e | ( report.clone(), e.into() ) )?; - file.seek( SeekFrom::Start( 0 ) ).map_err( | e | ( report.clone(), e.into() ) )?; - file.write_all( content.as_bytes() ).map_err( | e | ( report.clone(), e.into() ) )?; + file.set_len( 0 ).err_with( || report.clone() )?; + file.seek( SeekFrom::Start( 0 ) ).err_with( || report.clone() )?; + file.write_all( content.as_bytes() ).err_with( || report.clone() )?; report.touched_files.insert( path.as_ref().to_path_buf() ); } Ok( report ) } - fn header_content_generate< 'a >( content : &'a str, header : ModuleHeader, raw_params : &str, workspace_root : &str ) -> Result< Cow< 'a, str > > + fn header_content_generate< 'a > + ( + content : &'a str, + header : ModuleHeader, + raw_params : &str, + workspace_root : &str + ) -> Result< Cow< 'a, str > > { let header = header.to_header( workspace_root )?; - let result = TAGS_TEMPLATE.get().unwrap().replace( &content, &format!( "\n{header}\n" ) ); + let result = TAGS_TEMPLATE + .get() + .unwrap() + .replace + ( + &content, + &format! + ( + "\n{}\n", + raw_params, + header + ) + ); Ok( result ) } } @@ -241,4 +356,6 @@ crate::mod_interface! orphan use readme_modules_headers_renew; /// report orphan use ModulesHeadersRenewReport; + /// Error. + orphan use ModulesHeadersRenewError; } \ No newline at end of file diff --git a/module/move/willbe/src/action/test.rs b/module/move/willbe/src/action/test.rs index 7ad9a532a4..86d9a09f95 100644 --- a/module/move/willbe/src/action/test.rs +++ b/module/move/willbe/src/action/test.rs @@ -2,31 +2,26 @@ mod private { use crate::*; - use test::*; - use _path::AbsolutePath; + use entity::test::{ TestPlan, TestOptions, TestsReport, tests_run }; - use std::collections::HashSet; + // use test::*; + // qqq : for Petro : no asterisks imports + // qqq : for Petro : bad : not clear what is imported, there are multiple filles with name test + use collection::HashSet; use std::{ env, fs }; - #[ cfg( feature = "progress_bar" ) ] - use indicatif::{ MultiProgress, ProgressStyle }; - use former::Former; - use wtools:: + use error:: { - error:: + untyped:: { - for_app:: - { - Error, - format_err - }, - Result + Error, + format_err }, - iter::Itertools, + Result }; - use workspace::WorkspacePackage; + use iter::Itertools; /// Used to store arguments for running tests. /// @@ -68,27 +63,36 @@ mod private /// It is possible to enable and disable various features of the crate. /// The function also has the ability to run tests in parallel using `Rayon` crate. /// The result of the tests is written to the structure `TestsReport` and returned as a result of the function execution. - pub fn test( args : TestsCommandOptions, dry : bool ) -> Result< TestsReport, ( TestsReport, Error ) > + // zzz : it probably should not be here + // xxx : use newtype + pub fn test( o : TestsCommandOptions, dry : bool ) -> Result< TestsReport, ( TestsReport, Error ) > { - #[ cfg( feature = "progress_bar" ) ] - let multiprocess = MultiProgress::new(); - #[ cfg( feature = "progress_bar" ) ] - let style = ProgressStyle::with_template - ( - "[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}", - ) - .unwrap() - .progress_chars( "##-" ); - let mut reports = TestsReport::default(); + // qqq : incapsulate progress bar logic into some function of struct. don't keep it here + // aaa : done + + let mut report = TestsReport::default(); // fail fast if some additional installations required - let channels = channel::available_channels( args.dir.as_ref() ).map_err( | e | ( reports.clone(), e ) )?; - let channels_diff = args.channels.difference( &channels ).collect::< Vec< _ > >(); + let channels = channel::available_channels( o.dir.as_ref() ) + .err_with( || report.clone() )?; + let channels_diff : Vec< _ > = o.channels.difference( &channels ).collect(); if !channels_diff.is_empty() { - return Err(( reports, format_err!( "Missing toolchain(-s) that was required : [{}]. Try to install it with `rustup install {{toolchain name}}` command(-s)", channels_diff.into_iter().join( ", " ) ) )) + // aaa : for Petro : non readable + // aaa : readable and with actual command + return Err + (( + report, + format_err! + ( + "Missing toolchain(-s) that was required : [{}]. \ +Try to install it with `rustup install {}` command(-s)", + channels_diff.iter().join( ", " ), + channels_diff.iter().join( " " ) + ) + )) } - reports.dry = dry; + report.dry = dry; let TestsCommandOptions { dir : _ , @@ -104,13 +108,47 @@ mod private optimizations, variants_cap, with_progress, - } = args; + } = o; - let packages = needed_packages( args.dir.clone() ).map_err( | e | ( reports.clone(), e ) )?; + // zzz : watch and review after been ready + // aaa : for Petro : use relevant entity. use either, implement TryFrom< Either< CrateDir, ManifestFile > > + // aaa : done + // qqq : for Petro : nonsense + let path = match EitherDirOrFile::try_from( o.dir.as_ref() ).map_err( | e | ( report.clone(), e.into() ) )?.inner() + { + data_type::Either::Left( crate_dir ) => crate_dir, + data_type::Either::Right( manifest ) => CrateDir::from( manifest ) + }; + + let workspace = Workspace + ::try_from( CrateDir::try_from( path.clone() ).err_with( || report.clone() )? ) + .err_with( || report.clone() )? + // xxx : clone? + // aaa : for Petro : use trait !everywhere! + // aaa : !When I wrote this solution, pr with this changes was not yet ready.! + ; + + // let packages = needed_packages( &workspace ); + let packages = workspace + .packages() + .filter + ( + move | p | + p + .manifest_file() + .is_ok() && + p. + manifest_file() + .unwrap() + .starts_with( path.as_ref() ) + ) + // aaa : for Petro : too long line + // aaa : done + ; let plan = TestPlan::try_from ( - &packages, + packages, &channels, power, include_features, @@ -120,16 +158,18 @@ mod private with_all_features, with_none_features, variants_cap, - ).map_err( | e | ( reports.clone(), e ) )?; + ).err_with( || report.clone() )?; println!( "{plan}" ); + // aaa : split on two functions for create plan and for execute + // aaa : it's already separated, look line: 203 : let result = tests_run( &options ); let temp_path = if temp { let mut unique_name = format! ( "temp_dir_for_test_command_{}", - path_tools::path::unique_folder_name().map_err( | e | ( reports.clone(), e.into() ) )? + path::unique_folder_name().err_with( || report.clone() )? ); let mut temp_dir = env::temp_dir().join( unique_name ); @@ -139,12 +179,12 @@ mod private unique_name = format! ( "temp_dir_for_test_command_{}", - path_tools::path::unique_folder_name().map_err( | e | ( reports.clone(), e.into() ) )? + path::unique_folder_name().err_with( || report.clone() )? ); temp_dir = env::temp_dir().join( unique_name ); } - fs::create_dir( &temp_dir ).map_err( | e | ( reports.clone(), e.into() ) )?; + fs::create_dir( &temp_dir ).err_with( || report.clone() )?; Some( temp_dir ) } else @@ -156,49 +196,20 @@ mod private .concurrent( concurrent ) .plan( plan ) .option_temp( temp_path ) - .dry( dry ); - - #[ cfg( feature = "progress_bar" ) ] - let test_options_former = if with_progress - { - let test_options_former = test_options_former.feature( TestOptionsProgressBarFeature{ multiprocess, style } ); - test_options_former - } - else - { - test_options_former - }; + .dry( dry ) + .with_progress( with_progress ); let options = test_options_former.form(); let result = tests_run( &options ); if temp { - fs::remove_dir_all( options.temp_path.unwrap() ).map_err( | e | ( reports.clone(), e.into() ) )?; + fs::remove_dir_all( options.temp_path.unwrap() ).err_with( || report.clone() )?; } - result + result.map_err( | ( report, e) | ( report, e.into() ) ) } - fn needed_packages( path : AbsolutePath ) -> Result< Vec< WorkspacePackage > > - { - let path = if path.as_ref().file_name() == Some( "Cargo.toml".as_ref() ) - { - path.parent().unwrap() - } - else - { - path - }; - let metadata = Workspace::with_crate_dir( CrateDir::try_from( path.clone() )? )?; - - let result = metadata - .packages()? - .into_iter() - .filter( move | x | x.manifest_path().starts_with( path.as_ref() ) ) - .collect(); - Ok( result ) - } } crate::mod_interface! diff --git a/module/move/willbe/src/action/workspace_renew.rs b/module/move/willbe/src/action/workspace_renew.rs index d03fcda04d..707ea7423d 100644 --- a/module/move/willbe/src/action/workspace_renew.rs +++ b/module/move/willbe/src/action/workspace_renew.rs @@ -3,10 +3,14 @@ mod private use crate::*; use std::fs; use std::path::Path; - use error_tools::for_app::bail; - use error_tools::Result; - use wtools::iter::Itertools; - use crate::template::{Template, TemplateFileDescriptor, TemplateFiles, TemplateFilesBuilder, TemplateParameters, TemplateValues}; + use error::untyped::bail; + use error::Result; + // qqq : group dependencies + use iter::Itertools; + use template:: + { + TemplateFileDescriptor, TemplateFiles, TemplateFilesBuilder, TemplateParameters, TemplateValues + }; /// Template for creating workspace files. #[ derive( Debug ) ] @@ -17,45 +21,54 @@ mod private values : TemplateValues, } - impl Template for WorkspaceTemplate + impl WorkspaceTemplate { - fn create_all( self, path : &Path ) -> Result< () > - { - self.files.create_all( path, &self.values ) - } - - fn parameters( &self ) -> &TemplateParameters + /// Returns template parameters + pub fn get_parameters( &self ) -> &TemplateParameters { &self.parameters } + } - fn set_values( &mut self, values : TemplateValues ) - { - self.values = values - } + // impl Template for WorkspaceTemplate + // { + // fn create_all( self, path : &Path ) -> Result< () > + // { + // self.files.create_all( path, &self.values ) + // } - fn parameter_storage( &self ) -> &Path - { - "./.workspace_template.toml".as_ref() - } + // fn parameters( &self ) -> &TemplateParameters + // { + // &self.parameters + // } - fn template_name( &self ) -> &'static str - { - "workspace" - } + // fn set_values( &mut self, values : TemplateValues ) + // { + // self.values = values + // } - fn get_values( &self ) -> &TemplateValues - { - &self.values - } + // fn parameter_storage( &self ) -> &Path + // { + // "./.workspace_template.toml".as_ref() + // } - fn get_values_mut( &mut self ) -> &mut TemplateValues - { - &mut self.values - } + // fn template_name( &self ) -> &'static str + // { + // "workspace" + // } + // fn get_values( &self ) -> &TemplateValues + // { + // &self.values + // } - } + // fn get_values_mut( &mut self ) -> &mut TemplateValues + // { + // &mut self.values + // } + + + // } impl Default for WorkspaceTemplate { @@ -79,25 +92,65 @@ mod private /// /// Default implementation contains all required files. #[ derive( Debug ) ] - pub struct WorkspaceTemplateFiles(Vec< TemplateFileDescriptor > ); + pub struct WorkspaceTemplateFiles( Vec< TemplateFileDescriptor > ); impl Default for WorkspaceTemplateFiles { fn default() -> Self { let formed = TemplateFilesBuilder::former() - .file().data( include_str!( "../../template/workspace/.gitattributes" ) ).path( "./.gitattributes" ).end() - .file().data( include_str!( "../../template/workspace/.gitignore1" ) ).path( "./.gitignore" ).end() - .file().data( include_str!( "../../template/workspace/.gitpod.yml" ) ).path( "./.gitpod.yml" ).end() - .file().data( include_str!( "../../template/workspace/Cargo.hbs" ) ).path( "./Cargo.toml" ).is_template( true ).end() - .file().data( include_str!( "../../template/workspace/Makefile" ) ).path( "./Makefile" ).end() - .file().data( include_str!( "../../template/workspace/Readme.md" ) ).path( "./Readme.md" ).end() - .file().data( include_str!( "../../template/workspace/.cargo/config.toml" ) ).path( "./.cargo/config.toml" ).end() - .file().data( include_str!( "../../template/workspace/module/module1/Cargo.toml.x" ) ).path( "./module/Cargo.toml" ).end() - .file().data( include_str!( "../../template/workspace/module/module1/Readme.md" ) ).path( "./module/module1/Readme.md" ).end() - .file().data( include_str!( "../../template/workspace/module/module1/examples/module1_example.rs" ) ).path( "./module/module1/examples/module1_example.rs" ).end() - .file().data( include_str!( "../../template/workspace/module/module1/src/lib.rs" ) ).path( "./module/module1/src/lib.rs" ).end() - .file().data( include_str!( "../../template/workspace/module/module1/tests/hello_test.rs" ) ).path( "./module/module1/tests/hello_test.rs" ).end() + .file() + .data( include_str!( "../../template/workspace/.gitattributes" ) ) + .path( "./.gitattributes" ) + .end() + .file() + .data( include_str!( "../../template/workspace/.gitignore1" ) ) + .path( "./.gitignore" ) + .end() + .file() + .data( include_str!( "../../template/workspace/.gitpod.yml" ) ) + .path( "./.gitpod.yml" ) + .end() + .file() + .data( include_str!( "../../template/workspace/Cargo.hbs" ) ) + .path( "./Cargo.toml" ) + .is_template( true ) + .end() + .file() + .data( include_str!( "../../template/workspace/Makefile" ) ) + .path( "./Makefile" ) + .end() + .file() + .data( include_str!( "../../template/workspace/Readme.md" ) ) + .path( "./Readme.md" ) + .end() + .file() + .data( include_str!( "../../template/workspace/.cargo/config.toml" ) ) + .path( "./.cargo/config.toml" ) + .end() + .file() + .data( include_str!( "../../template/workspace/module/module1/Cargo.toml.x" ) ) + .path( "./module/Cargo.toml" ) + .end() + .file() + .data( include_str!( "../../template/workspace/module/module1/Readme.md" ) ) + .path( "./module/module1/Readme.md" ) + .end() + .file() + .data + ( + include_str!( "../../template/workspace/module/module1/examples/module1_example.rs" ) + ) + .path( "./module/module1/examples/module1_example.rs" ) + .end() + .file() + .data( include_str!( "../../template/workspace/module/module1/src/lib.rs" ) ) + .path( "./module/module1/src/lib.rs" ) + .end() + .file() + .data( include_str!( "../../template/workspace/module/module1/tests/hello_test.rs" ) ) + .path( "./module/module1/tests/hello_test.rs" ) + .end() .form(); Self( formed.files ) @@ -117,21 +170,43 @@ mod private } } + // zzz // qqq : for Petro : should return report // qqq : for Petro : should have typed error - // aaa : parametrized templates?? - // aaa : use Viktor lib /// Creates workspace template - pub fn workspace_renew( path : &Path, mut template : WorkspaceTemplate, repository_url : String, branches : Vec< String > ) -> Result< () > + pub fn workspace_renew + ( + path : &Path, + mut template : WorkspaceTemplate, + repository_url : String, + branches : Vec< String > + ) + -> Result< () > + // qqq : don't use 1-prameter Result { - if fs::read_dir(path)?.count() != 0 + if fs::read_dir( path )?.count() != 0 { bail!( "Directory should be empty" ) } - template.values.insert_if_empty( "project_name", wca::Value::String( path.file_name().unwrap().to_string_lossy().into() ) ); + template + .values + .insert_if_empty + ( + "project_name", + wca::Value::String( path.file_name().unwrap().to_string_lossy().into() ) + ); template.values.insert_if_empty( "url", wca::Value::String( repository_url ) ); - template.values.insert_if_empty( "branches", wca::Value::String( branches.into_iter().map( | b | format!( r#""{}""#, b ) ).join( ", " ) ) ); - template.create_all( path )?; + template + .values + .insert_if_empty + ( + "branches", + wca::Value::String + ( + branches.into_iter().map( | b | format!( r#""{}""#, b ) ).join( ", " ) + ) + ); + template.files.create_all( path, &template.values )?; Ok( () ) } } diff --git a/module/move/willbe/src/bin/cargo-will.rs b/module/move/willbe/src/bin/cargo-will.rs index 3e4e4ffeaf..53aa39e51e 100644 --- a/module/move/willbe/src/bin/cargo-will.rs +++ b/module/move/willbe/src/bin/cargo-will.rs @@ -6,7 +6,7 @@ #[ allow( unused_imports ) ] use::willbe::*; -fn main() -> Result< (), wtools::error::for_app::Error > +fn main() -> Result< (), error::untyped::Error > { let args = std::env::args().skip( 1 ).collect(); Ok( willbe::run( args )? ) diff --git a/module/move/willbe/src/bin/will.rs b/module/move/willbe/src/bin/will.rs index e514a10bc4..cbaad31299 100644 --- a/module/move/willbe/src/bin/will.rs +++ b/module/move/willbe/src/bin/will.rs @@ -9,7 +9,10 @@ #[ allow( unused_imports ) ] use::willbe::*; -fn main() -> Result< (), wtools::error::for_app::Error > +fn main() -> Result< (), error::untyped::Error > { Ok( willbe::run( std::env::args().collect() )? ) } + +// cargo_subcommand_metadata::description!( "xxx" ); +// xxx : use \ No newline at end of file diff --git a/module/move/willbe/src/bin/willbe.rs b/module/move/willbe/src/bin/willbe.rs index 4fe12f8995..5943573a67 100644 --- a/module/move/willbe/src/bin/willbe.rs +++ b/module/move/willbe/src/bin/willbe.rs @@ -6,7 +6,7 @@ #[ allow( unused_imports ) ] use::willbe::*; -fn main() -> Result< (), wtools::error::for_app::Error > +fn main() -> Result< (), error::untyped::Error > { Ok( willbe::run( std::env::args().collect() )? ) } diff --git a/module/move/willbe/src/command/cicd_renew.rs b/module/move/willbe/src/command/cicd_renew.rs index f87569e9a8..70d824c40c 100644 --- a/module/move/willbe/src/command/cicd_renew.rs +++ b/module/move/willbe/src/command/cicd_renew.rs @@ -2,14 +2,18 @@ mod private { use crate::*; - use wtools::error::{ anyhow::Context, Result }; + use error::{ untyped::Context, Result }; /// /// Generate table. /// pub fn cicd_renew() -> Result< () > { - action::cicd_renew( &std::env::current_dir()? ).context( "Fail to generate workflow" ) + action::cicd_renew + ( + &std::env::current_dir()? + ) + .context( "Fail to generate workflow" ) } } diff --git a/module/move/willbe/src/command/deploy_renew.rs b/module/move/willbe/src/command/deploy_renew.rs index 7fe0ebf13e..4013cb4d05 100644 --- a/module/move/willbe/src/command/deploy_renew.rs +++ b/module/move/willbe/src/command/deploy_renew.rs @@ -3,9 +3,10 @@ mod private use crate::*; use wca::VerifiedCommand; - use wtools::error::{ anyhow::Context, Result }; - use tool::template::Template; - use action::deploy_renew::*; + use error::{ untyped::Context, Result }; + use tool::TemplateHolder; + //use tool::template::Template; + // use action::deploy_renew::*; /// /// Create new deploy. @@ -14,7 +15,8 @@ mod private pub fn deploy_renew( o : VerifiedCommand ) -> Result< () > { let current_dir = std::env::current_dir()?; - let mut template = DeployTemplate::default(); + + let mut template = TemplateHolder::default(); _ = template.load_existing_params( ¤t_dir ); let parameters = template.parameters(); let mut values = parameters.values_from_props( &o.props ); @@ -23,7 +25,8 @@ mod private values.interactive_if_empty( mandatory ); } template.set_values( values ); - action::deploy_renew( ¤t_dir, template ).context( "Fail to create deploy template" ) + action::deploy_renew( ¤t_dir, template ) + .context( "Fail to create deploy template" ) } } diff --git a/module/move/willbe/src/command/features.rs b/module/move/willbe/src/command/features.rs index ad69897935..50a7356dfc 100644 --- a/module/move/willbe/src/command/features.rs +++ b/module/move/willbe/src/command/features.rs @@ -3,29 +3,35 @@ mod private use crate::*; use action::features::FeaturesOptions; + use std::fs; use std::path::PathBuf; - use _path::AbsolutePath; + // // use path::AbsolutePath; use wca::VerifiedCommand; - use wtools::error::Result; + use error::Result; + // qqq : group dependencies /// /// List features of a package. /// + // qqq : don't use 1-prameter Result pub fn features( o : VerifiedCommand ) -> Result< () > { let path : PathBuf = o.args.get_owned( 0 ).unwrap_or_else( || "./".into() ); - let path = AbsolutePath::try_from( path )?; - let with_features_deps = o.props.get_owned( "with_features_deps" ).unwrap_or( false ); - let options = FeaturesOptions::former() - .manifest_dir( path ) + let crate_dir = CrateDir::try_from( fs::canonicalize( path )? )?; + let with_features_deps = o + .props + .get_owned( "with_features_deps" ) + .unwrap_or( false ); + let o = FeaturesOptions::former() + .crate_dir( crate_dir ) .with_features_deps( with_features_deps ) .form(); - let report = action::features( options ); + let report = action::features( o ); match report { - Ok(success) => println!("{success}"), - Err(failure) => eprintln!("{failure}"), + Ok( success ) => println!( "{success}" ), + Err( failure ) => eprintln!( "{failure}" ), } Ok( () ) } diff --git a/module/move/willbe/src/command/list.rs b/module/move/willbe/src/command/list.rs index 154317904c..0e1fd8a653 100644 --- a/module/move/willbe/src/command/list.rs +++ b/module/move/willbe/src/command/list.rs @@ -3,20 +3,20 @@ mod private { use crate::*; - use { action, wtools }; - use std:: { str::FromStr, path::PathBuf, - collections::HashSet, }; - use wca::VerifiedCommand; - use wtools::error::{ for_app::Context, Result }; + use error::{ untyped::Context, Result }; + use collection::HashSet; - use _path::AbsolutePath; - use action::{ list as l, list::{ ListFormat, ListOptions } }; + use action:: + { + list as l, + list::{ ListFormat, ListOptions }, + }; use former::Former; #[ derive( Former ) ] @@ -49,8 +49,10 @@ mod private pub fn list( o : VerifiedCommand ) -> Result< () > { - let path_to_workspace : PathBuf = o.args.get_owned( 0 ).unwrap_or( std::env::current_dir().context( "Workspace list command without subject" )? ); - let path_to_workspace = AbsolutePath::try_from( path_to_workspace )?; + let path_to_workspace : PathBuf = o.args + .get_owned( 0 ) + .unwrap_or( std::env::current_dir().context( "Workspace list command without subject" )? ); + // let path_to_workspace = AbsolutePath::try_from( fs::canonicalize( path_to_workspace )? )?; let ListProperties { format, with_version, with_path, with_local, with_remote, with_primary, with_dev, with_build } = o.props.try_into()?; @@ -69,7 +71,7 @@ mod private if with_dev { categories.insert( l::DependencyCategory::Dev ); } if with_build { categories.insert( l::DependencyCategory::Build ); } - let args = ListOptions::former() + let o = ListOptions::former() .path_to_manifest( crate_dir ) .format( format ) .info( additional_info ) @@ -77,7 +79,7 @@ mod private .dependency_categories( categories ) .form(); - match action::list( args ) + match action::list( o ) { Ok( report ) => { @@ -96,19 +98,29 @@ mod private impl TryFrom< wca::Props > for ListProperties { - type Error = wtools::error::for_app::Error; + type Error = error::untyped::Error; fn try_from( value : wca::Props ) -> Result< Self, Self::Error > { let mut this = Self::former(); - this = if let Some( v ) = value.get_owned( "format" ).map( ListFormat::from_str ) { this.format( v? ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_version" ) { this.with_version::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_path" ) { this.with_path::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_local" ) { this.with_local::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_remote" ) { this.with_remote::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_primary" ) { this.with_primary::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_dev" ) { this.with_dev::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_build" ) { this.with_build::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "format" ) + .map( ListFormat::from_str ) { this.format( v? ) } else { this }; + + this = if let Some( v ) = value + .get_owned( "with_version" ) { this.with_version::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_path" ) { this.with_path::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_local" ) { this.with_local::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_remote" ) { this.with_remote::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_primary" ) { this.with_primary::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_dev" ) { this.with_dev::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_build" ) { this.with_build::< bool >( v ) } else { this }; Ok( this.form() ) } diff --git a/module/move/willbe/src/command/main_header.rs b/module/move/willbe/src/command/main_header.rs index 558f2a625b..5a957a79d1 100644 --- a/module/move/willbe/src/command/main_header.rs +++ b/module/move/willbe/src/command/main_header.rs @@ -2,17 +2,18 @@ mod private { use crate::*; use action; - use _path::AbsolutePath; - use error_tools::Result; - use wtools::error::anyhow::Error; + use error::untyped::{ Error, Result }; /// Generates header to main Readme.md file. pub fn readme_header_renew() -> Result< () > { - match action::readme_header_renew( AbsolutePath::try_from( std::env::current_dir()? )? ) + match action::readme_header_renew + ( + CrateDir::transitive_try_from::< AbsolutePath >( CurrentPath )? + ) { - Ok( report ) => - { + Ok( report ) => + { println!( "{report}" ); Ok( () ) } diff --git a/module/move/willbe/src/command/publish.rs b/module/move/willbe/src/command/publish.rs index ca085f3b31..dfc8bb4a61 100644 --- a/module/move/willbe/src/command/publish.rs +++ b/module/move/willbe/src/command/publish.rs @@ -5,7 +5,7 @@ mod private use colored::Colorize; use wca::VerifiedCommand; - use wtools::error::{ Result, for_app::Context }; + use error::{ Result, untyped::Context }; use former::Former; use std::fmt::Write; use channel::Channel; @@ -27,18 +27,41 @@ mod private pub fn publish( o : VerifiedCommand ) -> Result< () > { - let args_line = format!( "{}", o.args.get_owned( 0 ).unwrap_or( std::path::PathBuf::from( "" ) ).display() ); - let prop_line = format!( "{}", o.props.iter().map( | p | format!( "{}:{}", p.0, p.1.to_string() ) ).collect::< Vec< _ > >().join(" ") ); + let args_line = format! + ( + "{}", + o + .args + .get_owned( 0 ) + .unwrap_or( std::path::PathBuf::from( "" ) ).display() + ); + let prop_line = format! + ( + "{}", + o + .props + .iter() + .map( | p | format!( "{}:{}", p.0, p.1.to_string() ) ) + .collect::< Vec< _ > >().join(" ") ); - let patterns : Vec< _ > = o.args.get_owned( 0 ).unwrap_or_else( || vec![ "./".into() ] ); + let patterns : Vec< _ > = o + .args + .get_owned( 0 ) + .unwrap_or_else( || vec![ "./".into() ] ); - let PublishProperties { channel, dry, temp } = o.props.try_into()?; - let plan = action::publish_plan( patterns, channel, dry, temp ).context( "Failed to plan the publication process" )?; + let PublishProperties + { + channel, + dry, + temp + } = o.props.try_into()?; + let plan = action::publish_plan( patterns, channel, dry, temp ) + .context( "Failed to plan the publication process" )?; let mut formatted_plan = String::new(); writeln!( &mut formatted_plan, "Tree :" )?; plan.write_as_tree( &mut formatted_plan )?; - + if !plan.plans.is_empty() { writeln!( &mut formatted_plan, "The following packages are pending for publication :" )?; @@ -52,13 +75,14 @@ mod private { println!( "{report}" ); - if dry && report.packages.iter().find( |( _, p )| p.publish_required ).is_some() + if dry && !report.packages.is_empty() { let args = if args_line.is_empty() { String::new() } else { format!(" {}", args_line) }; let prop = if prop_line.is_empty() { String::new() } else { format!(" {}", prop_line) }; - let line = format!("will .publish{}{} dry:0", args, prop); - println!("To apply plan, call the command `{}`", line.blue()); - // qqq : for Petro : for Bohdan : bad. should be exact command with exact parameters + let line = format!("will .publish{}{} dry:0", args, prop ); + println!("To apply plan, call the command `{}`", line.blue() ); + // aaa : for Petro : for Bohdan : bad. should be exact command with exact parameters + // aaa : it`s already works } Ok( () ) @@ -73,14 +97,23 @@ mod private impl TryFrom< wca::Props > for PublishProperties { - type Error = wtools::error::for_app::Error; + type Error = error::untyped::Error; fn try_from( value : wca::Props ) -> Result< Self, Self::Error > { let mut this = Self::former(); - this = if let Some( v ) = value.get_owned( "channel" ) { this.channel::< Channel >( { let v : String = v; Channel::try_from( v )? } ) } else { this }; - this = if let Some( v ) = value.get_owned( "dry" ) { this.dry::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "temp" ) { this.temp::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "channel" ) + { + this.channel::< Channel >( { let v : String = v; Channel::try_from( v )? } ) + } + else + { this }; + + this = if let Some( v ) = value + .get_owned( "dry" ) { this.dry::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "temp" ) { this.temp::< bool >( v ) } else { this }; Ok( this.form() ) } diff --git a/module/move/willbe/src/command/publish_diff.rs b/module/move/willbe/src/command/publish_diff.rs index 961ba818c4..d34d72968b 100644 --- a/module/move/willbe/src/command/publish_diff.rs +++ b/module/move/willbe/src/command/publish_diff.rs @@ -2,11 +2,13 @@ mod private { use crate::*; + use std::fs; use std::path::PathBuf; use wca::VerifiedCommand; - use wtools::error::Result; - use _path::AbsolutePath; + use error::Result; + // qqq : group dependencies + // use path::AbsolutePath; #[ derive( former::Former ) ] struct PublishDiffProperties @@ -27,6 +29,8 @@ mod private /// # Errors /// /// Returns an error if there is an issue with the command. + + // qqq : don't use 1-prameter Result pub fn publish_diff( o : VerifiedCommand ) -> Result< () > { let path : PathBuf = o.args.get_owned( 0 ).unwrap_or( std::env::current_dir()? ); @@ -40,7 +44,7 @@ mod private println!( "{}", action::publish_diff( o )? ); if let Some( keep ) = keep_archive { - let keep = AbsolutePath::try_from( keep ).unwrap(); + let keep = AbsolutePath::try_from( fs::canonicalize( keep )? ).unwrap(); println!( "Remote version of the package was saved at `{}`", keep.as_ref().display() ); } @@ -49,12 +53,16 @@ mod private impl TryFrom< wca::Props > for PublishDiffProperties { - type Error = wtools::error::for_app::Error; + type Error = error::untyped::Error; fn try_from( value : wca::Props ) -> Result< Self, Self::Error > { let mut this = Self::former(); - this = if let Some( v ) = value.get_owned( "keep_archive" ) { this.keep_archive::< PathBuf >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "keep_archive" ) + { this.keep_archive::< PathBuf >( v ) } + else + { this }; Ok( this.form() ) } diff --git a/module/move/willbe/src/command/readme_headers_renew.rs b/module/move/willbe/src/command/readme_headers_renew.rs index d8b4edd5c5..015399ae9d 100644 --- a/module/move/willbe/src/command/readme_headers_renew.rs +++ b/module/move/willbe/src/command/readme_headers_renew.rs @@ -1,34 +1,35 @@ mod private { use crate::*; - use _path::AbsolutePath; + // use path::AbsolutePath; use action; - use wtools::error::anyhow::Error; - use error_tools::{ Result, err }; + // use error::untyped::Error; + use error::{ Result, err }; use std::fmt::{ Display, Formatter }; #[ derive( Debug, Default ) ] struct ReadmeHeadersRenewReport { main_header_renew_report : action::MainHeaderRenewReport, - main_header_renew_error : Option< Error >, + main_header_renew_error : Option< action::MainHeaderRenewError >, // aaa : for Petro : typed error // aaa : done modules_headers_renew_report : action::ModulesHeadersRenewReport, - modules_headers_renew_error : Option< Error >, + modules_headers_renew_error : Option< action::ModulesHeadersRenewError >, // aaa : for Petro : typed error // aaa : done } impl Display for ReadmeHeadersRenewReport { fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result { - match ( &self.main_header_renew_error, &self.modules_headers_renew_error ) - { - ( Some( main ), Some( modules ) ) => + match ( &self.main_header_renew_error, &self.modules_headers_renew_error ) + { + ( Some( main ), Some( modules ) ) => { writeln! - ( - f, - "Main header renew report : \n{}\nError : \n{:?}\nModules headers renew report : \n{}\nError : \n{:?}", - self.main_header_renew_report, main, self.modules_headers_renew_report, modules + ( + f, + "Main header renew report : \ +{}\nError : \n{:?}\nModules headers renew report : \n{}\nError : \n{:?}", + self.main_header_renew_report, main, self.modules_headers_renew_report, modules )?; } ( Some( main ), None ) => @@ -36,7 +37,8 @@ mod private writeln! ( f, - "Main header renew report : \n{}\nError : \n{:?}\nModules headers renew report : \n{}", + "Main header renew report : \ +{}\nError : \n{:?}\nModules headers renew report : \n{}", self.main_header_renew_report, main, self.modules_headers_renew_report )?; } @@ -45,7 +47,8 @@ mod private writeln! ( f, - "Main header renew report : \n{}\nModules headers renew report : \n{}\nError : \n{:?}\n", + "Main header renew report : \ +{}\nModules headers renew report : \n{}\nError : \n{:?}\n", self.main_header_renew_report, self.modules_headers_renew_report, modules )?; } @@ -62,29 +65,30 @@ mod private Ok( () ) } } - + /// Aggregates two commands: `generate_modules_headers` & `generate_main_header` pub fn readme_headers_renew() -> Result< () > { let mut report = ReadmeHeadersRenewReport::default(); - let absolute_path = AbsolutePath::try_from( std::env::current_dir()? )?; + // let absolute_path = AbsolutePath::try_from( std::env::current_dir()? )?; + let crate_dir = CrateDir::transitive_try_from::< AbsolutePath >( CurrentPath )?; let mut fail = false; - - match action::readme_header_renew( absolute_path.clone() ) + + match action::readme_header_renew( crate_dir.clone() ) { - Ok( r ) => + Ok( r ) => { report.main_header_renew_report = r; } - Err( ( r, error ) ) => + Err( ( r, error ) ) => { fail = true; report.main_header_renew_report = r; - report.main_header_renew_error = Some( Error::from( error ) ); + report.main_header_renew_error = Some( error ); } }; - match action::readme_modules_headers_renew( absolute_path ) + match action::readme_modules_headers_renew( crate_dir ) { Ok( r ) => { @@ -94,10 +98,10 @@ mod private { fail = true; report.modules_headers_renew_report = r; - report.modules_headers_renew_error = Some( Error::from( error ) ); + report.modules_headers_renew_error = Some( error ); } } - + if fail { eprintln!( "{report}" ); diff --git a/module/move/willbe/src/command/readme_health_table_renew.rs b/module/move/willbe/src/command/readme_health_table_renew.rs index 945490062f..a1b100dd4b 100644 --- a/module/move/willbe/src/command/readme_health_table_renew.rs +++ b/module/move/willbe/src/command/readme_health_table_renew.rs @@ -2,14 +2,18 @@ mod private { use crate::*; - use wtools::error::{ for_app::Context, Result }; + use error::{ untyped::Context, Result }; /// /// Generate table. /// pub fn readme_health_table_renew() -> Result< () > { - action::readme_health_table_renew( &std::env::current_dir()? ).context( "Fail to create table" ) + action::readme_health_table_renew + ( + &std::env::current_dir()? + ) + .context( "Fail to create table" ) } } diff --git a/module/move/willbe/src/command/readme_modules_headers_renew.rs b/module/move/willbe/src/command/readme_modules_headers_renew.rs index e959c12365..f71e4cf916 100644 --- a/module/move/willbe/src/command/readme_modules_headers_renew.rs +++ b/module/move/willbe/src/command/readme_modules_headers_renew.rs @@ -1,20 +1,20 @@ mod private { use crate::*; - use _path::AbsolutePath; - use wtools::error::{ for_app::Error, Result }; + // use path::AbsolutePath; + use error::{ untyped::Error, Result }; /// Generate headers for workspace members pub fn readme_modules_headers_renew() -> Result< () > { - match action::readme_modules_headers_renew( AbsolutePath::try_from( std::env::current_dir()? )? ) + match action::readme_modules_headers_renew( CrateDir::transitive_try_from::< AbsolutePath >( CurrentPath )? ) { - Ok( report ) => + Ok( report ) => { println!( "{report}" ); Ok( () ) } - Err( ( report, e ) ) => + Err( ( report, e ) ) => { eprintln!( "{report}" ); Err( Error::from( e ).context( "Fail to generate modules headers." ) ) diff --git a/module/move/willbe/src/command/test.rs b/module/move/willbe/src/command/test.rs index 25019344fb..0a65952d20 100644 --- a/module/move/willbe/src/command/test.rs +++ b/module/move/willbe/src/command/test.rs @@ -3,16 +3,17 @@ mod private { use crate::*; - use std::collections::HashSet; - use std::path::PathBuf; + use collection::HashSet; + use std::fs; use colored::Colorize; use wca::VerifiedCommand; - use wtools::error::Result; - use _path::AbsolutePath; + use error::Result; + // qqq : group dependencies + use path::{ AbsolutePath, PathBuf }; use action::test::TestsCommandOptions; use former::Former; use channel::Channel; - use error_tools::for_app::bail; + use error::untyped::bail; use optimization::Optimization; #[ derive( Former, Debug ) ] @@ -47,13 +48,30 @@ mod private } /// run tests in specified crate + // qqq : don't use 1-prameter Result pub fn test( o : VerifiedCommand ) -> Result< () > { - let args_line = format!( "{}", o.args.get_owned( 0 ).unwrap_or( std::path::PathBuf::from( "" ) ).display() ); - let prop_line = format!( "{}", o.props.iter().map( | p | format!( "{}:{}", p.0, p.1.to_string() ) ).collect::< Vec< _ > >().join(" ") ); + let args_line = format! + ( + "{}", + o + .args + .get_owned( 0 ) + .unwrap_or( std::path::PathBuf::from( "" ) ) + .display() + ); + let prop_line = format! + ( + "{}", + o + .props + .iter() + .map( | p | format!( "{}:{}", p.0, p.1.to_string() ) ) + .collect::< Vec< _ > >().join(" ") + ); let path : PathBuf = o.args.get_owned( 0 ).unwrap_or_else( || "./".into() ); - let path = AbsolutePath::try_from( path )?; + let path = AbsolutePath::try_from( fs::canonicalize( path )? )?; let TestsProperties { dry, @@ -68,7 +86,7 @@ mod private with_all_features, with_none_features, with_debug, - with_release, + with_release, with_progress } = o.props.try_into()?; @@ -82,7 +100,8 @@ mod private if optimizations.is_empty() { - bail!( "Cannot run tests if with_debug and with_release are both false. Set at least one of them to true." ); + bail!( "Cannot run tests if with_debug and with_release are both false. \ +Set at least one of them to true." ); } @@ -103,7 +122,7 @@ mod private match action::test( args, dry ) { - + Ok( report ) => { if dry @@ -113,11 +132,11 @@ mod private let line = format!("will .publish{}{} dry:0", args, prop); println!("To apply plan, call the command `{}`", line.blue()); } - else - { + else + { println!( "{report} "); } - + Ok( () ) } Err( ( report, e ) ) => @@ -130,25 +149,39 @@ mod private impl TryFrom< wca::Props > for TestsProperties { - type Error = wtools::error::for_app::Error; + type Error = error::untyped::Error; fn try_from( value : wca::Props ) -> Result< Self, Self::Error > { let mut this = Self::former(); - this = if let Some( v ) = value.get_owned( "dry" ) { this.dry::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "temp" ) { this.temp::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_stable" ) { this.with_stable::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_nightly" ) { this.with_nightly::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "concurrent" ) { this.concurrent::< u32 >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "power" ) { this.power::< u32 >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "include" ) { this.include::< Vec< String > >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "exclude" ) { this.exclude::< Vec< String > >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_debug" ) { this.with_debug::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_release" ) { this.with_release::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_all_features" ) { this.with_all_features::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_none_features" ) { this.with_none_features::< bool >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "always" ) { this.enabled_features::< Vec< String > >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "with_progress" ) { this.with_progress::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "dry" ) { this.dry::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "temp" ) { this.temp::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_stable" ) { this.with_stable::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_nightly" ) { this.with_nightly::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "concurrent" ) { this.concurrent::< u32 >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "power" ) { this.power::< u32 >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "include" ) { this.include::< Vec< String > >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "exclude" ) { this.exclude::< Vec< String > >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_debug" ) { this.with_debug::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_release" ) { this.with_release::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_all_features" ) { this.with_all_features::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_none_features" ) { this.with_none_features::< bool >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "always" ) { this.enabled_features::< Vec< String > >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "with_progress" ) { this.with_progress::< bool >( v ) } else { this }; Ok( this.form() ) } diff --git a/module/move/willbe/src/command/workspace_renew.rs b/module/move/willbe/src/command/workspace_renew.rs index 26cc520bf4..ea28f5d631 100644 --- a/module/move/willbe/src/command/workspace_renew.rs +++ b/module/move/willbe/src/command/workspace_renew.rs @@ -4,7 +4,7 @@ mod private use former::Former; use wca::VerifiedCommand; - use wtools::error::{ anyhow::Context, Result }; + use error::{ untyped::Context, Result }; use action::WorkspaceTemplate; #[ derive( Former ) ] @@ -22,19 +22,28 @@ mod private { let WorkspaceNewProperties { repository_url, branches } = o.props.try_into()?; let template = WorkspaceTemplate::default(); - action::workspace_renew( &std::env::current_dir()?, template, repository_url, branches ).context( "Fail to create workspace" ) + action::workspace_renew + ( + &std::env::current_dir()?, + template, + repository_url, + branches + ) + .context( "Fail to create workspace" ) } impl TryFrom< wca::Props > for WorkspaceNewProperties { - type Error = wtools::error::for_app::Error; + type Error = error::untyped::Error; fn try_from( value : wca::Props ) -> std::result::Result< Self, Self::Error > { let mut this = Self::former(); - this = if let Some( v ) = value.get_owned( "repository_url" ) { this.repository_url::< String >( v ) } else { this }; - this = if let Some( v ) = value.get_owned( "branches" ) { this.branches::< Vec< String > >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "repository_url" ) { this.repository_url::< String >( v ) } else { this }; + this = if let Some( v ) = value + .get_owned( "branches" ) { this.branches::< Vec< String > >( v ) } else { this }; Ok( this.form() ) } diff --git a/module/move/willbe/src/entity/channel.rs b/module/move/willbe/src/entity/channel.rs index a266890aab..57e1eede8a 100644 --- a/module/move/willbe/src/entity/channel.rs +++ b/module/move/willbe/src/entity/channel.rs @@ -4,12 +4,11 @@ mod private use std:: { fmt::Formatter, - path::Path, - collections::HashSet, + ffi::OsString, }; - use std::ffi::OsString; - use error_tools::for_app::Error; - use wtools::error::Result; + use path::Path; + use collection::HashSet; + use error::untyped::{ Error, Result }; use process_tools::process::*; /// The `Channel` enum represents different release channels for rust. @@ -34,17 +33,17 @@ mod private } } } - + impl TryFrom< String > for Channel { - type Error = error::for_app::Error; + type Error = error::untyped::Error; fn try_from( value : String ) -> Result< Self, Self::Error > { Ok( match value.as_ref() { "stable" => Self::Stable, "nightly" => Self::Nightly, - other => error::for_app::bail!( "Unexpected channel value. Expected [stable, channel]. Got: `{other}`" ), + other => error::untyped::bail!( "Unexpected channel value. Expected [stable, channel]. Got: `{other}`" ), }) } } diff --git a/module/move/willbe/src/entity/code.rs b/module/move/willbe/src/entity/code.rs new file mode 100644 index 0000000000..5c8418bad8 --- /dev/null +++ b/module/move/willbe/src/entity/code.rs @@ -0,0 +1,43 @@ +mod private +{ + use crate::*; + + use std:: + { + borrow::Cow, + }; + + /// A trait for converting an object to its code representation. + /// + /// The `AsCode` trait defines a method for converting an object into a code representation, + /// typically as a string. This can be useful for generating code from various data structures + /// or objects. + /// + /// ``` + pub trait AsCode + { + /// Converts the object to its code representation. + fn as_code< 'a >( &'a self ) -> std::io::Result< Cow< 'a, str > >; + } + + /// A trait for retrieving an iterator over items of a source file. + /// + /// The `CodeItems` trait is used to represent objects that can provide an iterator over their + /// contained source files. This is useful in scenarios where you need to access or process + /// all source files associated with an object, such as a workspace or a package. + pub trait CodeItems + { + /// Returns an iterator over the source files. + fn items( &self ) -> impl IterTrait< '_, syn::Item >; + } +} + +// + +crate::mod_interface! +{ + + exposed use AsCode; + exposed use CodeItems; + +} diff --git a/module/move/willbe/src/entity/dependency.rs b/module/move/willbe/src/entity/dependency.rs new file mode 100644 index 0000000000..d3e807f374 --- /dev/null +++ b/module/move/willbe/src/entity/dependency.rs @@ -0,0 +1,285 @@ +mod private +{ + + use crate::*; + + // use crates_tools::CrateArchive; + // use workspace::Workspace; + use error:: + { + // untyped::Result, + // typed::Error, + untyped::format_err, + }; + + /// A dependency of the main crate + #[ derive( Debug, Clone, Copy ) ] + #[ repr( transparent ) ] + pub struct DependencyRef< 'a > + { + inner : &'a cargo_metadata::Dependency, + } + + impl< 'a > DependencyRef< 'a > + { + + /// The file system path for a local path dependency. + /// Only produced on cargo 1.51+ + pub fn crate_dir( &self ) -> Option< CrateDir > + { + match &self.inner.path + { + Some( path ) => path.as_path().try_into().ok(), + None => None, + } + } + + /// Name as given in the Cargo.toml. + pub fn name( &self ) -> String + { + self.inner.name.clone() + } + + /// The kind of dependency this is. + pub fn kind( &self ) -> DependencyKind + { + match self.inner.kind + { + cargo_metadata::DependencyKind::Normal => DependencyKind::Normal, + cargo_metadata::DependencyKind::Development => DependencyKind::Development, + cargo_metadata::DependencyKind::Build => DependencyKind::Build, + cargo_metadata::DependencyKind::Unknown => DependencyKind::Unknown, + } + } + + /// Required version + pub fn req( &self ) -> semver::VersionReq + { + self.inner.req.clone() + } + } + + impl< 'a > From< &'a cargo_metadata::Dependency > for DependencyRef< 'a > + { + #[ inline( always ) ] + fn from( inner : &'a cargo_metadata::Dependency ) -> Self + { + Self { inner } + } + } + + /// Dependencies can come in three kinds + #[ derive( Eq, PartialEq, Debug, Clone, Copy ) ] + pub enum DependencyKind + { + /// The 'normal' kind + Normal, + /// Those used in tests only + Development, + /// Those used in build scripts only + Build, + /// The 'unknown' kind + Unknown, + } + + // + + /// Identifier of any crate (local and remote). + #[ derive( Debug, Clone, Hash, Eq, PartialEq ) ] + pub struct CrateId + { + /// The name of the crate. + pub name : String, // qqq : that should be Arc< str > + /// The absolute path to the crate, if available. + pub crate_dir : Option< CrateDir >, // qqq : that should be Option< Arc< CrateDir > > + // pub path : Option< AbsolutePath >, + } + + impl< 'a > From< &WorkspacePackageRef< 'a > > for CrateId + { + fn from( value : &WorkspacePackageRef< 'a > ) -> Self + { + Self + { + name : value.name().into(), + crate_dir : Some( value.crate_dir().unwrap() ) + // path : Some( AbsolutePath::try_from( value.manifest_file().parent().unwrap() ).unwrap() ), + } + } + } + + impl From< &DependencyRef< '_ > > for CrateId + { + fn from( value : &DependencyRef< '_ > ) -> Self + { + Self + { + name : value.name().into(), + crate_dir : value.crate_dir(), + // path : value.path().clone().map( | path | AbsolutePath::try_from( path ).unwrap() ), + } + } + } + + /// Sorting variants for dependencies. + #[ derive( Debug, Copy, Clone ) ] + pub enum DependenciesSort + { + /// List will be topologically sorted. + Topological, + /// List will be unsorted. + Unordered, + } + + #[ derive( Debug, Clone ) ] + /// Args for `local_dependencies` function. + pub struct DependenciesOptions + { + /// With dependencies of dependencies. + pub recursive : bool, + /// With sorting. + pub sort : DependenciesSort, + /// Include dev dependencies. + pub with_dev : bool, + /// Include remote dependencies. + pub with_remote : bool, + } + + impl Default for DependenciesOptions + { + fn default() -> Self + { + Self + { + recursive : true, + sort : DependenciesSort::Unordered, + with_dev : false, + with_remote : false, + } + } + } + + // qqq : for Bohdan : poor description + /// Recursive implementation of the `list` function + pub fn _list< 'a > + ( + workspace : &Workspace, // aaa : for Bohdan : no mut // aaa : no mut + package : &Package< 'a >, + graph : &mut collection::HashMap< CrateId, collection::HashSet< CrateId > >, + opts : DependenciesOptions + ) + -> Result< CrateId > + { + let DependenciesOptions + { + recursive, + sort : _, + with_dev, + with_remote, + } = opts; + if recursive && with_remote { unimplemented!( "`recursive` + `with_remote` options") } + + let manifest_file = &package.manifest_file(); + + let package = workspace + .package_find_by_manifest( &manifest_file ) + .ok_or( format_err!( "Package not found in the workspace with path : `{}`", manifest_file.as_ref().display() ) )?; + + let deps : collection::HashSet< _ > = package + .dependencies() + // .iter() + .filter( | dep | ( with_remote || dep.crate_dir().is_some() ) && ( with_dev || dep.kind() != DependencyKind::Development ) ) + .map( | dep | CrateId::from( &dep ) ) + .collect(); + + let package = CrateId::from( &package ); + graph.insert( package.clone(), deps.clone() ); + + if recursive + { + for dep in deps + { + if graph.get( &dep ).is_none() + { + // unwrap because `recursive` + `with_remote` not yet implemented + _list + ( + workspace, + &dep.crate_dir.unwrap().try_into()?, + // &dep.path.as_ref().unwrap().join( "Cargo.toml" ).try_into().unwrap(), + graph, + opts.clone(), + )?; + } + } + } + + Ok( package ) + } + + /// Returns local dependencies of a specified package by its package path from a workspace. + /// + /// # Arguments + /// + /// - `workspace` - holds cached information about the workspace, such as the packages it contains and their dependencies. By passing it as a mutable reference, function can update the cache as needed. + /// - `package` - The package package file contains package about the package such as its name, version, and dependencies. + /// - `opts` - used to specify options or configurations for fetching local dependencies. + /// + /// # Returns + /// + /// If the operation is successful, returns a vector of `PathBuf` objects, where each `PathBuf` represents the path to a local dependency of the specified package. + // qqq : typed error? + pub fn list< 'a > + ( + workspace : &mut Workspace, + package : &Package< 'a >, + opts : DependenciesOptions + ) + -> Result< Vec< CrateId > > + { + let mut graph = collection::HashMap::new(); + let root = _list( workspace, package, &mut graph, opts.clone() )?; + + let output = match opts.sort + { + DependenciesSort::Unordered => + { + graph + .into_iter() + .flat_map( | ( id, dependency ) | + { + dependency + .into_iter() + .chain( Some( id ) ) + }) + .unique() + .filter( | x | x != &root ) + .collect() + } + DependenciesSort::Topological => + { + // qqq : too long line + graph::toposort( graph::construct( &graph ) ).map_err( | err | format_err!( "{}", err ) )?.into_iter().filter( | x | x != &root ).collect() + }, + }; + + Ok( output ) + } + +} + +// + +crate::mod_interface! +{ + + exposed use DependencyRef; + exposed use DependencyKind; + + protected use CrateId; + protected use DependenciesSort; + protected use DependenciesOptions; + protected use _list; + protected use list; + +} diff --git a/module/move/willbe/src/entity/diff.rs b/module/move/willbe/src/entity/diff.rs index 49ef63b29a..7ddd7f65f1 100644 --- a/module/move/willbe/src/entity/diff.rs +++ b/module/move/willbe/src/entity/diff.rs @@ -4,17 +4,17 @@ mod private use std:: { - collections::HashSet, fmt::Formatter, - path::PathBuf, }; - use std::collections::HashMap; + use path::PathBuf; + use collection::HashMap; use colored::Colorize; use crates_tools::CrateArchive; - use similar::*; + use collection::HashSet; + use similar::{ TextDiff, ChangeTag }; + + // use similar::*; // qqq : for Bohdan : bad - use wtools::iter::Itertools; - /// These files are ignored because they can be safely changed without affecting functionality /// /// - `.cargo_vcs_info.json` - contains the git sha1 hash that varies between different commits @@ -78,7 +78,7 @@ mod private Is : Into< HashSet< I > >, I : AsRef< std::path::Path >, { - let current = self.0.keys().cloned().collect::< HashSet< _ > >(); + let current : HashSet< _ > = self.0.keys().cloned().collect(); let Some( key ) = current.iter().next() else { return self }; let crate_part = std::path::Path::new( key.components().next().unwrap().as_os_str() ); diff --git a/module/move/willbe/src/entity/features.rs b/module/move/willbe/src/entity/features.rs index 54f38b2d22..1e4b80746a 100644 --- a/module/move/willbe/src/entity/features.rs +++ b/module/move/willbe/src/entity/features.rs @@ -1,12 +1,9 @@ mod private { use crate::*; - use std::collections::{ BTreeSet, HashSet }; - // aaa : for Petro : don't use cargo_metadata and Package directly, use facade - // aaa : ✅ - use error_tools::for_app::{ bail, Result }; - use wtools::iter::Itertools; - use workspace::WorkspacePackage; + use collection::{ BTreeSet, HashSet }; + use error::untyped::{ bail, Result }; + use iter::Itertools; /// Generates a powerset of the features available in the given `package`, /// filtered according to specified inclusion and exclusion criteria, @@ -42,12 +39,9 @@ mod private /// // Use `feature_combinations` as needed. /// ``` - // aaa : for Petro : bad, don't use ignore with need - // aaa : I have to ignore this test because the function accepts &Package as input, and to mock it requires a lot of lines - pub fn features_powerset ( - package : &WorkspacePackage, + package : WorkspacePackageRef< '_ >, power : usize, exclude_features : &[ String ], include_features : &[ String ], @@ -109,14 +103,14 @@ mod private with_none_features : bool, enabled_features : &[ String ], total_features : usize - ) - -> usize + ) + -> usize { let mut estimate = 0; let mut binom = 1; let power = power.min( n ); - for k in 0..=power + for k in 0..=power { estimate += binom; binom = binom * ( n - k ) / ( k + 1 ); @@ -125,13 +119,13 @@ mod private if with_all_features { estimate += 1; } if with_none_features { estimate += 1; } - if !enabled_features.is_empty() + if !enabled_features.is_empty() { let len = enabled_features.len(); - let combinations = ( 0..=len.min( total_features ) ).map( | k | + let combinations = ( 0..=len.min( total_features ) ).map( | k | { let mut binom = 1; - for i in 0..k + for i in 0..k { binom = binom * ( len - i ) / ( i + 1 ); } diff --git a/module/move/willbe/src/entity/files.rs b/module/move/willbe/src/entity/files.rs new file mode 100644 index 0000000000..96efecf308 --- /dev/null +++ b/module/move/willbe/src/entity/files.rs @@ -0,0 +1,54 @@ +/// Internal namespace. +pub( crate ) mod private +{ + use crate::*; + + use std:: + { + io, + }; + use error:: + { + typed::Error, + }; + + /// `PathError` enum represents errors when creating a `CrateDir` object. + #[ derive( Debug, Error ) ] + pub enum PathError + { + /// Indicates a validation error with a descriptive message. + #[ error( "Failed to create a `CrateDir` object due to `{0}`" ) ] + Validation( String ), + /// Try to read or write + #[ error( "IO operation failed. Details : {0}" ) ] + Io( #[ from ] io::Error ), + } + +} + +// + +mod crate_dir; +mod manifest_file; +mod source_file; +mod either; + +// + +crate::mod_interface! +{ + exposed use super:: + { + crate_dir::CrateDir, + manifest_file::ManifestFile, + source_file:: + { + SourceFile, + Entries, + Sources, + // Items, + }, + either::EitherDirOrFile + }; + exposed use PathError; +} diff --git a/module/move/willbe/src/entity/files/crate_dir.rs b/module/move/willbe/src/entity/files/crate_dir.rs new file mode 100644 index 0000000000..1c0165947b --- /dev/null +++ b/module/move/willbe/src/entity/files/crate_dir.rs @@ -0,0 +1,259 @@ +use crate::*; + +use entity:: +{ + PathError, + ManifestFile, +}; +use core:: +{ + fmt, + ops:: + { + Deref, + DerefMut, + }, +}; +use std:: +{ + path::{ Path, PathBuf }, + io, +}; +use error:: +{ + Result, +}; +use path::{ AbsolutePath, Utf8Path, Utf8PathBuf }; + +/// Path to crate directory +#[ derive( Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] +pub struct CrateDir( AbsolutePath ); + +impl CrateDir +{ + + /// Returns inner type which is an absolute path. + #[ inline( always ) ] + pub fn absolute_path( self ) -> AbsolutePath + { + self.0 + } + + /// Returns path to manifest aka cargo file. + #[ inline( always ) ] + pub fn manifest_file( self ) -> ManifestFile + { + self.into() + } + +} + +impl fmt::Display for CrateDir +{ + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "{}", self.0.display() ) + } +} + +impl fmt::Debug for CrateDir +{ + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "crate dir :: {}", self.0.display() ) + } +} + +impl From< ManifestFile > for CrateDir +{ + fn from( src : ManifestFile ) -> Self + { + Self ( src.inner().parent().unwrap() ) + } +} + +impl From< CrateDir > for AbsolutePath +{ + fn from( src : CrateDir ) -> Self + { + src.absolute_path() + } +} + +impl From< CrateDir > for PathBuf +{ + fn from( src : CrateDir ) -> Self + { + src.absolute_path().inner() + } +} + +impl< 'a > TryFrom< &'a CrateDir > for &'a str +{ + type Error = std::io::Error; + fn try_from( src : &'a CrateDir ) -> Result< &'a str, Self::Error > + { + ( &src.0 ).try_into() + } +} + +impl TryFrom< &CrateDir > for String +{ + type Error = std::io::Error; + fn try_from( src : &CrateDir ) -> Result< String, Self::Error > + { + let src2 : &str = src.try_into()?; + Ok( src2.into() ) + } +} + +// impl< IntoPath : TryInto< PathBuf > > TryFrom< ( IntoPath, ) > +// for CrateDir +// where +// PathError : From< < IntoPath as TryInto< PathBuf > >::Error >, +// { +// type Error = PathError; +// +// #[ inline( always ) ] +// fn try_from( ( crate_dir_path, ) : ( IntoPath, ) ) -> Result< Self, Self::Error > +// { +// Self::try_from( AbsolutePath::try_from( crate_dir_path.try_into()? )? ) +// } +// } + +impl TryFrom< &AbsolutePath > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &AbsolutePath ) -> Result< Self, Self::Error > + { + crate_dir_path.clone().try_into() + } +} + +impl TryFrom< AbsolutePath > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : AbsolutePath ) -> Result< Self, Self::Error > + { + if !crate_dir_path.as_ref().join( "Cargo.toml" ).is_file() + { + let err = io::Error::new( io::ErrorKind::InvalidData, format!( "Cannot find crate dir at {crate_dir_path:?}" ) ); + return Err( PathError::Io( err ) ); + } + Ok( Self( crate_dir_path ) ) + } +} + +impl TryFrom< &PathBuf > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< PathBuf > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< &Path > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &Path ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< &str > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &str ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< Utf8PathBuf > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : Utf8PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< &Utf8PathBuf > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &Utf8PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< &Utf8Path > for CrateDir +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &Utf8Path ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl AsRef< Path > for CrateDir +{ + fn as_ref( &self ) -> &Path + { + self.0.as_ref() + } +} + +impl AsMut< Path > for CrateDir +{ + fn as_mut( &mut self ) -> &mut Path + { + self.0.as_mut() + } +} + +impl Deref for CrateDir +{ + type Target = AbsolutePath; + fn deref( &self ) -> &Self::Target + { + &self.0 + } +} + +impl DerefMut for CrateDir +{ + fn deref_mut( &mut self ) -> &mut Self::Target + { + &mut self.0 + } +} diff --git a/module/move/willbe/src/entity/files/either.rs b/module/move/willbe/src/entity/files/either.rs new file mode 100644 index 0000000000..922110964e --- /dev/null +++ b/module/move/willbe/src/entity/files/either.rs @@ -0,0 +1,90 @@ +use crate::*; +use core:: +{ + ops:: + { + Deref, + DerefMut, + }, +}; +use std:: +{ + path::Path, +}; +use error:: +{ + Result, +}; + +/// Wrapper over `data_type::Either< CrateDir, ManifestFile >` with utils methods. +#[ derive( Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug ) ] +pub struct EitherDirOrFile( data_type::Either< CrateDir, ManifestFile > ); + +impl EitherDirOrFile +{ + /// Returns inner type which is an data_type::Either< CrateDir, ManifestFile >. + pub fn inner( self ) -> data_type::Either< CrateDir, ManifestFile > + { + self.0 + } + +} + +impl TryFrom< &Path > for EitherDirOrFile +{ + type Error = PathError; + + fn try_from( value : &Path ) -> Result< Self, Self::Error > + { + if value.file_name() == Some( "Cargo.toml".as_ref() ) + { + Ok( Self( data_type::Either::Right( ManifestFile::try_from( value )? ) ) ) + } + else + { + Ok( Self( data_type::Either::Left( CrateDir::try_from( value )? ) ) ) + } + } +} + +impl AsRef< Path > for EitherDirOrFile +{ + fn as_ref( &self ) -> &Path + { + match &self.0 + { + data_type::Either::Left( crate_dir ) => crate_dir.as_ref(), + data_type::Either::Right( manifest_path ) => manifest_path.as_ref(), + } + } +} + +impl AsMut< Path > for EitherDirOrFile +{ + fn as_mut( &mut self ) -> &mut Path + { + match &mut self.0 + { + data_type::Either::Left( crate_dir ) => crate_dir.as_mut(), + data_type::Either::Right( manifest_path ) => manifest_path.as_mut(), + } + } +} + +impl Deref for EitherDirOrFile +{ + type Target = Path; + + fn deref( &self ) -> &Self::Target + { + self.0.deref() + } +} + +impl DerefMut for EitherDirOrFile +{ + fn deref_mut( &mut self ) -> &mut Self::Target + { + self.0.deref_mut() + } +} \ No newline at end of file diff --git a/module/move/willbe/src/entity/files/manifest_file.rs b/module/move/willbe/src/entity/files/manifest_file.rs new file mode 100644 index 0000000000..d0a82fe907 --- /dev/null +++ b/module/move/willbe/src/entity/files/manifest_file.rs @@ -0,0 +1,276 @@ +use crate::*; + +use entity:: +{ + PathError, + CrateDir, +}; +use core:: +{ + fmt, + ops:: + { + Deref, + DerefMut, + }, +}; +use std:: +{ + path::{ Path, PathBuf }, + io, +}; + +use path::{ AbsolutePath, Utf8Path, Utf8PathBuf }; + +use error:: +{ + Result, +}; + +/// Path to crate directory +#[ derive( Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] +pub struct ManifestFile( AbsolutePath ); + +impl ManifestFile +{ + // aaa : bad : for Petro : why clone? + // /// Returns an absolute path. + // pub fn absolute_path( &self ) -> AbsolutePath + // { + // self.0.clone() + // } + + /// Returns inner type whicj is an absolute path. + #[ inline( always ) ] + pub fn inner( self ) -> AbsolutePath + { + self.0 + } + + /// Returns path to crate dir. + #[ inline( always ) ] + pub fn crate_dir( self ) -> CrateDir + { + self.into() + } + +} + +impl fmt::Display for ManifestFile +{ + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "{}", self.0.display() ) + } +} + +impl fmt::Debug for ManifestFile +{ + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "manifest file :: {}", self.0.display() ) + } +} + +// impl AsRef< AbsolutePath > for ManifestFile +// { +// fn as_ref( &self ) -> &AbsolutePath +// { +// &self.0 +// } +// } + +impl From< CrateDir > for ManifestFile +{ + fn from( src : CrateDir ) -> Self + { + Self( src.absolute_path().join( "Cargo.toml" ) ) + } +} + +impl From< ManifestFile > for AbsolutePath +{ + fn from( src : ManifestFile ) -> Self + { + src.inner() + } +} + +impl From< ManifestFile > for PathBuf +{ + fn from( src : ManifestFile ) -> Self + { + src.inner().inner() + } +} + +// impl From< &ManifestFile > for &str +// { +// fn from( src : &ManifestFile ) -> Self +// { +// src.to_str() +// } +// } + +impl< 'a > TryFrom< &'a ManifestFile > for &'a str +{ + type Error = std::io::Error; + fn try_from( src : &'a ManifestFile ) -> Result< &'a str, Self::Error > + { + ( &src.0 ).try_into() + } +} + +impl TryFrom< &ManifestFile > for String +{ + type Error = std::io::Error; + fn try_from( src : &ManifestFile ) -> Result< String, Self::Error > + { + let src2 : &str = src.try_into()?; + Ok( src2.into() ) + } +} + +impl TryFrom< &AbsolutePath > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : &AbsolutePath ) -> Result< Self, Self::Error > + { + manifest_file.clone().try_into() + } +} + +impl TryFrom< AbsolutePath > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : AbsolutePath ) -> Result< Self, Self::Error > + { + + if !manifest_file.as_ref().ends_with( "Cargo.toml" ) + { + let err = io::Error::new( io::ErrorKind::Other, format!( "File path does not end with Cargo.toml as it should {manifest_file:?}" ) ); + return Err( PathError::Io( err ) ); + } + + if !manifest_file.as_ref().is_file() + { + let err = io::Error::new( io::ErrorKind::InvalidData, format!( "Cannot find crate dir at {manifest_file:?}" ) ); + return Err( PathError::Io( err ) ); + } + Ok( Self( manifest_file ) ) + } +} + +impl TryFrom< PathBuf > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( manifest_file )? ) + } +} + +impl TryFrom< &PathBuf > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : &PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( manifest_file )? ) + } +} + +impl TryFrom< &Path > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : &Path ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( manifest_file )? ) + } +} + +impl TryFrom< &str > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( crate_dir_path : &str ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( crate_dir_path )? ) + } +} + +impl TryFrom< Utf8PathBuf > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : Utf8PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( manifest_file )? ) + } +} + +impl TryFrom< &Utf8PathBuf > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : &Utf8PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( manifest_file )? ) + } +} + +impl TryFrom< &Utf8Path > for ManifestFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( manifest_file : &Utf8Path ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( manifest_file )? ) + } +} + +impl AsRef< Path > for ManifestFile +{ + fn as_ref( &self ) -> &Path + { + self.0.as_ref() + } +} + +impl AsMut< Path > for ManifestFile +{ + fn as_mut( &mut self ) -> &mut Path + { + self.0.as_mut() + } +} + +impl Deref for ManifestFile +{ + type Target = AbsolutePath; + fn deref( &self ) -> &Self::Target + { + &self.0 + } +} + +impl DerefMut for ManifestFile +{ + fn deref_mut( &mut self ) -> &mut Self::Target + { + &mut self.0 + } +} diff --git a/module/move/willbe/src/entity/files/source_file.rs b/module/move/willbe/src/entity/files/source_file.rs new file mode 100644 index 0000000000..0ffba70092 --- /dev/null +++ b/module/move/willbe/src/entity/files/source_file.rs @@ -0,0 +1,270 @@ +use crate::*; + +use entity:: +{ + PathError, + ManifestFile, +}; +use core:: +{ + fmt, + ops:: + { + Deref, + DerefMut, + }, +}; +use std:: +{ + fs, + path::{ Path, PathBuf }, + borrow::Cow, +}; +use error:: +{ + Result, +}; +use path::{ AbsolutePath, Utf8Path, Utf8PathBuf }; + +/// Path to a source file +#[ derive( Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] +pub struct SourceFile( AbsolutePath ); + +impl SourceFile +{ + + /// Returns inner type which is an absolute path. + #[ inline( always ) ] + pub fn inner( self ) -> AbsolutePath + { + self.0 + } + +} + +impl fmt::Display for SourceFile +{ + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "{}", self.0.display() ) + } +} + +impl fmt::Debug for SourceFile +{ + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + write!( f, "source file :: {}", self.0.display() ) + } +} + +impl From< ManifestFile > for SourceFile +{ + fn from( src : ManifestFile ) -> Self + { + Self ( src.inner().parent().unwrap() ) + } +} + +impl From< SourceFile > for AbsolutePath +{ + fn from( src : SourceFile ) -> Self + { + src.inner() + } +} + +impl From< SourceFile > for PathBuf +{ + fn from( src : SourceFile ) -> Self + { + src.inner().inner() + } +} + +impl< 'a > TryFrom< &'a SourceFile > for &'a str +{ + type Error = std::io::Error; + fn try_from( src : &'a SourceFile ) -> Result< &'a str, Self::Error > + { + ( &src.0 ).try_into() + } +} + +impl TryFrom< &SourceFile > for String +{ + type Error = std::io::Error; + fn try_from( src : &SourceFile ) -> Result< String, Self::Error > + { + let src2 : &str = src.try_into()?; + Ok( src2.into() ) + } +} + +impl TryFrom< &AbsolutePath > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : &AbsolutePath ) -> Result< Self, Self::Error > + { + src.clone().try_into() + } +} + +impl TryFrom< AbsolutePath > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : AbsolutePath ) -> Result< Self, Self::Error > + { + Ok( Self( src ) ) + } +} + +impl TryFrom< PathBuf > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( src )? ) + } +} + +impl TryFrom< &Path > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : &Path ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( src )? ) + } +} + +impl TryFrom< &str > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : &str ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( src )? ) + } +} + +impl TryFrom< Utf8PathBuf > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : Utf8PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( src )? ) + } +} + +impl TryFrom< &Utf8PathBuf > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : &Utf8PathBuf ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( src )? ) + } +} + +impl TryFrom< &Utf8Path > for SourceFile +{ + type Error = PathError; + + #[ inline( always ) ] + fn try_from( src : &Utf8Path ) -> Result< Self, Self::Error > + { + Self::try_from( AbsolutePath::try_from( src )? ) + } +} + +impl AsRef< Path > for SourceFile +{ + fn as_ref( &self ) -> &Path + { + self.0.as_ref() + } +} + +impl AsMut< Path > for SourceFile +{ + fn as_mut( &mut self ) -> &mut Path + { + self.0.as_mut() + } +} + +impl Deref for SourceFile +{ + type Target = AbsolutePath; + fn deref( &self ) -> &Self::Target + { + &self.0 + } +} + +impl DerefMut for SourceFile +{ + fn deref_mut( &mut self ) -> &mut Self::Target + { + &mut self.0 + } +} + +// = + +impl CodeItems for SourceFile +{ + fn items( &self ) -> impl IterTrait< '_, syn::Item > + { + // xxx : use closures instead of expect + let content = fs::read_to_string( self.as_ref() ).expect( &format!( "Failed to parse file {self}" ) ); + let parsed : syn::File = syn::parse_file( &content ).expect( &format!( "Failed to parse file {self}" ) ); + parsed.items.into_iter() + } +} + +impl AsCode for SourceFile +{ + fn as_code< 'a >( &'a self ) -> std::io::Result< Cow< 'a, str > > + { + Ok( Cow::Owned( std::fs::read_to_string( self.as_ref() )? ) ) + } +} + +// = + +/// A trait that defines a method for retrieving an iterator over entries. +/// +/// The `Entries` trait is used to represent objects that can provide an iterator over their +/// contained entries, which are represented as source files. This can be useful in scenarios +/// where you need to access or process all entries associated with an object. +pub trait Entries +{ + /// Returns an iterator over the entries. + fn entries( &self ) -> impl IterTrait< '_, SourceFile >; +} + +/// A trait that defines a method for retrieving an iterator over source files. +/// +/// The `Sources` trait is used to represent objects that can provide an iterator over their +/// contained source files. This can be useful in scenarios where you need to access or process +/// all source files associated with an object. +pub trait Sources +{ + /// Returns an iterator over the source files. + fn sources( &self ) -> impl IterTrait< '_, SourceFile >; +} + +// = diff --git a/module/move/willbe/src/entity/git.rs b/module/move/willbe/src/entity/git.rs new file mode 100644 index 0000000000..3e9d0239d0 --- /dev/null +++ b/module/move/willbe/src/entity/git.rs @@ -0,0 +1,88 @@ +mod private +{ + use crate::*; + + use std::fmt; + use process_tools::process; + use error:: + { + Result, + untyped::{ format_err, Context }, + }; + + #[ derive( Debug, Default, Clone ) ] + /// Represents an extended Git report with optional process reports. + pub struct ExtendedGitReport + { + /// Optional report for the `git add` process. + pub add : Option< process::Report >, + /// Optional report for the `git commit` process. + pub commit : Option< process::Report >, + /// Optional report for the `git push` process. + pub push : Option< process::Report >, + } + + impl fmt::Display for ExtendedGitReport + { + fn fmt( &self, f : &mut fmt::Formatter<'_> ) -> fmt::Result + { + let Self { add, commit, push } = &self; + + if let Some( add ) = add { writeln!( f, "{add}" )? } + if let Some( commit ) = commit { writeln!( f, "{commit}" )? } + if let Some( push ) = push { writeln!( f, "{push}" )? } + + Ok( () ) + } + } + + // aaa : for Bohdan : should not be here // aaa : done + // aaa : for Bohdan : documentation // aaa : done + /// The `GitOptions` struct represents a set of options used to perform a Git commit operation. + #[ derive( Debug, Clone ) ] + pub struct GitOptions + { + /// An absolute path to the root directory of the Git repository. + pub git_root : AbsolutePath, + /// A vector of absolute paths to the files or directories that should be committed. + pub items : Vec< AbsolutePath >, + /// A string containing the commit message. + pub message : String, + /// A boolean flag indicating whether the commit should be performed in dry run mode + /// (i.e., no changes are actually made to the repository) + pub dry : bool, + } + + // aaa : for Bohdan : should not be here // aaa : done + // aaa : for Bohdan : documentation // aaa : done + /// Performs a Git commit operation using the provided options + pub fn perform_git_commit( o : GitOptions ) -> Result< ExtendedGitReport > + { + let mut report = ExtendedGitReport::default(); + if o.items.is_empty() { return Ok( report ); } + let items = o + .items + .iter() + .map + ( + | item | item.as_ref().strip_prefix( o.git_root.as_ref() ).map( std::path::Path::to_string_lossy ) + .with_context( || format!("git_root: {}, item: {}", o.git_root.as_ref().display(), item.as_ref().display() ) ) + ) + .collect::< Result< Vec< _ > > >()?; + let res = tool::git::add( &o.git_root, &items, o.dry ).map_err( | e | format_err!( "{report}\n{e}" ) )?; + report.add = Some( res ); + let res = tool::git::commit( &o.git_root, &o.message, o.dry ).map_err( | e | format_err!( "{report}\n{e}" ) )?; + report.commit = Some( res ); + + Ok( report ) + } +} + +// + +crate::mod_interface! +{ + protected use ExtendedGitReport; + protected use GitOptions; + protected use perform_git_commit; +} diff --git a/module/move/willbe/src/entity/manifest.rs b/module/move/willbe/src/entity/manifest.rs index d167175177..75a4f3f8f0 100644 --- a/module/move/willbe/src/entity/manifest.rs +++ b/module/move/willbe/src/entity/manifest.rs @@ -7,82 +7,14 @@ pub( crate ) mod private { io::{ self, Read }, fs, - path::{ Path, PathBuf }, }; - use wtools::error:: + use error:: { - Result, - thiserror, - for_lib::Error, - for_app::format_err, + typed::Error, + untyped::{ Result, format_err }, }; - use _path::AbsolutePath; - /// `CrateDirError` enum represents errors when creating a `CrateDir` object. - #[ derive( Debug, Error ) ] - pub enum CrateDirError - { - /// Indicates a validation error with a descriptive message. - #[ error( "Failed to create a `CrateDir` object due to `{0}`" ) ] - Validation( String ), - } - - /// Path to crate directory - #[ derive( Debug, Clone ) ] - pub struct CrateDir( AbsolutePath ); - - impl AsRef< Path > for CrateDir - { - fn as_ref( &self ) -> &Path - { - self.0.as_ref() - } - } - - impl TryFrom< AbsolutePath > for CrateDir - { - // aaa : make better errors - // aaa : use `CrateDirError` for it - type Error = CrateDirError; - - fn try_from( crate_dir_path : AbsolutePath ) -> Result< Self, Self::Error > - { - if !crate_dir_path.as_ref().join( "Cargo.toml" ).exists() - { - return Err( CrateDirError::Validation( "The path is not a crate directory path".into() ) ); - } - - Ok( Self( crate_dir_path ) ) - } - } - - impl TryFrom< PathBuf > for CrateDir - { - type Error = CrateDirError; - - fn try_from( crate_dir_path : PathBuf ) -> Result< Self, Self::Error > - { - if !crate_dir_path.join( "Cargo.toml" ).exists() - { - return Err( CrateDirError::Validation( "The path is not a crate directory path".into() ) ); - } - - Ok( Self( AbsolutePath::try_from( crate_dir_path ).unwrap() ) ) - } - } - - impl CrateDir - { - /// Returns an absolute path. - pub fn absolute_path( &self ) -> AbsolutePath - { - self.0.clone() - } - } - - - -/// Represents errors related to manifest data processing. + /// Represents errors related to manifest data processing. #[ derive( Debug, Error ) ] pub enum ManifestError { @@ -110,45 +42,43 @@ pub( crate ) mod private pub struct Manifest { /// Path to `Cargo.toml` - pub manifest_path : AbsolutePath, + // pub manifest_file : AbsolutePath, + pub manifest_file : ManifestFile, + // aaa : for Bohdan : for Petro : why not ManifestFile? /// Strict type of `Cargo.toml` manifest. - pub manifest_data : Option< toml_edit::Document >, + pub data : toml_edit::Document, + // pub data : Option< toml_edit::Document >, } - impl TryFrom< AbsolutePath > for Manifest + impl TryFrom< ManifestFile > for Manifest { - // aaa : make better errors - // aaa : return `ManifestError` type Error = ManifestError; - fn try_from( manifest_path : AbsolutePath ) -> Result< Self, Self::Error > + fn try_from( manifest_file : ManifestFile ) -> Result< Self, Self::Error > { - if !manifest_path.as_ref().ends_with( "Cargo.toml" ) - { - let err = io::Error::new( io::ErrorKind::NotFound, "Cannot find manifest" ); - return Err( ManifestError::Io( err ) ); - } + + let read = fs::read_to_string( &manifest_file )?; + let data = read.parse::< toml_edit::Document >() + .map_err( | e | io::Error::new( io::ErrorKind::InvalidData, e ) )?; Ok ( Manifest { - manifest_path, - manifest_data : None, + manifest_file, + data, } ) } } - impl From< CrateDir > for Manifest + impl TryFrom< CrateDir > for Manifest { - fn from( value : CrateDir ) -> Self + type Error = ManifestError; + + fn try_from( src : CrateDir ) -> Result< Self, Self::Error > { - Self - { - manifest_path : value.0.join( "Cargo.toml" ), - manifest_data : None, - } + Self::try_from( src.manifest_file() ) } } @@ -164,99 +94,65 @@ pub( crate ) mod private /// A mutable reference to the TOML document. pub fn data( &mut self ) -> &mut toml_edit::Document { - if self.manifest_data.is_none() { self.load().unwrap() } - - self.manifest_data.as_mut().unwrap() + // if self.data.is_none() { self.load().unwrap() } + // self.data.as_mut().unwrap() + &mut self.data } - + /// Returns path to `Cargo.toml`. - pub fn manifest_path( &self ) -> &AbsolutePath + pub fn manifest_file( &self ) -> &AbsolutePath { - &self.manifest_path + &self.manifest_file } /// Path to directory where `Cargo.toml` located. pub fn crate_dir( &self ) -> CrateDir { - CrateDir( self.manifest_path.parent().unwrap() ) - } - - /// Load manifest from path. - pub fn load( &mut self ) -> Result< (), ManifestError > - { - let read = fs::read_to_string( &self.manifest_path )?; - let result = read.parse::< toml_edit::Document >().map_err( | e | io::Error::new( io::ErrorKind::InvalidData, e ) )?; - self.manifest_data = Some( result ); - - Ok( () ) + self.manifest_file.parent().unwrap().try_into().unwrap() + // CrateDir( self.manifest_file.parent().unwrap() ) } - // aaa : for Bohdan : don't abuse anyhow - // aaa : return `io` error /// Store manifest. pub fn store( &self ) -> io::Result< () > { - // If the `manifest_data` doesn't contain any data, then there's no point in attempting to write - if let Some( data ) = &self.manifest_data - { - fs::write( &self.manifest_path, data.to_string() )?; - } + fs::write( &self.manifest_file, self.data.to_string() )?; Ok( () ) } /// Check that the current manifest is the manifest of the package (can also be a virtual workspace). - pub fn package_is( &self ) -> Result< bool, ManifestError> + pub fn package_is( &self ) -> bool { - let data = self.manifest_data.as_ref().ok_or_else( || ManifestError::EmptyManifestData )?; - if data.get( "package" ).is_some() && data[ "package" ].get( "name" ).is_some() - { - return Ok( true ); - } - Ok( false ) + // let data = self.data.as_ref().ok_or_else( || ManifestError::EmptyManifestData )?; + let data = &self.data; + data.get( "package" ).is_some() && data[ "package" ].get( "name" ).is_some() } /// Check that module is local. /// The package is defined as local if the `publish` field is set to `false' or the registers are specified. - pub fn local_is( &self ) -> Result + pub fn local_is( &self ) -> bool { - let data = self.manifest_data.as_ref().ok_or_else( || ManifestError::EmptyManifestData )?; + // let data = self.data.as_ref().ok_or_else( || ManifestError::EmptyManifestData )?; + let data = &self.data; if data.get( "package" ).is_some() && data[ "package" ].get( "name" ).is_some() { let remote = data[ "package" ].get( "publish" ).is_none() - || data[ "package" ][ "publish" ].as_bool().ok_or_else( || ManifestError::CannotFindValue( "[package], [publish]".into() ) )?; - return Ok(!remote); - } - Ok(true) - } - } + || data[ "package" ][ "publish" ].as_bool().or( Some( true ) ).unwrap(); - /// Create and load manifest by specified path - // aaa : for Bohdan : use newtype, add proper errors handing - // aaa : return `ManifestError` - pub fn open( path : AbsolutePath ) -> Result< Manifest, ManifestError > - { - let mut manifest = if let Ok( dir ) = CrateDir::try_from( path.clone() ) - { - Manifest::from( dir ) + return !remote; + } + true } - else - { - Manifest::try_from( path )? - }; - - manifest.load()?; - - Ok( manifest ) } /// Retrieves the repository URL of a package from its `Cargo.toml` file. - pub fn repo_url( package_path : &Path ) -> Result< String > + pub fn repo_url( crate_dir : &CrateDir ) -> Result< String > { - let path = package_path.join( "Cargo.toml" ); + let path = crate_dir.clone().manifest_file().inner().inner(); if path.exists() { let mut contents = String::new(); + // qqq : zzz : for Petro : redundant read and parse fs::File::open( path )?.read_to_string( &mut contents )?; let doc = contents.parse::< toml_edit::Document >()?; @@ -266,12 +162,12 @@ pub( crate ) mod private .and_then( | i | i.as_str() ); if let Some( repo_url ) = repo_url { - url::extract_repo_url( repo_url ).ok_or_else( || format_err!( "Fail to extract repository url ") ) + url::repo_url_extract( repo_url ).ok_or_else( || format_err!( "Fail to extract repository url ") ) } else { - let report = git::ls_remote_url( package_path )?; - url::extract_repo_url( &report.out.trim() ).ok_or_else( || format_err!( "Fail to extract repository url from git remote.") ) + let report = tool::git::ls_remote_url( crate_dir.clone().absolute_path() )?; + url::repo_url_extract( &report.out.trim() ).ok_or_else( || format_err!( "Fail to extract repository url from git remote.") ) } } else @@ -287,9 +183,6 @@ pub( crate ) mod private crate::mod_interface! { exposed use Manifest; - exposed use CrateDir; orphan use ManifestError; - orphan use CrateDirError; - protected use open; protected use repo_url; } diff --git a/module/move/willbe/src/entity/mod.rs b/module/move/willbe/src/entity/mod.rs index 24342f1c3b..6f26700128 100644 --- a/module/move/willbe/src/entity/mod.rs +++ b/module/move/willbe/src/entity/mod.rs @@ -1,5 +1,16 @@ crate::mod_interface! { + /// Rust toolchain channel: stable/nightly. + layer channel; + orphan use super::channel; + + /// Source code. + layer code; + orphan use super::code; + + /// Dependency of a package. + layer dependency; + orphan use super::dependency; /// Compare two crate archives and create a difference report. layer diff; @@ -9,43 +20,71 @@ crate::mod_interface! layer features; orphan use super::features; + /// Paths and files. + layer files; + orphan use super::files; + + /// Git. + layer git; + orphan use super::git; + + /// To manipulate manifest data. + layer manifest; + orphan use super::manifest; + + /// Rust build optimization: debug/release + layer optimization; + orphan use super::optimization; + + /// Offers capabilities for package management, facilitating the handling and organization of packages. + layer package; + orphan use super::package; + + /// Md's extension for workspace. + layer package_md_extension; + orphan use super::package_md_extension; + + /// Provides a set of functionalities for handling and manipulating packages. + layer packages; + orphan use super::packages; + /// Handles operations related to packed Rust crates layer packed_crate; orphan use super::packed_crate; - + + /// Progress bar staff. + layer progress_bar; + orphan use super::progress_bar; + + /// Publish. + layer publish; + orphan use super::publish; + /// Facade for `preatytable` crate. layer table; orphan use super::table; - /// Provides a set of functionalities for handling and manipulating packages. - layer packages; - orphan use super::packages; + /// Operations with tests + layer test; + orphan use super::test; - /// Offers capabilities for package management, facilitating the handling and organization of packages. - layer package; - orphan use super::package; + /// Provides an opportunity to work with versions. + layer version; + orphan use super::version; /// It features the ability to interact with workspaces, manage their participants, and other functionalities. layer workspace; orphan use super::workspace; - /// To manipulate manifest data. - layer manifest; - orphan use super::manifest; + /// Workspace' graph. + layer workspace_graph; + orphan use super::workspace_graph; - /// Provides an opportunity to work with versions. - layer version; - orphan use super::version; + /// Md's extension for workspace. + layer workspace_md_extension; + orphan use super::workspace_md_extension; - /// Operations with tests - layer test; - orphan use super::test; - - /// Rust toolchain channel: stable/nightly. - layer channel; - orphan use super::channel; - - /// Rust build optimization: debug/release - layer optimization; - orphan use super::optimization; + /// Packages of workspace. + layer workspace_package; + orphan use super::workspace_package; } diff --git a/module/move/willbe/src/entity/optimization.rs b/module/move/willbe/src/entity/optimization.rs index a2c1a58241..56bdaf1385 100644 --- a/module/move/willbe/src/entity/optimization.rs +++ b/module/move/willbe/src/entity/optimization.rs @@ -1,9 +1,8 @@ mod private { - use std::fmt::Formatter; - /// Rust optimization - #[ derive( Debug, Default, Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd ) ] + #[ derive( Debug, Default, Copy, Clone, Hash, Eq, PartialEq, Ord, PartialOrd, derive_tools::Display ) ] + #[ display( style = "snake_case" ) ] pub enum Optimization { /// Debug @@ -12,21 +11,7 @@ mod private /// Release Release, } - - // qqq : use derive - impl std::fmt::Display for Optimization - { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result - { - match self - { - Optimization::Debug => write!( f, "debug" ), - Optimization::Release => write!( f, "release" ), - } - } - } } -// qqq : for Petro : why is it here? crate::mod_interface! { diff --git a/module/move/willbe/src/entity/package.rs b/module/move/willbe/src/entity/package.rs index 19940502b6..6639904f2b 100644 --- a/module/move/willbe/src/entity/package.rs +++ b/module/move/willbe/src/entity/package.rs @@ -4,47 +4,38 @@ mod private use std:: { - path::Path, - collections::{ HashMap, HashSet }, + hash::Hash, }; - use std::fmt::Formatter; - use std::hash::Hash; - use std::path::PathBuf; - use process_tools::process; - use manifest::{ Manifest, ManifestError }; use crates_tools::CrateArchive; - - use workspace::Workspace; - use _path::AbsolutePath; - - use wtools:: + use error:: { - iter::Itertools, - error:: - { - thiserror, - Result, - for_lib::Error, - for_app::{ format_err, Context }, - } + Result, + typed::Error, }; - use action::readme_health_table_renew::Stability; - use former::Former; - use workspace::WorkspacePackage; - use diff::crate_diff; - use version::version_revert; - use error_tools::for_app::Error; - use channel::Channel; + /// A wrapper type for representing the name of a package. /// + /// This struct encapsulates a `String` that holds the name of a package. + #[ derive + ( + Debug, Default, Clone, Hash, Ord, PartialOrd, Eq, PartialEq, + derive_tools::Display, derive_tools::Deref, derive_tools::From, derive_tools::AsRef, + ) ] + pub struct PackageName( String ); + + // + /// Represents different types of packages in a Cargo workspace. + /// + /// It is designed to accommodate the two primary types of package + /// representations within a Cargo workspace. #[ derive( Debug, Clone ) ] - pub enum Package + pub enum Package< 'a > { /// `Cargo.toml` file. Manifest( Manifest ), - /// Cargo metadata package. - Metadata( WorkspacePackage ), + /// Cargo package package. + WorkspacePackageRef( WorkspacePackageRef< 'a > ), } /// Represents errors related to package handling. @@ -53,10 +44,10 @@ mod private { /// Manifest error. #[ error( "Manifest error. Reason : {0}." ) ] - Manifest( #[ from ] ManifestError ), - /// Fail to load metadata. - #[ error( "Fail to load metadata." ) ] - Metadata, + Manifest( #[ from ] manifest::ManifestError ), + /// Fail to load package. + #[ error( "Fail to load package." ) ] + WorkspacePackageRef, /// Fail to load remote package. #[ error( "Fail to load remote package." ) ] LoadRemotePackage, @@ -71,49 +62,45 @@ mod private NotAPackage, } - impl TryFrom< AbsolutePath > for Package + impl< 'a > TryFrom< ManifestFile > for Package< 'a > { - // aaa : make better errors - // aaa : return `PackageError` instead of `anohow` message type Error = PackageError; - fn try_from( value : AbsolutePath ) -> Result< Self, Self::Error > + fn try_from( value : ManifestFile ) -> Result< Self, Self::Error > { - let manifest = manifest::open( value.clone() )?; - if !manifest.package_is()? + let package = Manifest::try_from( value )?; + if !package.package_is() { return Err( PackageError::NotAPackage ); } - Ok( Self::Manifest( manifest ) ) + Ok( Self::Manifest( package ) ) } } - impl TryFrom< CrateDir > for Package + impl< 'a > TryFrom< CrateDir > for Package< 'a > { type Error = PackageError; fn try_from( value : CrateDir ) -> Result< Self, Self::Error > { - let manifest = manifest::open( value.absolute_path().join( "Cargo.toml" ) )?; - if !manifest.package_is()? + let package = Manifest::try_from( value )?; + if !package.package_is() { return Err( PackageError::NotAPackage ); } - Ok( Self::Manifest( manifest ) ) + Ok( Self::Manifest( package ) ) } } - impl TryFrom< Manifest > for Package + impl< 'a > TryFrom< Manifest > for Package< 'a > { - // aaa : make better errors - // aaa : return `PackageError` instead of `anohow` message type Error = PackageError; fn try_from( value : Manifest ) -> Result< Self, Self::Error > { - if !value.package_is()? + if !value.package_is() { return Err( PackageError::NotAPackage ); } @@ -122,23 +109,24 @@ mod private } } - impl From< WorkspacePackage > for Package + impl< 'a > From< WorkspacePackageRef< 'a > > for Package< 'a > { - fn from( value : WorkspacePackage ) -> Self + fn from( value : WorkspacePackageRef< 'a > ) -> Self { - Self::Metadata( value ) + Self::WorkspacePackageRef( value ) } } - impl Package + impl< 'a > Package< 'a > { + /// Path to `Cargo.toml` - pub fn manifest_path( &self ) -> AbsolutePath + pub fn manifest_file( &self ) -> ManifestFile { match self { - Self::Manifest( manifest ) => manifest.manifest_path.clone(), - Self::Metadata( metadata ) => AbsolutePath::try_from( metadata.manifest_path().as_std_path().to_path_buf() ).unwrap(), + Self::Manifest( package ) => package.manifest_file.clone(), + Self::WorkspacePackageRef( package ) => package.manifest_file().unwrap(), } } @@ -147,33 +135,8 @@ mod private { match self { - Self::Manifest( manifest ) => manifest.crate_dir(), - Self::Metadata( metadata ) => - { - let path = metadata.manifest_path().parent().unwrap().as_std_path().to_path_buf(); - let absolute = AbsolutePath::try_from( path ).unwrap(); - - CrateDir::try_from( absolute ).unwrap() - }, - } - } - - /// Package name - pub fn name( &self ) -> Result< String, PackageError > - { - match self - { - Self::Manifest( manifest ) => - { - let data = manifest.manifest_data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; - - // Unwrap safely because of the `Package` type guarantee - Ok( data[ "package" ][ "name" ].as_str().unwrap().to_string() ) - } - Self::Metadata( metadata ) => - { - Ok( metadata.name().clone() ) - } + Self::Manifest( package ) => package.crate_dir(), + Self::WorkspacePackageRef( package ) => package.crate_dir().unwrap(), } } @@ -182,89 +145,35 @@ mod private { match self { - Self::Manifest( manifest ) => + Self::Manifest( package ) => { - let data = manifest.manifest_data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; + // let data = package.data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; + let data = &package.data; // Unwrap safely because of the `Package` type guarantee Ok( data[ "package" ][ "version" ].as_str().unwrap().to_string() ) } - Self::Metadata( metadata ) => - { - Ok( metadata.version().to_string() ) - } - } - } - - /// Stability - pub fn stability( &self ) -> Result< Stability, PackageError > - { - match self - { - Self::Manifest( manifest ) => - { - let data = manifest.manifest_data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; - - // Unwrap safely because of the `Package` type guarantee - Ok( data[ "package" ].get( "metadata" ).and_then( | m | m.get( "stability" ) ).and_then( | s | s.as_str() ).and_then( | s | s.parse::< Stability >().ok() ).unwrap_or( Stability::Experimental) ) - } - Self::Metadata( metadata ) => + Self::WorkspacePackageRef( package ) => { - Ok( metadata.metadata()[ "stability" ].as_str().and_then( | s | s.parse::< Stability >().ok() ).unwrap_or( Stability::Experimental) ) - } - } - } - - /// Repository - pub fn repository( &self ) -> Result< Option< String >, PackageError > - { - match self - { - Self::Manifest( manifest ) => - { - let data = manifest.manifest_data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; - - // Unwrap safely because of the `Package` type guarantee - Ok( data[ "package" ].get( "repository" ).and_then( | r | r.as_str() ).map( | r | r.to_string()) ) - } - Self::Metadata( metadata ) => - { - Ok( metadata.repository().cloned() ) - } - } - } - - /// Discord url - pub fn discord_url( &self ) -> Result< Option< String >, PackageError > - { - match self - { - Self::Manifest( manifest ) => - { - let data = manifest.manifest_data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; - - Ok( data[ "package" ].get( "metadata" ).and_then( | m | m.get( "discord_url" ) ).and_then( | url | url.as_str() ).map( | r | r.to_string() ) ) - } - Self::Metadata( metadata ) => - { - Ok( metadata.metadata()[ "discord_url" ].as_str().map( | url | url.to_string() ) ) + Ok( package.version().to_string() ) } } } /// Check that module is local. - pub fn local_is( &self ) -> Result< bool, ManifestError > + pub fn local_is( &self ) -> bool { match self { - Self::Manifest( manifest ) => + Self::Manifest( package ) => { - // verify that manifest not empty - manifest.local_is() + // verify that package not empty + package.local_is() } - Self::Metadata( metadata ) => + Self::WorkspacePackageRef( package ) => { - Ok( !( metadata.publish().is_none() || metadata.publish().as_ref().is_some_and( | p | p.is_empty() ) ) ) + !( package.publish().is_none() || package.publish().as_ref().is_some_and( | p | p.is_empty() ) ) + // Ok( !( package.publish().is_none() || package.publish().as_ref().is_some_and( | p | p.is_empty() ) ) ) } } } @@ -274,654 +183,15 @@ mod private { match self { - Package::Manifest( manifest ) => Ok( manifest.clone() ), - Package::Metadata( metadata ) => manifest::open + Package::Manifest( package ) => Ok( package.clone() ), + Package::WorkspacePackageRef( package ) => Manifest::try_from ( - AbsolutePath::try_from( metadata.manifest_path() ).map_err( | _ | PackageError::LocalPath )? + package.manifest_file().map_err( | _ | PackageError::LocalPath )? // qqq : use trait ) - .map_err( | _ | PackageError::Metadata ), - } - } - - /// Returns the `Metadata` - pub fn metadata( &self ) -> Result< WorkspacePackage, PackageError > - { - match self - { - Package::Manifest( manifest ) => - Workspace::with_crate_dir( manifest.crate_dir() ).map_err( | _ | PackageError::Metadata )? - .package_find_by_manifest( &manifest.manifest_path ) - .ok_or_else( || PackageError::Metadata ), - Package::Metadata( metadata ) => Ok( metadata.clone() ), - } - } - } - - #[ derive( Debug, Default, Clone ) ] - pub struct ExtendedGitReport - { - pub add : Option< process::Report >, - pub commit : Option< process::Report >, - pub push : Option< process::Report >, - } - - impl std::fmt::Display for ExtendedGitReport - { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - let Self { add, commit, push } = &self; - if let Some( add ) = add { writeln!( f, "{add}" )? } - if let Some( commit ) = commit { writeln!( f, "{commit}" )? } - if let Some( push ) = push { writeln!( f, "{push}" )? } - - Ok( () ) - } - } - - #[ derive( Debug, Clone ) ] - pub struct GitOptions - { - pub git_root : AbsolutePath, - pub items : Vec< AbsolutePath >, - pub message : String, - pub dry : bool, - } - - fn perform_git_commit( o : GitOptions ) -> Result< ExtendedGitReport > - { - let mut report = ExtendedGitReport::default(); - if o.items.is_empty() { return Ok( report ); } - let items = o - .items - .iter() - .map - ( - | item | item.as_ref().strip_prefix( o.git_root.as_ref() ).map( Path::to_string_lossy ) - .with_context( || format!("git_root: {}, item: {}", o.git_root.as_ref().display(), item.as_ref().display() ) ) - ) - .collect::< Result< Vec< _ > > >()?; - let res = git::add( &o.git_root, &items, o.dry ).map_err( | e | format_err!( "{report}\n{e}" ) )?; - report.add = Some( res ); - let res = git::commit( &o.git_root, &o.message, o.dry ).map_err( | e | format_err!( "{report}\n{e}" ) )?; - report.commit = Some( res ); - - Ok( report ) - } - - #[ derive( Debug, Clone ) ] - pub struct PackagePublishInstruction - { - pub package_name : String, - pub pack : cargo::PackOptions, - pub version_bump : version::BumpOptions, - pub git_options : GitOptions, - pub publish : cargo::PublishOptions, - pub dry : bool, - } - - /// Represents a planner for publishing a single package. - #[ derive( Debug, Former ) ] - #[ perform( fn build() -> PackagePublishInstruction ) ] - pub struct PublishSinglePackagePlanner - { - workspace_dir : CrateDir, - package : Package, - channel : Channel, - base_temp_dir : Option< PathBuf >, - #[ former( default = true ) ] - dry : bool, - } - - impl PublishSinglePackagePlanner - { - fn build( self ) -> PackagePublishInstruction - { - let crate_dir = self.package.crate_dir(); - let workspace_root : AbsolutePath = self.workspace_dir.absolute_path(); - let pack = cargo::PackOptions - { - path : crate_dir.as_ref().into(), - channel : self.channel, - allow_dirty : self.dry, - no_verify : self.dry, - temp_path : self.base_temp_dir.clone(), - dry : self.dry, - }; - let old_version : version::Version = self.package.version().as_ref().unwrap().try_into().unwrap(); - let new_version = old_version.clone().bump(); - // bump the package version in dependents (so far, only workspace) - let dependencies = vec![ CrateDir::try_from( workspace_root.clone() ).unwrap() ]; - let version_bump = version::BumpOptions - { - crate_dir : crate_dir.clone(), - old_version : old_version.clone(), - new_version : new_version.clone(), - dependencies : dependencies.clone(), - dry : self.dry, - }; - let git_options = GitOptions - { - git_root : workspace_root, - items : dependencies.iter().chain([ &crate_dir ]).map( | d | d.absolute_path().join( "Cargo.toml" ) ).collect(), - message : format!( "{}-v{}", self.package.name().unwrap(), new_version ), - dry : self.dry, - }; - let publish = cargo::PublishOptions - { - path : crate_dir.as_ref().into(), - temp_path : self.base_temp_dir.clone(), - retry_count : 2, - dry : self.dry, - }; - - PackagePublishInstruction - { - package_name : self.package.name().unwrap(), - pack, - version_bump, - git_options, - publish, - dry : self.dry, - } - } - } - - /// Performs package publishing based on the given arguments. - /// - /// # Arguments - /// - /// * `args` - The package publishing instructions. - /// - /// # Returns - /// - /// * `Result` - The result of the publishing operation, including information about the publish, version bump, and git operations. - pub fn perform_package_publish( instruction : PackagePublishInstruction ) -> Result< PublishReport, ( PublishReport, Error ) > - { - let mut report = PublishReport::default(); - let PackagePublishInstruction - { - package_name: _, - mut pack, - mut version_bump, - mut git_options, - mut publish, - dry, - } = instruction; - pack.dry = dry; - version_bump.dry = dry; - git_options.dry = dry; - publish.dry = dry; - - report.get_info = Some( cargo::pack( pack ).map_err( | e | ( report.clone(), e ) )? ); - // qqq : redundant field? - report.publish_required = true; - let bump_report = version::version_bump( version_bump ).map_err( | e | ( report.clone(), e ) )?; - report.bump = Some( bump_report.clone() ); - let git_root = git_options.git_root.clone(); - let git = match perform_git_commit( git_options ) - { - Ok( git ) => git, - Err( e ) => - { - version_revert( &bump_report ) - .map_err( | le | - ( - report.clone(), - format_err!( "Base error:\n{}\nRevert error:\n{}", e.to_string().replace( '\n', "\n\t" ), le.to_string().replace( '\n', "\n\t" ) ) - ))?; - return Err(( report, e )); - } - }; - report.add = git.add; - report.commit = git.commit; - report.publish = match cargo::publish( publish ) - { - Ok( publish ) => Some( publish ), - Err( e ) => - { - git::reset( git_root.as_ref(), true, 1, false ) - .map_err( | le | - ( - report.clone(), - format_err!( "Base error:\n{}\nRevert error:\n{}", e.to_string().replace( '\n', "\n\t" ), le.to_string().replace( '\n', "\n\t" ) ) - ))?; - return Err(( report, e )); - } - }; - - let res = git::push( &git_root, dry ).map_err( | e | ( report.clone(), e ) )?; - report.push = Some( res ); - - Ok( report ) - } - - /// `PublishPlan` manages the overall publication process for multiple packages. - /// It organizes the necessary details required for publishing each individual package. - /// This includes the workspace root directory, any temporary directories used during the process, - /// and the set of specific instructions for publishing each package. - #[ derive( Debug, Former, Clone ) ] - pub struct PublishPlan - { - /// `workspace_dir` - This is the root directory of your workspace, containing all the Rust crates - /// that make up your package. It is used to locate the packages within your workspace that are meant - /// to be published. The value here is represented by `CrateDir` which indicates the directory of the crate. - pub workspace_dir : CrateDir, - - /// `base_temp_dir` - This is used for any temporary operations during the publication process, like - /// building the package or any other processes that might require the storage of transient data. It's - /// optional as not all operations will require temporary storage. The type used is `PathBuf` which allows - /// manipulation of the filesystem paths. - pub base_temp_dir : Option< PathBuf >, - - /// Release channels for rust. - pub channel : Channel, - - /// `dry` - A boolean value indicating whether to do a dry run. If set to `true`, the application performs - /// a simulated run without making any actual changes. If set to `false`, the operations are actually executed. - /// This property is optional and defaults to `true`. - #[ former( default = true ) ] - pub dry : bool, - - /// Required for tree view only - pub roots : Vec< CrateDir >, - - /// `plans` - This is a vector containing the instructions for publishing each package. Each item - /// in the `plans` vector indicates a `PackagePublishInstruction` set for a single package. It outlines - /// how to build and where to publish the package amongst other instructions. The `#[setter( false )]` - /// attribute indicates that there is no setter method for the `plans` variable and it can only be modified - /// within the struct. - #[ scalar( setter = false ) ] - pub plans : Vec< PackagePublishInstruction >, - } - - impl PublishPlan - { - /// Displays a tree-like structure of crates and their dependencies. - /// - /// # Arguments - /// - /// * `f` - A mutable reference to a `Formatter` used for writing the output. - /// - /// # Errors - /// - /// Returns a `std::fmt::Error` if there is an error writing to the formatter. - pub fn write_as_tree< W >( &self, f : &mut W ) -> std::fmt::Result - where - W : std::fmt::Write - { - let name_bump_report = self - .plans - .iter() - .map( | x | ( &x.package_name, ( x.version_bump.old_version.to_string(), x.version_bump.new_version.to_string() ) ) ) - .collect::< HashMap< _, _ > >(); - for wanted in &self.roots - { - let list = action::list - ( - action::list::ListOptions::former() - .path_to_manifest( wanted.clone() ) - .format( action::list::ListFormat::Tree ) - .dependency_sources([ action::list::DependencySource::Local ]) - .dependency_categories([ action::list::DependencyCategory::Primary ]) - .form() - ) - .map_err( |( _, _e )| std::fmt::Error )?; - let action::list::ListReport::Tree( list ) = list else { unreachable!() }; - - fn callback( name_bump_report : &HashMap< &String, ( String, String ) >, mut r : action::list::ListNodeReport ) -> action::list::ListNodeReport - { - if let Some(( old, new )) = name_bump_report.get( &r.name ) - { - r.version = Some( format!( "({old} -> {new})" ) ); - } - r.normal_dependencies = r.normal_dependencies.into_iter().map( | r | callback( name_bump_report, r ) ).collect(); - r.dev_dependencies = r.dev_dependencies.into_iter().map( | r | callback( name_bump_report, r ) ).collect(); - r.build_dependencies = r.build_dependencies.into_iter().map( | r | callback( name_bump_report, r ) ).collect(); - - r - } - let list = list.into_iter().map( | r | callback( &name_bump_report, r ) ).collect(); - - let list = action::list::ListReport::Tree( list ); - writeln!( f, "{}", list )?; - } - - Ok( () ) - } - - /// Format and display the list of packages and their version bumps in a formatted way. - /// - /// # Arguments - /// - /// - `f`: A mutable reference to a `Formatter` where the output will be written to. - /// - /// # Errors - /// - /// Returns a `std::fmt::Error` if there is an error writing to the formatter. - pub fn write_as_list< W >( &self, f : &mut W ) -> std::fmt::Result - where - W : std::fmt::Write - { - for ( idx, package ) in self.plans.iter().enumerate() - { - let bump = &package.version_bump; - writeln!( f, "[{idx}] {} ({} -> {})", package.package_name, bump.old_version, bump.new_version )?; + .map_err( | _ | PackageError::WorkspacePackageRef ), } - - Ok( () ) - } - } - - impl PublishPlanFormer - { - pub fn option_base_temp_dir( mut self, path : Option< PathBuf > ) -> Self - { - self.storage.base_temp_dir = path; - self - } - - pub fn package< IntoPackage >( mut self, package : IntoPackage ) -> Self - where - IntoPackage : Into< Package >, - { - let channel = self.storage.channel.unwrap_or_default(); - let mut plan = PublishSinglePackagePlanner::former(); - if let Some( workspace ) = &self.storage.workspace_dir - { - plan = plan.workspace_dir( workspace.clone() ); - } - if let Some( base_temp_dir ) = &self.storage.base_temp_dir - { - plan = plan.base_temp_dir( base_temp_dir.clone() ); - } - if let Some( dry ) = self.storage.dry - { - plan = plan.dry( dry ); - } - let plan = plan - .channel( channel ) - .package( package ) - .perform(); - let mut plans = self.storage.plans.unwrap_or_default(); - plans.push( plan ); - - self.storage.plans = Some( plans ); - - self - } - - pub fn packages< IntoPackageIter, IntoPackage >( mut self, packages : IntoPackageIter ) -> Self - where - IntoPackageIter : IntoIterator< Item = IntoPackage >, - IntoPackage : Into< Package >, - { - for package in packages - { - self = self.package( package ); - } - - self - } - } - - - /// Perform publishing of multiple packages based on the provided publish plan. - /// - /// # Arguments - /// - /// * `plan` - The publish plan with details of packages to be published. - /// - /// # Returns - /// - /// Returns a `Result` containing a vector of `PublishReport` if successful, else an error. - pub fn perform_packages_publish( plan : PublishPlan ) -> Result< Vec< PublishReport > > - { - let mut report = vec![]; - for package in plan.plans - { - let res = perform_package_publish( package ).map_err( |( current_rep, e )| format_err!( "{}\n{current_rep}\n{e}", report.iter().map( | r | format!( "{r}" ) ).join( "\n" ) ) )?; - report.push( res ); } - Ok( report ) - } - - /// Holds information about the publishing process. - #[ derive( Debug, Default, Clone ) ] - pub struct PublishReport - { - /// Retrieves information about the package. - pub get_info : Option< process::Report >, - /// Indicates whether publishing is required for the package. - pub publish_required : bool, - /// Bumps the version of the package. - pub bump : Option< version::ExtendedBumpReport >, - /// Report of adding changes to the Git repository. - pub add : Option< process::Report >, - /// Report of committing changes to the Git repository. - pub commit : Option< process::Report >, - /// Report of pushing changes to the Git repository. - pub push : Option< process::Report >, - /// Report of publishes the package using the `cargo publish` command. - pub publish : Option< process::Report >, - } - - impl std::fmt::Display for PublishReport - { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result - { - let PublishReport - { - get_info, - publish_required, - bump, - add, - commit, - push, - publish, - } = self; - - if get_info.is_none() - { - f.write_str( "Empty report" )?; - return Ok( () ) - } - let info = get_info.as_ref().unwrap(); - write!( f, "{}", info )?; - - if !publish_required - { - f.write_str( "The package has no changes, so no publishing is required" )?; - return Ok( () ) - } - - if let Some( bump ) = bump - { - writeln!( f, "{}", bump )?; - } - if let Some( add ) = add - { - write!( f, "{add}" )?; - } - if let Some( commit ) = commit - { - write!( f, "{commit}" )?; - } - if let Some( push ) = push - { - write!( f, "{push}" )?; - } - if let Some( publish ) = publish - { - write!( f, "{publish}" )?; - } - - Ok( () ) - } - } - - /// Sorting variants for dependencies. - #[ derive( Debug, Copy, Clone ) ] - pub enum DependenciesSort - { - /// List will be topologically sorted. - Topological, - /// List will be unsorted. - Unordered, - } - - #[ derive( Debug, Clone ) ] - /// Args for `local_dependencies` function. - pub struct DependenciesOptions - { - /// With dependencies of dependencies. - pub recursive : bool, - /// With sorting. - pub sort : DependenciesSort, - /// Include dev dependencies. - pub with_dev : bool, - /// Include remote dependencies. - pub with_remote : bool, - } - - impl Default for DependenciesOptions - { - fn default() -> Self - { - Self - { - recursive : true, - sort : DependenciesSort::Unordered, - with_dev : false, - with_remote : false, - } - } - } - - // - - /// Identifier of any crate(local and remote) - #[ derive( Debug, Clone, Hash, Eq, PartialEq ) ] - pub struct CrateId - { - /// TODO : make it private - pub name : String, - /// TODO : make it private - pub path : Option< AbsolutePath >, - } - - impl From< &WorkspacePackage > for CrateId - { - fn from( value : &WorkspacePackage ) -> Self - { - Self - { - name : value.name().clone(), - path : Some( AbsolutePath::try_from( value.manifest_path().parent().unwrap() ).unwrap() ), - } - } - } - - impl From< &workspace::Dependency > for CrateId - { - fn from( value : &workspace::Dependency ) -> Self - { - Self - { - name : value.name().clone(), - path : value.path().clone().map( | path | AbsolutePath::try_from( path ).unwrap() ), - } - } - } - - /// Recursive implementation of the `dependencies` function - pub fn _dependencies - ( - workspace : &mut Workspace, - manifest : &Package, - graph : &mut HashMap< CrateId, HashSet< CrateId > >, - opts : DependenciesOptions - ) -> Result< CrateId > - { - let DependenciesOptions - { - recursive, - sort : _, - with_dev, - with_remote, - } = opts; - if recursive && with_remote { unimplemented!( "`recursive` + `with_remote` options") } - - let manifest_path = &manifest.manifest_path(); - - let package = workspace - .load()? - .package_find_by_manifest( &manifest_path ) - .ok_or( format_err!( "Package not found in the workspace with path : `{}`", manifest_path.as_ref().display() ) )?; - - let deps = package - .dependencies() - .iter() - .filter( | dep | ( with_remote || dep.path().is_some() ) && ( with_dev || dep.kind() != workspace::DependencyKind::Development ) ) - .map( CrateId::from ) - .collect::< HashSet< _ > >(); - - let package = CrateId::from( &package ); - graph.insert( package.clone(), deps.clone() ); - - if recursive - { - for dep in deps - { - if graph.get( &dep ).is_none() - { - // unwrap because `recursive` + `with_remote` not yet implemented - _dependencies( workspace, &dep.path.as_ref().unwrap().join( "Cargo.toml" ).try_into().unwrap(), graph, opts.clone() )?; - } - } - } - - Ok( package ) - } - - /// Returns local dependencies of a specified package by its manifest path from a workspace. - /// - /// # Arguments - /// - /// - `workspace` - holds cached information about the workspace, such as the packages it contains and their dependencies. By passing it as a mutable reference, function can update the cache as needed. - /// - `manifest` - The package manifest file contains metadata about the package such as its name, version, and dependencies. - /// - `opts` - used to specify options or configurations for fetching local dependencies. - /// - /// # Returns - /// - /// If the operation is successful, returns a vector of `PathBuf` objects, where each `PathBuf` represents the path to a local dependency of the specified package. - pub fn dependencies( workspace : &mut Workspace, manifest : &Package, opts : DependenciesOptions ) -> Result< Vec< CrateId > > - { - let mut graph = HashMap::new(); - let root = _dependencies( workspace, manifest, &mut graph, opts.clone() )?; - - let output = match opts.sort - { - DependenciesSort::Unordered => - { - graph - .into_iter() - .flat_map( | ( id, dependency ) | - { - dependency - .into_iter() - .chain( Some( id ) ) - }) - .unique() - .filter( | x | x != &root ) - .collect() - } - DependenciesSort::Topological => - { - graph::toposort( graph::construct( &graph ) ).map_err( | err | format_err!( "{}", err ) )?.into_iter().filter( | x | x != &root ).collect() - }, - }; - - Ok( output ) } // @@ -934,9 +204,9 @@ mod private /// - `true` if the package needs to be published. /// - `false` if there is no need to publish the package. /// - /// Panics if the manifest is not loaded or local package is not packed. + /// Panics if the package is not loaded or local package is not packed. - pub fn publish_need( package : &Package, path : Option< PathBuf > ) -> Result< bool, PackageError > + pub fn publish_need< 'a >( package : &Package< 'a >, path : Option< path::PathBuf > ) -> Result< bool, PackageError > { let name = package.name()?; let version = package.version()?; @@ -944,8 +214,6 @@ mod private .map( | p | p.join( format!( "package/{0}-{1}.crate", name, version ) ) ) .unwrap_or( packed_crate::local_path( &name, &version, package.crate_dir() ).map_err( | _ | PackageError::LocalPath )? ); - // aaa : for Bohdan : bad, properly handle errors - // aaa : return result instead of panic let local_package = CrateArchive::read( local_package_path ).map_err( | _ | PackageError::ReadArchive )?; let remote_package = match CrateArchive::download_crates_io( name, version ) { @@ -955,8 +223,9 @@ mod private _ => return Err( PackageError::LoadRemotePackage ), }; - Ok( crate_diff( &local_package, &remote_package ).exclude( diff::PUBLISH_IGNORE_LIST ).has_changes() ) + Ok( diff::crate_diff( &local_package, &remote_package ).exclude( diff::PUBLISH_IGNORE_LIST ).has_changes() ) } + } // @@ -964,20 +233,10 @@ mod private crate::mod_interface! { - protected use PublishSinglePackagePlanner; - protected use PublishPlan; - protected use perform_package_publish; - protected use perform_packages_publish; - - protected use PublishReport; - protected use Package; + exposed use Package; + protected use PackageName; protected use PackageError; protected use publish_need; - protected use CrateId; - protected use DependenciesSort; - protected use DependenciesOptions; - protected use dependencies; - } diff --git a/module/move/willbe/src/entity/package_md_extension.rs b/module/move/willbe/src/entity/package_md_extension.rs new file mode 100644 index 0000000000..1cc03998c7 --- /dev/null +++ b/module/move/willbe/src/entity/package_md_extension.rs @@ -0,0 +1,152 @@ +/// Internal namespace. +pub( crate ) mod private +{ + use crate::*; + + /// Md's extension for workspace + pub trait PackageMdExtension + { + /// Package name + fn name( &self ) -> Result< &str, package::PackageError >; + + /// Stability + fn stability( &self ) -> Result< action::readme_health_table_renew::Stability, package::PackageError >; + + /// Repository + fn repository( &self ) -> Result< Option< String >, package::PackageError >; + + /// Discord url + fn discord_url( &self ) -> Result< Option< String >, package::PackageError >; + } + + impl < 'a > package::Package< 'a > + { + /// Package name + pub fn name( &self ) -> Result< &str, package::PackageError > + { + match self + { + Self::Manifest( manifest ) => + { + // let data = manifest.data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; + let data = &manifest.data; + + // Unwrap safely because of the `Package` type guarantee + // Ok( data[ "package" ][ "name" ].as_str().unwrap().to_string() ) + Ok( data[ "package" ][ "name" ].as_str().unwrap() ) + } + Self::WorkspacePackageRef( package ) => + { + Ok( package.name() ) + } + } + } + + /// Stability + pub fn stability( &self ) -> Result< action::readme_health_table_renew::Stability, package::PackageError > + { + // aaa : for Petro : bad : first of all it should be in trait. also there is duplicated code + // aaa : done + // aaa : for Petro : review other similar places + // aaa : done + match self + { + Self::Manifest( _ ) => + { + // Unwrap safely because of the `Package` type guarantee + Ok + ( + self.package_metadata() + .and_then( | m | m.get( "stability" ) ) + .and_then( | s | s.as_str() ) + .and_then( | s | s.parse::< action::readme_health_table_renew::Stability >().ok() ) + .unwrap_or( action::readme_health_table_renew::Stability::Experimental ) + ) + } + Self::WorkspacePackageRef( package ) => + { + Ok + ( + package + .metadata()[ "stability" ] + .as_str() + .and_then( | s | s.parse::< action::readme_health_table_renew::Stability >().ok() ) + .unwrap_or( action::readme_health_table_renew::Stability::Experimental) + ) + } + } + } + + /// Repository + pub fn repository( &self ) -> Result< Option< String >, package::PackageError > + { + match self + { + Self::Manifest( manifest ) => + { + // let data = manifest.data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; + let data = &manifest.data; + + // Unwrap safely because of the `Package` type guarantee + Ok + ( + data[ "package" ] + .get( "repository" ) + .and_then( | r | r.as_str() ) + .map( | r | r.to_string()) + ) + } + Self::WorkspacePackageRef( package ) => + { + Ok( package.repository().cloned() ) + } + } + } + + /// Discord url + pub fn discord_url( &self ) -> Result< Option< String >, package::PackageError > + { + match self + { + Self::Manifest( _ ) => + { + // let data = manifest.data.as_ref().ok_or_else( || PackageError::Manifest( ManifestError::EmptyManifestData ) )?; + Ok + ( + self.package_metadata() + .and_then( | m | m.get( "discord_url" ) ) + .and_then( | url | url.as_str() ) + .map( | r | r.to_string() ) + ) + } + Self::WorkspacePackageRef( package ) => + { + Ok( package.metadata()[ "discord_url" ].as_str().map( | url | url.to_string() ) ) + } + } + } + + fn package_metadata( &self ) -> Option< &toml_edit::Item > + { + match self { + package::Package::Manifest( manifest ) => + { + let data = &manifest.data; + + data[ "package" ] + .get( "metadata" ) + } + package::Package::WorkspacePackageRef(_) => + { + None + } + } + } + } +} + + +crate::mod_interface! +{ + protected use PackageMdExtension; +} diff --git a/module/move/willbe/src/entity/packages.rs b/module/move/willbe/src/entity/packages.rs index 5f48b09b3f..b1a4e0d141 100644 --- a/module/move/willbe/src/entity/packages.rs +++ b/module/move/willbe/src/entity/packages.rs @@ -4,13 +4,14 @@ mod private use std:: { fmt::Formatter, - collections::{ HashMap, HashSet }, }; - use workspace::WorkspacePackage; - use crate::workspace::Dependency; + use package::PackageName; + use collection::{ HashMap, HashSet }; - /// Type aliasing for String - pub type PackageName = String; + // use workspace::WorkspacePackageRef< '_ >; + // use Dependency; + + // aaa : poor description // aaa : removed /// A configuration struct for specifying optional filters when using the /// `filter` function. It allows users to provide custom filtering @@ -22,13 +23,13 @@ mod private /// applied to each package, and only packages that satisfy the condition /// are included in the final result. If not provided, a default filter that /// accepts all packages is used. - pub package_filter : Option< Box< dyn Fn( &WorkspacePackage ) -> bool > >, + pub package_filter : Option< Box< dyn Fn( WorkspacePackageRef< '_ > ) -> bool > >, /// An optional dependency filtering function. If provided, this function /// is applied to each dependency of each package, and only dependencies /// that satisfy the condition are included in the final result. If not /// provided, a default filter that accepts all dependencies is used. - pub dependency_filter : Option< Box< dyn Fn( &WorkspacePackage, &Dependency ) -> bool > >, + pub dependency_filter : Option< Box< dyn Fn( WorkspacePackageRef< '_ >, DependencyRef< '_ > ) -> bool > >, } impl std::fmt::Debug for FilterMapOptions @@ -71,25 +72,31 @@ mod private /// which dependencies should be included in the return for that package. If not provided, all /// dependencies for a package are included. - // qqq : for Bohdan : for Petro : bad. don't use PackageMetadata directly, use its abstraction only! + // aaa : for Bohdan : for Petro : bad. don't use PackageMetadata directly, use its abstraction only! - pub fn filter( packages : &[ WorkspacePackage ], options : FilterMapOptions ) -> HashMap< PackageName, HashSet< PackageName > > + pub fn filter< 'a > + ( + // packages : &[ WorkspacePackageRef< '_ > ], + packages : impl Iterator< Item = WorkspacePackageRef< 'a > >, + options : FilterMapOptions, + ) + -> HashMap< PackageName, HashSet< PackageName > > { let FilterMapOptions { package_filter, dependency_filter } = options; let package_filter = package_filter.unwrap_or_else( || Box::new( | _ | true ) ); let dependency_filter = dependency_filter.unwrap_or_else( || Box::new( | _, _ | true ) ); packages - .iter() + // .iter() .filter( | &p | package_filter( p ) ) .map ( | package | ( - package.name().clone(), + package.name().to_string().into(), package.dependencies() - .iter() - .filter( | &d | dependency_filter( package, d ) ) - .map( | d | d.name().clone() ) + // .iter() + .filter( | d | dependency_filter( package, *d ) ) + .map( | d | d.name().into() ) .collect::< HashSet< _ > >() ) ) @@ -102,7 +109,6 @@ mod private crate::mod_interface! { - protected use PackageName; protected use FilterMapOptions; protected use filter; diff --git a/module/move/willbe/src/entity/packed_crate.rs b/module/move/willbe/src/entity/packed_crate.rs index 9bf462d782..f75db6b0c7 100644 --- a/module/move/willbe/src/entity/packed_crate.rs +++ b/module/move/willbe/src/entity/packed_crate.rs @@ -9,7 +9,7 @@ mod private time::Duration, path::PathBuf, }; - use wtools::error::{ for_app::Context, Result }; + use error::{ untyped::Context, Result }; use ureq::Agent; /// Returns the local path of a packed `.crate` file based on its name, version, and manifest path. @@ -17,18 +17,17 @@ mod private /// # Args : /// - `name` - the name of the package. /// - `version` - the version of the package. - /// - `manifest_path` - path to the package `Cargo.toml` file. + /// - `manifest_file` - path to the package `Cargo.toml` file. /// /// # Returns : /// The local packed `.crate` file of the package pub fn local_path< 'a >( name : &'a str, version : &'a str, crate_dir : CrateDir ) -> Result< PathBuf > { let buf = format!( "package/{0}-{1}.crate", name, version ); - - let workspace = Workspace::with_crate_dir( crate_dir )?; + let workspace = Workspace::try_from( crate_dir )?; let mut local_package_path = PathBuf::new(); - local_package_path.push( workspace.target_directory()? ); + local_package_path.push( workspace.target_directory() ); local_package_path.push( buf ); Ok( local_package_path ) diff --git a/module/move/willbe/src/entity/progress_bar.rs b/module/move/willbe/src/entity/progress_bar.rs new file mode 100644 index 0000000000..52e0460f48 --- /dev/null +++ b/module/move/willbe/src/entity/progress_bar.rs @@ -0,0 +1,108 @@ +mod private +{ + /// The `ProgressBar` structure is used to display progress indicators in the terminal. + /// It wraps the functionality of the `indicatif` library. + /// + /// This structure is only available when the `progress_bar` feature is enabled. + #[ cfg( feature = "progress_bar" ) ] + pub struct ProgressBar< 'a > + { + /// A reference to the `MultiProgress` object from the `indicatif` library, which + /// allows managing multiple progress bars simultaneously. This object is necessary + /// for coordinating the display of multiple progress bars. + pub( crate ) multi_progress: &'a indicatif::MultiProgress, + /// The `ProgressBar` object from the `indicatif` library, which represents + /// an individual progress indicator. It is used to update the progress state + /// and display it in the terminal. + pub( crate ) progress_bar: indicatif::ProgressBar, + } + + #[ cfg( feature = "progress_bar" ) ] + impl < 'a > std::fmt::Debug for ProgressBar< 'a > + { + fn fmt( &self, f : &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result + { + f.debug_struct( "ProgressBar" ) + .finish() + } + } + + /// The `MultiProgress` structure is used to manage and display multiple progress + /// indicators simultaneously in the terminal. It utilizes the `indicatif` library. + /// + /// This structure is only available when the `progress_bar` feature is enabled. + #[ cfg( feature = "progress_bar" ) ] + pub struct MultiProgress + { + multi_progress: indicatif::MultiProgress, + progress_style: indicatif::ProgressStyle, + } + + #[ cfg( feature = "progress_bar" ) ] + impl MultiProgress + { + /// Creates a new `ProgressBar` instance tied to the `MultiProgress` manager. + /// This function initializes a new progress bar with a specified length and applies + /// the defined style to it. + /// + /// # Parameters + /// + /// - `variants_len`: The total length or count that the progress bar will track. + /// + /// # Returns + /// + /// A `ProgressBar` instance that can be used to update and display progress. + pub fn progress_bar< 'a >( &'a self, variants_len : u64 ) -> ProgressBar< 'a > + { + let progress_bar = + { + let pb = self.multi_progress.add( indicatif::ProgressBar::new( variants_len ) ); + pb.set_style( self.progress_style.clone() ); + pb.inc( 0 ); + pb + }; + ProgressBar + { + multi_progress : &self.multi_progress, + progress_bar, + } + } + } + + #[ cfg( feature = "progress_bar" ) ] + impl std::fmt::Debug for MultiProgress + { + fn fmt( &self, f : &mut std::fmt::Formatter< '_ > ) -> std::fmt::Result + { + f.debug_struct( "MultiprogressProgress" ) + .finish() + } + } + + + #[ cfg( feature = "progress_bar" ) ] + impl Default for MultiProgress + { + fn default() -> Self + { + Self + { + multi_progress: indicatif::MultiProgress::new(), + progress_style: indicatif::ProgressStyle::with_template + ( + "[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}", + ) + .unwrap() + .progress_chars( "##-" ), + } + } + } +} + +crate::mod_interface! +{ + #[ cfg( feature = "progress_bar" ) ] + protected use ProgressBar; + #[ cfg( feature = "progress_bar" ) ] + protected use MultiProgress; +} \ No newline at end of file diff --git a/module/move/willbe/src/entity/publish.rs b/module/move/willbe/src/entity/publish.rs new file mode 100644 index 0000000000..ad5632782b --- /dev/null +++ b/module/move/willbe/src/entity/publish.rs @@ -0,0 +1,441 @@ +mod private +{ + use crate::*; + + use std::fmt; + use process_tools::process; + use + { + iter::Itertools, + error:: + { + Result, + untyped::{ format_err, Error }, + } + }; + use error::ErrWith; + + /// Represents instructions for publishing a package. + #[ derive( Debug, Clone ) ] + pub struct PackagePublishInstruction + { + /// The name of the package. + pub package_name : package::PackageName, + /// Options for packing the package using Cargo. + pub pack : cargo::PackOptions, + /// Options for bumping the package version. + pub bump : version::BumpOptions, + /// Git options related to the package. + pub git_options : entity::git::GitOptions, + /// Options for publishing the package using Cargo. + pub publish : cargo::PublishOptions, + /// Indicates whether the process should be dry-run (no actual publishing). + pub dry : bool, + } + + /// Represents a planner for publishing a single package. + #[ derive( Debug, former::Former ) ] + #[ perform( fn build() -> PackagePublishInstruction ) ] + pub struct PublishSinglePackagePlanner< 'a > + { + workspace_dir : CrateDir, + package : package::Package< 'a >, + channel : channel::Channel, + base_temp_dir : Option< path::PathBuf >, + #[ former( default = true ) ] + dry : bool, + } + + impl< 'a > PublishSinglePackagePlanner< 'a > + { + fn build( self ) -> PackagePublishInstruction + { + let crate_dir = self.package.crate_dir(); + let workspace_root : AbsolutePath = self.workspace_dir.clone().absolute_path(); + let pack = cargo::PackOptions + { + path : crate_dir.clone().absolute_path().inner(), + channel : self.channel, + allow_dirty : self.dry, + checking_consistency : !self.dry, + temp_path : self.base_temp_dir.clone(), + dry : self.dry, + }; + let old_version : Version = self.package.version().as_ref().unwrap().try_into().unwrap(); + let new_version = old_version.clone().bump(); + // bump the package version in dependents (so far, only workspace) + let dependencies = vec![ CrateDir::try_from( workspace_root.clone() ).unwrap() ]; + let bump = version::BumpOptions + { + crate_dir : crate_dir.clone(), + old_version : old_version.clone(), + new_version : new_version.clone(), + dependencies : dependencies.clone(), + dry : self.dry, + }; + let git_options = entity::git::GitOptions + { + git_root : workspace_root, + items : dependencies.iter().chain([ &crate_dir ]).map( | d | d.clone().absolute_path().join( "Cargo.toml" ) ).collect(), + message : format!( "{}-v{}", self.package.name().unwrap(), new_version ), + dry : self.dry, + }; + let publish = cargo::PublishOptions + { + path : crate_dir.clone().absolute_path().inner(), + temp_path : self.base_temp_dir.clone(), + retry_count : 2, + dry : self.dry, + }; + + PackagePublishInstruction + { + package_name : self.package.name().unwrap().to_string().into(), + pack, + bump, + git_options, + publish, + dry : self.dry, + } + } + } + + /// `PublishPlan` manages the overall publication process for multiple packages. + /// It organizes the necessary details required for publishing each individual package. + /// This includes the workspace root directory, any temporary directories used during the process, + /// and the set of specific instructions for publishing each package. + #[ derive( Debug, former::Former, Clone ) ] + pub struct PublishPlan + { + /// `workspace_dir` - This is the root directory of your workspace, containing all the Rust crates + /// that make up your package. It is used to locate the packages within your workspace that are meant + /// to be published. The value here is represented by `CrateDir` which indicates the directory of the crate. + pub workspace_dir : CrateDir, + + /// `base_temp_dir` - This is used for any temporary operations during the publication process, like + /// building the package or any other processes that might require the storage of transient data. It's + /// optional as not all operations will require temporary storage. The type used is `PathBuf` which allows + /// manipulation of the filesystem paths. + pub base_temp_dir : Option< path::PathBuf >, + + /// Release channels for rust. + pub channel : channel::Channel, + + /// `dry` - A boolean value indicating whether to do a dry run. If set to `true`, the application performs + /// a simulated run without making any actual changes. If set to `false`, the operations are actually executed. + /// This property is optional and defaults to `true`. + #[ former( default = true ) ] + pub dry : bool, + + /// Required for tree view only + pub roots : Vec< CrateDir >, + + /// `plans` - This is a vector containing the instructions for publishing each package. Each item + /// in the `plans` vector indicates a `PackagePublishInstruction` set for a single package. It outlines + /// how to build and where to publish the package amongst other instructions. The `#[setter( false )]` + /// attribute indicates that there is no setter method for the `plans` variable and it can only be modified + /// within the struct. + #[ scalar( setter = false ) ] + pub plans : Vec< PackagePublishInstruction >, + } + + impl PublishPlan + { + /// Displays a tree-like structure of crates and their dependencies. + /// + /// # Arguments + /// + /// * `f` - A mutable reference to a `Formatter` used for writing the output. + /// + /// # Errors + /// + /// Returns a `std::fmt::Error` if there is an error writing to the formatter. + pub fn write_as_tree< W >( &self, f : &mut W ) -> fmt::Result + where + W : fmt::Write + { + let name_bump_report : collection::HashMap< _, _ > = self + .plans + .iter() + .map( | x | ( x.package_name.as_ref(), ( x.bump.old_version.to_string(), x.bump.new_version.to_string() ) ) ) + .collect(); + for wanted in &self.roots + { + let list = action::list + ( + action::list::ListOptions::former() + .path_to_manifest( wanted.clone() ) + .format( action::list::ListFormat::Tree ) + .dependency_sources([ action::list::DependencySource::Local ]) + .dependency_categories([ action::list::DependencyCategory::Primary ]) + .form() + ) + .map_err( |( _, _e )| fmt::Error )?; + let action::list::ListReport::Tree( list ) = list else { unreachable!() }; + + fn callback( name_bump_report : &collection::HashMap< &String, ( String, String ) >, mut r : tool::ListNodeReport ) -> tool::ListNodeReport + { + if let Some(( old, new )) = name_bump_report.get( &r.name ) + { + r.version = Some( format!( "({old} -> {new})" ) ); + } + r.normal_dependencies = r.normal_dependencies.into_iter().map( | r | callback( name_bump_report, r ) ).collect(); + r.dev_dependencies = r.dev_dependencies.into_iter().map( | r | callback( name_bump_report, r ) ).collect(); + r.build_dependencies = r.build_dependencies.into_iter().map( | r | callback( name_bump_report, r ) ).collect(); + + r + } + let printer = list; + let rep : Vec< tool::ListNodeReport > = printer.iter().map( | printer | printer.info.clone() ).collect(); + let list: Vec< tool::ListNodeReport > = rep.into_iter().map( | r | callback( &name_bump_report, r ) ).collect(); + let printer : Vec< tool::TreePrinter > = list.iter().map( | rep | tool::TreePrinter::new( rep ) ).collect(); + + let list = action::list::ListReport::Tree( printer ); + writeln!( f, "{}", list )?; + } + + Ok( () ) + } + + /// Format and display the list of packages and their version bumps in a formatted way. + /// + /// # Arguments + /// + /// - `f`: A mutable reference to a `Formatter` where the output will be written to. + /// + /// # Errors + /// + /// Returns a `std::fmt::Error` if there is an error writing to the formatter. + pub fn write_as_list< W >( &self, f : &mut W ) -> fmt::Result + where + W : fmt::Write + { + for ( idx, package ) in self.plans.iter().enumerate() + { + let bump = &package.bump; + writeln!( f, "[{idx}] {} ({} -> {})", package.package_name, bump.old_version, bump.new_version )?; + } + + Ok( () ) + } + } + + impl< 'a > PublishPlanFormer + { + pub fn option_base_temp_dir( mut self, path : Option< path::PathBuf > ) -> Self + { + self.storage.base_temp_dir = path; + self + } + + pub fn package< IntoPackage >( mut self, package : IntoPackage ) -> Self + where + IntoPackage : Into< package::Package< 'a > >, + { + let channel = self.storage.channel.unwrap_or_default(); + let mut plan = PublishSinglePackagePlanner::former(); + if let Some( workspace ) = &self.storage.workspace_dir + { + plan = plan.workspace_dir( workspace.clone() ); + } + if let Some( base_temp_dir ) = &self.storage.base_temp_dir + { + plan = plan.base_temp_dir( base_temp_dir.clone() ); + } + if let Some( dry ) = self.storage.dry + { + plan = plan.dry( dry ); + } + let plan = plan + .channel( channel ) + .package( package ) + .perform(); + let mut plans = self.storage.plans.unwrap_or_default(); + plans.push( plan ); + + self.storage.plans = Some( plans ); + + self + } + + pub fn packages< IntoPackageIter, IntoPackage >( mut self, packages : IntoPackageIter ) -> Self + where + IntoPackageIter : IntoIterator< Item = IntoPackage >, + IntoPackage : Into< package::Package< 'a > >, + { + for package in packages + { + self = self.package( package ); + } + + self + } + + } + + /// Holds information about the publishing process. + #[ derive( Debug, Default, Clone ) ] + pub struct PublishReport + { + /// Retrieves information about the package. + pub get_info : Option< process::Report >, + /// Bumps the version of the package. + pub bump : Option< version::ExtendedBumpReport >, + /// Report of adding changes to the Git repository. + pub add : Option< process::Report >, + /// Report of committing changes to the Git repository. + pub commit : Option< process::Report >, + /// Report of pushing changes to the Git repository. + pub push : Option< process::Report >, + /// Report of publishes the package using the `cargo publish` command. + pub publish : Option< process::Report >, + } + + impl fmt::Display for PublishReport + { + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> fmt::Result + { + let PublishReport + { + get_info, + bump, + add, + commit, + push, + publish, + } = self; + + if get_info.is_none() + { + f.write_str( "Empty report" )?; + return Ok( () ) + } + let info = get_info.as_ref().unwrap(); + write!( f, "{}", info )?; + + if let Some( bump ) = bump + { + writeln!( f, "{}", bump )?; + } + if let Some( add ) = add + { + write!( f, "{add}" )?; + } + if let Some( commit ) = commit + { + write!( f, "{commit}" )?; + } + if let Some( push ) = push + { + write!( f, "{push}" )?; + } + if let Some( publish ) = publish + { + write!( f, "{publish}" )?; + } + + Ok( () ) + } + } + + /// Performs package publishing based on the given arguments. + /// + /// # Arguments + /// + /// * `args` - The package publishing instructions. + /// + /// # Returns + /// + /// * `Result` - The result of the publishing operation, including information about the publish, version bump, and git operations. + + pub fn perform_package_publish( instruction : PackagePublishInstruction ) -> ResultWithReport< PublishReport, Error > + { + let mut report = PublishReport::default(); + let PackagePublishInstruction + { + package_name: _, + mut pack, + mut bump, + mut git_options, + mut publish, + dry, + } = instruction; + pack.dry = dry; + bump.dry = dry; + git_options.dry = dry; + publish.dry = dry; + + report.get_info = Some( cargo::pack( pack ).err_with( || report.clone() )? ); + // aaa : redundant field? // aaa : removed + let bump_report = version::bump( bump ).err_with( || report.clone() )?; + report.bump = Some( bump_report.clone() ); + let git_root = git_options.git_root.clone(); + let git = match entity::git::perform_git_commit( git_options ) + { + Ok( git ) => git, + Err( e ) => + { + version::revert( &bump_report ) + .map_err( | le | format_err!( "Base error:\n{}\nRevert error:\n{}", e.to_string().replace( '\n', "\n\t" ), le.to_string().replace( '\n', "\n\t" ) ) ) + .err_with( || report.clone() )?; + return Err(( report, e )); + } + }; + report.add = git.add; + report.commit = git.commit; + report.publish = match cargo::publish( publish ) + { + Ok( publish ) => Some( publish ), + Err( e ) => + { + tool::git::reset( git_root.as_ref(), true, 1, false ) + .map_err + ( + | le | + format_err!( "Base error:\n{}\nRevert error:\n{}", e.to_string().replace( '\n', "\n\t" ), le.to_string().replace( '\n', "\n\t" ) ) + ) + .err_with( || report.clone() )?; + return Err(( report, e )); + } + }; + + let res = tool::git::push( &git_root, dry ).err_with( || report.clone() )?; + report.push = Some( res ); + + Ok( report ) + } + + /// Perform publishing of multiple packages based on the provided publish plan. + /// + /// # Arguments + /// + /// * `plan` - The publish plan with details of packages to be published. + /// + /// # Returns + /// + /// Returns a `Result` containing a vector of `PublishReport` if successful, else an error. + pub fn perform_packages_publish( plan : PublishPlan ) -> Result< Vec< PublishReport > > + { + let mut report = vec![]; + for package in plan.plans + { + let res = perform_package_publish( package ).map_err( |( current_rep, e )| format_err!( "{}\n{current_rep}\n{e}", report.iter().map( | r | format!( "{r}" ) ).join( "\n" ) ) )?; + report.push( res ); + } + + Ok( report ) + } + +} + +// + +crate::mod_interface! +{ + protected use PublishPlan; + protected use PackagePublishInstruction; + protected use PublishSinglePackagePlanner; + protected use PublishReport; + protected use perform_package_publish; + protected use perform_packages_publish; +} diff --git a/module/move/willbe/src/entity/test.rs b/module/move/willbe/src/entity/test.rs index b8298d7f4e..0e4afc5c2d 100644 --- a/module/move/willbe/src/entity/test.rs +++ b/module/move/willbe/src/entity/test.rs @@ -1,63 +1,48 @@ mod private { - // qqq : for Petro : use https://github.com/console-rs/indicatif - use crate::*; use table::*; + // qqq : for Bohdan no asterisk imports, but in special cases use std:: { - collections::{ BTreeMap, BTreeSet, HashSet }, - fmt::Formatter, - sync::{ Arc, Mutex }, - path::Path, + // collections, + fmt, + sync, }; - use std::collections::HashMap; - use std::ffi::OsString; - use std::fmt::{ Debug, Display }; /* qqq : import only fmt here and everywhere */ - use std::marker::PhantomData; - use std::path::PathBuf; - // aaa : for Petro : don't use cargo_metadata directly, use facade - // aaa : ✅ - use colored::Colorize; - // qqq : for Petro : don't do micro imports - // qqq : for Petro : don't do micro imports - #[ cfg( feature = "progress_bar" ) ] - use indicatif:: + use colored::Colorize as _; + use process_tools::process::*; + use error:: { - MultiProgress, - ProgressBar, - ProgressStyle + Error, + untyped::format_err, }; - use rayon::ThreadPoolBuilder; - use process_tools::process::*; - use wtools::error::anyhow::{ Error, format_err }; - use wtools::iter::Itertools; - use wtools::error::Result; - use former::Former; - use channel::Channel; - use optimization::Optimization; - use workspace::WorkspacePackage; - - /// Newtype for package name - #[ derive( Debug, Default, Clone ) ] - pub struct PackageName( String ); + use package::PackageName; + + #[ derive( Debug, Error ) ] + pub enum TestError + { + #[ error( "Common error: {0}" ) ] + Common( #[ from ] error::untyped::Error ), + #[ error( "Path error: {0}" ) ] + Path( #[ from ] PathError ), + } /// Represents a variant for testing purposes. - #[ derive( Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Former ) ] + #[ derive( Debug, Clone, Eq, PartialEq, Ord, PartialOrd, former::Former ) ] pub struct TestVariant { /// Represents the channel for the test variant. - channel : Channel, + channel : channel::Channel, /// Represents the optimization setting for the test variant. - optimization : Optimization, + optimization : optimization::Optimization, /// Contains additional features or characteristics of the test variant. - features : BTreeSet< String >, + features : collection::BTreeSet, } - impl Display for TestVariant + impl fmt::Display for TestVariant { - fn fmt( &self, f : &mut Formatter< '_ >) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ >) -> fmt::Result { let features = if self.features.is_empty() { " ".to_string() } else { self.features.iter().join( " " ) }; writeln!( f, "{} {} {}", self.optimization, self.channel, features )?; @@ -72,9 +57,9 @@ mod private packages_plan : Vec< TestPackagePlan >, } - impl Display for TestPlan + impl fmt::Display for TestPlan { - fn fmt( &self, f : &mut Formatter< '_ >) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ >) -> std::fmt::Result { writeln!( f, "Plan: " )?; for plan in &self.packages_plan @@ -98,19 +83,20 @@ mod private /// `with_all_features` - If it's true - add to powerset one subset which contains all features. /// `with_none_features` - If it's true - add to powerset one empty subset. /// `variants_cap` - Maximum of subset in powerset - pub fn try_from + pub fn try_from< 'a > ( - packages : &[ WorkspacePackage ], - channels : &HashSet< Channel >, + packages : impl core::iter::Iterator< Item = WorkspacePackageRef< 'a > >, + channels : &collection::HashSet< channel::Channel >, power : u32, include_features : Vec< String >, exclude_features : Vec< String >, - optimizations : &HashSet< Optimization >, + optimizations : &collection::HashSet< optimization::Optimization >, enabled_features : Vec< String >, with_all_features : bool, with_none_features : bool, variants_cap : u32, - ) -> Result< Self > + ) + -> Result< Self, TestError > { let mut packages_plan = vec![]; for package in packages @@ -142,17 +128,18 @@ mod private #[ derive( Debug ) ] pub struct TestPackagePlan { - enabled_features : BTreeSet< String >, - package : PathBuf, - test_variants : BTreeSet< TestVariant >, + enabled_features : collection::BTreeSet< String >, + // package : PathBuf, + crate_dir : CrateDir, + test_variants : collection::BTreeSet< TestVariant >, } - impl Display for TestPackagePlan + impl fmt::Display for TestPackagePlan { - fn fmt( &self, f : &mut Formatter< '_ >) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ >) -> std::fmt::Result { - writeln!( f, "Package : {}\nThe tests will be executed using the following configurations :", self.package.file_name().unwrap().to_string_lossy() )?; - let mut all_features = BTreeSet::new(); + writeln!( f, "Package : {}\nThe tests will be executed using the following configurations :", self.crate_dir.clone().absolute_path() )?; + let mut all_features = collection::BTreeSet::new(); for variant in &self.test_variants { let features = variant.features.iter().cloned(); @@ -216,22 +203,24 @@ mod private /// `with_all_features` - If it's true - add to powerset one subset which contains all features. /// `with_none_features` - If it's true - add to powerset one empty subset. /// `variants_cap` - Maximum of subset in powerset - fn try_from + fn try_from< 'a > ( - package : &WorkspacePackage, - channels : &HashSet< Channel >, + package : WorkspacePackageRef< 'a >, + channels : &collection::HashSet< channel::Channel >, power : u32, include_features : &[ String ], exclude_features : &[ String ], - optimizations : &HashSet< Optimization >, + optimizations : &collection::HashSet< optimization::Optimization >, enabled_features : &[ String ], with_all_features : bool, with_none_features : bool, variants_cap : u32, - ) -> Result< Self > + ) + -> Result< Self, TestError > { - let dir = package.manifest_path().parent().unwrap().as_std_path().to_path_buf(); - let mut test_variants = BTreeSet::new(); + // let crate_dir = package.manifest_file().parent().unwrap().as_std_path().to_path_buf(); + let crate_dir = package.crate_dir()?; + let mut test_variants = collection::BTreeSet::new(); let features_powerset = features::features_powerset ( package, @@ -266,14 +255,22 @@ mod private Self { enabled_features: enabled_features.iter().cloned().collect(), - package : dir, + crate_dir, test_variants, } ) } } - fn generate_features_cells( ff : &mut Vec< String >, variant : &TestVariant, row : &mut Row, mut counter : usize, mut flag : bool, enabled_features : &BTreeSet< String > ) + fn generate_features_cells + ( + ff : &mut Vec< String >, + variant : &TestVariant, + row : &mut Row, + mut counter : usize, + mut flag : bool, + enabled_features : &collection::BTreeSet< String > + ) { for feature in ff { @@ -296,29 +293,20 @@ mod private } } - #[ derive( Debug, Former ) ] + #[ derive( Debug, former::Former ) ] pub struct PackageTestOptions< 'a > { - temp_path : Option< PathBuf >, + temp_path : Option< path::PathBuf >, plan : &'a TestPackagePlan, dry : bool, - progress_bar_feature : Option< PackageTestOptionsProgressBarFeature< 'a > >, - } - - #[ derive( Debug ) ] - struct PackageTestOptionsProgressBarFeature< 'a > - { - phantom : PhantomData< &'a () >, - #[ cfg( feature = "progress_bar" ) ] - multi_progress : &'a Option< &'a MultiProgress >, + with_progress : bool, #[ cfg( feature = "progress_bar" ) ] - progress_bar : &'a Option< ProgressBar > + progress_bar : progress_bar::ProgressBar< 'a > } - impl PackageTestOptionsFormer< '_ > { - pub fn option_temp( mut self, value : impl Into< Option< PathBuf > > ) -> Self + pub fn option_temp( mut self, value : impl Into< Option< path::PathBuf > > ) -> Self { self.storage.temp_path = value.into(); self @@ -326,14 +314,14 @@ mod private } /// Represents the options for the test. - #[ derive( Debug, Former, Clone ) ] + #[ derive( Debug, former::Former, Clone ) ] pub struct SingleTestOptions { /// Specifies the release channels for rust. /// More details : https://rust-lang.github.io/rustup/concepts/channels.html#:~:text=Rust%20is%20released%20to%20three,releases%20are%20made%20every%20night. - channel : Channel, + channel : channel::Channel, /// Specifies the optimization for rust. - optimization : Optimization, + optimization : optimization::Optimization, /// Determines whether to use default features in the test. /// Enabled by default. #[ former( default = true ) ] @@ -343,9 +331,9 @@ mod private #[ former( default = false ) ] with_all_features : bool, /// Specifies a list of features to be enabled in the test. - enable_features : BTreeSet< String >, + enable_features : collection::BTreeSet< String >, /// Temp directory path - temp_directory_path : Option< PathBuf >, + temp_directory_path : Option< path::PathBuf >, /// A boolean indicating whether to perform a dry run or not. dry : bool, /// RUST_BACKTRACE @@ -357,15 +345,17 @@ mod private { fn as_rustup_args( &self ) -> Vec< String > { - debug_assert!( !self.with_default_features ); // qqq : remove later - debug_assert!( !self.with_all_features ); // qqq : remove later + debug_assert!( !self.with_default_features ); // aaa : remove later + debug_assert!( !self.with_all_features ); // aaa : remove later [ "run".into(), self.channel.to_string(), "cargo".into(), "test".into() ] .into_iter() - .chain( if self.optimization == Optimization::Release { Some( "--release".into() ) } else { None } ) + .chain( if self.optimization == optimization::Optimization::Release { Some( "--release".into() ) } else { None } ) .chain( if self.with_default_features { None } else { Some( "--no-default-features".into() ) } ) - // qqq : for Petro : bad, --no-default-features is always enabled! + // aaa : for Petro : bad, --no-default-features is always enabled! + // aaa : add `debug_assert!( !self.with_default_features )` .chain( if self.with_all_features { Some( "--all-features".into() ) } else { None } ) - // qqq : for Petro : bad, --all-features is always disabled! + // aaa : for Petro : bad, --all-features is always disabled! + // aaa : add `debug_assert!( !self.with_all_features )` .chain( if self.enable_features.is_empty() { None } else { Some([ "--features".into(), self.enable_features.iter().join( "," ) ]) }.into_iter().flatten() ) .chain( self.temp_directory_path.clone().map( | p | vec![ "--target-dir".to_string(), p.to_string_lossy().into() ] ).into_iter().flatten() ) .collect() @@ -385,13 +375,11 @@ mod private /// Returns a `Result` containing a `Report` if the command is executed successfully, /// or an error if the command fails to execute. pub fn _run< P >( path : P, options : SingleTestOptions ) -> Result< Report, Report > + // xxx where - P : AsRef< Path > + P : AsRef< path::Path > { let ( program, args ) = ( "rustup", options.as_rustup_args() ); - // qqq : for Petro : rustup ??? - // aaa : for Petro : RUST_BACKTRACE=1 ?? // add to SingleTestOptions, by default true - // aaa : add if options.dry { @@ -409,10 +397,10 @@ mod private } else { - let envs = if options.backtrace { [( "RUST_BACKTRACE".to_string(), "full".to_string() )].into_iter().collect() } else { HashMap::new() }; + let envs = if options.backtrace { [( "RUST_BACKTRACE".to_string(), "full".to_string() )].into_iter().collect() } else { collection::HashMap::new() }; Run::former() .bin_path( program ) - .args( args.into_iter().map( OsString::from ).collect::< Vec< _ > >() ) + .args( args.into_iter().map( std::ffi::OsString::from ).collect::< Vec< _ > >() ) .current_path( path.as_ref().to_path_buf() ) .joining_streams( true ) .env_variable( envs ) @@ -421,7 +409,7 @@ mod private } /// `TestOptions` is a structure used to store the arguments for tests. - #[ derive( Former ) ] + #[ derive( former::Former ) ] pub struct TestOptions { /// Plan for testing @@ -431,40 +419,22 @@ mod private pub concurrent : u32, /// `temp_path` - path to temp directory. - pub temp_path : Option< PathBuf >, + pub temp_path : Option< path::PathBuf >, /// A boolean indicating whether to perform a dry run or not. pub dry : bool, - /// This field contains fields for progress_bar feature - pub feature : Option< TestOptionsProgressBarFeature >, + /// Progress bar flag. + pub with_progress : bool, } - // qqq : for Petro : remove after Former fix - /// Structure for progress bar feature field - pub struct TestOptionsProgressBarFeature - { - #[ cfg( feature = "progress_bar" ) ] - /// Base progress bar - pub multiprocess : MultiProgress, - - #[ cfg( feature = "progress_bar" ) ] - /// Style for progress bar - pub style : ProgressStyle, - } + // aaa : for Petro : remove after Former fix + // aaa : done - impl Debug for TestOptionsProgressBarFeature + impl fmt::Debug for TestOptions { - fn fmt( &self, f : &mut Formatter< '_ >) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> std::fmt::Result { - f.debug_struct( "TestOptionsProgressBarFeature" ) - .finish() - } - } - - impl Debug for TestOptions - { - fn fmt( &self, f : &mut Formatter< '_ >) -> std::fmt::Result { f.debug_struct( "TestOptions" ) .field( "plan", &self.plan) .field( "concurrent", &self.concurrent) @@ -476,7 +446,7 @@ mod private impl TestOptionsFormer { - pub fn option_temp( mut self, value : impl Into< Option< PathBuf > > ) -> Self + pub fn option_temp( mut self, value : impl Into< Option< path::PathBuf > > ) -> Self { self.storage.temp_path = value.into(); self @@ -505,15 +475,14 @@ mod private /// for which the tests were run, and the values are nested `BTreeMap` where the keys are /// feature names and the values are `Report` structs representing the test results for /// the specific feature and channel. - pub tests : BTreeMap< TestVariant, Result< Report, Report > > , + pub tests : collection::BTreeMap< TestVariant, Result< Report, Report > >, /// Enabled features - pub enabled_features : BTreeSet< String >, - // qqq : for Petro : rid off map of map of map, keep flat map + pub enabled_features : collection::BTreeSet, } - impl Display for TestReport + impl fmt::Display for TestReport { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> std::fmt::Result { if self.dry { @@ -521,7 +490,7 @@ mod private } let mut failed = 0; let mut success = 0; - let mut all_features = BTreeSet::new(); + let mut all_features = collection::BTreeSet::new(); for variant in self.tests.keys() { let features = variant.features.iter().cloned(); @@ -550,7 +519,7 @@ mod private } table.set_header( header_row ); - writeln!( f, "{} {}\n", "\n=== Module".bold(), self.package_name.0.bold() )?; + writeln!( f, "{} {}\n", "\n=== Module".bold(), self.package_name.bold() )?; if self.tests.is_empty() { writeln!( f, "unlucky" )?; @@ -628,16 +597,17 @@ mod private pub failure_reports : Vec< TestReport >, } - impl Display for TestsReport + impl fmt::Display for TestsReport { - fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + fn fmt( &self, f : &mut fmt::Formatter< '_ > ) -> std::fmt::Result { - if self.dry - { - writeln!( f, "\nYou can execute the plan with 'will .test dry : 0'." )?; - // qqq : for Petro : bad. should be exact command with exact parameters / при виклику зовнішніх команд повинен бути вивід у консоль про цей виклик і його аргументи за виключенням коли ційлий блок виводу прихований (у моєму випадку при фейлі) - return Ok( () ) - } + // if self.dry + // { + // writeln!( f, "\nYou can execute the plan with 'will .test dry : 0'." )?; + // // aaa : for Petro : bad. should be exact command with exact parameters / при виклику зовнішніх команд повинен бути вивід у консоль про цей виклик і його аргументи за виключенням коли ційлий блок виводу прихований (у моєму випадку при фейлі) + // // aaa : coment in because its redundant, this behavior already implemented + // return Ok( () ) + // } if self.success_reports.is_empty() && self.failure_reports.is_empty() { writeln!( f, "The tests have not been run." )?; @@ -668,13 +638,13 @@ mod private /// `tests_run` is a function that runs tests on a given package with specified arguments. /// It returns a `TestReport` on success, or a `TestReport` and an `Error` on failure. - pub fn run( options : &PackageTestOptions< '_ > ) -> Result< TestReport, ( TestReport, Error ) > + pub fn run( options : &PackageTestOptions< '_ > ) -> Result< TestReport, ( TestReport, TestError ) > { let mut report = TestReport::default(); report.dry = options.dry; report.enabled_features = options.plan.enabled_features.clone(); - let report = Arc::new( Mutex::new( report ) ); - let dir = options.plan.package.clone(); + let report = sync::Arc::new( sync::Mutex::new( report ) ); + let crate_dir = options.plan.crate_dir.clone(); rayon::scope ( @@ -684,7 +654,7 @@ mod private { let TestVariant{ channel, optimization, features } = variant; let r = report.clone(); - let dir = dir.clone(); + let crate_dir = crate_dir.clone(); s.spawn ( move | _ | @@ -698,33 +668,29 @@ mod private if let Some( p ) = options.temp_path.clone() { - let path = p.join( path_tools::path::unique_folder_name().unwrap() ); - // qqq : for Petro : rid off unwrap + let path = p.join( path::unique_folder_name().unwrap() ); std::fs::create_dir_all( &path ).unwrap(); args_t = args_t.temp_directory_path( path ); } #[ cfg( feature = "progress_bar" ) ] - let _s = + if options.with_progress { - let s = if let Some( multi_progress ) = options.progress_bar_feature.as_ref().and_then( | f | f.multi_progress.as_ref() ) + let _s = { - let s = multi_progress.add( ProgressBar::new_spinner().with_message( format!( "{}", variant ) ) ); + let s = options.progress_bar.multi_progress.add( indicatif::ProgressBar::new_spinner().with_message( format!( "{}", variant ) ) ); s.enable_steady_tick( std::time::Duration::from_millis( 100 ) ); - Some( s ) - } - else - { - None + s }; - // spinner.enable_steady_tick( std::time::Duration::from_millis( 100 ) ); - s - }; + } let args = args_t.form(); let temp_dir = args.temp_directory_path.clone(); - let cmd_rep = _run( dir, args ); + let cmd_rep = _run( crate_dir, args ); r.lock().unwrap().tests.insert( variant.clone(), cmd_rep ); #[ cfg( feature = "progress_bar" ) ] - options.progress_bar_feature.as_ref().unwrap().progress_bar.as_ref().map( | b | b.inc( 1 ) ); + if options.with_progress + { + options.progress_bar.progress_bar.inc( 1 ); + } if let Some( path ) = temp_dir { std::fs::remove_dir_all( path ).unwrap(); @@ -736,21 +702,25 @@ mod private ); // unpack. all tasks must be completed until now - let report = Mutex::into_inner( Arc::into_inner( report ).unwrap() ).unwrap(); + let report = sync::Mutex::into_inner( sync::Arc::into_inner( report ).unwrap() ).unwrap(); let at_least_one_failed = report .tests .iter() .any( | ( _, result ) | result.is_err() ); - if at_least_one_failed { Err( ( report, format_err!( "Some tests was failed" ) ) ) } else { Ok( report ) } + if at_least_one_failed { Err( ( report, format_err!( "Some tests was failed" ).into() ) ) } else { Ok( report ) } } /// Run tests for given packages. - pub fn tests_run( args : &TestOptions ) -> Result< TestsReport, ( TestsReport, Error ) > + pub fn tests_run( args : &TestOptions ) -> Result< TestsReport, ( TestsReport, TestError ) > { + #[ cfg( feature = "progress_bar" ) ] + let multi_progress = progress_bar::MultiProgress::default(); + #[ cfg( feature = "progress_bar" ) ] + let mm = &multi_progress; let mut report = TestsReport::default(); report.dry = args.dry; - let report = Arc::new( Mutex::new( report ) ); - let pool = ThreadPoolBuilder::new().use_current_thread().num_threads( args.concurrent as usize ).build().unwrap(); + let report = sync::Arc::new( sync::Mutex::new( report ) ); + let pool = rayon::ThreadPoolBuilder::new().use_current_thread().num_threads( args.concurrent as usize ).build().unwrap(); pool.scope ( | s | @@ -762,35 +732,17 @@ mod private ( move | _ | { + let test_package_options = PackageTestOptions::former() + .option_temp( args.temp_path.clone() ) + .plan( plan ) + .dry( args.dry ) + .with_progress( args.with_progress ); + #[ cfg( feature = "progress_bar" ) ] - let pb = + let test_package_options = { - let pb = if let Some( feature ) = args.feature.as_ref() - { - let pb = feature.multiprocess.add(ProgressBar::new(plan.test_variants.len() as u64)); - pb.set_style( args.feature.as_ref().unwrap().style.clone() ); - pb.inc( 0 ); - Some( pb ) - } - else - { - None - }; - pb + test_package_options.progress_bar( mm.progress_bar( plan.test_variants.len() as u64 ) ) }; - #[ cfg( feature = "progress_bar" ) ] - let multi_progress = args.feature.as_ref().map( | f | &f.multiprocess ); - let test_package_options = PackageTestOptions::former().option_temp( args.temp_path.clone() ).plan( plan ).dry( args.dry ); - #[ cfg( feature = "progress_bar" ) ] - let test_package_options = test_package_options.progress_bar_feature - ( - PackageTestOptionsProgressBarFeature - { - phantom : PhantomData, - multi_progress : &multi_progress, - progress_bar : &pb, - } - ); let options = test_package_options.form(); match run( &options ) { @@ -808,14 +760,14 @@ mod private } } ); - let report = Arc::into_inner( report ).unwrap().into_inner().unwrap(); + let report = sync::Arc::into_inner( report ).unwrap().into_inner().unwrap(); if report.failure_reports.is_empty() { Ok( report ) } else { - Err(( report, format_err!( "Some tests was failed" ) )) + Err(( report, format_err!( "Some tests was failed" ).into() )) } } } @@ -828,12 +780,9 @@ crate::mod_interface! protected use _run; protected use TestPlan; - protected use TestOptions; protected use TestReport; protected use TestsReport; protected use run; protected use tests_run; - - protected use TestOptionsProgressBarFeature; -} \ No newline at end of file +} diff --git a/module/move/willbe/src/entity/version.rs b/module/move/willbe/src/entity/version.rs index e3900e1d57..c911a5b7e1 100644 --- a/module/move/willbe/src/entity/version.rs +++ b/module/move/willbe/src/entity/version.rs @@ -12,11 +12,10 @@ mod private use toml_edit::value; use semver::Version as SemVersion; - use wtools::error::for_app::Result; + use error::untyped::Result; use manifest::Manifest; - use _path::AbsolutePath; use package::Package; - use wtools::{ error::anyhow::format_err, iter::Itertools }; + use { error::untyped::format_err, iter::Itertools }; /// Wrapper for a SemVer structure #[ derive( Debug, Clone, Eq, PartialEq, Ord, PartialOrd ) ] @@ -109,61 +108,6 @@ mod private } } - /// Bump version by manifest. - /// It takes data from the manifest and increments the version number according to the semantic versioning scheme. - /// It then writes the updated manifest file back to the same path, unless the flag is set to true, in which case it only returns the new version number as a string. - /// - /// # Args : - /// - `manifest` - a manifest mutable reference - /// - `dry` - a flag that indicates whether to apply the changes or not - /// - `true` - does not modify the manifest file, but only returns the new version; - /// - `false` - overwrites the manifest file with the new version. - /// - /// # Returns : - /// - `Ok` - the new version number as a string; - /// - `Err` - if the manifest file cannot be read, written, parsed. - pub fn bump( manifest : &mut Manifest, dry : bool ) -> Result< BumpReport, manifest::ManifestError > - { - let mut report = BumpReport::default(); - - let version= - { - if manifest.manifest_data.is_none() - { - manifest.load()?; - } - let data = manifest.manifest_data.as_ref().unwrap(); - if !manifest.package_is()? - { - return Err( manifest::ManifestError::NotAPackage ); - } - let package = data.get( "package" ).unwrap(); - - let version = package.get( "version" ); - if version.is_none() - { - return Err( manifest::ManifestError::CannotFindValue( "version".into() ) ); - } - let version = version.unwrap().as_str().unwrap(); - report.name = Some( package[ "name" ].as_str().unwrap().to_string() ); - report.old_version = Some( version.to_string() ); - - Version::from_str( version ).map_err( | e | manifest::ManifestError::InvalidValue( e.to_string() ) )? - }; - - let new_version = version.bump().to_string(); - report.new_version = Some( new_version.clone() ); - - if !dry - { - let data = manifest.manifest_data.as_mut().unwrap(); - data[ "package" ][ "version" ] = value( &new_version ); - manifest.store()?; - } - - Ok( report ) - } - // qqq : we have to replace the implementation above with the implementation below, don't we? // qqq : for Bohdan : duplication? @@ -206,7 +150,7 @@ mod private /// Package new version. pub new_version : Option< String >, /// Files that should(already) changed for bump. - pub changed_files : Vec< AbsolutePath > + pub changed_files : Vec< ManifestFile > } impl std::fmt::Display for ExtendedBumpReport @@ -237,19 +181,22 @@ mod private /// /// # Arguments /// - /// * `args` - The options for version bumping. + /// * `o` - The options for version bumping. /// /// # Returns /// /// Returns a result containing the extended bump report if successful. /// - pub fn version_bump( o : BumpOptions ) -> Result< ExtendedBumpReport > + // qqq : should be typed error, apply err_with + // qqq : don't use 1-prameter Result + pub fn bump( o : BumpOptions ) -> Result< ExtendedBumpReport > { let mut report = ExtendedBumpReport::default(); - let package_path = o.crate_dir.absolute_path().join( "Cargo.toml" ); - let package = Package::try_from( package_path.clone() ).map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; + // let manifest_file = o.crate_dir.inner().join( "Cargo.toml" ); + let manifest_file = o.crate_dir.manifest_file(); + let package = Package::try_from( manifest_file.clone() ).map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; let name = package.name().map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; - report.name = Some( name.clone() ); + report.name = Some( name.into() ); let package_version = package.version().map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; let current_version = version::Version::try_from( package_version.as_str() ).map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; if current_version > o.new_version @@ -262,17 +209,20 @@ mod private let mut package_manifest = package.manifest().map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; if !o.dry { - let data = package_manifest.manifest_data.as_mut().unwrap(); + // let data = package_manifest.data.as_mut().unwrap(); + let data = &mut package_manifest.data; data[ "package" ][ "version" ] = value( &o.new_version.to_string() ); package_manifest.store()?; } - report.changed_files = vec![ package_path ]; + report.changed_files = vec![ manifest_file ]; let new_version = &o.new_version.to_string(); for dep in &o.dependencies { - let manifest_path = dep.absolute_path().join( "Cargo.toml" ); - let mut manifest = manifest::open( manifest_path.clone() ).map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; - let data = manifest.manifest_data.as_mut().unwrap(); + // let manifest_file = dep.absolute_path().join( "Cargo.toml" ); + let manifest_file = dep.clone().manifest_file(); + let mut manifest = Manifest::try_from( manifest_file.clone() ).map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; + // let data = manifest.data.as_mut().unwrap(); + let data = &mut manifest.data; let item = if let Some( item ) = data.get_mut( "package" ) { item } else if let Some( item ) = data.get_mut( "workspace" ) { item } else { return Err( format_err!( "{report:?}\nThe manifest nor the package and nor the workspace" ) ); }; @@ -291,7 +241,7 @@ mod private } } if !o.dry { manifest.store().map_err( | e | format_err!( "{report:?}\n{e:#?}" ) )?; } - report.changed_files.push( manifest_path ); + report.changed_files.push( manifest_file ); } Ok( report ) @@ -306,7 +256,8 @@ mod private /// # Returns /// /// Returns `Ok(())` if the version is reverted successfully. Returns `Err` with an error message if there is any issue with reverting the version. - pub fn version_revert( report : &ExtendedBumpReport ) -> Result< () > + // qqq : don't use 1-prameter Result + pub fn revert( report : &ExtendedBumpReport ) -> Result< () > { let Some( name ) = report.name.as_ref() else { return Ok( () ) }; let Some( old_version ) = report.old_version.as_ref() else { return Ok( () ) }; @@ -337,7 +288,7 @@ mod private for path in &report.changed_files { - let mut manifest = manifest::open( path.clone() )?; + let mut manifest = Manifest::try_from( path.clone() )?; let data = manifest.data(); if let Some( workspace ) = data.get_mut( "workspace" ) { @@ -361,6 +312,58 @@ mod private Ok( () ) } + + // qqq : for Bohdan : not used? why is it needed? + /// Bump version by manifest. + /// It takes data from the manifest and increments the version number according to the semantic versioning scheme. + /// It then writes the updated manifest file back to the same path, unless the flag is set to true, in which case it only returns the new version number as a string. + /// + /// # Args : + /// - `manifest` - a manifest mutable reference + /// - `dry` - a flag that indicates whether to apply the changes or not + /// - `true` - does not modify the manifest file, but only returns the new version; + /// - `false` - overwrites the manifest file with the new version. + /// + /// # Returns : + /// - `Ok` - the new version number as a string; + /// - `Err` - if the manifest file cannot be read, written, parsed. + pub fn manifest_bump( manifest : &mut Manifest, dry : bool ) -> Result< BumpReport, manifest::ManifestError > + { + let mut report = BumpReport::default(); + + let version= + { + let data = &manifest.data; + if !manifest.package_is() + { + return Err( manifest::ManifestError::NotAPackage ); + } + let package = data.get( "package" ).unwrap(); + + let version = package.get( "version" ); + if version.is_none() + { + return Err( manifest::ManifestError::CannotFindValue( "version".into() ) ); + } + let version = version.unwrap().as_str().unwrap(); + report.name = Some( package[ "name" ].as_str().unwrap().to_string() ); + report.old_version = Some( version.to_string() ); + + Version::from_str( version ).map_err( | e | manifest::ManifestError::InvalidValue( e.to_string() ) )? + }; + + let new_version = version.bump().to_string(); + report.new_version = Some( new_version.clone() ); + + if !dry + { + let data = &mut manifest.data; + data[ "package" ][ "version" ] = value( &new_version ); + manifest.store()?; + } + + Ok( report ) + } } // @@ -368,20 +371,21 @@ mod private crate::mod_interface! { /// Version entity. - protected use Version; + exposed use Version; /// Report for bump operation. protected use BumpReport; - /// Bump version. - protected use bump; - /// Options for version bumping. protected use BumpOptions; /// Report about a changing version with list of files that was changed. protected use ExtendedBumpReport; + /// Bumps the version of a package and its dependencies. - protected use version_bump; + protected use manifest_bump; + /// Bump version. + protected use bump; + /// Reverts the version of a package. - protected use version_revert; + protected use revert; } diff --git a/module/move/willbe/src/entity/workspace.rs b/module/move/willbe/src/entity/workspace.rs index b477aa3c97..3fc37828fd 100644 --- a/module/move/willbe/src/entity/workspace.rs +++ b/module/move/willbe/src/entity/workspace.rs @@ -1,367 +1,296 @@ mod private { - use std::collections::BTreeMap; use crate::*; - use std::path::Path; - use cargo_metadata::camino::{ Utf8Path, Utf8PathBuf }; - use petgraph::Graph; - use serde::Deserialize; - use serde_json::Value; - use wtools::error:: + // qqq : for Bohdan : bad + // use std::*; + + use std::slice; + use former::{ Former }; + + /// Stores information about the current workspace. + #[ derive( Debug, Clone ) ] + pub struct Workspace { - for_app::Context, - for_lib::Error, - Result - }; - use _path::AbsolutePath; - - /// Facade for cargo_metadata::Package - #[ derive( Debug, Clone, Deserialize ) ] - pub struct WorkspacePackage + /// Metadata of the workspace, containing detailed information about the packages, dependencies, and other workspace-related data. + pub metadata : cargo_metadata::Metadata, + /// The directory containing the manifest file (`Cargo.toml`) of the workspace. + pub crate_dir : CrateDir, + } + + /// Represents errors related to workspace operations. + #[ derive( Debug, error::typed::Error ) ] + pub enum WorkspaceInitError { - #[ serde( flatten ) ] - inner : cargo_metadata::Package + /// Something went wrong with path to a workspace. + #[ error( "Path error. Details: {0}" ) ] + Path( #[ from ] PathError ), + /// Something went wrong with the workspace' data + #[ error( "Can not load workspace data. Details: {0}" ) ] + Metadata( #[ from ] cargo_metadata::Error ), + /// Files error + #[ error( "I/O error: {0}" ) ] + IO( #[ from ] std::io::Error ), } - - impl From< cargo_metadata::Package > for WorkspacePackage + + impl TryFrom< CrateDir > for Workspace { - fn from( inner : cargo_metadata::Package) -> Self + type Error = WorkspaceInitError; + + /// Load data from current directory + fn try_from( mut crate_dir : CrateDir ) -> Result< Self, Self::Error > { - Self + let metadata = cargo_metadata::MetadataCommand::new() + .current_dir( crate_dir.as_ref() ) + .no_deps() + .exec()?; + // inout crate dir may refer on crate's manifest dir, not workspace's manifest dir + crate_dir = ( &metadata.workspace_root ).try_into()?; + Ok( Self { - inner - } + metadata, + crate_dir, + }) } + } - - impl WorkspacePackage + + impl TryFrom< CurrentPath > for Workspace { - /// The name field as given in the Cargo.toml - pub fn name( &self ) -> &String - { - &self.inner.name - } - - /// List of dependencies of this particular package - pub fn dependencies( &self ) -> Vec< Dependency > - { - self.inner.dependencies.iter().cloned().map( Dependency::from ).collect() - } - - /// Path containing the Cargo.toml - pub fn manifest_path( &self ) -> &Utf8Path - { - self.inner.manifest_path.as_path() - } - - /// The version field as specified in the Cargo.toml - pub fn version( &self ) -> semver::Version - { - self.inner.version.clone() - } - - /// List of registries to which this package may be published (derived from the publish field). - /// Publishing is unrestricted if None, and forbidden if the Vec is empty. - /// This is always None if running with a version of Cargo older than 1.39. - pub fn publish( &self ) -> Option< &Vec< String > > - { - self.inner.publish.as_ref() - } - - ///Contents of the free form package.metadata section. - /// This contents can be serialized to a struct using serde: - /// ``` rust - /// use serde::Deserialize; - /// use serde_json::json; - /// - /// #[ derive( Debug, Deserialize ) ] - /// struct SomePackageMetadata - /// { - /// some_value : i32, - /// } - /// - /// fn main() - /// { - /// let value = json! - /// ({ - /// "some_value" : 42, - /// }); - /// - /// let package_metadata : SomePackageMetadata = serde_json::from_value( value ).unwrap(); - /// assert_eq!( package_metadata.some_value, 42 ); - /// } - /// ``` - pub fn metadata( &self ) -> &Value - { - &self.inner.metadata - } - - /// The repository URL as specified in the Cargo.toml - pub fn repository( &self ) -> Option< &String > - { - self.inner.repository.as_ref() - } - - /// Features provided by the crate, mapped to the features required by that feature. - pub fn features( &self ) -> &BTreeMap< String, Vec< String > > + type Error = WorkspaceInitError; + + /// Load data from current directory + fn try_from( cd : CurrentPath ) -> Result< Self, Self::Error > { - &self.inner.features + Self::try_from( CrateDir::transitive_try_from::< AbsolutePath >( cd )? ) } + } - - /// A dependency of the main crate - #[ derive( Debug ) ] - pub struct Dependency + + impl From< cargo_metadata::Metadata > for Workspace { - inner : cargo_metadata::Dependency, + fn from( metadata : cargo_metadata::Metadata ) -> Self + { + // SAFE: `workspace_root` is a path to a`Cargo.toml` file, therefor the parent is the directory + let path = metadata.workspace_root.as_std_path().parent().unwrap().to_path_buf(); + let crate_dir = CrateDir::try_from( path ).unwrap(); + Self + { + metadata, + crate_dir, + } + } } - - impl Dependency + + impl Workspace { - /// The file system path for a local path dependency. - /// Only produced on cargo 1.51+ - pub fn path( &self ) -> Option< Utf8PathBuf > + + /// Returns list of all packages + pub fn packages< 'a >( &'a self ) + -> core::iter::Map + < + slice::Iter< 'a, cargo_metadata::Package >, + impl Fn( &'a cargo_metadata::Package ) -> WorkspacePackageRef< 'a > + Clone, + > { - self.inner.path.clone() + self.metadata.packages.iter().map( WorkspacePackageRef::from ) } - - /// Name as given in the Cargo.toml. - pub fn name( &self ) -> String + + /// Returns the path to workspace root + pub fn workspace_root( &self ) -> CrateDir { - self.inner.name.clone() + // Safe because workspace_root.as_std_path() is always a path to a directory + CrateDir::try_from( self.metadata.workspace_root.as_std_path() ).unwrap() } - - /// The kind of dependency this is. - pub fn kind( &self ) -> DependencyKind + + /// Returns the path to target directory + pub fn target_directory( &self ) -> &std::path::Path { - match self.inner.kind - { - cargo_metadata::DependencyKind::Normal => DependencyKind::Normal, - cargo_metadata::DependencyKind::Development => DependencyKind::Development, - cargo_metadata::DependencyKind::Build => DependencyKind::Build, - cargo_metadata::DependencyKind::Unknown => DependencyKind::Unknown, - } + self.metadata.target_directory.as_std_path() } - - /// he required version - pub fn req( &self ) -> semver::VersionReq + + /// Find a package by its manifest file path + pub fn package_find_by_manifest< 'a, P >( &'a self, manifest_file : P ) -> Option< WorkspacePackageRef< 'a > > + where + P : AsRef< std::path::Path >, { - self.inner.req.clone() + self + .packages() + .find( | &p | p.manifest_file().unwrap().as_ref() == manifest_file.as_ref() ) } - } - - impl From< cargo_metadata::Dependency > for Dependency - { - fn from( inner : cargo_metadata::Dependency ) -> Self + + /// Filter of packages. + pub fn packages_which< 'a >( &'a self ) -> PackagesFilterFormer< 'a > { - Self - { - inner - } + // PackagesFilter::new( self ) + PackagesFilter::former().workspace( self ) } - } - /// Dependencies can come in three kinds - #[ derive( Eq, PartialEq, Debug ) ] - pub enum DependencyKind - { - /// The 'normal' kind - Normal, - /// Those used in tests only - Development, - /// Those used in build scripts only - Build, - /// The 'unknown' kind - Unknown, } - - /// Stores information about current workspace. - #[ derive( Debug, Clone ) ] - pub struct Workspace + + + #[ derive( Former ) ] + // #[ debug ] + #[ allow( missing_debug_implementations ) ] + pub struct PackagesFilter< 'a > { - metadata : Option< cargo_metadata::Metadata >, - manifest_dir : CrateDir, + workspace : &'a Workspace, + crate_dir : Box< dyn PackageFilter >, + manifest_file : Box< dyn PackageFilter >, } - /// Represents errors related to workspace operations. - #[ derive( Debug, Error ) ] - pub enum WorkspaceError + pub trait PackageFilter { - /// Metadata is non. - #[ error( "Metadata is non " ) ] - MetadataError, + fn include( &self, package : WorkspacePackageRef< '_ > ) -> bool; } - impl Workspace + impl Default for Box< dyn PackageFilter > { - /// Load data from current directory - pub fn from_current_path() -> Result< Self > + fn default() -> Self { - let current_path = AbsolutePath::try_from( std::env::current_dir().unwrap_or_default() )?; - let metadata = cargo_metadata::MetadataCommand::new().no_deps().exec().context("fail to load CargoMetadata")?; - Ok( Self - { - metadata : Some( metadata ), - manifest_dir : CrateDir::try_from( current_path )?, - }) + Box::new( PackageFilterAll ) } + } - /// Load data from current directory - pub fn with_crate_dir( crate_dir : CrateDir ) -> Result< Self > + pub struct PackageFilterAll; + impl PackageFilter for PackageFilterAll + { + #[ inline( always ) ] + fn include( &self, _package : WorkspacePackageRef< '_ > ) -> bool { - Ok - ( - Self - { - metadata : Some( cargo_metadata::MetadataCommand::new().current_dir( crate_dir.as_ref() ).no_deps().exec().context( "fail to load CargoMetadata" )? ), - manifest_dir : crate_dir, - } - ) + true } } - impl From< cargo_metadata::Metadata > for Workspace + pub struct PackageFilterCrateDir( CrateDir ); + impl PackageFilter for PackageFilterCrateDir { - fn from( value : cargo_metadata::Metadata ) -> Self + #[ inline( always ) ] + fn include( &self, package : WorkspacePackageRef< '_ > ) -> bool { - let path = value.workspace_root.as_std_path().parent().unwrap().to_path_buf(); - let path = AbsolutePath::try_from( path ).unwrap(); - Self - { - metadata : Some( value ), - manifest_dir : CrateDir::try_from( path ).unwrap(), - } + self.0 == package.crate_dir().unwrap() } } - impl Workspace + impl From< CrateDir > for Box< dyn PackageFilter > { - /// Load data from the current location or from cache - // FIX : Maybe unsafe. Take metadata of workspace in current dir. - pub fn load( &mut self ) -> Result< &mut Self > + #[ inline( always ) ] + fn from( src : CrateDir ) -> Self { - if self.metadata.is_none() - { - let metadata = Self::with_crate_dir( self.manifest_dir.clone() )?.metadata.unwrap(); - _ = self.metadata.insert( metadata ); - } - - Ok( self ) + Box::new( PackageFilterCrateDir( src ) ) } + } - /// Force loads data from the current location - // FIX : Maybe unsafe. Take metadata of workspace in current dir. - pub fn force_reload( &mut self ) -> Result< &mut Self > + pub struct PackageFilterManifestFile( ManifestFile ); + impl PackageFilter for PackageFilterManifestFile + { + #[ inline( always ) ] + fn include( &self, package : WorkspacePackageRef< '_ > ) -> bool { - let metadata = Self::with_crate_dir( self.manifest_dir.clone() )?.metadata.unwrap(); - _ = self.metadata.insert( metadata ); - - Ok( self ) + self.0 == package.manifest_file().unwrap() } } - impl Workspace + impl From< ManifestFile > for Box< dyn PackageFilter > { - /// Returns list of all packages - pub fn packages( &self ) -> Result< Vec< WorkspacePackage >, WorkspaceError > + #[ inline( always ) ] + fn from( src : ManifestFile ) -> Self { - self - .metadata - .as_ref() - .ok_or_else( || WorkspaceError::MetadataError ) - .map( | metadata | metadata.packages.clone() ) - .map( | p | p.into_iter().map( WorkspacePackage::from ).collect() ) + Box::new( PackageFilterManifestFile( src ) ) } + } + impl< 'a > PackagesFilter< 'a > + { - /// Returns the path to workspace root - pub fn workspace_root( &self ) -> Result< &Path, WorkspaceError > + pub fn new( workspace : &'a Workspace ) -> Self { - Ok( self.metadata.as_ref().ok_or_else( || WorkspaceError::MetadataError )?.workspace_root.as_std_path() ) + Self + { + workspace, + crate_dir : Default::default(), + manifest_file : Default::default(), + } } - /// Returns the path to target directory - pub fn target_directory( &self ) -> Result< &Path, WorkspaceError > + #[ inline( always ) ] + pub fn iter( &'a self ) -> impl Iterator< Item = WorkspacePackageRef< 'a > > + Clone { - Ok( self.metadata.as_ref().ok_or_else( || WorkspaceError::MetadataError )?.target_directory.as_std_path() ) - } - /// Return discord url - pub fn discord_url( &self ) -> Result< Option< String >, WorkspaceError > - { - Ok( self.metadata.as_ref().ok_or_else( || WorkspaceError::MetadataError )?.workspace_metadata[ "discord_url" ].as_str().map( | url | url.to_string() ) ) - } + // self + // .workspace + // .packages() + // .find( | &p | p.manifest_file().unwrap().as_ref() == manifest_file.as_ref() ) - /// Return the master branch - pub fn master_branch( &self ) -> Result< Option< String >, WorkspaceError > - { - Ok( self.metadata.as_ref().ok_or_else( || WorkspaceError::MetadataError )?.workspace_metadata.get( "master_branch" ).and_then( | b | b.as_str() ).map( | b | b.to_string() ) ) - } + // let filter_crate_dir = if Some( crate_dir ) = self.crate_dir + // { + // | p | p.manifest_file().unwrap().as_ref() == manifest_file.as_ref() + // } - /// Return the repository url - pub fn repository_url( &self ) -> Result< Option< String >, WorkspaceError > - { - Ok( self.metadata.as_ref().ok_or_else( || WorkspaceError::MetadataError )?.workspace_metadata.get( "repo_url" ).and_then( | b | b.as_str() ).map( | b | b.to_string() ) ) + std::iter::empty() } - /// Return the workspace_name - pub fn workspace_name( &self ) -> Result< Option< String >, WorkspaceError > + } + + impl< 'a > PackagesFilterFormer< 'a > + { + #[ inline( always ) ] + // pub fn find< 'a >( self ) -> impl Iterator< Item = WorkspacePackageRef< 'a > > + Clone + pub fn find( self ) -> Option< WorkspacePackageRef< 'a > > { - Ok( self.metadata.as_ref().ok_or_else( || WorkspaceError::MetadataError )?.workspace_metadata.get( "workspace_name" ).and_then( | b | b.as_str() ).map( | b | b.to_string() ) ) + let formed = self.form(); + + formed + .workspace + .packages() + .find( | &p | + { + if !formed.crate_dir.include( p ) { return false }; + if !formed.manifest_file.include( p ) { return false }; + return true; + }) + .clone() + // .unwrap() + + // let filter_crate_dir = if Some( crate_dir ) = self.crate_dir + // { + // | p | p.manifest_file().unwrap().as_ref() == manifest_file.as_ref() + // } + + // std::iter::empty() } + } - /// Find a package by its manifest file path - pub fn package_find_by_manifest< P >( &self, manifest_path : P ) -> Option< WorkspacePackage > - where - P : AsRef< Path >, + impl Entries for Workspace + { + fn entries( &self ) -> impl IterTrait< '_, SourceFile > { self .packages() - .ok() - .and_then - ( - | packages | - packages - .iter() - .find( | &p | p.manifest_path().as_std_path() == manifest_path.as_ref() ) - .cloned() - ) + .flat_map( | package | package.entries().collect::< Vec< _ > >() ) + .collect::< Vec< _ > >() + .into_iter() } + } - /// Returns a graph of packages. - pub( crate ) fn graph( &self ) -> Graph< String, String > + impl Sources for Workspace + { + fn sources( &self ) -> impl IterTrait< '_, SourceFile > { - let packages = self.packages().unwrap(); - let module_package_filter : Option< Box< dyn Fn( &WorkspacePackage ) -> bool > > = Some - ( - Box::new( move | p | p.publish().is_none() ) - ); - let module_dependency_filter : Option< Box< dyn Fn( &WorkspacePackage, &Dependency ) -> bool > > = Some - ( - Box::new - ( - move | _, d | d.path().is_some() && d.kind() != DependencyKind::Development - ) - ); - let module_packages_map = packages::filter - ( - packages.as_slice(), - packages::FilterMapOptions { package_filter : module_package_filter, dependency_filter : module_dependency_filter }, - ); - - graph::construct( &module_packages_map ).map( | _, x | x.to_string(), | _, x | x.to_string() ) + self + .packages() + .flat_map( | package | package.sources().collect::< Vec< _ > >() ) + .collect::< Vec< _ > >().into_iter() + // .into_iter() } } + } // crate::mod_interface! { + exposed use WorkspaceInitError; exposed use Workspace; - orphan use WorkspaceError; - protected use WorkspacePackage; - protected use Dependency; - protected use DependencyKind; } diff --git a/module/move/willbe/src/entity/workspace_graph.rs b/module/move/willbe/src/entity/workspace_graph.rs new file mode 100644 index 0000000000..01e17d7846 --- /dev/null +++ b/module/move/willbe/src/entity/workspace_graph.rs @@ -0,0 +1,38 @@ +pub( crate ) mod private +{ + use crate::*; + + /// Returns a graph of packages. + pub fn graph( workspace : &Workspace ) -> petgraph::Graph< String, String > + { + let packages = workspace.packages(); + let module_package_filter : Option< Box< dyn Fn( WorkspacePackageRef< '_ > ) -> bool > > = Some + ( + Box::new( move | p | p.publish().is_none() ) + ); + let module_dependency_filter : Option< Box< dyn Fn( WorkspacePackageRef< '_ >, DependencyRef< '_ > ) -> bool > > = Some + ( + Box::new + ( + move | _, d | d.crate_dir().is_some() && d.kind() != DependencyKind::Development + ) + ); + let module_packages_map = packages::filter + ( + // packages.as_slice(), + packages, + packages::FilterMapOptions { package_filter : module_package_filter, dependency_filter : module_dependency_filter }, + ); + + graph::construct( &module_packages_map ).map( | _, x | x.to_string(), | _, x | x.to_string() ) + } +} + +// + +crate::mod_interface! +{ + protected use graph; +} + +// xxx : move \ No newline at end of file diff --git a/module/move/willbe/src/entity/workspace_md_extension.rs b/module/move/willbe/src/entity/workspace_md_extension.rs new file mode 100644 index 0000000000..bbc5f06da9 --- /dev/null +++ b/module/move/willbe/src/entity/workspace_md_extension.rs @@ -0,0 +1,70 @@ +/// Internal namespace. +pub( crate ) mod private +{ + use crate::*; + + /// Md's extension for workspace + pub trait WorkspaceMdExtension + { + /// Return discord url + fn discord_url( &self ) -> Option< String >; + + /// Return the master branch + fn master_branch( &self ) -> Option< String >; + + /// Return the repository url + fn repository_url( &self ) -> Option< String >; + + /// Return the workspace_name + fn workspace_name( &self ) -> Option< String >; + } + + impl WorkspaceMdExtension for Workspace + { + fn discord_url( &self ) -> Option< String > + { + self + .metadata + .workspace_metadata[ "discord_url" ] + .as_str() + .map( | url | url.to_string() ) + } + + fn master_branch( &self ) -> Option< String > + { + self + .metadata + .workspace_metadata + .get( "master_branch" ) + .and_then( | b | b.as_str() ) + .map( | b | b.to_string() ) + } + + fn repository_url( &self ) -> Option< String > + { + self + .metadata + .workspace_metadata + .get( "repo_url" ) + .and_then( | b | b.as_str() ) + .map( | b | b.to_string() ) + } + + fn workspace_name( &self ) -> Option< String > + { + self + .metadata + .workspace_metadata + .get( "workspace_name" ) + .and_then( | b | b.as_str() ) + .map( | b | b.to_string() ) + } + } + +} + + +crate::mod_interface! +{ + protected use WorkspaceMdExtension; +} diff --git a/module/move/willbe/src/entity/workspace_package.rs b/module/move/willbe/src/entity/workspace_package.rs new file mode 100644 index 0000000000..6ecada7108 --- /dev/null +++ b/module/move/willbe/src/entity/workspace_package.rs @@ -0,0 +1,212 @@ +mod private +{ + use crate::*; + use macros::kw; + use collection::BTreeMap; + use serde_json::Value; + + use std:: + { + borrow::Cow, + }; + + // xxx : qqq : Deref, DerefMut, AsRef, AsMut + + /// Facade for cargo_metadata::Package + #[ derive( Debug, Clone, Copy ) ] + #[ repr( transparent ) ] + pub struct WorkspacePackageRef< 'a > + { + // #[ serde( flatten ) ] + inner : &'a cargo_metadata::Package, + } + + impl< 'a > From< &'a cargo_metadata::Package > for WorkspacePackageRef< 'a > + { + fn from( inner : &'a cargo_metadata::Package ) -> Self + { + Self + { + inner + } + } + } + + impl< 'a > WorkspacePackageRef< 'a > + { + /// The name field as given in the Cargo.toml + pub fn name( &'a self ) -> &'a str + { + &self.inner.name + } + + /// List of dependencies of this particular package + pub fn dependencies( &'a self ) + -> core::iter::Map + < + core::slice::Iter< 'a, cargo_metadata::Dependency >, + fn( &'a cargo_metadata::Dependency ) -> DependencyRef< 'a >, + > + { + fn dependency_from( dependency : &cargo_metadata::Dependency ) -> DependencyRef< '_ > + { + dependency.into() + } + self.inner.dependencies.iter().map( dependency_from ) + } + + /// Path to the manifest Cargo.toml + pub fn manifest_file( &self ) -> Result< ManifestFile, PathError > + { + self.inner.manifest_path.as_path().try_into() + } + + /// Path to the directory with manifest Cargo.toml. + pub fn crate_dir( &self ) -> Result< CrateDir, PathError > + { + // SAFE because `manifest_path containing the Cargo.toml` + self.inner.manifest_path.as_path().parent().unwrap().try_into() + } + + /// The version field as specified in the Cargo.toml + pub fn version( &self ) -> semver::Version + { + self.inner.version.clone() + } + + /// List of registries to which this package may be published (derived from the publish field). + /// Publishing is unrestricted if None, and forbidden if the Vec is empty. + /// This is always None if running with a version of Cargo older than 1.39. + pub fn publish( &self ) -> Option< &Vec< String > > + { + self.inner.publish.as_ref() + } + + ///Contents of the free form package.metadata section. + /// This contents can be serialized to a struct using serde: + /// ``` rust + /// use serde::Deserialize; + /// use serde_json::json; + /// + /// #[ derive( Debug, Deserialize ) ] + /// struct SomePackageMetadata + /// { + /// some_value : i32, + /// } + /// + /// fn main() + /// { + /// let value = json! + /// ({ + /// "some_value" : 42, + /// }); + /// + /// let package_metadata : SomePackageMetadata = serde_json::from_value( value ).unwrap(); + /// assert_eq!( package_metadata.some_value, 42 ); + /// } + /// ``` + pub fn metadata( &self ) -> &Value + { + &self.inner.metadata + } + + /// The repository URL as specified in the Cargo.toml + pub fn repository( &self ) -> Option< &String > + { + self.inner.repository.as_ref() + } + + /// Features provided by the crate, mapped to the features required by that feature. + pub fn features( &self ) -> &BTreeMap< String, Vec< String > > + { + &self.inner.features + } + } + + impl< 'a > Entries for WorkspacePackageRef< 'a > + { + fn entries( &self ) -> impl IterTrait< '_, SourceFile > + { + self.inner.targets.iter().map( | target | + { + let src_path = &target.src_path; + let source : SourceFile = src_path.try_into().expect( &format!( "Illformed path to source file {src_path}" ) ); + // println!( " -- {:?} {:?}", source, target.kind ); + source + }) + } + } + + impl< 'a > Sources for WorkspacePackageRef< 'a > + { + fn sources( &self ) -> impl IterTrait< '_, SourceFile > + { + use walkdir::WalkDir; + let crate_dir = self.crate_dir().unwrap(); + WalkDir::new( crate_dir ) + .into_iter() + .filter_map( Result::ok ) + .filter( | e | e.path().extension().map_or( false, | ext | ext == "rs" ) ) + .map( | e | SourceFile::try_from( e.path() ).unwrap() ) + .collect::< Vec< _ > >() + .into_iter() + } + } + + impl< 'a > CodeItems for WorkspacePackageRef< 'a > + { + fn items( &self ) -> impl IterTrait< '_, syn::Item > + { + self + .sources() + .flat_map( | source | source.items().collect::< Vec< _ > >().into_iter() ) + .collect::< Vec< _ > >().into_iter() + } + } + + impl< 'a > AsCode for WorkspacePackageRef< 'a > + { + fn as_code< 'b >( &'b self ) -> std::io::Result< Cow< 'b, str > > + { + let mut results : Vec< String > = Vec::new(); + // zzz : introduce formatter + + for source in self.sources() + { + let code = source.as_code()?.into_owned(); + let mut filename = source + .as_ref() + .with_extension( "" ) + .file_name() + .expect( &format!( "Cant get file name of path {}", source.as_ref().display() ) ) + .to_string_lossy() + .replace( ".", "_" ); + + if kw::is( &filename ) + { + filename.push_str( "_rs" ); + } + + // qqq : xxx : use callbacks instead of expect + + results.push( format!( "// === Begin of File {}", source.as_ref().display() ) ); + results.push( format!( "mod {}\n{{\n", filename ) ); + results.push( code ); + results.push( "\n}".to_string() ); + results.push( format!( "// === End of File {}", source.as_ref().display() ) ); + + } + + let joined = results.join( "\n" ); + Ok( Cow::Owned( joined ) ) + } + } + +} + +// + +crate::mod_interface! +{ + exposed use WorkspacePackageRef; +} diff --git a/module/move/willbe/src/lib.rs b/module/move/willbe/src/lib.rs index 3f0adc2fe1..69d0e6f1f9 100644 --- a/module/move/willbe/src/lib.rs +++ b/module/move/willbe/src/lib.rs @@ -3,10 +3,7 @@ #![ doc( html_root_url = "https://docs.rs/willbe/" ) ] #![ doc = include_str!( concat!( env!( "CARGO_MANIFEST_DIR" ), "/", "Readme.md" ) ) ] -use mod_interface::mod_interface; -/// Micro wtools -pub mod wtools; -use wtools::*; +pub use mod_interface::mod_interface; /// Internal namespace. pub( crate ) mod private @@ -18,14 +15,14 @@ pub( crate ) mod private /// It then terminates the program with an exit code of 1 to indicate an error due to the lack of input. /// /// Do not support interactive mode. - pub fn run( args : Vec< String > ) -> Result< (), wtools::error::for_app::Error > + pub fn run( args : Vec< String > ) -> Result< (), error::untyped::Error > { #[ cfg( feature = "tracing" ) ] { tracing_subscriber::fmt().pretty().init(); } - let args = args.into_iter().skip( 1 ).collect::< Vec< String > >(); + let args : Vec< String > = args.into_iter().skip( 1 ).collect(); let ca = command::ca() .help_variants( [ wca::HelpVariants::General, wca::HelpVariants::SubjectCommand ] ) @@ -44,6 +41,7 @@ pub( crate ) mod private } } + } mod_interface! diff --git a/module/move/willbe/src/tool/_path.rs b/module/move/willbe/src/tool/_path.rs deleted file mode 100644 index a8209366f7..0000000000 --- a/module/move/willbe/src/tool/_path.rs +++ /dev/null @@ -1,170 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use std::path::{ Path, PathBuf }; - use std::time::{ SystemTime, UNIX_EPOCH }; - use cargo_metadata::camino::{ Utf8Path, Utf8PathBuf }; - - /// Absolute path. - #[ derive( Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash ) ] - pub struct AbsolutePath( PathBuf ); - - impl TryFrom< &str > for AbsolutePath - { - type Error = std::io::Error; - - fn try_from( value : &str ) -> Result< Self, Self::Error > - { - let value = PathBuf::from( value ); - Ok( Self( canonicalize( value )? ) ) - } - } - - impl TryFrom< String > for AbsolutePath - { - type Error = std::io::Error; - - fn try_from( value : String ) -> Result< Self, Self::Error > - { - let value = PathBuf::from( value ); - Ok( Self( canonicalize( value )? ) ) - } - } - - impl TryFrom< PathBuf > for AbsolutePath - { - type Error = std::io::Error; - - fn try_from( value : PathBuf ) -> Result< Self, Self::Error > - { - Ok( Self( canonicalize( value )? ) ) - } - } - - impl TryFrom< &Path > for AbsolutePath - { - type Error = std::io::Error; - - fn try_from( value : &Path ) -> Result< Self, Self::Error > - { - Ok( Self( canonicalize( value )? ) ) - } - } - - impl TryFrom< Utf8PathBuf > for AbsolutePath - { - type Error = std::io::Error; - - fn try_from( value : Utf8PathBuf ) -> Result< Self, Self::Error > - { - AbsolutePath::try_from( value.as_std_path() ) - } - } - - impl TryFrom< &Utf8Path > for AbsolutePath - { - type Error = std::io::Error; - - fn try_from( value : &Utf8Path ) -> Result< Self, Self::Error > - { - AbsolutePath::try_from( value.as_std_path() ) - } - } - - impl AsRef< Path > for AbsolutePath - { - fn as_ref( &self ) -> &Path - { - self.0.as_ref() - } - } - - impl AbsolutePath - { - /// Returns the Path without its final component, if there is one. - /// Returns None if the path terminates in a root or prefix, or if it's the empty string. - pub fn parent( &self ) -> Option< AbsolutePath > - { - self.0.parent().map( PathBuf::from ).map( AbsolutePath ) - } - - /// Creates an owned `AbsolutePath` with path adjoined to self. - pub fn join< P >( &self, path : P ) -> AbsolutePath - where - P : AsRef< Path >, - { - Self::try_from( self.0.join( path ) ).unwrap() - } - } - - // qqq : for Petro : for Bohdan : bad. move out - - /// Check if path has a glob. - #[ allow( dead_code ) ] - fn glob_is( path : &str ) -> bool - { - let glob_chars = "*?[{"; - let mut last_char = ' '; - for char in path.chars() - { - if last_char != '\\' && glob_chars.contains( char ) - { - return true; - } - - last_char = char; - } - - false - } - - /// Returns the canonical, absolute form of the path with all intermediate components normalized and symbolic links resolved. - pub fn canonicalize( path : impl AsRef< Path > ) -> std::io::Result< PathBuf > - { - let path = path.as_ref().canonicalize()?; - - // In Windows the regular/legacy paths (C :\foo) are supported by all programs, but have lots of bizarre restrictions for backwards compatibility with MS-DOS. - // And there are Windows NT UNC paths (\\?\C :\foo), which are more robust and with fewer gotchas, but are rarely supported by Windows programs. Even Microsoft’s own! - // - // https://github.com/rust-lang/rust/issues/42869 - #[ cfg( target_os = "windows" ) ] - let path = - { - const VERBATIM_PREFIX : &str = r#"\\?\"#; - let p = path.display().to_string(); - if p.starts_with( VERBATIM_PREFIX ) - { - PathBuf::from( &p[ VERBATIM_PREFIX.len() .. ] ) - } - else - { - path.into() - } - }; - - Ok( path ) - } - - /// Generate name based on system time - pub fn unique_folder_name() -> crate::wtools::error::Result< String > - { - let timestamp = SystemTime::now() - .duration_since( UNIX_EPOCH )? - .as_nanos(); - - Ok( format!( "{}", timestamp ) ) - } - -} - -crate::mod_interface! -{ - // qqq : remove this? - // protected use glob_is; - protected use canonicalize; - protected use unique_folder_name; - - protected use AbsolutePath; -} - -// qqq : for Petro : for Bohdan : rid off this file. use proper_path_tools diff --git a/module/move/willbe/src/tool/cargo.rs b/module/move/willbe/src/tool/cargo.rs index 32ef54e375..3463201de5 100644 --- a/module/move/willbe/src/tool/cargo.rs +++ b/module/move/willbe/src/tool/cargo.rs @@ -1,29 +1,59 @@ +/// Internal namespace. mod private { - use std::ffi::OsString; - use crate::*; + #[ allow( unused_imports ) ] + use crate::tool::*; + use std::ffi::OsString; use std::path::PathBuf; - use error_tools::err; - use error_tools::for_app::format_err; + use error::err; + use error::untyped::format_err; use former::Former; - use process_tools::process::*; - use wtools::error::Result; - use channel::Channel; + use process_tools::process; + // use process_tools::process::*; + // qqq : for Bohdan : bad + // use error::Result; + // qqq : group dependencies + + // qqq : for Bohdan : bad : tools can't depend on entitties! + use crate::channel::Channel; + + // aaa : documentation /// aaa : documented - /// Represents pack options + /// Represents options for packaging a project. + /// + /// The `PackOptions` struct encapsulates various options that can be configured when packaging a project, + /// including the path to the project, the distribution channel, and various flags for controlling the behavior of the packaging process. #[ derive( Debug, Former, Clone ) ] pub struct PackOptions { - pub( crate ) path : PathBuf, + /// The path to the project to be packaged. + /// + /// This field specifies the file system path where the project is located. + pub( crate ) path : PathBuf, + /// The distribution channel for the packaging project. + /// + /// This field specifies the channel through which the packaged project will be distributed. + /// pub( crate ) channel : Channel, + /// Flag indicating whether to allow packaging even if the working directory is dirty. + /// + /// This field is set to `true` by default, meaning that packaging will proceed even if there are uncommitted changes. #[ former( default = true ) ] pub( crate ) allow_dirty : bool, // qqq : rename to checking_changes + /// Flag indicating whether to skip verification checks. #[ former( default = false ) ] - // qqq : don't abuse negative form, rename to checking_consistency - pub( crate ) no_verify : bool, + // aaa : don't abuse negative form, rename to checking_consistency + // renamed and changed logic + pub( crate ) checking_consistency : bool, + /// An optional temporary path to be used during packaging. + /// + /// This field may contain a path to a temporary directory that will be used during the packaging process. pub( crate ) temp_path : Option< PathBuf >, + /// Flag indicating whether to perform a dry run. + /// + /// This field specifies whether the packaging process should be a dry run, meaning that no actual changes will be made. pub( crate ) dry : bool, } @@ -43,7 +73,7 @@ mod private [ "run".to_string(), self.channel.to_string(), "cargo".into(), "package".into() ] .into_iter() .chain( if self.allow_dirty { Some( "--allow-dirty".to_string() ) } else { None } ) - .chain( if self.no_verify { Some( "--no-verify".to_string() ) } else { None } ) + .chain( if !self.checking_consistency { Some( "--no-verify".to_string() ) } else { None } ) .chain( self.temp_path.clone().map( | p | vec![ "--target-dir".to_string(), p.to_string_lossy().into() ] ).into_iter().flatten() ) .collect() } @@ -62,7 +92,9 @@ mod private track_caller, tracing::instrument( fields( caller = ?{ let x = std::panic::Location::caller(); ( x.file(), x.line() ) } ) ) )] - pub fn pack( args : PackOptions ) -> Result< Report > + // qqq : should be typed error, apply err_with + // qqq : don't use 1-prameter Result + pub fn pack( args : PackOptions ) -> Result< process::Report > { let ( program, options ) = ( "rustup", args.to_pack_args() ); @@ -70,7 +102,7 @@ mod private { Ok ( - Report + process::Report { command : format!( "{program} {}", options.join( " " ) ), out : String::new(), @@ -82,7 +114,7 @@ mod private } else { - Run::former() + process::Run::former() .bin_path( program ) .args( options.into_iter().map( OsString::from ).collect::< Vec< _ > >() ) .current_path( args.path ) @@ -116,7 +148,7 @@ mod private fn as_publish_args( &self ) -> Vec< String > { let target_dir = self.temp_path.clone().map( | p | vec![ "--target-dir".to_string(), p.to_string_lossy().into() ] ); - [ "publish".to_string() ].into_iter().chain( target_dir.into_iter().flatten() ).collect::< Vec< String > >() + [ "publish".to_string() ].into_iter().chain( target_dir.into_iter().flatten() ).collect() } } @@ -127,7 +159,8 @@ mod private track_caller, tracing::instrument( fields( caller = ?{ let x = std::panic::Location::caller(); ( x.file(), x.line() ) } ) ) )] - pub fn publish( args : PublishOptions ) -> Result< Report > + pub fn publish( args : PublishOptions ) -> Result< process::Report > + // qqq : don't use 1-prameter Result { let ( program, arguments) = ( "cargo", args.as_publish_args() ); @@ -135,7 +168,7 @@ mod private { Ok ( - Report + process::Report { command : format!( "{program} {}", arguments.join( " " ) ), out : String::new(), @@ -148,10 +181,10 @@ mod private else { let mut results = Vec::with_capacity( args.retry_count + 1 ); - let run_args = arguments.into_iter().map( OsString::from ).collect::< Vec< _ > >(); + let run_args : Vec< _ > = arguments.into_iter().map( OsString::from ).collect(); for _ in 0 .. args.retry_count + 1 { - let result = Run::former() + let result = process::Run::former() .bin_path( program ) .args( run_args.clone() ) .current_path( &args.path ) @@ -168,7 +201,7 @@ mod private } else { - Err( results.remove( 0 ) ).map_err( | report | err!( report.to_string() ) ) + Err( results.remove( 0 ) ).map_err( | report | err!( report.to_string() ) ) } } } @@ -184,4 +217,4 @@ crate::mod_interface! protected use PublishOptions; protected use PackOptions; -} +} \ No newline at end of file diff --git a/module/move/willbe/src/tool/collection.rs b/module/move/willbe/src/tool/collection.rs new file mode 100644 index 0000000000..42254b9618 --- /dev/null +++ b/module/move/willbe/src/tool/collection.rs @@ -0,0 +1,12 @@ +/// Internal namespace. +pub( crate ) mod private +{ +} + +crate::mod_interface! +{ + + use ::collection_tools; + protected use ::collection_tools::protected::*; + +} diff --git a/module/move/willbe/src/tool/error.rs b/module/move/willbe/src/tool/error.rs new file mode 100644 index 0000000000..ff6f01b439 --- /dev/null +++ b/module/move/willbe/src/tool/error.rs @@ -0,0 +1,55 @@ +/// Internal namespace. +pub( crate ) mod private +{ + #[ allow( unused_imports ) ] + use crate::tool::*; + + use ::error_tools::protected::*; + + // qqq : for for Petro : for Bohdan : good one, apply it to all code + + /// This trait can be used to add extra information to an error, creating a tuple of the additional + /// context and the original error. This can be particularly useful for error handling where you + /// want to include more context or details in the error without losing the original error value. + pub trait ErrWith< V, R, E > + { + /// Takes a closure `f` that returns a value of type `V`, and uses it to wrap an error of type `(V, E1)` + /// in the context of a `Result` of type `R`. + fn err_with< F >( self, f : F ) -> std::result::Result< R, ( V, E ) > + where + F : FnOnce() -> V; + } + + impl< V, R, E1, E2 > ErrWith< V, R, E1 > for std::result::Result< R, E2 > + where + E2 : Into< E1 >, + { + fn err_with< F >( self, f : F ) -> std::result::Result< R, ( V, E1 ) > + where + F : FnOnce() -> V, + { + self.map_err( | e | ( f(), e.into() ) ) + } + } + + /// A type alias for a `Result` that contains an error which is a tuple of a report and an original error. + /// + /// This is useful when you want to report additional information along with an error. The `ResultWithReport` type + /// helps in defining such results more concisely. + pub type ResultWithReport< Report, Error > = Result< Report, ( Report, Error ) >; + + +} + +crate::mod_interface! +{ + // #![ debug ] + + use ::error_tools; + protected use ::error_tools::protected::*; + + exposed use ErrWith; + exposed use ResultWithReport; + exposed use ::error_tools::Result; + +} diff --git a/module/move/willbe/src/tool/files.rs b/module/move/willbe/src/tool/files.rs index 8dfbc4f78c..bb684e8bbe 100644 --- a/module/move/willbe/src/tool/files.rs +++ b/module/move/willbe/src/tool/files.rs @@ -1,15 +1,17 @@ - /// Internal namespace. pub( crate ) mod private { + #[ allow( unused_imports ) ] + use crate::tool::*; + use std::path::{ Path, PathBuf }; /// /// Find paths. /// - /* rrr : Dmytro : dubious prototype */ + /* xxx : check */ pub fn find< P, S >( base_dir : P, patterns : &[ S ] ) -> Vec< PathBuf > where P : AsRef< Path >, @@ -21,7 +23,7 @@ pub( crate ) mod private .into_iter() .filter_map( Result::ok ) .map( | s | s.path().to_path_buf() ) - .collect::< Vec< PathBuf > >() + .collect() } /// Check if path is valid. diff --git a/module/move/willbe/src/tool/git.rs b/module/move/willbe/src/tool/git.rs index c90dc08109..e85532b3b4 100644 --- a/module/move/willbe/src/tool/git.rs +++ b/module/move/willbe/src/tool/git.rs @@ -1,11 +1,14 @@ +/// Internal namespace. mod private { - use crate::*; + #[ allow( unused_imports ) ] + use crate::tool::*; + use std::ffi::OsString; use std::path::Path; use process_tools::process::*; - use wtools::error::Result; - use wtools::error::err; + use error::err; + // qqq : group dependencies /// Adds changes to the Git staging area. /// @@ -18,8 +21,10 @@ mod private /// /// # Returns : /// Returns a result containing a report indicating the result of the operation. + // qqq : should be typed error, apply err_with #[ cfg_attr( feature = "tracing", tracing::instrument( skip( path, objects ), fields( path = %path.as_ref().display() ) ) ) ] pub fn add< P, Os, O >( path : P, objects : Os, dry : bool ) -> Result< Report > + // qqq : don't use 1-prameter Result where P : AsRef< Path >, Os : AsRef< [ O ] >, @@ -27,7 +32,7 @@ mod private { let objects = objects.as_ref().iter().map( | x | x.as_ref() ); - let ( program, args ) = ( "git", Some( "add" ).into_iter().chain( objects ).collect::< Vec< _ > >() ); + let ( program, args ) : ( _, Vec< _ > ) = ( "git", Some( "add" ).into_iter().chain( objects ).collect() ); if dry { @@ -65,8 +70,10 @@ mod private /// /// # Returns : /// Returns a result containing a report indicating the result of the operation. + // qqq : should be typed error, apply err_with #[ cfg_attr( feature = "tracing", tracing::instrument( skip( path, message ), fields( path = %path.as_ref().display(), message = %message.as_ref() ) ) ) ] pub fn commit< P, M >( path : P, message : M, dry : bool ) -> Result< Report > + // qqq : don't use 1-prameter Result where P : AsRef< Path >, M : AsRef< str >, @@ -108,8 +115,12 @@ mod private /// /// # Returns : /// Returns a result containing a report indicating the result of the operation. + + // qqq : should be typed error, apply err_with + #[ cfg_attr( feature = "tracing", tracing::instrument( skip( path ), fields( path = %path.as_ref().display() ) ) ) ] pub fn push< P >( path : P, dry : bool ) -> Result< Report > + // qqq : don't use 1-prameter Result where P : AsRef< Path >, { @@ -138,7 +149,7 @@ mod private .run().map_err( | report | err!( report.to_string() ) ) } } - + /// This function is a wrapper around the `git reset` command. /// /// # Args : @@ -150,13 +161,17 @@ mod private /// /// # Returns : /// This function returns a `Result` containing a `Report` if the command is executed successfully. The `Report` contains the command executed, the output -// git reset command wrapper + /// git reset command wrapper + + // qqq : should be typed error, apply err_with + pub fn reset< P >( path : P, hard : bool, commits_count : usize, dry : bool ) -> Result< Report > + // qqq : don't use 1-prameter Result where P : AsRef< Path >, { if commits_count < 1 { return Err( err!( "Cannot reset, the count of commits must be greater than 0" ) ) } - let ( program, args ) = + let ( program, args ) : ( _, Vec< _ > ) = ( "git", Some( "reset" ) @@ -164,7 +179,7 @@ mod private .chain( if hard { Some( "--hard" ) } else { None } ) .map( String::from ) .chain( Some( format!( "HEAD~{}", commits_count ) ) ) - .collect::< Vec< _ > >() + .collect() ); if dry @@ -200,6 +215,10 @@ mod private /// # Returns /// /// A `Result` containing a `Report`, which represents the result of the command execution. + + // qqq : should be typed error, apply err_with + // qqq : don't use 1-prameter Result + pub fn ls_remote_url< P >( path : P ) -> Result< Report > where P : AsRef< Path >, diff --git a/module/move/willbe/src/tool/graph.rs b/module/move/willbe/src/tool/graph.rs index db91c15afb..8e9f51ac91 100644 --- a/module/move/willbe/src/tool/graph.rs +++ b/module/move/willbe/src/tool/graph.rs @@ -1,17 +1,20 @@ /// Internal namespace. pub( crate ) mod private { + #[ allow( unused_imports ) ] use crate::*; + // use crate::tool::*; + // qqq : bad : for Bohdan : asterist only crate::* and prelude::* + use std:: { ops::Index, fmt::Debug, hash::Hash, - collections::{ HashMap, HashSet } }; - use std::collections::VecDeque; - use std::path::PathBuf; + use collection::{ HashMap, HashSet, VecDeque }; + use path::PathBuf; use petgraph:: { graph::Graph, @@ -20,9 +23,13 @@ pub( crate ) mod private use petgraph::graph::NodeIndex; use petgraph::prelude::*; - use error_tools::for_lib::Error; - use error::Result; + use error:: + { + typed::Error, + }; + use package::{ Package, publish_need }; + // qqq : for Bohdan : bad : tools can't depend on entitties! #[ derive( Debug, Error ) ] pub enum GraphError< T : Debug > @@ -40,8 +47,11 @@ pub( crate ) mod private /// The graph with all accepted packages pub fn construct< PackageIdentifier > ( - packages : &HashMap< PackageIdentifier, - HashSet< PackageIdentifier > > + packages : &HashMap + < + PackageIdentifier, + HashSet< PackageIdentifier >, + > ) -> Graph< &PackageIdentifier, &PackageIdentifier > where @@ -86,7 +96,7 @@ pub( crate ) mod private ( graph : Graph< &'a PackageIdentifier, &'a PackageIdentifier > ) - -> Result< Vec< PackageIdentifier >, GraphError< PackageIdentifier > > + -> error::Result< Vec< PackageIdentifier >, GraphError< PackageIdentifier > > { match pg_toposort( &graph, None ) { @@ -96,7 +106,7 @@ pub( crate ) mod private .iter() .rev() .map( | dep_idx | ( *graph.node_weight( *dep_idx ).unwrap() ).clone() ) - .collect::< Vec< _ > >() + .collect() ), Err( index ) => Err( GraphError::Cycle( ( *graph.index( index.node_id() ) ).clone() ) ), // aaa : for Bohdan : bad, make proper error handling @@ -116,37 +126,37 @@ pub( crate ) mod private pub fn topological_sort_with_grouping< 'a, PackageIdentifier : Clone + std::fmt::Debug > ( graph : Graph< &'a PackageIdentifier, &'a PackageIdentifier > - ) - -> Vec< Vec< PackageIdentifier > > + ) + -> Vec< Vec< PackageIdentifier > > { let mut in_degree = HashMap::new(); - for node in graph.node_indices() + for node in graph.node_indices() { in_degree.insert( node, graph.neighbors_directed( node, Incoming ).count() ); } let mut roots = VecDeque::new(); - for ( node, °ree ) in in_degree.iter() + for ( node, °ree ) in in_degree.iter() { - if degree == 0 + if degree == 0 { roots.push_back( *node ); } } let mut result = Vec::new(); - while !roots.is_empty() + while !roots.is_empty() { let mut next_roots = Vec::new(); let mut group = Vec::new(); - while let Some( node ) = roots.pop_front() + while let Some( node ) = roots.pop_front() { group.push( node ); - for edge in graph.neighbors( node ) + for edge in graph.neighbors( node ) { let degree = in_degree.get_mut( &edge ).unwrap(); *degree -= 1; - if *degree == 0 + if *degree == 0 { next_roots.push( edge ); } @@ -158,12 +168,12 @@ pub( crate ) mod private result .into_iter() .map - ( - | vec | + ( + | vec | vec .iter() .map( | dep_idx | ( *graph.node_weight( *dep_idx ).unwrap() ).clone() ) - .collect() + .collect() ) .rev() .collect() @@ -236,14 +246,17 @@ pub( crate ) mod private /// # Returns /// /// A new `Graph` with the nodes that are not required to be published removed. - pub fn remove_not_required_to_publish - ( - package_map : &HashMap< String, Package >, - graph : &Graph< String, String >, - roots : &[ String ], + + // qqq : for Bohdan : typed error + pub fn remove_not_required_to_publish< 'a > + ( + package_map : &HashMap< String, Package< 'a > >, + graph : &Graph< String, String >, + roots : &[ String ], temp_path : Option< PathBuf >, - ) - -> Result< Graph< String, String > > + ) + -> error::Result< Graph< String, String > > + // qqq : don't use 1-prameter Result { let mut nodes = HashSet::new(); let mut cleared_graph = Graph::new(); @@ -264,9 +277,9 @@ pub( crate ) mod private } let package = package_map.get( &graph[ n ] ).unwrap(); _ = cargo::pack - ( + ( cargo::PackOptions::former() - .path( package.crate_dir().absolute_path().as_ref().to_path_buf() ) + .path( package.crate_dir().absolute_path() ) .option_temp_path( temp_path.clone() ) .dry( false ) .allow_dirty( true ) diff --git a/module/move/willbe/src/tool/http.rs b/module/move/willbe/src/tool/http.rs index 4cd557ef59..6527044e75 100644 --- a/module/move/willbe/src/tool/http.rs +++ b/module/move/willbe/src/tool/http.rs @@ -1,7 +1,8 @@ /// Internal namespace. pub( crate ) mod private { - use crate::*; + #[ allow( unused_imports ) ] + use crate::tool::*; use std:: { @@ -9,7 +10,7 @@ pub( crate ) mod private fmt::Write, time::Duration }; - use wtools::error::{ for_app::Context, Result }; + use error::{ untyped::Context, Result }; use ureq::Agent; /// diff --git a/module/move/willbe/src/tool/iter.rs b/module/move/willbe/src/tool/iter.rs new file mode 100644 index 0000000000..3c75db35f0 --- /dev/null +++ b/module/move/willbe/src/tool/iter.rs @@ -0,0 +1,16 @@ +/// Internal namespace. +pub( crate ) mod private +{ +} + +// + +crate::mod_interface! +{ + use ::iter_tools; + protected use ::iter_tools::protected::*; +} + +// use ::iter_tools::protected::iter as xxx; +// use ::iter_tools::protected::iter2 as xxx2; +// use iter as xxx3; diff --git a/module/move/willbe/src/tool/macros.rs b/module/move/willbe/src/tool/macros.rs new file mode 100644 index 0000000000..b69d25643d --- /dev/null +++ b/module/move/willbe/src/tool/macros.rs @@ -0,0 +1,15 @@ +/// Internal namespace. +pub( crate ) mod private +{ +} + +crate::mod_interface! +{ + + use ::macro_tools; + protected use ::macro_tools::protected::*; + +} + +// use protected::macro_tools as xxx; +// use protected::macro_tools2 as xxx2; diff --git a/module/move/willbe/src/tool/mod.rs b/module/move/willbe/src/tool/mod.rs index 8f8e77635e..b104c31de3 100644 --- a/module/move/willbe/src/tool/mod.rs +++ b/module/move/willbe/src/tool/mod.rs @@ -1,17 +1,37 @@ crate::mod_interface! { - /// Make sha-1 hash for data. - layer sha; - orphan use super::sha; + /// Interaction module with the `cargo` utilities. + layer cargo; + orphan use super::cargo; + + /// Function and structures to work with collections. + layer collection; + orphan use super::collection; + + /// Errors handling. + layer error; + orphan use super::error; /// Operate over files. layer files; orphan use super::files; - + + /// Http requests. + layer http; + orphan use super::http; + + /// Iterating things. + layer iter; + orphan use super::iter; + + /// Work with paths. + layer macros; + orphan use super::macros; + /// Work with paths. - layer _path; - orphan use super::_path; + layer path; + orphan use super::path; /// Tools for working with dependencies graph. layer graph; @@ -25,10 +45,6 @@ crate::mod_interface! layer git; orphan use super::git; - /// Interaction module with the `cargo` utilities. - layer cargo; - orphan use super::cargo; - /// The parse function parses an input string into a HashMap where the keys are String and the values are of type Value. layer query; orphan use super::query; @@ -36,4 +52,26 @@ crate::mod_interface! /// Tools for parsing and extracting information from url. layer url; orphan use super::url; + + /// Tools for printing a tree + layer tree; + orphan use super::tree; + + /// Repository tools. + layer repository; + orphan use super::repository; + + exposed use ::former:: + { + Former, + Assign, + }; + + // xxx : check + // exposed use + // { + // ::former::Former, + // ::former::Assign, + // }; + } diff --git a/module/move/willbe/src/tool/path.rs b/module/move/willbe/src/tool/path.rs new file mode 100644 index 0000000000..03d2e08276 --- /dev/null +++ b/module/move/willbe/src/tool/path.rs @@ -0,0 +1,12 @@ +/// Internal namespace. +pub( crate ) mod private +{ +} + +crate::mod_interface! +{ + + use ::proper_path_tools; + protected use ::proper_path_tools::protected::*; + +} diff --git a/module/move/willbe/src/tool/query.rs b/module/move/willbe/src/tool/query.rs index 6686858821..61b1f5013e 100644 --- a/module/move/willbe/src/tool/query.rs +++ b/module/move/willbe/src/tool/query.rs @@ -1,14 +1,19 @@ +/// Internal namespace. mod private { - use crate::*; + #[ allow( unused_imports ) ] + use crate::tool::*; use std:: { str::FromStr, - collections::HashMap }; - use error_tools::for_app::bail; - use wtools::error::{ for_app::{ Error }, Result }; + use error:: + { + untyped::{ Error, bail }, + Result, + }; + use collection::HashMap; #[ derive( Debug, PartialEq, Eq, Clone ) ] /// Parser value enum diff --git a/module/move/willbe/src/tool/repository.rs b/module/move/willbe/src/tool/repository.rs new file mode 100644 index 0000000000..5cb5b61623 --- /dev/null +++ b/module/move/willbe/src/tool/repository.rs @@ -0,0 +1,61 @@ +/// Internal namespace. +pub( crate ) mod private +{ + #[ allow( unused_imports ) ] + use crate::tool::*; + + /// Searches for a README file in specific subdirectories of the given directory path. + /// + /// This function attempts to find a README file in the following subdirectories: ".github", + /// the root directory, and "./docs". It returns the path to the first found README file, or + /// `None` if no README file is found in any of these locations. + pub fn readme_path( dir_path : &std::path::Path ) -> Result< std::path::PathBuf, std::io::Error > + { + if let Some( path ) = readme_in_dir_find( &dir_path.join( ".github" ) ) + { + Ok( path ) + } + else if let Some( path ) = readme_in_dir_find( dir_path ) + { + Ok( path ) + } + else if let Some( path ) = readme_in_dir_find( &dir_path.join( "docs" ) ) + { + Ok( path ) + } + else + { + Err( std::io::Error::new( std::io::ErrorKind::NotFound, format!( "Fail to find README.md at {}", &dir_path.display() ) ) ) + } + } + + /// Searches for a file named "readme.md" in the specified directory path. + /// + /// Given a directory path, this function searches for a file named "readme.md" in the specified + /// directory. + fn readme_in_dir_find( path : &std::path::Path ) -> Option< std::path::PathBuf > + { + std::fs::read_dir( path ) + .ok()? + .filter_map( Result::ok ) + .filter( | p | p.path().is_file() ) + .filter_map( | f | + { + let l_f = f.file_name().to_ascii_lowercase(); + if l_f == "readme.md" + { + return Some( f.file_name() ) + } + None + }) + .max() + .map( std::path::PathBuf::from ) + } + +} + + +crate::mod_interface! +{ + protected use readme_path; +} diff --git a/module/move/willbe/src/tool/sha.rs b/module/move/willbe/src/tool/sha.rs deleted file mode 100644 index 5bb60bed23..0000000000 --- a/module/move/willbe/src/tool/sha.rs +++ /dev/null @@ -1,26 +0,0 @@ -/// Internal namespace. -pub( crate ) mod private -{ - use sha1::{ Sha1, Digest }; - - // zzz : not used - - /// - /// Make sha-1 hash for data. - /// - - pub fn hash( data : &[ u8 ] ) -> Vec< u8 > - { - let mut hasher = Sha1::new(); - hasher.update( data ); - let result = hasher.finalize(); - result.to_vec() - } -} - -// - -crate::mod_interface! -{ - orphan use hash; -} diff --git a/module/move/willbe/src/tool/template.rs b/module/move/willbe/src/tool/template.rs index ae07683d4b..411777dad5 100644 --- a/module/move/willbe/src/tool/template.rs +++ b/module/move/willbe/src/tool/template.rs @@ -1,49 +1,135 @@ +/// Internal namespace. mod private { - use std::collections::BTreeMap; - use std::fs; - use error_tools::for_app::Context; - use error_tools::Result; - use former::Former; - use wca::Props; - use std::path::Path; - use std::path::PathBuf; - use wca::Value; - use std::collections::HashMap; - - // qqq : for Viktor : is that trait really necessary? + #[ allow( unused_imports ) ] + use crate::tool::*; + + use std:: + { + fs, + path:: + { + Path, + PathBuf + }, + }; + use error::untyped::Context; + + // qqq : for Nikita : is that trait really necessary? // Template - remove // DeployTemplate - move here // DeployTemplateFiles - remove - /// Trait for creating a template for a file structure. - pub trait Template< F > : Sized - where - F : TemplateFiles + Default + /// Template for creating deploy files. + /// + /// Includes terraform deploy options to GCP, and Hetzner, + /// a Makefile for useful commands, and a key directory. + #[ derive( Debug ) ] + pub struct TemplateHolder { - /// Creates all files in the template. + /// Files of the template. + pub files : Vec< TemplateFileDescriptor >, + /// Parameters definitions. + pub parameters : TemplateParameters, + /// The values associated with the template. + pub values : TemplateValues, + } + + impl TemplateFiles for Vec< TemplateFileDescriptor > {} + + // qqq : for Viktor : why DeployTemplate can't be part of template.rs? + + impl TemplateHolder + { + /// Creates all files in the specified path using the template values. + /// + /// # Parameters + /// + /// - `path`: A reference to the path where the files will be created. /// - /// Path is the base path for the template to be created in. - fn create_all( self, path : &Path ) -> Result< () >; + /// # Returns + /// + /// A `Result` which is `Ok` if the files are created successfully, or an `Err` otherwise. + pub fn create_all( self, path : &path::Path ) -> Result< () > + { + self.files.create_all( path, &self.values ) + } - /// Returns all parameters used by the template. - fn parameters( &self ) -> &TemplateParameters; + /// Returns a reference to the template parameters. + /// + /// # Returns + /// + /// A reference to `TemplateParameters`. + pub fn parameters( &self ) -> &TemplateParameters + { + &self.parameters + } - /// Sets values for provided parameters. - fn set_values( &mut self, values : TemplateValues ); + /// Sets the template values. + /// + /// # Parameters + /// + /// - `values`: The new `TemplateValues` to be set. + pub fn set_values( &mut self, values : TemplateValues ) + { + self.values = values + } - /// Relative path for parameter values storage. - fn parameter_storage( &self ) -> &Path; + /// Returns a reference to the template values. + /// + /// # Returns + /// + /// A reference to `TemplateValues`. + pub fn get_values( &self ) -> &TemplateValues + { + &self.values + } + + /// Returns a mutable reference to the template values. + /// + /// # Returns + /// + /// A mutable reference to `TemplateValues`. + pub fn get_values_mut( &mut self ) -> &mut TemplateValues + { + &mut self.values + } + + /// Returns the path to the parameter storage file. + /// + /// # Returns + /// + /// A reference to a `Path` representing the parameter storage file. + pub fn parameter_storage( &self ) -> &Path + { + "./.deploy_template.toml".as_ref() + // qqq : for Mykyta : hardcode? + } + /// Returns the name of the template. /// - fn template_name( &self ) -> &'static str; + /// # Returns + /// + /// A static string slice representing the template name. + pub fn template_name( &self ) -> &'static str + { + "deploy" + } - /// Loads provided parameters from previous run. - fn load_existing_params( &mut self, path : &Path ) -> Option< () > + /// Loads existing parameters from the specified path and updates the template values. + /// + /// # Parameters + /// + /// - `path`: A reference to the path where the parameter file is located. + /// + /// # Returns + /// + /// An `Option` which is `Some(())` if the parameters are loaded successfully, or `None` otherwise. + pub fn load_existing_params( &mut self, path : &Path ) -> Option< () > { let data = fs::read_to_string( path.join( self.parameter_storage() ) ).ok()?; let document = data.parse::< toml_edit::Document >().ok()?; - let parameters = self.parameters().descriptors.iter().map( | d | &d.parameter ).cloned().collect::< Vec< _ > >(); + let parameters : Vec< _ > = self.parameters().descriptors.iter().map( | d | &d.parameter ).cloned().collect(); let template_table = document.get( self.template_name() )?; for parameter in parameters { @@ -59,20 +145,14 @@ mod private ); if let Some( value ) = value { - self.get_values_mut().insert_if_empty( ¶meter, Value::String( value.into() ) ); + self.get_values_mut().insert_if_empty( ¶meter, wca::Value::String( value.into() ) ); } } Some( () ) } - /// Get all template values. - fn get_values( &self ) -> &TemplateValues; - - /// Get all template values as a mutable reference. - fn get_values_mut( &mut self ) -> &mut TemplateValues; - /// Fetches mandatory parameters that are not set yet. - fn get_missing_mandatory( &self ) -> Vec< &str > + pub fn get_missing_mandatory( &self ) -> Vec< &str > { let values = self.get_values(); self @@ -84,6 +164,94 @@ mod private } } + impl Default for TemplateHolder + { + fn default() -> Self + { + let parameters = TemplateParameters::former() + .parameter( "gcp_project_id" ).is_mandatory( true ).end() + .parameter( "gcp_region" ).end() + .parameter( "gcp_artifact_repo_name" ).end() + .parameter( "docker_image_name" ).end() + .form(); + + Self + { + files : Default::default(), + parameters, + values : Default::default(), + } + } + } + + // /// Trait for creating a template for a file structure. + // pub trait Template< F > : Sized + // where + // F : TemplateFiles + Default + // { + // /// Creates all files in the template. + // /// + // /// Path is the base path for the template to be created in. + // fn create_all( self, path : &Path ) -> Result< () >; + + // /// Returns all parameters used by the template. + // fn parameters( &self ) -> &TemplateParameters; + + // /// Sets values for provided parameters. + // fn set_values( &mut self, values : TemplateValues ); + + // /// Relative path for parameter values storage. + // fn parameter_storage( &self ) -> &Path; + + // /// + // fn template_name( &self ) -> &'static str; + + // /// Loads provided parameters from previous run. + // fn load_existing_params( &mut self, path : &Path ) -> Option< () > + // { + // let data = fs::read_to_string( path.join( self.parameter_storage() ) ).ok()?; + // let document = data.parse::< toml_edit::Document >().ok()?; + // let parameters : Vec< _ > = self.parameters().descriptors.iter().map( | d | &d.parameter ).cloned().collect(); + // let template_table = document.get( self.template_name() )?; + // for parameter in parameters + // { + // let value = template_table.get( ¶meter ) + // .and_then + // ( + // | item | + // match item + // { + // toml_edit::Item::Value( toml_edit::Value::String( val ) ) => Some( val.value() ), + // _ => None + // } + // ); + // if let Some( value ) = value + // { + // self.get_values_mut().insert_if_empty( ¶meter, Value::String( value.into() ) ); + // } + // } + // Some( () ) + // } + + // /// Get all template values. + // fn get_values( &self ) -> &TemplateValues; + + // /// Get all template values as a mutable reference. + // fn get_values_mut( &mut self ) -> &mut TemplateValues; + + // /// Fetches mandatory parameters that are not set yet. + // fn get_missing_mandatory( &self ) -> Vec< &str > + // { + // let values = self.get_values(); + // self + // .parameters() + // .list_mandatory() + // .into_iter() + // .filter( | key | values.0.get( *key ).map( | val | val.as_ref() ).flatten().is_none() ) + // .collect() + // } + // } + /// Files stored in a template. /// /// Can be iterated over, consuming the owner of the files. @@ -104,7 +272,7 @@ mod private } /// Parameters required for the template. - #[ derive( Debug, Default, Former ) ] + #[ derive( Debug, Default, former::Former ) ] pub struct TemplateParameters { #[ subform_entry( setter = false ) ] @@ -114,12 +282,12 @@ mod private impl TemplateParameters { /// Extracts template values from props for parameters required for this template. - pub fn values_from_props( &self, props : &Props ) -> TemplateValues + pub fn values_from_props( &self, props : &wca::Props ) -> TemplateValues { let values = self.descriptors .iter() .map( | d | &d.parameter ) - .map( | param | ( param.clone(), props.get( param ).map( Value::clone ) ) ) + .map( | param | ( param.clone(), props.get( param ).map( wca::Value::clone ) ) ) .collect(); TemplateValues( values ) } @@ -132,7 +300,7 @@ mod private } /// Parameter description. - #[ derive( Debug, Default, Former ) ] + #[ derive( Debug, Default, former::Former ) ] pub struct TemplateParameterDescriptor { parameter : String, @@ -154,14 +322,14 @@ mod private /// Holds a map of parameters and their values. #[ derive( Debug, Default ) ] - pub struct TemplateValues( HashMap< String, Option< Value > > ); + pub struct TemplateValues( collection::HashMap< String, Option< wca::Value > > ); impl TemplateValues { /// Converts values to a serializable object. /// /// Currently only `String`, `Number`, and `Bool` are supported. - pub fn to_serializable( &self ) -> BTreeMap< String, String > + pub fn to_serializable( &self ) -> collection::BTreeMap< String, String > { self.0.iter().map ( @@ -173,11 +341,11 @@ mod private { match value { - Value::String( val ) => val.to_string(), - Value::Number( val ) => val.to_string(), - Value::Path( _ ) => "unsupported".to_string(), - Value::Bool( val ) => val.to_string(), - Value::List( _ ) => "unsupported".to_string(), + wca::Value::String( val ) => val.to_string(), + wca::Value::Number( val ) => val.to_string(), + wca::Value::Path( _ ) => "unsupported".to_string(), + wca::Value::Bool( val ) => val.to_string(), + wca::Value::List( _ ) => "unsupported".to_string(), } } ) @@ -189,7 +357,7 @@ mod private } /// Inserts new value if parameter wasn't initialized before. - pub fn insert_if_empty( &mut self, key : &str, value : Value ) + pub fn insert_if_empty( &mut self, key : &str, value : wca::Value ) { if let None = self.0.get( key ).and_then( | v | v.as_ref() ) { @@ -204,7 +372,7 @@ mod private { println! ("Parameter `{key}` is not set" ); let answer = wca::ask( "Enter value" ); - self.0.insert( key.into(), Some( Value::String( answer ) ) ); + self.0.insert( key.into(), Some( wca::Value::String( answer ) ) ); } } } @@ -213,7 +381,7 @@ mod private /// /// Holds raw template data, relative path for the file, and a flag that /// specifies whether the raw data should be treated as a template. - #[ derive( Debug, Former ) ] + #[ derive( Debug, former::Former ) ] pub struct TemplateFileDescriptor { path : PathBuf, @@ -298,7 +466,7 @@ mod private } /// Helper builder for full template file list. - #[ derive( Debug, Former ) ] + #[ derive( Debug, former::Former ) ] pub struct TemplateFilesBuilder { /// Stores all file descriptors for current template. @@ -343,7 +511,7 @@ mod private fn read( &self, instruction : &FileReadInstruction ) -> Result< Vec< u8 > >; } - // qqq : xxx : why not public? + // zzz : why not public? struct FileSystem; impl FileSystemPort for FileSystem { @@ -372,7 +540,8 @@ mod private crate::mod_interface! { - orphan use Template; + //orphan use Template; + orphan use TemplateHolder; orphan use TemplateFiles; orphan use TemplateFileDescriptor; orphan use TemplateParameters; diff --git a/module/move/willbe/src/tool/tree.rs b/module/move/willbe/src/tool/tree.rs new file mode 100644 index 0000000000..3c1e0c670b --- /dev/null +++ b/module/move/willbe/src/tool/tree.rs @@ -0,0 +1,167 @@ +mod private +{ + use std::fmt::Write; + use crate::CrateDir; + use std::fmt::Formatter; + + /// Struct for formatting and printing tree-like structures. + /// It contains symbols used for visualizing the tree and information about the tree nodes. + #[ derive( Debug, Clone, Eq, PartialEq ) ] + pub struct TreePrinter + { + /// Symbols used for visualizing the tree. + symbols : Symbols, + /// Information about the tree nodes. + pub info : ListNodeReport, + } + + impl TreePrinter + { + /// Creates a new instance of `TreePrinter` with the provided node information. + /// + /// # Parameters + /// + /// - `info`: A reference to a `ListNodeReport` object containing information about the tree nodes. + /// + /// # Returns + /// + /// A new instance of `TreePrinter`. + pub fn new(info : &ListNodeReport) -> Self + { + TreePrinter + { + symbols : Symbols::default(), + info : info.clone(), + } + } + + /// Displays the name, version, path, and dependencies of a package with appropriate indentation and spacing. + /// + /// # Arguments + /// + /// * `spacer` - A string used for indentation. + /// + /// # Returns + /// + /// * A `Result` containing the formatted string or a `std::fmt::Error` if formatting fails. + pub fn display_with_spacer( &self, spacer : &str ) -> Result< String, std::fmt::Error > + { + let mut f = String::new(); + + write!( f, "{}", self.info.name )?; + if let Some( version ) = &self.info.version { write!( f, " {version}" )? } + if let Some( crate_dir ) = &self.info.crate_dir { write!( f, " {}", crate_dir )? } + if self.info.duplicate { write!( f, "(*)" )? } + write!( f, "\n" )?; + + let mut new_spacer = format!( "{spacer}{} ", if self.info.normal_dependencies.len() < 2 { " " } else { self.symbols.down } ); + + let mut normal_dependencies_iter = self.info.normal_dependencies.iter(); + let last = normal_dependencies_iter.next_back(); + + for dep in normal_dependencies_iter + { + write!( f, "{spacer}{}{} {}", self.symbols.tee, self.symbols.right, Self::display_with_spacer( &TreePrinter::new( dep ), &new_spacer )? )?; + } + if let Some( last ) = last + { + new_spacer = format!( "{spacer} " ); + write!( f, "{spacer}{}{} {}", self.symbols.ell, self.symbols.right, Self::display_with_spacer( &TreePrinter::new( last ), &new_spacer )? )?; + } + if !self.info.dev_dependencies.is_empty() + { + let mut dev_dependencies_iter = self.info.dev_dependencies.iter(); + let last = dev_dependencies_iter.next_back(); + write!( f, "{spacer}[dev-dependencies]\n" )?; + for dep in dev_dependencies_iter + { + write!( f, "{spacer}{}{} {}", self.symbols.tee, self.symbols.right, Self::display_with_spacer( &TreePrinter::new( dep ), &new_spacer )? )?; + } + // unwrap - safe because `is_empty` check + write!( f, "{spacer}{}{} {}", self.symbols.ell, self.symbols.right, Self::display_with_spacer( &TreePrinter::new( last.unwrap() ), &new_spacer )? )?; + } + if !self.info.build_dependencies.is_empty() + { + let mut build_dependencies_iter = self.info.build_dependencies.iter(); + let last = build_dependencies_iter.next_back(); + write!( f, "{spacer}[build-dependencies]\n" )?; + for dep in build_dependencies_iter + { + write!( f, "{spacer}{}{} {}", self.symbols.tee, self.symbols.right, Self::display_with_spacer( &TreePrinter::new( dep ), &new_spacer )? )?; + } + // unwrap - safe because `is_empty` check + write!( f, "{spacer}{}{} {}", self.symbols.ell, self.symbols.right, Self::display_with_spacer( &TreePrinter::new( last.unwrap() ), &new_spacer )? )?; + } + + Ok( f ) + } + } + + impl std::fmt::Display for TreePrinter + { + fn fmt( &self, f : &mut Formatter< '_ > ) -> std::fmt::Result + { + write!( f, "{}", self.display_with_spacer( "" )? )?; + + Ok( () ) + } + } + + #[ derive( Debug, Clone, Eq, PartialEq ) ] + struct Symbols + { + down : &'static str, + tee : &'static str, + ell : &'static str, + right : &'static str, + } + + impl Default for Symbols + { + fn default() -> Self { + Self + { + down : "│", + tee : "├", + ell : "└", + right : "─", + } + } + } + + /// Represents a node in a dependency graph. + /// It holds essential information about the project dependencies. It is also capable + /// of holding any nested dependencies in a recursive manner, allowing the modeling + /// of complex dependency structures. + #[ derive( Debug, Clone, Eq, PartialEq ) ] + pub struct ListNodeReport + { + /// This could be the name of the library or crate. + pub name : String, + /// Ihe version of the crate. + pub version : Option< String >, + /// The path to the node's source files in the local filesystem. This is + /// optional as not all nodes may have a local presence (e.g., nodes representing remote crates). + pub crate_dir : Option< CrateDir >, + /// This field is a flag indicating whether the Node is a duplicate or not. + pub duplicate : bool, + /// A list that stores normal dependencies. + /// Each element in the list is also of the same 'ListNodeReport' type to allow + /// storage of nested dependencies. + pub normal_dependencies : Vec< ListNodeReport >, + /// A list that stores dev dependencies(dependencies required for tests or examples). + /// Each element in the list is also of the same 'ListNodeReport' type to allow + /// storage of nested dependencies. + pub dev_dependencies : Vec< ListNodeReport >, + /// A list that stores build dependencies. + /// Each element in the list is also of the same 'ListNodeReport' type to allow + /// storage of nested dependencies. + pub build_dependencies : Vec< ListNodeReport >, + } +} + +crate::mod_interface! +{ + orphan use TreePrinter; + orphan use ListNodeReport; +} \ No newline at end of file diff --git a/module/move/willbe/src/tool/url.rs b/module/move/willbe/src/tool/url.rs index f841613d79..f9635d56db 100644 --- a/module/move/willbe/src/tool/url.rs +++ b/module/move/willbe/src/tool/url.rs @@ -1,13 +1,17 @@ +/// Internal namespace. mod private { - use error_tools::for_app:: + #[ allow( unused_imports ) ] + use crate::tool::*; + + use error::untyped:: { format_err, Result, }; /// Extracts the repository URL from a full URL. - pub fn extract_repo_url( full_url : &str ) -> Option< String > + pub fn repo_url_extract( full_url : &str ) -> Option< String > { let parts : Vec< &str > = full_url.split( '/' ).collect(); @@ -34,13 +38,13 @@ mod private } else { - Err( format_err!( "Fail to extract git username and repository name" ) ) + Err( format_err!( "Fail to extract git username and repository name" ) ) } } } crate::mod_interface! { - protected use extract_repo_url; + protected use repo_url_extract; protected use git_info_extract; } diff --git a/module/move/willbe/src/wtools.rs b/module/move/willbe/src/wtools.rs index 58a2d73144..1ff22e50ab 100644 --- a/module/move/willbe/src/wtools.rs +++ b/module/move/willbe/src/wtools.rs @@ -1,28 +1,28 @@ -pub use error_tools::err; - -// pub use error_tools::BasicError; - -pub use mod_interface::mod_interface; - -/// error tools -pub mod error -{ - pub use error_tools::*; - pub use error_tools::for_lib::*; - pub use::error_tools::dependency::*; -} - -/// This module provides utilities for working with iterators. -pub mod iter -{ - pub use iter_tools::prelude::*; -} - -/// Collection of function and structures to manipulate paths. -pub mod path_tools -{ - // pub use proper_path_tools::protected::*; - // pub use proper_path_tools::protected::path; - // xxx : make use proper_path_tools::protected::path working - pub use proper_path_tools::path; -} +// pub use error::err; +// +// // pub use error::BasicError; +// +// pub use mod_interface::mod_interface; +// +// /// error tools +// pub mod error +// { +// pub use error::*; +// pub use error::typed::*; +// pub use::error_tools::dependency::*; +// } +// +// /// This module provides utilities for working with iterators. +// pub mod iter +// { +// pub use iter_tools::prelude::*; +// } +// +// /// Collection of function and structures to manipulate paths. +// pub mod path_tools +// { +// // pub use proper_path_tools::protected::*; +// // pub use proper_path_tools::protected::path; +// // zzz : make use proper_path_tools::protected::path working +// pub use proper_path::protected as path; +// } diff --git a/module/move/willbe/template/deploy/Makefile.hbs b/module/move/willbe/template/deploy/Makefile.hbs index 05601a05cf..2f3461aea8 100644 --- a/module/move/willbe/template/deploy/Makefile.hbs +++ b/module/move/willbe/template/deploy/Makefile.hbs @@ -43,129 +43,129 @@ export AWS_ACCESS_KEY_ID ?= $(SECRET_AWS_ACCESS_KEY_ID) # AWS Secret Access key for deploying to an EC2 instance export AWS_SECRET_ACCESS_KEY ?= $(SECRET_AWS_ACCESS_KEY) -# Check Hetzner and deployment related keys +# Check Hetzner and deployment related keys check-hetzner-keys: - @[ -f key/SECRET_CSP_HETZNER ] \ + @[ -f key/SECRET_CSP_HETZNER ] \ || [ ! -z "${SECRET_CSP_HETZNER}" ] \ || { echo "ERROR: File key/SECRET_CSP_HETZNER does not exist"; exit 1; } -# Check AWS and deployment related keys +# Check AWS and deployment related keys check-aws-keys: - @[ -f key/SECRET_AWS_ACCESS_KEY_ID ] \ + @[ -f key/SECRET_AWS_ACCESS_KEY_ID ] \ || [ ! -z "${SECRET_AWS_ACCESS_KEY_ID}" ] \ || echo "ERROR: File key/SECRET_AWS_ACCESS_KEY_ID does not exist" - @[ -f key/SECRET_AWS_ACCESS_KEY ] \ + @[ -f key/SECRET_AWS_ACCESS_KEY ] \ || [ ! -z "${SECRET_AWS_ACCESS_KEY}" ] \ || echo "ERROR: File key/SECRET_AWS_ACCESS_KEY does not exist" - @[ -f key/SECRET_AWS_ACCESS_KEY_ID ] \ + @[ -f key/SECRET_AWS_ACCESS_KEY_ID ] \ || [ ! -z "${SECRET_AWS_ACCESS_KEY_ID}" ] \ || exit 1 - @[ -f key/SECRET_AWS_ACCESS_KEY ] \ + @[ -f key/SECRET_AWS_ACCESS_KEY ] \ || [ ! -z "${SECRET_AWS_ACCESS_KEY}" ] \ || exit 1 check-gce-keys: - @echo "All required GCE keys are the same as GCP keys" + @echo "All required GCE keys are the same as GCP keys" # Check if required GCP keys are present check-gcp-keys: - @[ -f key/service_account.json ] \ + @[ -f key/service_account.json ] \ || echo "ERROR: File key/service_account.json does not exist" - @[ -f key/SECRET_STATE_ARCHIVE_KEY ] \ + @[ -f key/SECRET_STATE_ARCHIVE_KEY ] \ || [ ! -z "${SECRET_STATE_ARCHIVE_KEY}" ] \ || echo "ERROR: File key/SECRET_STATE_ARCHIVE_KEY does not exist" - @[ -f key/service_account.json ] \ + @[ -f key/service_account.json ] \ || exit 1 - @[ -f key/SECRET_STATE_ARCHIVE_KEY ] \ + @[ -f key/SECRET_STATE_ARCHIVE_KEY ] \ || [ ! -z "${SECRET_STATE_ARCHIVE_KEY}" ] \ || exit 1 # Start local docker container start: - docker compose up -d + docker compose up -d # Stop local docker container stop: - docker compose down + docker compose down # Remove created docker image clean: stop - docker rmi $(TF_VAR_IMAGE_NAME) - docker buildx prune -af + docker rmi $(TF_VAR_IMAGE_NAME) + docker buildx prune -af # Install gcloud for Debian/Ubuntu install-gcloud: - # GCloud - sudo apt-get update - sudo apt-get install -y apt-transport-https ca-certificates gnupg curl sudo - curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo gpg --dearmor -o /usr/share/keyrings/cloud.google.gpg - echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list - sudo apt-get update && sudo apt-get install -y google-cloud-cli + # GCloud + sudo apt-get update + sudo apt-get install -y apt-transport-https ca-certificates gnupg curl sudo + curl https://packages.cloud.google.com/apt/doc/apt-key.gpg | sudo gpg --dearmor -o /usr/share/keyrings/cloud.google.gpg + echo "deb [signed-by=/usr/share/keyrings/cloud.google.gpg] https://packages.cloud.google.com/apt cloud-sdk main" | sudo tee -a /etc/apt/sources.list.d/google-cloud-sdk.list + sudo apt-get update && sudo apt-get install -y google-cloud-cli # Install terraform for Debian/Ubuntu install-terraform: - sudo apt-get update && sudo apt-get install -y gnupg software-properties-common - wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg - gpg --no-default-keyring --keyring /usr/share/keyrings/hashicorp-archive-keyring.gpg --fingerprint - echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list - sudo apt update && sudo apt-get install terraform + sudo apt-get update && sudo apt-get install -y gnupg software-properties-common + wget -O- https://apt.releases.hashicorp.com/gpg | gpg --dearmor | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg + gpg --no-default-keyring --keyring /usr/share/keyrings/hashicorp-archive-keyring.gpg --fingerprint + echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] https://apt.releases.hashicorp.com $(lsb_release -cs) main" | sudo tee /etc/apt/sources.list.d/hashicorp.list + sudo apt update && sudo apt-get install terraform # Install gcloud and terraform install: install-gcloud install-terraform - gcloud --version - terraform -version + gcloud --version + terraform -version # Login to GCP with user account gcp-auth: - gcloud auth application-default login + gcloud auth application-default login # Authorize to GCP with service account gcp-service: - gcloud auth activate-service-account --key-file=$(google_sa_creds) + gcloud auth activate-service-account --key-file=$(google_sa_creds) # Add docker repo auth helper gcp-docker: - gcloud auth configure-docker $(TF_VAR_REGION)-docker.pkg.dev --quiet + gcloud auth configure-docker $(TF_VAR_REGION)-docker.pkg.dev --quiet # Initializes all terraform projects # Downloads required modules and validates .tf files tf-init: - terraform -chdir=$(tf_dir)/gar init - terraform -chdir=$(tf_dir)/gce init - terraform -chdir=$(tf_dir)/hetzner init - terraform -chdir=$(tf_dir)/aws init + terraform -chdir=$(tf_dir)/gar init + terraform -chdir=$(tf_dir)/gce init + terraform -chdir=$(tf_dir)/hetzner init + terraform -chdir=$(tf_dir)/aws init # Creates Artifact Registry repository on GCP in specified location create-artifact-repo: tf-init - terraform -chdir=$(tf_dir)/gar apply -auto-approve + terraform -chdir=$(tf_dir)/gar apply -auto-approve # Builds uarust_conf_site image build-image: - docker build . -t name:$(TF_VAR_IMAGE_NAME) -t $(tag) + docker build . -t name:$(TF_VAR_IMAGE_NAME) -t $(tag) # Builds and pushes local docker image to the private repository push-image: gcp-docker create-artifact-repo - docker push $(tag) + docker push $(tag) # Creates GCE instance with the website configured on boot create-gce: check-gce-keys gcp-service state_storage_pull push-image - terraform -chdir=$(tf_dir)/gce apply -auto-approve + terraform -chdir=$(tf_dir)/gce apply -auto-approve # Creates AWS EC2 instance with the website configured on boot create-aws: check-aws-keys gcp-service state_storage_pull push-image - terraform -chdir=$(tf_dir)/aws apply -auto-approve + terraform -chdir=$(tf_dir)/aws apply -auto-approve # Creates Hetzner instance with the website configured on boot create-hetzner: check-hetzner-keys gcp-service state_storage_pull push-image - terraform -chdir=$(tf_dir)/hetzner apply -auto-approve + terraform -chdir=$(tf_dir)/hetzner apply -auto-approve # Deploys everything and updates terraform states deploy-in-container: create-$(CSP) state_storage_push # Deploys using tools from the container deploy: check-gcp-keys build-image - docker build . -t deploy-$(TF_VAR_IMAGE_NAME) -f ./$(tf_dir)/Dockerfile --build-arg google_sa_creds="$(google_sa_creds)" - @docker run -v //var/run/docker.sock:/var/run/docker.sock -v .:/app \ + docker build . -t deploy-$(TF_VAR_IMAGE_NAME) -f ./$(tf_dir)/Dockerfile --build-arg google_sa_creds="$(google_sa_creds)" + @docker run -v //var/run/docker.sock:/var/run/docker.sock -v .:/app \ -e SECRET_STATE_ARCHIVE_KEY=$(SECRET_STATE_ARCHIVE_KEY) \ -e SECRET_CSP_HETZNER=$(SECRET_CSP_HETZNER) \ -e SECRET_AWS_ACCESS_KEY_ID=$(SECRET_AWS_ACCESS_KEY_ID) \ @@ -175,35 +175,35 @@ deploy: check-gcp-keys build-image # Review changes that terraform will do on apply tf-plan: tf-init - terraform -chdir=$(tf_dir)/gar plan - terraform -chdir=$(tf_dir)/gce plan - terraform -chdir=$(tf_dir)/hetzner plan - terraform -chdir=$(tf_dir)/aws plan + terraform -chdir=$(tf_dir)/gar plan + terraform -chdir=$(tf_dir)/gce plan + terraform -chdir=$(tf_dir)/hetzner plan + terraform -chdir=$(tf_dir)/aws plan # Destroy created infrastracture on GCP tf-destroy: tf-init - terraform -chdir=$(tf_dir)/gar destroy - terraform -chdir=$(tf_dir)/gce destroy - terraform -chdir=$(tf_dir)/hetzner destroy - terraform -chdir=$(tf_dir)/aws destroy + terraform -chdir=$(tf_dir)/gar destroy + terraform -chdir=$(tf_dir)/gce destroy + terraform -chdir=$(tf_dir)/hetzner destroy + terraform -chdir=$(tf_dir)/aws destroy # Pushes encrypted terraform state files to the GCS Bucket state_storage_push: - @echo Pushing encrypted terraform state files to the GCS Bucket - @gcloud storage cp $(tf_dir)/gce/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/gce.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" - @gcloud storage cp $(tf_dir)/gar/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/gar.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" - @gcloud storage cp $(tf_dir)/hetzner/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/hetzner.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" - @gcloud storage cp $(tf_dir)/aws/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/aws.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" + @echo Pushing encrypted terraform state files to the GCS Bucket + @gcloud storage cp $(tf_dir)/gce/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/gce.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" + @gcloud storage cp $(tf_dir)/gar/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/gar.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" + @gcloud storage cp $(tf_dir)/hetzner/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/hetzner.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" + @gcloud storage cp $(tf_dir)/aws/terraform.tfstate gs://$(TF_VAR_BUCKET_NAME)/aws.tfstate --encryption-key="$(SECRET_STATE_ARCHIVE_KEY)" # Pulls and decrypts terraform state files to the GCS Bucket state_storage_pull: - @echo Pulling terraform state files to the GCS Bucket - -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/gce.tfstate $(tf_dir)/gce/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" - -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/gar.tfstate $(tf_dir)/gar/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" - -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/hetzner.tfstate $(tf_dir)/hetzner/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" - -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/aws.tfstate $(tf_dir)/aws/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" + @echo Pulling terraform state files to the GCS Bucket + -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/gce.tfstate $(tf_dir)/gce/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" + -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/gar.tfstate $(tf_dir)/gar/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" + -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/hetzner.tfstate $(tf_dir)/hetzner/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" + -@gcloud storage cp gs://$(TF_VAR_BUCKET_NAME)/aws.tfstate $(tf_dir)/aws/terraform.tfstate --decryption-keys="$(SECRET_STATE_ARCHIVE_KEY)" # Creates GCS Bucket for terraform states state_storage_init: - terraform -chdir=$(tf_dir)/gcs init - terraform -chdir=$(tf_dir)/gcs apply + terraform -chdir=$(tf_dir)/gcs init + terraform -chdir=$(tf_dir)/gcs apply diff --git a/module/move/willbe/template/workspace/Makefile b/module/move/willbe/template/workspace/Makefile index b1036cde01..994c511991 100644 --- a/module/move/willbe/template/workspace/Makefile +++ b/module/move/willbe/template/workspace/Makefile @@ -22,10 +22,10 @@ VERSION ?= $(strip $(shell grep -m1 'version = "' Cargo.toml | cut -d '"' -f2)) # Sync local repostiry. # # Usage : -# make git.sync [message='description of changes'] +# make git.sync [message='description of changes'] git.sync : - git add --all && git commit -am $(message) && git pull + git add --all && git commit -am $(message) && git pull sync : git.sync @@ -36,10 +36,10 @@ sync : git.sync # Check vulnerabilities with cargo-audit. # # Usage : -# make audit +# make audit audit : - cargo audit + cargo audit # # === General commands @@ -48,88 +48,88 @@ audit : # Generate crates documentation from Rust sources. # # Usage : -# make doc [private=(yes|no)] [open=(yes|no)] [clean=(no|yes)] [manifest_path=(|[path])] +# make doc [private=(yes|no)] [open=(yes|no)] [clean=(no|yes)] [manifest_path=(|[path])] doc : ifeq ($(clean),yes) - @rm -rf target/doc/ + @rm -rf target/doc/ endif - cargo doc --all-features \ - $(if $(call eq,$(private),no),,--document-private-items) \ - $(if $(call eq,$(manifest_path),),--manifest-path ./Cargo.toml,--manifest-path $(manifest_path)) \ - $(if $(call eq,$(open),no),,--open) + cargo doc --all-features \ + $(if $(call eq,$(private),no),,--document-private-items) \ + $(if $(call eq,$(manifest_path),),--manifest-path ./Cargo.toml,--manifest-path $(manifest_path)) \ + $(if $(call eq,$(open),no),,--open) # Lint Rust sources with Clippy. # # Usage : -# make lint [warnings=(no|yes)] [manifest_path=(|[path])] +# make lint [warnings=(no|yes)] [manifest_path=(|[path])] lint : - cargo clippy --all-features \ - $(if $(call eq,$(manifest_path),),--manifest-path ./Cargo.toml,--manifest-path $(manifest_path)) \ - $(if $(call eq,$(warnings),no),-- -D warnings,) + cargo clippy --all-features \ + $(if $(call eq,$(manifest_path),),--manifest-path ./Cargo.toml,--manifest-path $(manifest_path)) \ + $(if $(call eq,$(warnings),no),-- -D warnings,) # Check Rust sources `check`. # # Usage : -# make check [manifest_path=(|[path])] +# make check [manifest_path=(|[path])] check : - cargo check \ - $(if $(call eq,$(manifest_path),),--manifest-path ./Cargo.toml,--manifest-path $(manifest_path)) + cargo check \ + $(if $(call eq,$(manifest_path),),--manifest-path ./Cargo.toml,--manifest-path $(manifest_path)) # Format and lint Rust sources. # # Usage : -# make normalize +# make normalize normalize : fmt lint # Perform common checks on the module. # # Usage : -# make checkmate +# make checkmate checkmate : doc lint check # Format Rust sources with rustfmt. # # Usage : -# make fmt [check=(no|yes)] +# make fmt [check=(no|yes)] fmt : - { find -L module -name *.rs -print0 ; } | xargs -0 rustfmt +nightly $(if $(call eq,$(check),yes),-- --check,) + { find -L module -name *.rs -print0 ; } | xargs -0 rustfmt +nightly $(if $(call eq,$(check),yes),-- --check,) # cargo +nightly fmt --all $(if $(call eq,$(check),yes),-- --check,) # Run project Rust sources with Cargo. # # Usage : -# make up +# make up up : - cargo up + cargo up # Run project Rust sources with Cargo. # # Usage : -# make clean +# make clean clean : - cargo clean && rm -rf Cargo.lock && cargo cache -a && cargo update + cargo clean && rm -rf Cargo.lock && cargo cache -a && cargo update # Run Rust tests of project. # # Usage : -# make test +# make test test : - cargo test --all-features + cargo test --all-features # Run format link test and tests. # # Usage : -# make all +# make all all : fmt lint test @@ -138,14 +138,14 @@ all : fmt lint test # .PHONY : \ - all \ - audit \ - docs \ - lint \ - check \ - fmt \ - normalize \ - checkmate \ - test \ - up \ - doc + all \ + audit \ + docs \ + lint \ + check \ + fmt \ + normalize \ + checkmate \ + test \ + up \ + doc diff --git a/module/move/willbe/template/workspace/module/module1/Cargo.toml.x b/module/move/willbe/template/workspace/module/module1/Cargo.toml.x index 9cf134e518..eba1f8da41 100644 --- a/module/move/willbe/template/workspace/module/module1/Cargo.toml.x +++ b/module/move/willbe/template/workspace/module/module1/Cargo.toml.x @@ -13,4 +13,4 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + diff --git a/module/move/willbe/template/workspace/module/module1/examples/module1_example.rs b/module/move/willbe/template/workspace/module/module1/examples/module1_example.rs index 1ce8bc56f8..d7a0d23ef4 100644 --- a/module/move/willbe/template/workspace/module/module1/examples/module1_example.rs +++ b/module/move/willbe/template/workspace/module/module1/examples/module1_example.rs @@ -7,6 +7,6 @@ use example_module::hello; ///test fn main() { - let h = hello(); - println!( "{}", h ); + let h = hello(); + println!( "{}", h ); } diff --git a/module/move/willbe/template/workspace/module/module1/src/lib.rs b/module/move/willbe/template/workspace/module/module1/src/lib.rs index d7b38faf6c..19c783b9ef 100644 --- a/module/move/willbe/template/workspace/module/module1/src/lib.rs +++ b/module/move/willbe/template/workspace/module/module1/src/lib.rs @@ -3,5 +3,5 @@ /// Example pub fn hello() -> String { - "hello world!".into() + "hello world!".into() } diff --git a/module/move/willbe/template/workspace/module/module1/tests/hello_test.rs b/module/move/willbe/template/workspace/module/module1/tests/hello_test.rs index 7ea32f1cba..06400d06b3 100644 --- a/module/move/willbe/template/workspace/module/module1/tests/hello_test.rs +++ b/module/move/willbe/template/workspace/module/module1/tests/hello_test.rs @@ -5,5 +5,5 @@ use example_module::*; #[ test ] fn example_test() { - assert_eq!( "hello world!".to_string(), hello() ); + assert_eq!( "hello world!".to_string(), hello() ); } diff --git a/module/move/willbe/tests/inc/action/cicd_renew.rs b/module/move/willbe/tests/inc/action_tests/cicd_renew.rs similarity index 92% rename from module/move/willbe/tests/inc/action/cicd_renew.rs rename to module/move/willbe/tests/inc/action_tests/cicd_renew.rs index 4f9b0bc17b..ffbbe5b570 100644 --- a/module/move/willbe/tests/inc/action/cicd_renew.rs +++ b/module/move/willbe/tests/inc/action_tests/cicd_renew.rs @@ -1,16 +1,17 @@ use super::*; use assert_fs::prelude::*; -use the_module::action; +use the_module:: +{ + action, + collection::HashMap, +}; // -// aaa : for Petro : rid off redundant namespace. ask -// aaa : remove use std:: { fs::File, io::Read, - collections::HashMap }; use std::fs::create_dir_all; use serde::Deserialize; @@ -69,10 +70,10 @@ fn default_case() uses : "Username/test/.github/workflows/standard_rust_push.yml@alpha".into(), with }; - let expected = Workflow + let exp = Workflow { name : "test_module".into(), - on : + on : { let mut map = HashMap::new(); let mut push_map = HashMap::new(); @@ -90,13 +91,14 @@ fn default_case() // Act _ = action::cicd_renew( &temp ).unwrap(); + dbg!( &file_path ); // Assert let mut file = File::open( file_path ).unwrap(); let mut content = String::new(); _ = file.read_to_string( &mut content ).unwrap(); - let actual: Workflow = serde_yaml::from_str( &content ).unwrap(); - assert_eq!( expected, actual ); + let got : Workflow = serde_yaml::from_str( &content ).unwrap(); + assert_eq!( got, exp ); assert!( base_path.join( "appropriate_branch.yml" ).exists() ); assert!( base_path.join( "appropriate_branch_beta.yml" ).exists() ); diff --git a/module/move/willbe/tests/inc/action/features.rs b/module/move/willbe/tests/inc/action_tests/features.rs similarity index 86% rename from module/move/willbe/tests/inc/action/features.rs rename to module/move/willbe/tests/inc/action_tests/features.rs index c27e99c3b4..5e2607bad8 100644 --- a/module/move/willbe/tests/inc/action/features.rs +++ b/module/move/willbe/tests/inc/action_tests/features.rs @@ -18,8 +18,9 @@ fn package_no_features() { // Arrange let temp = arrange( "three_packages/b" ); + // let x : PathBuf = temp.path().to_owned(); let options = willbe::action::features::FeaturesOptions::former() - .manifest_dir( willbe::_path::AbsolutePath::try_from( temp.path().to_owned() ).unwrap() ) + .crate_dir( willbe::CrateDir::try_from( temp.path().to_owned() ).unwrap() ) .form(); // Act @@ -38,7 +39,7 @@ fn package_features() // Arrange let temp = arrange( "three_packages_with_features/b" ); let options = willbe::action::features::FeaturesOptions::former() - .manifest_dir( willbe::_path::AbsolutePath::try_from( temp.path().to_owned() ).unwrap() ) + .crate_dir( willbe::CrateDir::try_from( temp.path().to_owned() ).unwrap() ) .form(); // Act @@ -60,7 +61,7 @@ fn package_features_with_features_deps() { let temp = arrange( "three_packages_with_features/b" ); let options = willbe::action::features::FeaturesOptions::former() - .manifest_dir( willbe::_path::AbsolutePath::try_from( temp.path().to_owned() ).unwrap() ) + .crate_dir( willbe::CrateDir::try_from( temp.path().to_owned() ).unwrap() ) .with_features_deps( true ) .form(); @@ -84,7 +85,7 @@ fn workspace_no_features() // Arrange let temp = arrange( "three_packages" ); let options = willbe::action::features::FeaturesOptions::former() - .manifest_dir( willbe::_path::AbsolutePath::try_from( temp.path().to_owned() ).unwrap() ) + .crate_dir( willbe::CrateDir::try_from( temp.path().to_owned() ).unwrap() ) .form(); // Act @@ -113,7 +114,7 @@ fn workspace_features() // Arrange let temp = arrange( "three_packages_with_features" ); let options = willbe::action::features::FeaturesOptions::former() - .manifest_dir( willbe::_path::AbsolutePath::try_from( temp.path().to_owned() ).unwrap() ) + .crate_dir( willbe::CrateDir::try_from( temp.path().to_owned() ).unwrap() ) .form(); // Act @@ -150,7 +151,7 @@ fn workspace_features_with_features_deps() // Arrange let temp = arrange( "three_packages_with_features" ); let options = willbe::action::features::FeaturesOptions::former() - .manifest_dir( willbe::_path::AbsolutePath::try_from( temp.path().to_owned() ).unwrap() ) + .crate_dir( willbe::CrateDir::try_from( temp.path().to_owned() ).unwrap() ) .with_features_deps( true ) .form(); diff --git a/module/move/willbe/tests/inc/action/list.rs b/module/move/willbe/tests/inc/action_tests/list.rs similarity index 100% rename from module/move/willbe/tests/inc/action/list.rs rename to module/move/willbe/tests/inc/action_tests/list.rs diff --git a/module/move/willbe/tests/inc/action/list/data.rs b/module/move/willbe/tests/inc/action_tests/list/data.rs similarity index 87% rename from module/move/willbe/tests/inc/action/list/data.rs rename to module/move/willbe/tests/inc/action_tests/list/data.rs index 17fc6230b6..423baf654c 100644 --- a/module/move/willbe/tests/inc/action/list/data.rs +++ b/module/move/willbe/tests/inc/action_tests/list/data.rs @@ -3,7 +3,7 @@ use super::*; use assert_fs::prelude::*; use the_module::action::{ self, list::* }; use willbe::CrateDir; -use willbe::_path::AbsolutePath; +use willbe::path::AbsolutePath; // @@ -51,13 +51,13 @@ mod chain_of_three_packages assert_eq!( 1, trees.len() ); let tree = &trees[ 0 ]; - assert_eq!( "_chain_of_packages_a", tree.name.as_str() ); + assert_eq!( "_chain_of_packages_a", tree.info.name.as_str() ); - assert_eq!( 1, tree.normal_dependencies.len() ); - assert!( tree.dev_dependencies.is_empty() ); - assert!( tree.build_dependencies.is_empty() ); + assert_eq!( 1, tree.info.normal_dependencies.len() ); + assert!( tree.info.dev_dependencies.is_empty() ); + assert!( tree.info.build_dependencies.is_empty() ); - let sub_tree = &tree.normal_dependencies[ 0 ]; + let sub_tree = &tree.info.normal_dependencies[ 0 ]; assert_eq!( "_chain_of_packages_b", sub_tree.name.as_str() ); assert_eq!( 1, sub_tree.normal_dependencies.len() ); @@ -73,16 +73,16 @@ mod chain_of_three_packages } #[ test ] - fn list_format_for_single_package() + fn list_format_for_single_package_1() { // Arrange let temp = arrange(); let args = ListOptions::former() - .path_to_manifest( crate_dir( &temp.join( "a" ) ) ) - .format( ListFormat::Topological ) - .dependency_sources([ DependencySource::Local ]) - .dependency_categories([ DependencyCategory::Primary ]) - .form(); + .path_to_manifest( crate_dir( &temp.join( "a" ) ) ) + .format( ListFormat::Topological ) + .dependency_sources([ DependencySource::Local ]) + .dependency_categories([ DependencyCategory::Primary ]) + .form(); // Act let output = action::list( args ).unwrap(); @@ -152,13 +152,13 @@ mod package_with_remote_dependency assert_eq!( 1, trees.len() ); let tree = &trees[ 0 ]; - assert_eq!( "_package_with_remote_dep_a", tree.name.as_str() ); + assert_eq!( "_package_with_remote_dep_a", tree.info.name.as_str() ); - assert_eq!( 2, tree.normal_dependencies.len() ); - assert!( tree.dev_dependencies.is_empty() ); - assert!( tree.build_dependencies.is_empty() ); + assert_eq!( 2, tree.info.normal_dependencies.len() ); + assert!( tree.info.dev_dependencies.is_empty() ); + assert!( tree.info.build_dependencies.is_empty() ); - let [ sub_tree_1, sub_tree_2, .. ] = tree.normal_dependencies.as_slice() else { unreachable!() }; + let [ sub_tree_1, sub_tree_2, .. ] = tree.info.normal_dependencies.as_slice() else { unreachable!() }; assert_eq!( "_package_with_remote_dep_b", sub_tree_1.name.as_str() ); assert!( sub_tree_1.normal_dependencies.is_empty() ); assert!( sub_tree_1.dev_dependencies.is_empty() ); @@ -171,7 +171,7 @@ mod package_with_remote_dependency } #[ test ] - fn list_format_for_single_package() + fn list_format_for_single_package_2() { // Arrange let temp = arrange(); @@ -250,14 +250,14 @@ mod workspace_with_cyclic_dependency assert_eq!( 1, trees.len() ); let tree = &trees[ 0 ]; - assert_eq!( "_workspace_with_cyclic_dep_a", tree.name.as_str() ); - assert_eq!( "0.1.0", tree.version.as_ref().unwrap().as_str() ); + assert_eq!( "_workspace_with_cyclic_dep_a", tree.info.name.as_str() ); + assert_eq!( "0.1.0", tree.info.version.as_ref().unwrap().as_str() ); - assert_eq!( 1, tree.normal_dependencies.len() ); - assert!( tree.dev_dependencies.is_empty() ); - assert!( tree.build_dependencies.is_empty() ); + assert_eq!( 1, tree.info.normal_dependencies.len() ); + assert!( tree.info.dev_dependencies.is_empty() ); + assert!( tree.info.build_dependencies.is_empty() ); - let sub_tree = &tree.normal_dependencies[ 0 ]; + let sub_tree = &tree.info.normal_dependencies[ 0 ]; assert_eq!( "_workspace_with_cyclic_dep_b", sub_tree.name.as_str() ); assert_eq!( "*", sub_tree.version.as_ref().unwrap().as_str() ); diff --git a/module/move/willbe/tests/inc/action/list/format.rs b/module/move/willbe/tests/inc/action_tests/list/format.rs similarity index 82% rename from module/move/willbe/tests/inc/action/list/format.rs rename to module/move/willbe/tests/inc/action_tests/list/format.rs index 42d492cf9d..84b7f32a96 100644 --- a/module/move/willbe/tests/inc/action/list/format.rs +++ b/module/move/willbe/tests/inc/action_tests/list/format.rs @@ -1,6 +1,7 @@ use super::*; -use the_module::action::list::ListNodeReport; +use the_module::tree::ListNodeReport; +use willbe::tree::TreePrinter; #[ test ] fn node_with_depth_two_leaves_stop_spacer() @@ -9,7 +10,7 @@ fn node_with_depth_two_leaves_stop_spacer() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec! [ @@ -17,13 +18,13 @@ fn node_with_depth_two_leaves_stop_spacer() { name : "sub_node1".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![ ListNodeReport { name : "sub_sub_node1".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -36,13 +37,13 @@ fn node_with_depth_two_leaves_stop_spacer() { name : "sub_node2".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![ ListNodeReport { name : "sub_sub_node2".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -63,7 +64,8 @@ node └─ sub_sub_node2 "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -77,7 +79,7 @@ fn node_with_depth_two_leaves() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec! [ @@ -85,13 +87,13 @@ fn node_with_depth_two_leaves() { name : "sub_node1".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![ ListNodeReport { name : "sub_sub_node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -104,7 +106,7 @@ fn node_with_depth_two_leaves() { name : "sub_node2".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -121,7 +123,8 @@ node └─ sub_node2 "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -135,19 +138,19 @@ fn node_with_depth_one_leaf() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![ ListNodeReport { name : "sub_node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![ ListNodeReport { name : "sub_sub_node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -165,7 +168,8 @@ node └─ sub_sub_node "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -179,7 +183,7 @@ fn node_with_build_dependencies_tree_with_two_leaves() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -189,7 +193,7 @@ fn node_with_build_dependencies_tree_with_two_leaves() { name : "build_sub_node1".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -199,7 +203,7 @@ fn node_with_build_dependencies_tree_with_two_leaves() { name : "build_sub_node2".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -214,7 +218,8 @@ node └─ build_sub_node2 "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -228,7 +233,7 @@ fn node_with_build_dependencies_tree_with_one_leaf() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -237,7 +242,7 @@ fn node_with_build_dependencies_tree_with_one_leaf() { name : "build_sub_node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -251,7 +256,8 @@ node └─ build_sub_node "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -265,7 +271,7 @@ fn node_with_dev_dependencies_tree_with_two_leaves() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec! @@ -274,7 +280,7 @@ fn node_with_dev_dependencies_tree_with_two_leaves() { name : "dev_sub_node1".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -284,7 +290,7 @@ fn node_with_dev_dependencies_tree_with_two_leaves() { name : "dev_sub_node2".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -300,7 +306,8 @@ node └─ dev_sub_node2 "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -314,7 +321,7 @@ fn node_with_dev_dependencies_tree_with_one_leaf() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![ @@ -322,7 +329,7 @@ fn node_with_dev_dependencies_tree_with_one_leaf() { name : "dev_sub_node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -337,7 +344,8 @@ node └─ dev_sub_node "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -351,7 +359,7 @@ fn node_with_dependencies_tree_with_two_leaves() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec! [ @@ -359,7 +367,7 @@ fn node_with_dependencies_tree_with_two_leaves() { name : "sub_node1".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -369,7 +377,7 @@ fn node_with_dependencies_tree_with_two_leaves() { name : "sub_node2".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -385,7 +393,8 @@ node └─ sub_node2 "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -399,13 +408,13 @@ fn node_with_dependency_tree_with_one_leaf() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![ ListNodeReport { name : "sub_node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -419,7 +428,8 @@ node └─ sub_node "#.trim(); - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); let actual = actual.trim(); println!("{actual}"); @@ -433,7 +443,7 @@ fn one_node_one_line() { name : "node".into(), version : None, - path : None, + crate_dir : None, duplicate : false, normal_dependencies : vec![], dev_dependencies : vec![], @@ -441,7 +451,8 @@ fn one_node_one_line() }; let expected = "node\n"; - let actual = node.display_with_spacer( "" ).unwrap(); + let printer = TreePrinter::new( &node ); + let actual = printer.display_with_spacer( "" ).unwrap(); println!("{actual}"); assert_eq!( expected, actual ); diff --git a/module/move/willbe/tests/inc/action/main_header.rs b/module/move/willbe/tests/inc/action_tests/main_header.rs similarity index 99% rename from module/move/willbe/tests/inc/action/main_header.rs rename to module/move/willbe/tests/inc/action_tests/main_header.rs index abc3e492d4..82f1b89fba 100644 --- a/module/move/willbe/tests/inc/action/main_header.rs +++ b/module/move/willbe/tests/inc/action_tests/main_header.rs @@ -3,7 +3,7 @@ use assert_fs::prelude::*; use the_module::action; use std::io::Read; -use willbe::_path::AbsolutePath; +use willbe::path::AbsolutePath; fn arrange( source : &str ) -> assert_fs::TempDir diff --git a/module/move/willbe/tests/inc/action/mod.rs b/module/move/willbe/tests/inc/action_tests/mod.rs similarity index 100% rename from module/move/willbe/tests/inc/action/mod.rs rename to module/move/willbe/tests/inc/action_tests/mod.rs diff --git a/module/move/willbe/tests/inc/action/readme_health_table_renew.rs b/module/move/willbe/tests/inc/action_tests/readme_health_table_renew.rs similarity index 100% rename from module/move/willbe/tests/inc/action/readme_health_table_renew.rs rename to module/move/willbe/tests/inc/action_tests/readme_health_table_renew.rs diff --git a/module/move/willbe/tests/inc/action/readme_modules_headers_renew.rs b/module/move/willbe/tests/inc/action_tests/readme_modules_headers_renew.rs similarity index 83% rename from module/move/willbe/tests/inc/action/readme_modules_headers_renew.rs rename to module/move/willbe/tests/inc/action_tests/readme_modules_headers_renew.rs index 8fe71df94a..db82f365ba 100644 --- a/module/move/willbe/tests/inc/action/readme_modules_headers_renew.rs +++ b/module/move/willbe/tests/inc/action_tests/readme_modules_headers_renew.rs @@ -1,8 +1,12 @@ use super::*; use assert_fs::prelude::*; -use the_module::action; use std::io::Read; -use willbe::_path::AbsolutePath; +use the_module:: +{ + action, + // path::AbsolutePath, + CrateDir, +}; fn arrange( source : &str ) -> assert_fs::TempDir { @@ -28,7 +32,7 @@ fn tags_should_stay() let temp = arrange( "single_module" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual = String::new(); @@ -47,7 +51,7 @@ fn default_stability() let temp = arrange( "single_module" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual = String::new(); @@ -66,7 +70,7 @@ fn docs() let temp = arrange( "single_module" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual = String::new(); @@ -84,7 +88,7 @@ fn no_gitpod() let temp = arrange("single_module"); // Act - _ = action::readme_modules_headers_renew(AbsolutePath::try_from(temp.path()).unwrap()).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open(temp.path().join("test_module").join("Readme.md")).unwrap(); let mut actual = String::new(); @@ -101,7 +105,7 @@ fn with_gitpod() let temp = arrange( "single_module_with_example" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "module" ).join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual = String::new(); @@ -119,7 +123,7 @@ fn discord() let temp = arrange( "single_module" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual = String::new(); @@ -137,7 +141,7 @@ fn status() let temp = arrange( "single_module" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual = String::new(); @@ -155,13 +159,13 @@ fn idempotency() let temp = arrange( "single_module" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual1 = String::new(); _ = file.read_to_string( &mut actual1 ).unwrap(); drop( file ); - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file = std::fs::File::open( temp.path().join( "test_module" ).join( "Readme.md" ) ).unwrap(); let mut actual2 = String::new(); _ = file.read_to_string( &mut actual2 ).unwrap(); @@ -176,7 +180,7 @@ fn with_many_members_and_varius_config() { let temp = arrange( "three_packages" ); - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); let mut file_b = std::fs::File::open( temp.path().join( "b" ).join( "Readme.md" ) ).unwrap(); let mut file_c = std::fs::File::open( temp.path().join( "c" ).join( "Readme.md" ) ).unwrap(); @@ -203,5 +207,5 @@ fn without_needed_config() let temp = arrange( "variadic_tag_configurations" ); // Act - _ = action::readme_modules_headers_renew( AbsolutePath::try_from( temp.path() ).unwrap() ).unwrap(); + _ = action::readme_modules_headers_renew( CrateDir::try_from( temp.path() ).unwrap() ).unwrap(); } diff --git a/module/move/willbe/tests/inc/action/test.rs b/module/move/willbe/tests/inc/action_tests/test.rs similarity index 68% rename from module/move/willbe/tests/inc/action/test.rs rename to module/move/willbe/tests/inc/action_tests/test.rs index 1233017b05..6d7d7898d9 100644 --- a/module/move/willbe/tests/inc/action/test.rs +++ b/module/move/willbe/tests/inc/action_tests/test.rs @@ -1,14 +1,26 @@ -use std::collections::BTreeSet; -use std::fs::{self, File }; -use std::io::Write; -use std::path::{ Path, PathBuf }; +use super::*; +use the_module::*; + +use inc::helper:: +{ + ProjectBuilder, + WorkspaceBuilder, + // BINARY_NAME, +}; + +use collection::BTreeSet; +// use std:: +// { +// fs::{ self, File }, +// io::Write, +// }; +// use path::{ Path, PathBuf }; use assert_fs::TempDir; -use crate::the_module::*; -use action::test::{test, TestsCommandOptions}; -use _path::AbsolutePath; +use action::test::{ test, TestsCommandOptions }; use channel::*; use optimization::*; +// qqq : for Petro : no astersisks import use willbe::test::TestVariant; #[ test ] @@ -218,109 +230,3 @@ fn backtrace_should_be() assert!( !no_features.clone().unwrap_err().out.contains( "RUST_BACKTRACE" ) ); assert!( no_features.clone().unwrap_err().out.contains( "stack backtrace" ) ); } - -#[ derive( Debug ) ] -pub struct ProjectBuilder -{ - name : String, - lib_content : Option< String >, - test_content : Option< String >, - toml_content : Option< String >, -} - -impl ProjectBuilder -{ - pub fn new( name : &str ) -> Self - { - Self - { - name : String::from( name ), - lib_content : None, - test_content : None, - toml_content : None, - } - } - - pub fn lib_file< S : Into< String > >( mut self, content : S ) -> Self - { - self.lib_content = Some( content.into() ); - self - } - - pub fn test_file< S : Into< String > >( mut self, content : S ) -> Self - { - self.test_content = Some( content.into() ); - self - } - - pub fn toml_file( mut self, content : &str ) -> Self - { - self.toml_content = Some( format!( "[package]\nname = \"{}\"\nversion = \"0.1.0\"\nedition = \"2021\"\n{}", self.name, content ) ); - self - } - - pub fn build< P : AsRef< Path > >( &self, path : P ) -> std::io::Result< PathBuf > - { - let project_path = path.as_ref(); - - fs::create_dir_all( project_path.join( "src" ) )?; - fs::create_dir_all( project_path.join( "tests" ) )?; - - if let Some( content ) = &self.toml_content - { - let mut file = File::create( project_path.join( "Cargo.toml" ) )?; - write!( file, "{}", content )?; - } - - let mut file = File::create( project_path.join( "src/lib.rs" ) )?; - if let Some( content ) = &self.lib_content - { - write!( file, "{}", content )?; - } - - if let Some( content ) = &self.test_content - { - let mut file = File::create( project_path.join( "tests/tests.rs" ) )?; - write!( file, "{}", content )?; - } - - Ok( project_path.to_path_buf() ) - } -} - -struct WorkspaceBuilder -{ - members : Vec< ProjectBuilder >, - toml_content : String, -} - -impl WorkspaceBuilder -{ - fn new() -> Self - { - Self - { - members : vec![], - toml_content : "[workspace]\nresolver = \"2\"\nmembers = [\n \"modules/*\",\n]\n".to_string(), - } - } - - fn member( mut self, project : ProjectBuilder ) -> Self - { - self.members.push( project ); - self - } - - fn build< P : AsRef< Path > >( self, path : P ) -> PathBuf - { - let project_path = path.as_ref(); - fs::create_dir_all( project_path.join( "modules" ) ).unwrap(); - let mut file = File::create( project_path.join( "Cargo.toml" ) ).unwrap(); - write!( file, "{}", self.toml_content ).unwrap(); - for member in self.members - { - member.build( project_path.join( "modules" ).join( &member.name ) ).unwrap(); - } - project_path.into() - } -} diff --git a/module/move/willbe/tests/inc/action/workspace_renew.rs b/module/move/willbe/tests/inc/action_tests/workspace_renew.rs similarity index 100% rename from module/move/willbe/tests/inc/action/workspace_renew.rs rename to module/move/willbe/tests/inc/action_tests/workspace_renew.rs diff --git a/module/move/willbe/tests/inc/command/tests_run.rs b/module/move/willbe/tests/inc/command/tests_run.rs index c540c285d5..67f6b97c9c 100644 --- a/module/move/willbe/tests/inc/command/tests_run.rs +++ b/module/move/willbe/tests/inc/command/tests_run.rs @@ -1,12 +1,11 @@ use super::*; + // use the_module::*; use assert_cmd::Command; -use inc:: +use inc::helper:: { - action::test::ProjectBuilder, - // aaa : for Petro : move to helper. don't reuse test-rs files in command and endpoints - // aaa : move to helper module - helpers::BINARY_NAME, + ProjectBuilder, + BINARY_NAME, }; use assert_fs::TempDir; diff --git a/module/move/willbe/tests/inc/entity/dependencies.rs b/module/move/willbe/tests/inc/entity/dependencies.rs index 28f393d4f5..bf6e0eca94 100644 --- a/module/move/willbe/tests/inc/entity/dependencies.rs +++ b/module/move/willbe/tests/inc/entity/dependencies.rs @@ -2,29 +2,32 @@ use super::*; use assert_fs::prelude::*; use assert_fs::TempDir; -use the_module::Workspace; -use the_module::package::{ dependencies, DependenciesOptions, DependenciesSort }; -use willbe::CrateDir; -use willbe::package::Package; -use willbe::_path::AbsolutePath; +use the_module:: +{ + Workspace, + dependency::{ self, DependenciesOptions, DependenciesSort }, + CrateDir, + package::Package, + path::AbsolutePath, +}; // fn arrange( asset_name : &str ) -> ( TempDir, Workspace ) { - let path = CrateDir::try_from( AbsolutePath::try_from( std::path::Path::new( env!( "CARGO_MANIFEST_DIR" ) ) ).unwrap() ).unwrap(); - let mut metadata = Workspace::with_crate_dir( path ).unwrap(); + let path = CrateDir::try_from( std::path::Path::new( env!( "CARGO_MANIFEST_DIR" ) ) ).unwrap(); + let workspace = Workspace::try_from( path ).unwrap(); - let root_path = metadata.load().unwrap().workspace_root().unwrap(); + let root_path = workspace.workspace_root(); let assets_relative_path = std::path::Path::new( ASSET_PATH ); let assets_path = root_path.join( "module" ).join( "move" ).join( "willbe" ).join( assets_relative_path ); let temp = TempDir::new().unwrap(); temp.copy_from( assets_path.join( asset_name ), &[ "**" ] ).unwrap(); let temp_crate_dir = CrateDir::try_from( AbsolutePath::try_from( temp.to_path_buf() ).unwrap() ).unwrap(); - let metadata = Workspace::with_crate_dir( temp_crate_dir ).unwrap(); + let workspace = Workspace::try_from( temp_crate_dir ).unwrap(); - ( temp, metadata ) + ( temp, workspace ) } // a -> b -> c @@ -32,26 +35,36 @@ fn arrange( asset_name : &str ) -> ( TempDir, Workspace ) fn chain_of_three_packages() { // Arrange - let ( temp, mut metadata ) = arrange( "chain_of_packages" ); + let ( temp, mut workspace ) = arrange( "chain_of_packages" ); - let a = Package::try_from( AbsolutePath::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); - let b = Package::try_from( AbsolutePath::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); - let c = Package::try_from( AbsolutePath::try_from( temp.join( "c" ) ).unwrap() ).unwrap(); + let a = Package::try_from( willbe::CrateDir::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); + let b = Package::try_from( willbe::CrateDir::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); + let c = Package::try_from( willbe::CrateDir::try_from( temp.join( "c" ) ).unwrap() ).unwrap(); // Act - let output = dependencies( &mut metadata, &a, DependenciesOptions::default() ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); + let output = dependency::list( &mut workspace, &a, DependenciesOptions::default() ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); // Assert assert_eq!( 2, output.len() ); - assert!( ( c.crate_dir().as_ref() == output[ 0 ] && b.crate_dir().as_ref() == output[ 1 ] ) || ( c.crate_dir().as_ref() == output[ 1 ] && b.crate_dir().as_ref() == output[ 0 ] ) ); - - let output = dependencies( &mut metadata, &b, DependenciesOptions::default() ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); + assert! + ( + ( c.crate_dir() == output[ 0 ] && b.crate_dir() == output[ 1 ] ) || + ( c.crate_dir() == output[ 1 ] && b.crate_dir() == output[ 0 ] ), + ); + + let output = dependency::list( &mut workspace, &b, DependenciesOptions::default() ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); assert_eq!( 1, output.len() ); - assert_eq!( c.crate_dir().as_ref(), output[ 0 ] ); + assert_eq!( c.crate_dir(), output[ 0 ] ); - let output = dependencies( &mut metadata, &c, DependenciesOptions::default() ).unwrap(); + let output = dependency::list( &mut workspace, &c, DependenciesOptions::default() ).unwrap(); assert!( output.is_empty() ); } @@ -60,24 +73,35 @@ fn chain_of_three_packages() fn chain_of_three_packages_topologically_sorted() { // Arrange - let ( temp, mut metadata ) = arrange( "chain_of_packages" ); + let ( temp, mut workspace ) = arrange( "chain_of_packages" ); - let a = Package::try_from( AbsolutePath::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); - let b = Package::try_from( AbsolutePath::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); - let c = Package::try_from( AbsolutePath::try_from( temp.join( "c" ) ).unwrap() ).unwrap(); + let a = Package::try_from( willbe::CrateDir::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); + let b = Package::try_from( willbe::CrateDir::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); + let c = Package::try_from( willbe::CrateDir::try_from( temp.join( "c" ) ).unwrap() ).unwrap(); // Act - let output = dependencies( &mut metadata, &a, DependenciesOptions { sort : DependenciesSort::Topological, ..Default::default() } ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); + let output = dependency::list + ( + &mut workspace, + &a, + DependenciesOptions { sort : DependenciesSort::Topological, ..Default::default() }, + ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); // Assert - assert_eq!( &[ c.crate_dir().as_ref(), b.crate_dir().as_ref() ], output.as_slice() ); + assert_eq!( &[ c.crate_dir(), b.crate_dir() ], output.as_slice() ); - let output = dependencies( &mut metadata, &b, DependenciesOptions { sort : DependenciesSort::Topological, ..Default::default() } ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); - assert_eq!( &[ c.crate_dir().as_ref() ], output.as_slice() ); + let output = dependency::list( &mut workspace, &b, DependenciesOptions { sort : DependenciesSort::Topological, ..Default::default() } ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); + assert_eq!( &[ c.crate_dir() ], output.as_slice() ); - let output = dependencies( &mut metadata, &c, DependenciesOptions { sort : DependenciesSort::Topological, ..Default::default() } ).unwrap(); + let output = dependency::list( &mut workspace, &c, DependenciesOptions { sort : DependenciesSort::Topological, ..Default::default() } ).unwrap(); assert!( output.is_empty() ); } @@ -86,18 +110,21 @@ fn chain_of_three_packages_topologically_sorted() fn package_with_remote_dependency() { // Arrange - let ( temp, mut metadata ) = arrange( "package_with_remote_dependency" ); + let ( temp, mut workspace ) = arrange( "package_with_remote_dependency" ); - let a = Package::try_from( AbsolutePath::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); - let b = Package::try_from( AbsolutePath::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); + let a = Package::try_from( willbe::CrateDir::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); + let b = Package::try_from( willbe::CrateDir::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); // Act - let output = dependencies( &mut metadata, &a, DependenciesOptions::default() ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); + let output = dependency::list( &mut workspace, &a, DependenciesOptions::default() ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); // Assert assert_eq!( 1, output.len() ); - assert_eq!( b.crate_dir().as_ref(), output[ 0 ] ); + assert_eq!( b.crate_dir(), output[ 0 ] ); } // a -> b -> a @@ -105,24 +132,30 @@ fn package_with_remote_dependency() fn workspace_with_cyclic_dependency() { // Arrange - let ( temp, mut metadata ) = arrange( "workspace_with_cyclic_dependency" ); + let ( temp, mut workspace ) = arrange( "workspace_with_cyclic_dependency" ); - let a = Package::try_from( AbsolutePath::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); - let b = Package::try_from( AbsolutePath::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); + let a = Package::try_from( willbe::CrateDir::try_from( temp.join( "a" ) ).unwrap() ).unwrap(); + let b = Package::try_from( willbe::CrateDir::try_from( temp.join( "b" ) ).unwrap() ).unwrap(); // Act - let output = dependencies( &mut metadata, &a, DependenciesOptions::default() ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); + let output = dependency::list( &mut workspace, &a, DependenciesOptions::default() ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); // Assert assert_eq!( 1, output.len() ); - assert!( b.crate_dir().as_ref() == output[ 0 ] ); + assert!( b.crate_dir() == output[ 0 ] ); // Act - let output = dependencies( &mut metadata, &b, DependenciesOptions::default() ).unwrap(); - let output : Vec< _ > = output.iter().filter_map( | o | o.path.as_ref() ).map( | x | x.as_ref() ).collect(); + let output = dependency::list( &mut workspace, &b, DependenciesOptions::default() ).unwrap(); + let output : Vec< CrateDir > = output + .into_iter() + .filter_map( | p | p.crate_dir ) + .collect(); // Assert assert_eq!( 1, output.len() ); - assert!( a.crate_dir().as_ref() == output[ 0 ] ); + assert!( a.crate_dir() == output[ 0 ] ); } \ No newline at end of file diff --git a/module/move/willbe/tests/inc/entity/diff.rs b/module/move/willbe/tests/inc/entity/diff.rs index b3362ee64d..9c84aa6cc1 100644 --- a/module/move/willbe/tests/inc/entity/diff.rs +++ b/module/move/willbe/tests/inc/entity/diff.rs @@ -1,13 +1,12 @@ use crate::*; +use the_module::*; use std::path::{ Path, PathBuf }; use assert_fs::{ TempDir, prelude::* }; use crates_tools::CrateArchive; -use the_module::*; -use _path::AbsolutePath; use package::Package; use diff::crate_diff; -use the_module::version::{ Version, BumpOptions, version_bump }; +use the_module::version::{ Version, BumpOptions, bump }; const TEST_MODULE_PATH : &str = "../../test/"; @@ -16,17 +15,17 @@ fn no_changes() { let tmp = &TempDir::new().unwrap(); let package_path = package_path( "c" ); - + let left = prepare( tmp, "left", &package_path ); let left_crate = crate_file_path( &left ); let left_archive = CrateArchive::read( &left_crate ).unwrap(); - + let right = prepare( tmp, "right", &package_path ); let right_crate = crate_file_path( &right ); let right_archive = CrateArchive::read( &right_crate ).unwrap(); - + let has_changes = crate_diff( &left_archive, &right_archive ).exclude( diff::PUBLISH_IGNORE_LIST ).has_changes(); - + assert!( !has_changes ); } @@ -47,7 +46,8 @@ fn with_changes() { let right = prepare( tmp, "right", &package_path ); - let absolute = AbsolutePath::try_from( right.as_path() ).unwrap(); + // let absolute = AbsolutePath::try_from( right.as_path() ).unwrap(); + let absolute = CrateDir::try_from( right.as_path() ).unwrap(); let right_package = Package::try_from( absolute ).unwrap(); let right_version = Version::try_from( &right_package.version().unwrap() ).unwrap(); @@ -59,7 +59,7 @@ fn with_changes() dependencies : vec![], dry : false, }; - version_bump( bump_options ).unwrap(); + bump( bump_options ).unwrap(); let right_crate = crate_file_path( &right ); CrateArchive::read( &right_crate ).unwrap() @@ -89,10 +89,11 @@ fn crate_file_path( manifest_dir_path : &Path ) -> PathBuf { _ = cargo::pack( cargo::PackOptions::former().path( manifest_dir_path ).dry( false ).form() ).expect( "Failed to package a package" ); - let absolute = AbsolutePath::try_from( manifest_dir_path ).unwrap(); + let absolute = CrateDir::try_from( manifest_dir_path ).unwrap(); let package = Package::try_from( absolute ).unwrap(); manifest_dir_path - .join( "target" ) - .join( "package" ) - .join( format!( "{}-{}.crate", package.name().unwrap(), package.version().unwrap() ) ) + .join( "target" ) + .join( "package" ) + .join( format!( "{}-{}.crate", package.name().unwrap(), package.version().unwrap() ) ) + } diff --git a/module/move/willbe/tests/inc/entity/features.rs b/module/move/willbe/tests/inc/entity/features.rs index 0eaf8e7c75..14cd845879 100644 --- a/module/move/willbe/tests/inc/entity/features.rs +++ b/module/move/willbe/tests/inc/entity/features.rs @@ -1,15 +1,15 @@ use super::*; -// use the_module::*; -use the_module::features::features_powerset; - -use std::collections::HashMap; +use the_module:: +{ + features::{ features_powerset, estimate_with }, + collection::HashMap, +}; use serde::Deserialize; -use the_module::workspace::WorkspacePackage; -use willbe::features::estimate_with; /// Constructs a mock `Package` with specified features for testing. -fn mock_package( features : Vec< ( &str, Vec< &str > ) > ) -> WorkspacePackage +// fn mock_package( features : Vec< ( &str, Vec< &str > ) > ) -> WorkspacePackageRef< '_ > +fn mock_package( features : Vec< ( &str, Vec< &str > ) > ) -> cargo_metadata::Package { let mut features_map : HashMap< String, Vec< _ > > = HashMap::new(); for ( feature, deps ) in features @@ -34,7 +34,7 @@ fn mock_package( features : Vec< ( &str, Vec< &str > ) > ) -> WorkspacePackage } ); - WorkspacePackage::deserialize( json ).unwrap() + cargo_metadata::Package::deserialize( json ).unwrap() } #[ test ] @@ -57,7 +57,7 @@ fn case_1() let enabled_features = vec![ "f2".to_string() ]; let result = features_powerset ( - &package, + ( &package ).into(), power, &exclude_features, &include_features, @@ -93,7 +93,7 @@ fn case_2() let enabled_features = vec![ "f2".to_string() ]; let result = features_powerset ( - &package, + ( &package ).into(), power, &exclude_features, &include_features, @@ -130,7 +130,7 @@ fn case_3() let enabled_features = vec![ "f2".to_string() ]; let result = features_powerset ( - &package, + ( &package ).into(), power, &exclude_features, &include_features, @@ -167,7 +167,7 @@ fn case_4() let enabled_features = vec![ "f2".to_string() ]; let result = features_powerset ( - &package, + ( &package ).into(), power, &exclude_features, &include_features, @@ -205,7 +205,7 @@ fn case_5() let enabled_features = vec![ "f2".to_string() ]; let result = features_powerset ( - &package, + ( &package ).into(), power, &exclude_features, &include_features, @@ -240,7 +240,7 @@ fn case_6() let enabled_features = vec![ "f2".to_string() ]; let result = features_powerset ( - &package, + ( &package ).into(), power, &exclude_features, &include_features, diff --git a/module/move/willbe/tests/inc/entity/version.rs b/module/move/willbe/tests/inc/entity/version.rs index 6ae36602ce..3251fce27c 100644 --- a/module/move/willbe/tests/inc/entity/version.rs +++ b/module/move/willbe/tests/inc/entity/version.rs @@ -9,9 +9,9 @@ use the_module:: CrateDir, Manifest, version::Version, - _path::AbsolutePath, + path::AbsolutePath, package::Package, - version::{ BumpOptions, version_bump, version_revert }, + version::{ BumpOptions, bump, revert }, }; const TEST_MODULE_PATH : &str = "../../test/"; @@ -110,14 +110,15 @@ fn package_version_bump() std::fs::create_dir( &temp_module ).unwrap(); temp_module.child( "c" ).copy_from( &c, &[ "**" ] ).unwrap(); let c_temp_path = temp_module.join( "c" ); - let c_temp_absolute_path = AbsolutePath::try_from( c_temp_path ).unwrap(); + let c_temp_absolute_path = CrateDir::try_from( c_temp_path ).unwrap(); let c_temp_crate_dir = CrateDir::try_from( c_temp_absolute_path.clone() ).unwrap(); - let c_package = Package::try_from( c_temp_absolute_path.clone() ).unwrap(); + let c_package = Package::try_from( c_temp_crate_dir.clone() ).unwrap(); let version = c_package.version().unwrap(); let root_manifest_path = temp.join( "Cargo.toml" ); let mut cargo_toml = std::fs::File::create( &root_manifest_path ).unwrap(); - let root_manifest_absolute_path = AbsolutePath::try_from( root_manifest_path.as_path() ).unwrap(); + // let root_manifest_absolute_path = AbsolutePath::try_from( root_manifest_path.as_path() ).unwrap(); + let root_manifest_dir_absolute_path = CrateDir::try_from( root_manifest_path.as_path().parent().unwrap() ).unwrap(); write!( cargo_toml, r#" [workspace] resolver = "2" @@ -131,17 +132,17 @@ default-features = true "# ).unwrap(); let version = Version::try_from( &version ).unwrap(); let bumped_version = version.clone().bump(); - + // Act let options = BumpOptions { - crate_dir : c_temp_crate_dir, + crate_dir : c_temp_crate_dir.clone(), old_version : version.clone(), new_version : bumped_version.clone(), - dependencies : vec![ CrateDir::try_from( root_manifest_absolute_path.parent().unwrap() ).unwrap() ], + dependencies : vec![ root_manifest_dir_absolute_path.clone() ], dry : false, }; - let bump_report = version_bump( options ).unwrap(); + let bump_report = bump( options ).unwrap(); // Assert assert_eq!( Some( version.to_string() ), bump_report.old_version ); @@ -149,7 +150,8 @@ default-features = true assert_eq! ( { - let mut v = vec![ root_manifest_absolute_path.clone(), c_temp_absolute_path.join( "Cargo.toml" ) ]; + // let mut v = vec![ root_manifest_absolute_path.clone(), c_temp_absolute_path.join( "Cargo.toml" ) ]; + let mut v = vec![ root_manifest_dir_absolute_path.clone().manifest_file(), c_temp_absolute_path.manifest_file() ]; v.sort(); v }, @@ -159,11 +161,11 @@ default-features = true v } ); - let c_package = Package::try_from( c_temp_absolute_path.clone() ).unwrap(); + let c_package = Package::try_from( c_temp_crate_dir.clone() ).unwrap(); let name = c_package.name().unwrap(); assert_eq!( bumped_version.to_string(), c_package.version().unwrap() ); - let mut root_manifest = Manifest::try_from( root_manifest_absolute_path ).unwrap(); - root_manifest.load().unwrap(); + let mut root_manifest = Manifest::try_from( root_manifest_dir_absolute_path ).unwrap(); + // root_manifest.load().unwrap(); let data = root_manifest.data(); let current_version_item = data.get( "workspace" ).and_then( | w | w.get( "dependencies" ) ).and_then( | d | d.get( &name ) ).and_then( | p | p.get( "version" ) ).unwrap(); let current_version = current_version_item.as_str().unwrap(); @@ -182,12 +184,12 @@ fn package_version_bump_revert() let c_temp_path = temp_module.join( "c" ); let c_temp_absolute_path = AbsolutePath::try_from( c_temp_path ).unwrap(); let c_temp_crate_dir = CrateDir::try_from( c_temp_absolute_path.clone() ).unwrap(); - let c_package = Package::try_from( c_temp_absolute_path.clone() ).unwrap(); + let c_package = Package::try_from( c_temp_crate_dir.clone() ).unwrap(); let version = c_package.version().unwrap(); let root_manifest_path = temp.join( "Cargo.toml" ); let mut cargo_toml = std::fs::File::create( &root_manifest_path ).unwrap(); - let root_manifest_absolute_path = AbsolutePath::try_from( root_manifest_path.as_path() ).unwrap(); + let root_manifest_dir_absolute_path = CrateDir::try_from( root_manifest_path.as_path().parent().unwrap() ).unwrap(); write!( cargo_toml, r#" [workspace] resolver = "2" @@ -205,21 +207,21 @@ default-features = true // Act let options = BumpOptions { - crate_dir : c_temp_crate_dir, + crate_dir : c_temp_crate_dir.clone(), old_version : version.clone(), new_version : bumped_version.clone(), - dependencies : vec![ CrateDir::try_from( root_manifest_absolute_path.parent().unwrap() ).unwrap() ], + dependencies : vec![ root_manifest_dir_absolute_path.clone() ], dry : false, }; - let bump_report = version_bump( options ).unwrap(); - version_revert( &bump_report ).unwrap(); + let bump_report = bump( options ).unwrap(); + revert( &bump_report ).unwrap(); // Assert - let c_package = Package::try_from( c_temp_absolute_path.clone() ).unwrap(); + let c_package = Package::try_from( c_temp_crate_dir.clone() ).unwrap(); let name = c_package.name().unwrap(); assert_eq!( version.to_string(), c_package.version().unwrap() ); - let mut root_manifest = Manifest::try_from( root_manifest_absolute_path ).unwrap(); - root_manifest.load().unwrap(); + let mut root_manifest = Manifest::try_from( root_manifest_dir_absolute_path ).unwrap(); + // root_manifest.load().unwrap(); let data = root_manifest.data(); let current_version_item = data.get( "workspace" ).and_then( | w | w.get( "dependencies" ) ).and_then( | d | d.get( &name ) ).and_then( | p | p.get( "version" ) ).unwrap(); let current_version = current_version_item.as_str().unwrap(); diff --git a/module/move/willbe/tests/inc/helper.rs b/module/move/willbe/tests/inc/helper.rs new file mode 100644 index 0000000000..2efa341f9d --- /dev/null +++ b/module/move/willbe/tests/inc/helper.rs @@ -0,0 +1,116 @@ +use super::*; +use the_module::*; +use path::{ Path, PathBuf }; +use std:: +{ + fs::{ self, File }, + io::Write, +}; + +pub const BINARY_NAME : &'static str = "will"; + +#[ derive( Debug ) ] +pub struct ProjectBuilder +{ + name : String, + lib_content : Option< String >, + test_content : Option< String >, + toml_content : Option< String >, +} + +impl ProjectBuilder +{ + pub fn new( name : &str ) -> Self + { + Self + { + name : String::from( name ), + lib_content : None, + test_content : None, + toml_content : None, + } + } + + pub fn lib_file< S : Into< String > >( mut self, content : S ) -> Self + { + self.lib_content = Some( content.into() ); + self + } + + pub fn test_file< S : Into< String > >( mut self, content : S ) -> Self + { + self.test_content = Some( content.into() ); + self + } + + pub fn toml_file( mut self, content : &str ) -> Self + { + self.toml_content = Some( format!( "[package]\nname = \"{}\"\nversion = \"0.1.0\"\nedition = \"2021\"\n{}", self.name, content ) ); + self + } + + pub fn build< P : AsRef< Path > >( &self, path : P ) -> std::io::Result< PathBuf > + { + let project_path = path.as_ref(); + + fs::create_dir_all( project_path.join( "src" ) )?; + fs::create_dir_all( project_path.join( "tests" ) )?; + + if let Some( content ) = &self.toml_content + { + let mut file = File::create( project_path.join( "Cargo.toml" ) )?; + write!( file, "{}", content )?; + } + + let mut file = File::create( project_path.join( "src/lib.rs" ) )?; + if let Some( content ) = &self.lib_content + { + write!( file, "{}", content )?; + } + + if let Some( content ) = &self.test_content + { + let mut file = File::create( project_path.join( "tests/tests.rs" ) )?; + write!( file, "{}", content )?; + } + + Ok( project_path.to_path_buf() ) + } +} + +pub struct WorkspaceBuilder +{ + pub members : Vec< ProjectBuilder >, + pub toml_content : String, +} + +impl WorkspaceBuilder +{ + pub fn new() -> Self + { + Self + { + members : vec![], + toml_content : "[workspace]\nresolver = \"2\"\nmembers = [\n \"modules/*\",\n]\n".to_string(), + } + } + + pub fn member( mut self, project : ProjectBuilder ) -> Self + { + self.members.push( project ); + self + } + + pub fn build< P : AsRef< Path > >( self, path : P ) -> PathBuf + { + let project_path = path.as_ref(); + fs::create_dir_all( project_path.join( "modules" ) ).unwrap(); + let mut file = File::create( project_path.join( "Cargo.toml" ) ).unwrap(); + write!( file, "{}", self.toml_content ).unwrap(); + for member in self.members + { + member.build( project_path.join( "modules" ).join( &member.name ) ).unwrap(); + } + project_path.into() + } +} diff --git a/module/move/willbe/tests/inc/helpers.rs b/module/move/willbe/tests/inc/helpers.rs deleted file mode 100644 index c2c1ded1d5..0000000000 --- a/module/move/willbe/tests/inc/helpers.rs +++ /dev/null @@ -1 +0,0 @@ -pub const BINARY_NAME : &'static str = "will"; diff --git a/module/move/willbe/tests/inc/mod.rs b/module/move/willbe/tests/inc/mod.rs index b7a5680237..ca9dbda05d 100644 --- a/module/move/willbe/tests/inc/mod.rs +++ b/module/move/willbe/tests/inc/mod.rs @@ -1,10 +1,22 @@ use super::*; -mod action; -mod package; -mod command; +/// Entities of which spaces consists of. mod entity; + +/// Genera-purpose tools which might be moved out one day. mod tool; -mod helpers; -// qqq : for Petro : for Bohdan : sort out test files to be consistent with src files +/// Describes CLI commands. +mod command; + +/// Describes functions that can be called from an interface. +mod action_tests; + +mod helper; + +// aaa : for Petro : for Bohdan : for Nikita : sort out test files to be consistent with src files +// sorted + +// qqq : for Mykyta: to avoid names collisions add postfix _test for all dirs and files in dir `inc` +// query.rs -> query_test.rs + diff --git a/module/move/willbe/tests/inc/package.rs b/module/move/willbe/tests/inc/package.rs index 935069b5e6..8a5fb2a2f0 100644 --- a/module/move/willbe/tests/inc/package.rs +++ b/module/move/willbe/tests/inc/package.rs @@ -2,11 +2,11 @@ // use the_module:: // { // Workspace, -// _path::AbsolutePath, +// path::AbsolutePath, // package::PublishPlan, // }; // use willbe::package::perform_packages_publish; -// +// // #[ test ] // fn plan_publish_many_packages() // { @@ -27,7 +27,7 @@ // // ), // // dry: true, // // }, -// // version_bump: BumpOptions { +// // bump: BumpOptions { // // crate_dir: CrateDir( // // AbsolutePath( // // ".../wTools/module/move/wca", @@ -170,3 +170,5 @@ // // ) // panic!() // } + +// qqq : for Bohdan : fix the test diff --git a/module/move/willbe/tests/inc/tool/graph.rs b/module/move/willbe/tests/inc/tool/graph.rs deleted file mode 100644 index 28c08d7b75..0000000000 --- a/module/move/willbe/tests/inc/tool/graph.rs +++ /dev/null @@ -1,217 +0,0 @@ -mod toposort -{ - use crate::the_module::*; - use graph::toposort; - use std::collections::HashMap; - use petgraph::Graph; - use willbe::graph::topological_sort_with_grouping; - - struct IndexMap< T >( HashMap< T, usize > ); - - impl< T > IndexMap< T > - where - T : std::hash::Hash + Eq, - { - pub fn new( elements : Vec< T > ) -> Self - { - let index_map = elements.into_iter().enumerate().map( |( index, value )| ( value, index ) ).collect(); - Self( index_map ) - } - - pub fn position( &self, element : &T ) -> usize - { - self.0[ element ] - } - } - - #[ test ] - fn no_dependency() - { - let mut graph = Graph::new(); - - let _node1 = graph.add_node( &"A" ); - let _node2 = graph.add_node( &"B" ); - - let sorted = toposort( graph ).unwrap(); - - let index_map = IndexMap::new( sorted ); - let node1_position = index_map.position( &"A" ); - let node2_position = index_map.position( &"B" ); - - assert!( node1_position < node2_position ); - } - - #[ test ] - fn a_depends_on_b() - { - let mut graph = Graph::new(); - - let node1 = graph.add_node( &"A" ); - let node2 = graph.add_node( &"B" ); - - graph.add_edge( node1, node2, &"" ); - - let sorted = toposort( graph ).unwrap(); - - let index_map = IndexMap::new( sorted ); - let node1_position = index_map.position( &"A" ); - let node2_position = index_map.position( &"B" ); - - assert!( node1_position > node2_position ); - } - - #[ test ] - fn multiple_dependencies() - { - let mut graph = Graph::new(); - - let a = graph.add_node( &"A" ); - let b = graph.add_node( &"B" ); - let c = graph.add_node( &"C" ); - - graph.add_edge( a, b, &"" ); - graph.add_edge( a, c, &"" ); - - let sorted = toposort( graph ).unwrap(); - - let index_map = IndexMap::new( sorted ); - let a_position = index_map.position( &"A" ); - let b_position = index_map.position( &"B" ); - let c_position = index_map.position( &"C" ); - - assert!( a_position > b_position ); - assert!( a_position > c_position ); - } - - #[ test ] - fn transitive_dependencies() - { - let mut graph = Graph::new(); - - let a = graph.add_node( &"A" ); - let b = graph.add_node( &"B" ); - let c = graph.add_node( &"C" ); - - graph.add_edge( a, b, &"" ); - graph.add_edge( b, c, &"" ); - - let sorted = toposort( graph ).unwrap(); - - let index_map = IndexMap::new( sorted ); - let a_position = index_map.position( &"A" ); - let b_position = index_map.position( &"B" ); - let c_position = index_map.position( &"C" ); - - assert!( a_position > b_position ); - assert!( b_position > c_position ); - } - - #[ test ] - #[ should_panic( expected = "Cycle" ) ] - fn cycle() - { - let mut graph = Graph::new(); - - let node1 = graph.add_node( &"A" ); - let node2 = graph.add_node( &"B" ); - - graph.add_edge( node1, node2, &"" ); - graph.add_edge( node2, node1, &"" ); - - let _sorted = toposort( graph ).unwrap(); - } - - // input - // B -> A - // C -> A - // output - // [A], [B,C] - #[ test ] - fn simple_case() - { - let mut graph = Graph::new(); - - let a_node = graph.add_node( &"A" ); - let b_node = graph.add_node( &"B" ); - let c_node = graph.add_node( &"C" ); - - graph.add_edge( b_node, a_node, &"B->A"); - graph.add_edge( c_node, a_node, &"C->A"); - - let groups = topological_sort_with_grouping( graph ); - - assert_eq!( groups[ 0 ], vec![ "A" ] ); - assert_eq!( groups[1].len(), 2 ); - assert!( groups[ 1 ].contains( &"C" ) ); - assert!( groups[ 1 ].contains( &"B" ) ); - } - - // input - // digraph { - // 0 [ label = "0" ] - // 1 [ label = "1" ] - // 2 [ label = "2" ] - // 3 [ label = "3" ] - // 4 [ label = "4" ] - // 5 [ label = "5" ] - // 6 [ label = "6" ] - // 7 [ label = "7" ] - // 4 -> 0 [ label = "" ] - // 5 -> 0 [ label = "" ] - // 6 -> 0 [ label = "" ] - // 1 -> 3 [ label = "" ] - // 2 -> 3 [ label = "" ] - // 7 -> 6 [ label = "" ] - // 3 -> 4 [ label = "" ] - // 3 -> 5 [ label = "" ] - // 3 -> 6 [ label = "" ] - // } - // visualization : https://viz-js.com/?dot=ZGlncmFwaCB7CiAgICAwIFsgbGFiZWwgPSAiMCIgXQogICAgMSBbIGxhYmVsID0gIjEiIF0KICAgIDIgWyBsYWJlbCA9ICIyIiBdCiAgICAzIFsgbGFiZWwgPSAiMyIgXQogICAgNCBbIGxhYmVsID0gIjQiIF0KICAgIDUgWyBsYWJlbCA9ICI1IiBdCiAgICA2IFsgbGFiZWwgPSAiNiIgXQogICAgNyBbIGxhYmVsID0gIjciIF0KICAgIDQgLT4gMCBbIGxhYmVsID0gIiIgXQogICAgNSAtPiAwIFsgbGFiZWwgPSAiIiBdCiAgICA2IC0-IDAgWyBsYWJlbCA9ICIiIF0KICAgIDEgLT4gMyBbIGxhYmVsID0gIiIgXQogICAgMiAtPiAzIFsgbGFiZWwgPSAiIiBdCiAgICA3IC0-IDYgWyBsYWJlbCA9ICIiIF0KICAgIDMgLT4gNCBbIGxhYmVsID0gIiIgXQogICAgMyAtPiA1IFsgbGFiZWwgPSAiIiBdCiAgICAzIC0-IDYgWyBsYWJlbCA9ICIiIF0KfQo~ - // output - // [0], [6,5,4], [3], [1,2,7] - #[ test ] - fn complicated_test() - { - let mut graph = Graph::new(); - - let n = graph.add_node( &"0" ); - let n_1 = graph.add_node( &"1" ); - let n_2 = graph.add_node( &"2" ); - let n_3 = graph.add_node( &"3" ); - let n_4 = graph.add_node( &"4" ); - let n_5 = graph.add_node( &"5" ); - let n_6 = graph.add_node( &"6" ); - let n_7 = graph.add_node( &"7" ); - - graph.add_edge( n_1, n_3, &"" ); - graph.add_edge( n_2, n_3, &"" ); - graph.add_edge( n_7, n_6, &"" ); - - graph.add_edge( n_3, n_4, &"" ); - graph.add_edge( n_3, n_5, &"" ); - graph.add_edge( n_3, n_6, &"" ); - - graph.add_edge( n_4, n, &"" ); - graph.add_edge( n_5, n, &"" ); - graph.add_edge( n_6, n, &"" ); - - let groups = topological_sort_with_grouping( graph ); - - dbg!(&groups); - - assert_eq!( groups[ 0 ], vec![ "0" ] ); - - assert_eq!( groups[1].len(), 3 ); - assert!( groups[ 1 ].contains( &"6" ) ); - assert!( groups[ 1 ].contains( &"5" ) ); - assert!( groups[ 1 ].contains( &"4" ) ); - - assert_eq!( groups[ 2 ], vec![ "3" ] ); - - assert_eq!( groups[3].len(), 3 ); - assert!( groups[ 3 ].contains( &"1" ) ); - assert!( groups[ 3 ].contains( &"2" ) ); - assert!( groups[ 3 ].contains( &"7" ) ); - } - -} \ No newline at end of file diff --git a/module/move/willbe/tests/inc/tool/graph_test.rs b/module/move/willbe/tests/inc/tool/graph_test.rs new file mode 100644 index 0000000000..d6f5c38986 --- /dev/null +++ b/module/move/willbe/tests/inc/tool/graph_test.rs @@ -0,0 +1,215 @@ +use super::*; + +use the_module::*; +use graph::toposort; +use collection::HashMap; +use petgraph::Graph; +use willbe::graph::topological_sort_with_grouping; + +struct IndexMap< T >( HashMap< T, usize > ); + +impl< T > IndexMap< T > +where + T : std::hash::Hash + Eq, +{ + pub fn new( elements : Vec< T > ) -> Self + { + let index_map = elements.into_iter().enumerate().map( |( index, value )| ( value, index ) ).collect(); + Self( index_map ) + } + + pub fn position( &self, element : &T ) -> usize + { + self.0[ element ] + } +} + +#[ test ] +fn no_dependency() +{ + let mut graph = Graph::new(); + + let _node1 = graph.add_node( &"A" ); + let _node2 = graph.add_node( &"B" ); + + let sorted = toposort( graph ).unwrap(); + + let index_map = IndexMap::new( sorted ); + let node1_position = index_map.position( &"A" ); + let node2_position = index_map.position( &"B" ); + + assert!( node1_position < node2_position ); +} + +#[ test ] +fn a_depends_on_b() +{ + let mut graph = Graph::new(); + + let node1 = graph.add_node( &"A" ); + let node2 = graph.add_node( &"B" ); + + graph.add_edge( node1, node2, &"" ); + + let sorted = toposort( graph ).unwrap(); + + let index_map = IndexMap::new( sorted ); + let node1_position = index_map.position( &"A" ); + let node2_position = index_map.position( &"B" ); + + assert!( node1_position > node2_position ); +} + +#[ test ] +fn multiple_dependencies() +{ + let mut graph = Graph::new(); + + let a = graph.add_node( &"A" ); + let b = graph.add_node( &"B" ); + let c = graph.add_node( &"C" ); + + graph.add_edge( a, b, &"" ); + graph.add_edge( a, c, &"" ); + + let sorted = toposort( graph ).unwrap(); + + let index_map = IndexMap::new( sorted ); + let a_position = index_map.position( &"A" ); + let b_position = index_map.position( &"B" ); + let c_position = index_map.position( &"C" ); + + assert!( a_position > b_position ); + assert!( a_position > c_position ); +} + +#[ test ] +fn transitive_dependencies() +{ + let mut graph = Graph::new(); + + let a = graph.add_node( &"A" ); + let b = graph.add_node( &"B" ); + let c = graph.add_node( &"C" ); + + graph.add_edge( a, b, &"" ); + graph.add_edge( b, c, &"" ); + + let sorted = toposort( graph ).unwrap(); + + let index_map = IndexMap::new( sorted ); + let a_position = index_map.position( &"A" ); + let b_position = index_map.position( &"B" ); + let c_position = index_map.position( &"C" ); + + assert!( a_position > b_position ); + assert!( b_position > c_position ); +} + +#[ test ] +#[ should_panic( expected = "Cycle" ) ] +fn cycle() +{ + let mut graph = Graph::new(); + + let node1 = graph.add_node( &"A" ); + let node2 = graph.add_node( &"B" ); + + graph.add_edge( node1, node2, &"" ); + graph.add_edge( node2, node1, &"" ); + + let _sorted = toposort( graph ).unwrap(); +} + +// input +// B -> A +// C -> A +// output +// [A], [B,C] +#[ test ] +fn simple_case() +{ + let mut graph = Graph::new(); + + let a_node = graph.add_node( &"A" ); + let b_node = graph.add_node( &"B" ); + let c_node = graph.add_node( &"C" ); + + graph.add_edge( b_node, a_node, &"B->A"); + graph.add_edge( c_node, a_node, &"C->A"); + + let groups = topological_sort_with_grouping( graph ); + + assert_eq!( groups[ 0 ], vec![ "A" ] ); + assert_eq!( groups[1].len(), 2 ); + assert!( groups[ 1 ].contains( &"C" ) ); + assert!( groups[ 1 ].contains( &"B" ) ); +} + +// input +// digraph { +// 0 [ label = "0" ] +// 1 [ label = "1" ] +// 2 [ label = "2" ] +// 3 [ label = "3" ] +// 4 [ label = "4" ] +// 5 [ label = "5" ] +// 6 [ label = "6" ] +// 7 [ label = "7" ] +// 4 -> 0 [ label = "" ] +// 5 -> 0 [ label = "" ] +// 6 -> 0 [ label = "" ] +// 1 -> 3 [ label = "" ] +// 2 -> 3 [ label = "" ] +// 7 -> 6 [ label = "" ] +// 3 -> 4 [ label = "" ] +// 3 -> 5 [ label = "" ] +// 3 -> 6 [ label = "" ] +// } +// visualization : https://viz-js.com/?dot=ZGlncmFwaCB7CiAgICAwIFsgbGFiZWwgPSAiMCIgXQogICAgMSBbIGxhYmVsID0gIjEiIF0KICAgIDIgWyBsYWJlbCA9ICIyIiBdCiAgICAzIFsgbGFiZWwgPSAiMyIgXQogICAgNCBbIGxhYmVsID0gIjQiIF0KICAgIDUgWyBsYWJlbCA9ICI1IiBdCiAgICA2IFsgbGFiZWwgPSAiNiIgXQogICAgNyBbIGxhYmVsID0gIjciIF0KICAgIDQgLT4gMCBbIGxhYmVsID0gIiIgXQogICAgNSAtPiAwIFsgbGFiZWwgPSAiIiBdCiAgICA2IC0-IDAgWyBsYWJlbCA9ICIiIF0KICAgIDEgLT4gMyBbIGxhYmVsID0gIiIgXQogICAgMiAtPiAzIFsgbGFiZWwgPSAiIiBdCiAgICA3IC0-IDYgWyBsYWJlbCA9ICIiIF0KICAgIDMgLT4gNCBbIGxhYmVsID0gIiIgXQogICAgMyAtPiA1IFsgbGFiZWwgPSAiIiBdCiAgICAzIC0-IDYgWyBsYWJlbCA9ICIiIF0KfQo~ +// output +// [0], [6,5,4], [3], [1,2,7] +#[ test ] +fn complicated_test() +{ + let mut graph = Graph::new(); + + let n = graph.add_node( &"0" ); + let n_1 = graph.add_node( &"1" ); + let n_2 = graph.add_node( &"2" ); + let n_3 = graph.add_node( &"3" ); + let n_4 = graph.add_node( &"4" ); + let n_5 = graph.add_node( &"5" ); + let n_6 = graph.add_node( &"6" ); + let n_7 = graph.add_node( &"7" ); + + graph.add_edge( n_1, n_3, &"" ); + graph.add_edge( n_2, n_3, &"" ); + graph.add_edge( n_7, n_6, &"" ); + + graph.add_edge( n_3, n_4, &"" ); + graph.add_edge( n_3, n_5, &"" ); + graph.add_edge( n_3, n_6, &"" ); + + graph.add_edge( n_4, n, &"" ); + graph.add_edge( n_5, n, &"" ); + graph.add_edge( n_6, n, &"" ); + + let groups = topological_sort_with_grouping( graph ); + + dbg!(&groups); + + assert_eq!( groups[ 0 ], vec![ "0" ] ); + + assert_eq!( groups[1].len(), 3 ); + assert!( groups[ 1 ].contains( &"6" ) ); + assert!( groups[ 1 ].contains( &"5" ) ); + assert!( groups[ 1 ].contains( &"4" ) ); + + assert_eq!( groups[ 2 ], vec![ "3" ] ); + + assert_eq!( groups[3].len(), 3 ); + assert!( groups[ 3 ].contains( &"1" ) ); + assert!( groups[ 3 ].contains( &"2" ) ); + assert!( groups[ 3 ].contains( &"7" ) ); +} diff --git a/module/move/willbe/tests/inc/tool/mod.rs b/module/move/willbe/tests/inc/tool/mod.rs index 5766a1e126..08275f5556 100644 --- a/module/move/willbe/tests/inc/tool/mod.rs +++ b/module/move/willbe/tests/inc/tool/mod.rs @@ -1,7 +1,4 @@ -// use super::*; +use super::*; -// pub mod process; - -pub mod graph; - -pub mod query; \ No newline at end of file +pub mod graph_test; +pub mod query_test; diff --git a/module/move/willbe/tests/inc/tool/process.rs b/module/move/willbe/tests/inc/tool/process.rs deleted file mode 100644 index 8d718c38d9..0000000000 --- a/module/move/willbe/tests/inc/tool/process.rs +++ /dev/null @@ -1,65 +0,0 @@ -// use super::*; -// -// use process_tools::process::*; -// -// use std::env::consts::EXE_EXTENSION; -// use std::ffi::OsString; -// use std::path::{ Path, PathBuf }; -// use std::process::Command; -// -// pub fn path_to_exe( name : &Path, temp_path : &Path ) -> PathBuf -// { -// _ = Command::new("rustc") -// .current_dir( temp_path ) -// .arg( name ) -// .status() -// .unwrap(); -// -// PathBuf::from( temp_path ) -// .join( name.file_name().unwrap() ) -// .with_extension( EXE_EXTENSION ) -// } -// -// #[ test ] -// fn err_out_err() -// { -// let temp = assert_fs::TempDir::new().unwrap(); -// let root_path = Path::new( env!( "CARGO_MANIFEST_DIR" ) ); -// let assets_relative_path = Path::new( ASSET_PATH ); -// let assets_path = root_path.join( assets_relative_path ); -// -// let args : [ OsString ; 0 ] = []; -// -// let options = process::Run::former() -// .bin_path( path_to_exe( &assets_path.join( "err_out_test" ).join( "err_out_err.rs" ), temp.path() ) ) -// .args( args.to_vec() ) -// .current_path( temp.to_path_buf() ) -// .joining_streams( true ) -// .form(); -// -// let report = process::run( options ).unwrap().out; -// -// assert_eq!( "This is stderr text\nThis is stdout text\nThis is stderr text\n", report ); -// } -// -// #[ test ] -// fn out_err_out() -// { -// let temp = assert_fs::TempDir::new().unwrap(); -// let root_path = Path::new( env!( "CARGO_MANIFEST_DIR" ) ); -// let assets_relative_path = Path::new( ASSET_PATH ); -// let assets_path = root_path.join( assets_relative_path ); -// -// let args : [ OsString ; 0 ] = []; -// -// let options = process::Run::former() -// .bin_path( path_to_exe( &assets_path.join( "err_out_test" ).join( "out_err_out.rs" ), temp.path() ) ) -// .args( args.to_vec() ) -// .current_path( temp.to_path_buf() ) -// .joining_streams( true ) -// .form(); -// let report = process::run( options ).unwrap().out; -// -// assert_eq!( "This is stdout text\nThis is stderr text\nThis is stdout text\n", report ); -// } -// diff --git a/module/move/willbe/tests/inc/tool/query.rs b/module/move/willbe/tests/inc/tool/query_test.rs similarity index 97% rename from module/move/willbe/tests/inc/tool/query.rs rename to module/move/willbe/tests/inc/tool/query_test.rs index 49c23b0946..fa98f5fab1 100644 --- a/module/move/willbe/tests/inc/tool/query.rs +++ b/module/move/willbe/tests/inc/tool/query_test.rs @@ -1,10 +1,11 @@ -use crate::the_module::query:: +use super::*; +use the_module::query:: { - parse, - ParseResult, - Value, + parse, + ParseResult, + Value, }; -use std::collections::HashMap; +use the_module::collection::HashMap; use std::str::FromStr; #[ test ] diff --git a/module/move/wplot/Cargo.toml b/module/move/wplot/Cargo.toml index fb02fc12fa..a128e4223a 100644 --- a/module/move/wplot/Cargo.toml +++ b/module/move/wplot/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/plot", "/Cargo.toml", diff --git a/module/postponed/_video_experiment/Cargo.toml b/module/postponed/_video_experiment/Cargo.toml index b167ebec39..b5b8409690 100644 --- a/module/postponed/_video_experiment/Cargo.toml +++ b/module/postponed/_video_experiment/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/_blank", "/Cargo.toml", diff --git a/module/postponed/non_std/Cargo.toml b/module/postponed/non_std/Cargo.toml index 4b9d85d843..516d197d99 100644 --- a/module/postponed/non_std/Cargo.toml +++ b/module/postponed/non_std/Cargo.toml @@ -170,19 +170,19 @@ string_split = [ "string", "wtools/string_split" ] error = [ "wtools/error" ] error_default = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] error_full = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] error_no_std = [ "error", "wtools/error_no_std" ] error_use_alloc = [ "error", "wtools/error_use_alloc" ] -error_for_lib = [ "error", "wtools/error_for_lib" ] -error_for_app = [ "error", "wtools/error_for_app" ] +error_typed = [ "error", "wtools/error_typed" ] +error_untyped = [ "error", "wtools/error_untyped" ] # derive diff --git a/module/postponed/std_tools/Cargo.toml b/module/postponed/std_tools/Cargo.toml index e785849ad9..44d29afa00 100644 --- a/module/postponed/std_tools/Cargo.toml +++ b/module/postponed/std_tools/Cargo.toml @@ -171,19 +171,19 @@ string_split = [ "string", "wtools/string_split" ] error = [ "wtools/error" ] error_default = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] error_full = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] error_no_std = [ "error", "wtools/error_no_std" ] error_use_alloc = [ "error", "wtools/error_use_alloc" ] -error_for_lib = [ "error", "wtools/error_for_lib" ] -error_for_app = [ "error", "wtools/error_for_app" ] +error_typed = [ "error", "wtools/error_typed" ] +error_untyped = [ "error", "wtools/error_untyped" ] # derive diff --git a/module/postponed/std_x/Cargo.toml b/module/postponed/std_x/Cargo.toml index 4795beee5e..5693aa40a1 100644 --- a/module/postponed/std_x/Cargo.toml +++ b/module/postponed/std_x/Cargo.toml @@ -173,19 +173,19 @@ string_split = [ "string", "wtools/string_split" ] error = [ "wtools/error" ] error_default = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] error_full = [ "error", - "error_for_lib", - "error_for_app", + "error_typed", + "error_untyped", ] error_no_std = [ "error", "wtools/error_no_std" ] error_use_alloc = [ "error", "wtools/error_use_alloc" ] -error_for_lib = [ "error", "wtools/error_for_lib" ] -error_for_app = [ "error", "wtools/error_for_app" ] +error_typed = [ "error", "wtools/error_typed" ] +error_untyped = [ "error", "wtools/error_untyped" ] # derive diff --git a/module/postponed/type_constructor/Cargo.toml b/module/postponed/type_constructor/Cargo.toml index 0c12790c65..52dbcd6b95 100644 --- a/module/postponed/type_constructor/Cargo.toml +++ b/module/postponed/type_constructor/Cargo.toml @@ -24,7 +24,7 @@ workspace = true features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/dt/type_constructor", "/Cargo.toml", diff --git a/module/postponed/type_constructor/src/lib.rs b/module/postponed/type_constructor/src/lib.rs index 600458bfd0..29058c02b9 100644 --- a/module/postponed/type_constructor/src/lib.rs +++ b/module/postponed/type_constructor/src/lib.rs @@ -78,8 +78,10 @@ pub mod orphan /// Exposed namespace of the module. #[ cfg( feature = "enabled" ) ] +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/enumerable.rs b/module/postponed/type_constructor/src/type_constuctor/enumerable.rs index 31b7e3b352..f66a735bd7 100644 --- a/module/postponed/type_constructor/src/type_constuctor/enumerable.rs +++ b/module/postponed/type_constructor/src/type_constuctor/enumerable.rs @@ -250,8 +250,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/helper.rs b/module/postponed/type_constructor/src/type_constuctor/helper.rs index e671e69bbc..0b42aa91f1 100644 --- a/module/postponed/type_constructor/src/type_constuctor/helper.rs +++ b/module/postponed/type_constructor/src/type_constuctor/helper.rs @@ -58,8 +58,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/many.rs b/module/postponed/type_constructor/src/type_constuctor/many.rs index 0544dac8c0..234b8824c0 100644 --- a/module/postponed/type_constructor/src/type_constuctor/many.rs +++ b/module/postponed/type_constructor/src/type_constuctor/many.rs @@ -563,8 +563,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/mod.rs b/module/postponed/type_constructor/src/type_constuctor/mod.rs index df047d149f..fa5a78eb70 100644 --- a/module/postponed/type_constructor/src/type_constuctor/mod.rs +++ b/module/postponed/type_constructor/src/type_constuctor/mod.rs @@ -95,8 +95,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/no_many.rs b/module/postponed/type_constructor/src/type_constuctor/no_many.rs index 8e615be58b..51a1e6948d 100644 --- a/module/postponed/type_constructor/src/type_constuctor/no_many.rs +++ b/module/postponed/type_constructor/src/type_constuctor/no_many.rs @@ -49,8 +49,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/pair.rs b/module/postponed/type_constructor/src/type_constuctor/pair.rs index 18dfb74a30..91db5238e1 100644 --- a/module/postponed/type_constructor/src/type_constuctor/pair.rs +++ b/module/postponed/type_constructor/src/type_constuctor/pair.rs @@ -216,8 +216,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/single.rs b/module/postponed/type_constructor/src/type_constuctor/single.rs index 23aecddbc7..1d49657539 100644 --- a/module/postponed/type_constructor/src/type_constuctor/single.rs +++ b/module/postponed/type_constructor/src/type_constuctor/single.rs @@ -551,8 +551,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/traits.rs b/module/postponed/type_constructor/src/type_constuctor/traits.rs index 97c74c822d..6c7ab35d1d 100644 --- a/module/postponed/type_constructor/src/type_constuctor/traits.rs +++ b/module/postponed/type_constructor/src/type_constuctor/traits.rs @@ -75,8 +75,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/types.rs b/module/postponed/type_constructor/src/type_constuctor/types.rs index c6c3aa2224..d6d4501d69 100644 --- a/module/postponed/type_constructor/src/type_constuctor/types.rs +++ b/module/postponed/type_constructor/src/type_constuctor/types.rs @@ -831,8 +831,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/type_constructor/src/type_constuctor/vectorized_from.rs b/module/postponed/type_constructor/src/type_constuctor/vectorized_from.rs index 3621b81c7e..6d7cc73991 100644 --- a/module/postponed/type_constructor/src/type_constuctor/vectorized_from.rs +++ b/module/postponed/type_constructor/src/type_constuctor/vectorized_from.rs @@ -159,8 +159,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/module/postponed/wpublisher/Cargo.toml b/module/postponed/wpublisher/Cargo.toml index 720a12fc59..194b0b7719 100644 --- a/module/postponed/wpublisher/Cargo.toml +++ b/module/postponed/wpublisher/Cargo.toml @@ -23,7 +23,7 @@ workspace = true [package.metadata.docs.rs] features = [ "full" ] all-features = false -# exclude = [ "/tests", "/examples", "-*" ] + include = [ "/rust/impl/publisher", "/Cargo.toml", diff --git a/module/step/meta/src/meta/_template_procedural_macro/front/lib.rs b/module/step/meta/src/meta/_template_procedural_macro/front/lib.rs index 68e7399466..65b8231fde 100644 --- a/module/step/meta/src/meta/_template_procedural_macro/front/lib.rs +++ b/module/step/meta/src/meta/_template_procedural_macro/front/lib.rs @@ -34,8 +34,10 @@ pub mod protected pub use protected::*; /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::prelude::*; pub use meta::*; } diff --git a/module/step/meta/src/meta/_template_procedural_macro/runtime/lib.rs b/module/step/meta/src/meta/_template_procedural_macro/runtime/lib.rs index 3afe6ce196..6f42675ad6 100644 --- a/module/step/meta/src/meta/_template_procedural_macro/runtime/lib.rs +++ b/module/step/meta/src/meta/_template_procedural_macro/runtime/lib.rs @@ -27,8 +27,10 @@ pub mod protected pub use protected::*; /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; pub use super::prelude::*; } diff --git a/module/template/layer/layer.rs b/module/template/layer/layer.rs index e3d184e4a0..9000608222 100644 --- a/module/template/layer/layer.rs +++ b/module/template/layer/layer.rs @@ -39,8 +39,10 @@ pub mod orphan } /// Exposed namespace of the module. +#[ allow( unused_imports ) ] pub mod exposed { + use super::*; #[ doc( inline ) ] #[ allow( unused_imports ) ] pub use super::prelude::*; diff --git a/step/Cargo.toml b/step/Cargo.toml new file mode 100644 index 0000000000..70f00c8c96 --- /dev/null +++ b/step/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "step" +version = "0.0.0" +edition = "2021" +license = "MIT" +publish = false +readme = "Readme.md" +description = """ +Build and deploy steps. +""" + +[lints] +workspace = true + +[package.metadata.docs.rs] +features = [ "full" ] +all-features = false + +[dependencies] +# former = { workspace = true, features = [ "full" ] } +willbe = { workspace = true, features = [ "full" ] } + +[dev-dependencies] +test_tools = { workspace = true } diff --git a/step/src/bin/sources.rs b/step/src/bin/sources.rs new file mode 100644 index 0000000000..676fc25d02 --- /dev/null +++ b/step/src/bin/sources.rs @@ -0,0 +1,66 @@ +//! List all sources + +use willbe::exposed::*; +use willbe::{ Entries, Sources, CodeItems}; +use std:: +{ + fs, + fs::File, + io::Write, +}; + +fn main() -> Result< () > +{ + + let workspace = Workspace::try_from( CurrentPath )?; + + let package = workspace + .packages_which() + .crate_dir( CrateDir::transitive_try_from::< AbsolutePath >( CurrentPath )? ) + .find() + .expect( "No workspace at current path" ) + ; + + println!( " = package - {}", package.crate_dir().unwrap() ); + +// let ins = r#" +// pub mod exposed +// { +// "#; +// +// let sub = r#" +// pub mod exposed +// { +// #[ allow( unused_imports ) ] +// use super::*; +// "#; + + package.sources().for_each( | source | + { + println!( " - {source}" ); + + // let code = source.as_code().unwrap(); + // let code2 = code.replace( ins, sub ); + + // source + // .items() + // .for_each( | item | + // { + // println!( " - {}", std::any::type_name_of_val( &item ) ); + // // println!( " - item : {item:?}" ); + // }); + + }); + + // println!( "{}", package.as_code().unwrap() ); + + // let dst_path = format!( "{}.rs", package.name() ); + // let _ = fs::remove_file( &dst_path ); + // let code = package.as_code().unwrap(); + // let mut file = File::create( dst_path )?; + // file.write_all( code.as_bytes() )?; + + dbg!( &workspace.crate_dir ); + + return Ok( () ); +}