From b294aa568a23db6688463c84c27cf1a72b81fccc Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Mon, 8 Mar 2021 17:54:33 +0100 Subject: [PATCH 01/24] improved startup time --- rasa/__init__.py | 142 +++++++++++++++++++++++++++++++++++++++++- rasa/cli/data.py | 45 ++++++++----- rasa/cli/scaffold.py | 2 +- rasa/cli/test.py | 2 +- rasa/run.py | 46 +------------- rasa/telemetry.py | 2 - rasa/test.py | 16 +---- rasa/train.py | 59 +----------------- rasa/utils/common.py | 18 ++---- tests/cli/test_cli.py | 1 - 10 files changed, 181 insertions(+), 152 deletions(-) diff --git a/rasa/__init__.py b/rasa/__init__.py index db331550b2c1..4f489ba3a474 100644 --- a/rasa/__init__.py +++ b/rasa/__init__.py @@ -1,12 +1,148 @@ import logging +import typing from rasa import version +import rasa.shared.constants as __shared_constants + +if typing.TYPE_CHECKING: + from typing import Any, Text, Dict, Union, List, Optional + from rasa.train import TrainingResult + import asyncio # define the version before the other imports since these need it __version__ = version.__version__ -from rasa.run import run -from rasa.train import train -from rasa.test import test logging.getLogger(__name__).addHandler(logging.NullHandler()) + + +def run( + model: "Text", + endpoints: "Text", + connector: "Text" = None, + credentials: "Text" = None, + **kwargs: "Dict[Text, Any]", +): + """Runs a Rasa model. + + Args: + model: Path to model archive. + endpoints: Path to endpoints file. + connector: Connector which should be use (overwrites `credentials` + field). + credentials: Path to channel credentials file. + **kwargs: Additional arguments which are passed to + `rasa.core.run.serve_application`. + + """ + import rasa.core.run + import rasa.nlu.run + from rasa.core.utils import AvailableEndpoints + from rasa.shared.utils.cli import print_warning + import rasa.shared.utils.common + from rasa.shared.constants import DOCS_BASE_URL + + _endpoints = AvailableEndpoints.read_endpoints(endpoints) + + if not connector and not credentials: + connector = "rest" + + print_warning( + f"No chat connector configured, falling back to the " + f"REST input channel. To connect your bot to another channel, " + f"read the docs here: {DOCS_BASE_URL}/messaging-and-voice-channels" + ) + + kwargs = rasa.shared.utils.common.minimal_kwargs( + kwargs, rasa.core.run.serve_application + ) + rasa.core.run.serve_application( + model, + channel=connector, + credentials=credentials, + endpoints=_endpoints, + **kwargs, + ) + + +def train( + domain: "Text", + config: "Text", + training_files: "Union[Text, List[Text]]", + output: "Text" = __shared_constants.DEFAULT_MODELS_PATH, + dry_run: bool = False, + force_training: bool = False, + fixed_model_name: "Optional[Text]" = None, + persist_nlu_training_data: bool = False, + core_additional_arguments: "Optional[Dict]" = None, + nlu_additional_arguments: "Optional[Dict]" = None, + loop: "Optional[asyncio.AbstractEventLoop]" = None, + model_to_finetune: "Optional[Text]" = None, + finetuning_epoch_fraction: float = 1.0, +) -> "TrainingResult": + """Runs Rasa Core and NLU training in `async` loop. + + Args: + domain: Path to the domain file. + config: Path to the config for Core and NLU. + training_files: Paths to the training data for Core and NLU. + output: Output path. + dry_run: If `True` then no training will be done, and the information about + whether the training needs to be done will be printed. + force_training: If `True` retrain model even if data has not changed. + fixed_model_name: Name of model to be stored. + persist_nlu_training_data: `True` if the NLU training data should be persisted + with the model. + core_additional_arguments: Additional training parameters for core training. + nlu_additional_arguments: Additional training parameters forwarded to training + method of each NLU component. + loop: Optional EventLoop for running coroutines. + model_to_finetune: Optional path to a model which should be finetuned or + a directory in case the latest trained model should be used. + finetuning_epoch_fraction: The fraction currently specified training epochs + in the model configuration which should be used for finetuning. + + Returns: + An instance of `TrainingResult`. + """ + from rasa.train import train_async + import rasa.utils.common + + return rasa.utils.common.run_in_loop( + train_async( + domain=domain, + config=config, + training_files=training_files, + output=output, + dry_run=dry_run, + force_training=force_training, + fixed_model_name=fixed_model_name, + persist_nlu_training_data=persist_nlu_training_data, + core_additional_arguments=core_additional_arguments, + nlu_additional_arguments=nlu_additional_arguments, + model_to_finetune=model_to_finetune, + finetuning_epoch_fraction=finetuning_epoch_fraction, + ), + loop, + ) + + +def test( + model: "Text", + stories: "Text", + nlu_data: "Text", + output: "Text" = __shared_constants.DEFAULT_RESULTS_PATH, + additional_arguments: "Optional[Dict]" = None, +) -> None: + from rasa.test import test_core + import rasa.utils.common + from rasa.test import test_nlu + + if additional_arguments is None: + additional_arguments = {} + + test_core(model, stories, output, additional_arguments) + + rasa.utils.common.run_in_loop( + test_nlu(model, nlu_data, output, additional_arguments) + ) diff --git a/rasa/cli/data.py b/rasa/cli/data.py index 00f7752e2050..659baeecbcc0 100644 --- a/rasa/cli/data.py +++ b/rasa/cli/data.py @@ -11,10 +11,6 @@ from rasa.cli.arguments import data as arguments from rasa.cli.arguments import default_arguments import rasa.cli.utils -from rasa.core.training.converters.responses_prefix_converter import ( - DomainResponsePrefixConverter, - StoryResponsePrefixConverter, -) import rasa.nlu.convert from rasa.shared.constants import ( DEFAULT_DATA_PATH, @@ -38,18 +34,13 @@ import rasa.shared.nlu.training_data.util import rasa.shared.utils.cli import rasa.utils.common -from rasa.utils.converter import TrainingDataConverter -from rasa.validator import Validator from rasa.shared.core.domain import Domain, InvalidDomain import rasa.shared.utils.io -import rasa.core.config -from rasa.core.policies.form_policy import FormPolicy -from rasa.core.policies.fallback import FallbackPolicy -from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy -from rasa.core.policies.mapping_policy import MappingPolicy if TYPE_CHECKING: from rasa.shared.core.training_data.structures import StoryStep + from rasa.validator import Validator + from rasa.utils.converter import TrainingDataConverter logger = logging.getLogger(__name__) @@ -242,6 +233,7 @@ def validate_files(args: argparse.Namespace, stories_only: bool = False) -> None args: Commandline arguments stories_only: If `True`, only the story structure is validated. """ + from rasa.validator import Validator config = rasa.cli.utils.get_validated_path( args.config, "config", DEFAULT_CONFIG_PATH, none_is_valid=True @@ -278,15 +270,15 @@ def validate_stories(args: argparse.Namespace) -> None: validate_files(args, stories_only=True) -def _validate_domain(validator: Validator) -> bool: +def _validate_domain(validator: "Validator") -> bool: return validator.verify_domain_validity() -def _validate_nlu(validator: Validator, args: argparse.Namespace) -> bool: +def _validate_nlu(validator: "Validator", args: argparse.Namespace) -> bool: return validator.verify_nlu(not args.fail_on_warnings) -def _validate_story_structure(validator: Validator, args: argparse.Namespace) -> bool: +def _validate_story_structure(validator: "Validator", args: argparse.Namespace) -> bool: # Check if a valid setting for `max_history` was given if isinstance(args.max_history, int) and args.max_history < 1: raise argparse.ArgumentTypeError( @@ -358,6 +350,11 @@ def _migrate_responses(args: argparse.Namespace) -> None: """Migrate retrieval intent responses to the new 2.0 format. It does so modifying the stories and domain files. """ + from rasa.core.training.converters.responses_prefix_converter import ( + DomainResponsePrefixConverter, + StoryResponsePrefixConverter, + ) + if args.format == "yaml": rasa.utils.common.run_in_loop( _convert_to_yaml(args.out, args.domain, DomainResponsePrefixConverter()) @@ -374,7 +371,7 @@ def _migrate_responses(args: argparse.Namespace) -> None: async def _convert_to_yaml( - out_path: Text, data_path: Text, converter: TrainingDataConverter + out_path: Text, data_path: Text, converter: "TrainingDataConverter" ) -> None: output = Path(out_path) @@ -415,7 +412,7 @@ async def _convert_to_yaml( async def _convert_file_to_yaml( - source_file: Path, target_dir: Path, converter: TrainingDataConverter + source_file: Path, target_dir: Path, converter: "TrainingDataConverter" ) -> bool: """Converts a single training data file to `YAML` format. @@ -483,6 +480,8 @@ def _migrate_model_config(args: argparse.Namespace) -> None: def _get_configuration(path: Path) -> Dict: + from rasa.core.policies.form_policy import FormPolicy + config = {} try: config = rasa.shared.utils.io.read_model_configuration(path) @@ -506,6 +505,10 @@ def _get_configuration(path: Path) -> Dict: def _assert_config_needs_migration(policies: List[Text]) -> None: + from rasa.core.policies.mapping_policy import MappingPolicy + from rasa.core.policies.fallback import FallbackPolicy + from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy + migratable_policies = { MappingPolicy.__name__, FallbackPolicy.__name__, @@ -521,6 +524,8 @@ def _assert_config_needs_migration(policies: List[Text]) -> None: def _warn_about_manual_forms_migration() -> None: + from rasa.core.policies.form_policy import FormPolicy + rasa.shared.utils.cli.print_warning( f"Your model configuration contains the '{FormPolicy.__name__}'. " f"Note that this command does not migrate the '{FormPolicy.__name__}' and " @@ -531,6 +536,9 @@ def _warn_about_manual_forms_migration() -> None: def _assert_nlu_pipeline_given(config: Dict, policy_names: List[Text]) -> None: + from rasa.core.policies.fallback import FallbackPolicy + from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy + if not config.get("pipeline") and any( policy in policy_names for policy in [FallbackPolicy.__name__, TwoStageFallbackPolicy.__name__] @@ -542,6 +550,8 @@ def _assert_nlu_pipeline_given(config: Dict, policy_names: List[Text]) -> None: def _assert_two_stage_fallback_policy_is_migratable(config: Dict) -> None: + from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy + two_stage_fallback_config = next( ( policy_config @@ -583,6 +593,9 @@ def _assert_two_stage_fallback_policy_is_migratable(config: Dict) -> None: def _assert_only_one_fallback_policy_present(policies: List[Text]) -> None: + from rasa.core.policies.fallback import FallbackPolicy + from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy + if ( FallbackPolicy.__name__ in policies and TwoStageFallbackPolicy.__name__ in policies diff --git a/rasa/cli/scaffold.py b/rasa/cli/scaffold.py index 5357b8702463..05dee1f3b3f2 100644 --- a/rasa/cli/scaffold.py +++ b/rasa/cli/scaffold.py @@ -5,7 +5,6 @@ from rasa import telemetry from rasa.cli import SubParsersAction -import rasa.train from rasa.cli.shell import shell from rasa.cli.utils import create_output_path from rasa.shared.utils.cli import print_success, print_error_and_exit @@ -49,6 +48,7 @@ def add_subparser( def print_train_or_instructions(args: argparse.Namespace, path: Text) -> None: import questionary + import rasa.train print_success("Finished creating project structure.") diff --git a/rasa/cli/test.py b/rasa/cli/test.py index 4218ff3f81fd..b170a41b6ee3 100644 --- a/rasa/cli/test.py +++ b/rasa/cli/test.py @@ -17,7 +17,6 @@ DEFAULT_DATA_PATH, DEFAULT_RESULTS_PATH, ) -from rasa.core.test import FAILED_STORIES_FILE import rasa.shared.utils.validation as validation_utils import rasa.cli.utils import rasa.utils.common @@ -70,6 +69,7 @@ def add_subparser( def run_core_test(args: argparse.Namespace) -> None: """Run core tests.""" from rasa.test import test_core_models_in_directory, test_core, test_core_models + from rasa.core.test import FAILED_STORIES_FILE stories = rasa.cli.utils.get_validated_path( args.stories, "stories", DEFAULT_DATA_PATH diff --git a/rasa/run.py b/rasa/run.py index 2a6c3c8ac55a..32e0e0ceb55b 100644 --- a/rasa/run.py +++ b/rasa/run.py @@ -12,50 +12,8 @@ if typing.TYPE_CHECKING: from rasa.core.agent import Agent - -def run( - model: Text, - endpoints: Text, - connector: Text = None, - credentials: Text = None, - **kwargs: Dict, -): - """Runs a Rasa model. - - Args: - model: Path to model archive. - endpoints: Path to endpoints file. - connector: Connector which should be use (overwrites `credentials` - field). - credentials: Path to channel credentials file. - **kwargs: Additional arguments which are passed to - `rasa.core.run.serve_application`. - - """ - import rasa.core.run - import rasa.nlu.run - from rasa.core.utils import AvailableEndpoints - - _endpoints = AvailableEndpoints.read_endpoints(endpoints) - - if not connector and not credentials: - connector = "rest" - print_warning( - f"No chat connector configured, falling back to the " - f"REST input channel. To connect your bot to another channel, " - f"read the docs here: {DOCS_BASE_URL}/messaging-and-voice-channels" - ) - - kwargs = rasa.shared.utils.common.minimal_kwargs( - kwargs, rasa.core.run.serve_application - ) - rasa.core.run.serve_application( - model, - channel=connector, - credentials=credentials, - endpoints=_endpoints, - **kwargs, - ) +# backwards compatibility +run = rasa.run def create_agent(model: Text, endpoints: Text = None) -> "Agent": diff --git a/rasa/telemetry.py b/rasa/telemetry.py index 65b75635ebe7..4d3a964ff485 100644 --- a/rasa/telemetry.py +++ b/rasa/telemetry.py @@ -470,7 +470,6 @@ def _default_context_fields() -> Dict[Text, Any]: Return: A new context containing information about the runtime environment. """ - import tensorflow as tf global TELEMETRY_CONTEXT @@ -482,7 +481,6 @@ def _default_context_fields() -> Dict[Text, Any]: "directory": _hash_directory_path(os.getcwd()), "python": sys.version.split(" ")[0], "rasa_open_source": rasa.__version__, - "gpu": len(tf.config.list_physical_devices("GPU")), "cpu": multiprocessing.cpu_count(), "docker": _is_docker(), } diff --git a/rasa/test.py b/rasa/test.py index 9c511283c5d8..f619fbca0cfe 100644 --- a/rasa/test.py +++ b/rasa/test.py @@ -90,20 +90,8 @@ def test_core_models(models: List[Text], stories: Text, output: Text): rasa.utils.common.run_in_loop(compare_models(models, stories, output)) -def test( - model: Text, - stories: Text, - nlu_data: Text, - output: Text = DEFAULT_RESULTS_PATH, - additional_arguments: Optional[Dict] = None, -): - if additional_arguments is None: - additional_arguments = {} - - test_core(model, stories, output, additional_arguments) - rasa.utils.common.run_in_loop( - test_nlu(model, nlu_data, output, additional_arguments) - ) +# backwards compatibility +test = rasa.test def test_core( diff --git a/rasa/train.py b/rasa/train.py index 4309261e2c42..5d4e5ff0504c 100644 --- a/rasa/train.py +++ b/rasa/train.py @@ -51,63 +51,8 @@ class TrainingResult(NamedTuple): code: int = 0 -def train( - domain: Text, - config: Text, - training_files: Union[Text, List[Text]], - output: Text = DEFAULT_MODELS_PATH, - dry_run: bool = False, - force_training: bool = False, - fixed_model_name: Optional[Text] = None, - persist_nlu_training_data: bool = False, - core_additional_arguments: Optional[Dict] = None, - nlu_additional_arguments: Optional[Dict] = None, - loop: Optional[asyncio.AbstractEventLoop] = None, - model_to_finetune: Optional[Text] = None, - finetuning_epoch_fraction: float = 1.0, -) -> TrainingResult: - """Runs Rasa Core and NLU training in `async` loop. - - Args: - domain: Path to the domain file. - config: Path to the config for Core and NLU. - training_files: Paths to the training data for Core and NLU. - output: Output path. - dry_run: If `True` then no training will be done, and the information about - whether the training needs to be done will be printed. - force_training: If `True` retrain model even if data has not changed. - fixed_model_name: Name of model to be stored. - persist_nlu_training_data: `True` if the NLU training data should be persisted - with the model. - core_additional_arguments: Additional training parameters for core training. - nlu_additional_arguments: Additional training parameters forwarded to training - method of each NLU component. - loop: Optional EventLoop for running coroutines. - model_to_finetune: Optional path to a model which should be finetuned or - a directory in case the latest trained model should be used. - finetuning_epoch_fraction: The fraction currently specified training epochs - in the model configuration which should be used for finetuning. - - Returns: - An instance of `TrainingResult`. - """ - return rasa.utils.common.run_in_loop( - train_async( - domain=domain, - config=config, - training_files=training_files, - output=output, - dry_run=dry_run, - force_training=force_training, - fixed_model_name=fixed_model_name, - persist_nlu_training_data=persist_nlu_training_data, - core_additional_arguments=core_additional_arguments, - nlu_additional_arguments=nlu_additional_arguments, - model_to_finetune=model_to_finetune, - finetuning_epoch_fraction=finetuning_epoch_fraction, - ), - loop, - ) +# backwards compatibility +train = rasa.train async def train_async( diff --git a/rasa/utils/common.py b/rasa/utils/common.py index cea7e034fc8f..1b8258ea1556 100644 --- a/rasa/utils/common.py +++ b/rasa/utils/common.py @@ -6,7 +6,6 @@ from types import TracebackType from typing import Any, Coroutine, Dict, List, Optional, Text, Type, TypeVar, Union -import rasa.core.utils import rasa.utils.io from rasa.constants import DEFAULT_LOG_LEVEL_LIBRARIES, ENV_LOG_LEVEL_LIBRARIES from rasa.shared.constants import DEFAULT_LOG_LEVEL, ENV_LOG_LEVEL @@ -101,24 +100,17 @@ def update_tensorflow_log_level() -> None: """Set the log level of Tensorflow to the log level specified in the environment variable 'LOG_LEVEL_LIBRARIES'.""" - # Disables libvinfer, tensorRT, cuda, AVX2 and FMA warnings (CPU support). This variable needs to be set before the + # Disables libvinfer, tensorRT, cuda, AVX2 and FMA warnings (CPU support). + # This variable needs to be set before the # first import since some warnings are raised on the first import. os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2" - import tensorflow as tf - log_level = os.environ.get(ENV_LOG_LEVEL_LIBRARIES, DEFAULT_LOG_LEVEL_LIBRARIES) - if log_level == "DEBUG": - tf_log_level = tf.compat.v1.logging.DEBUG - elif log_level == "INFO": - tf_log_level = tf.compat.v1.logging.INFO - elif log_level == "WARNING": - tf_log_level = tf.compat.v1.logging.WARN - else: - tf_log_level = tf.compat.v1.logging.ERROR + if not log_level: + log_level = "ERROR" - tf.compat.v1.logging.set_verbosity(tf_log_level) + logging.getLogger("tensorflow").setLevel(log_level) logging.getLogger("tensorflow").propagate = False diff --git a/tests/cli/test_cli.py b/tests/cli/test_cli.py index de6c663051d6..9097bf11cbfd 100644 --- a/tests/cli/test_cli.py +++ b/tests/cli/test_cli.py @@ -9,7 +9,6 @@ def test_cli_start(run: Callable[..., RunResult]): Checks that a call to ``rasa --help`` does not take longer than 7 seconds (10 seconds on Windows). """ - import time start = time.time() run("--help") From 7b9def952b3d5a86d65558b59527a2b1d360b0b8 Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Wed, 10 Mar 2021 20:31:47 +0100 Subject: [PATCH 02/24] use policy name constants instead of importing classes --- rasa/cli/data.py | 47 ++++++++++------------------- rasa/core/config.py | 29 +++++++++--------- rasa/shared/core/constants.py | 8 +++++ tests/shared/core/test_constants.py | 28 +++++++++++++++++ 4 files changed, 67 insertions(+), 45 deletions(-) create mode 100644 tests/shared/core/test_constants.py diff --git a/rasa/cli/data.py b/rasa/cli/data.py index 659baeecbcc0..6df13fce89c6 100644 --- a/rasa/cli/data.py +++ b/rasa/cli/data.py @@ -20,6 +20,10 @@ ) import rasa.shared.data from rasa.shared.core.constants import ( + POLICY_NAME_FALLBACK, + POLICY_NAME_FORM, + POLICY_NAME_MAPPING, + POLICY_NAME_TWO_STAGE_FALLBACK, USER_INTENT_OUT_OF_SCOPE, ACTION_DEFAULT_FALLBACK_NAME, ) @@ -480,8 +484,6 @@ def _migrate_model_config(args: argparse.Namespace) -> None: def _get_configuration(path: Path) -> Dict: - from rasa.core.policies.form_policy import FormPolicy - config = {} try: config = rasa.shared.utils.io.read_model_configuration(path) @@ -498,50 +500,41 @@ def _get_configuration(path: Path) -> Dict: _assert_two_stage_fallback_policy_is_migratable(config) _assert_only_one_fallback_policy_present(policy_names) - if FormPolicy.__name__ in policy_names: + if POLICY_NAME_FORM in policy_names: _warn_about_manual_forms_migration() return config def _assert_config_needs_migration(policies: List[Text]) -> None: - from rasa.core.policies.mapping_policy import MappingPolicy - from rasa.core.policies.fallback import FallbackPolicy - from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy - migratable_policies = { - MappingPolicy.__name__, - FallbackPolicy.__name__, - TwoStageFallbackPolicy.__name__, + POLICY_NAME_MAPPING, + POLICY_NAME_FALLBACK, + POLICY_NAME_TWO_STAGE_FALLBACK, } if not migratable_policies.intersection((set(policies))): rasa.shared.utils.cli.print_error_and_exit( f"No policies were found which need migration. This command can migrate " - f"'{MappingPolicy.__name__}', '{FallbackPolicy.__name__}' and " - f"'{TwoStageFallbackPolicy.__name__}'." + f"'{POLICY_NAME_MAPPING}', '{POLICY_NAME_FALLBACK}' and " + f"'{POLICY_NAME_TWO_STAGE_FALLBACK}'." ) def _warn_about_manual_forms_migration() -> None: - from rasa.core.policies.form_policy import FormPolicy - rasa.shared.utils.cli.print_warning( - f"Your model configuration contains the '{FormPolicy.__name__}'. " - f"Note that this command does not migrate the '{FormPolicy.__name__}' and " - f"you have to migrate the '{FormPolicy.__name__}' manually. " + f"Your model configuration contains the '{POLICY_NAME_FORM}'. " + f"Note that this command does not migrate the '{POLICY_NAME_FORM}' and " + f"you have to migrate the '{POLICY_NAME_FORM}' manually. " f"Please see the migration guide for further details: " f"{DOCS_URL_MIGRATION_GUIDE}" ) def _assert_nlu_pipeline_given(config: Dict, policy_names: List[Text]) -> None: - from rasa.core.policies.fallback import FallbackPolicy - from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy - if not config.get("pipeline") and any( policy in policy_names - for policy in [FallbackPolicy.__name__, TwoStageFallbackPolicy.__name__] + for policy in [POLICY_NAME_FALLBACK, POLICY_NAME_TWO_STAGE_FALLBACK] ): rasa.shared.utils.cli.print_error_and_exit( "The model configuration has to include an NLU pipeline. This is required " @@ -550,13 +543,11 @@ def _assert_nlu_pipeline_given(config: Dict, policy_names: List[Text]) -> None: def _assert_two_stage_fallback_policy_is_migratable(config: Dict) -> None: - from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy - two_stage_fallback_config = next( ( policy_config for policy_config in config.get("policies", []) - if policy_config.get("name") == TwoStageFallbackPolicy.__name__ + if policy_config.get("name") == POLICY_NAME_TWO_STAGE_FALLBACK ), None, ) @@ -593,13 +584,7 @@ def _assert_two_stage_fallback_policy_is_migratable(config: Dict) -> None: def _assert_only_one_fallback_policy_present(policies: List[Text]) -> None: - from rasa.core.policies.fallback import FallbackPolicy - from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy - - if ( - FallbackPolicy.__name__ in policies - and TwoStageFallbackPolicy.__name__ in policies - ): + if POLICY_NAME_FALLBACK in policies and POLICY_NAME_TWO_STAGE_FALLBACK in policies: rasa.shared.utils.cli.print_error_and_exit( "Your policy configuration contains two configured policies for handling " "fallbacks. Please decide on one." diff --git a/rasa/core/config.py b/rasa/core/config.py index 2080a5dde667..84ccb5d01b1e 100644 --- a/rasa/core/config.py +++ b/rasa/core/config.py @@ -13,6 +13,11 @@ from rasa.shared.core.constants import ( ACTION_DEFAULT_FALLBACK_NAME, ACTION_TWO_STAGE_FALLBACK_NAME, + POLICY_NAME_RULE, + POLICY_NAME_FALLBACK, + POLICY_NAME_MAPPING, + POLICY_NAME_TWO_STAGE_FALLBACK, + POLICY_NAME_FORM, ) import rasa.utils.io from rasa.shared.constants import ( @@ -25,10 +30,6 @@ import rasa.shared.utils.io import rasa.utils.io -from rasa.core.policies.mapping_policy import MappingPolicy -from rasa.core.policies.rule_policy import RulePolicy -from rasa.core.policies.fallback import FallbackPolicy -from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy from rasa.nlu.classifiers.fallback_classifier import FallbackClassifier if TYPE_CHECKING: @@ -69,8 +70,8 @@ def migrate_fallback_policies(config: Dict) -> Tuple[Dict, Optional["StoryStep"] policies = new_config.get("policies", []) fallback_config = _get_config_for_name( - FallbackPolicy.__name__, policies - ) or _get_config_for_name(TwoStageFallbackPolicy.__name__, policies) + POLICY_NAME_FALLBACK, policies + ) or _get_config_for_name(POLICY_NAME_TWO_STAGE_FALLBACK, policies) if not fallback_config: return config, None @@ -83,7 +84,7 @@ def migrate_fallback_policies(config: Dict) -> Tuple[Dict, Optional["StoryStep"] # The triggered action is hardcoded for the Two-Stage Fallback` fallback_action_name = ACTION_TWO_STAGE_FALLBACK_NAME - if fallback_config.get("name") == FallbackPolicy.__name__: + if fallback_config.get("name") == POLICY_NAME_FALLBACK: fallback_action_name = fallback_config.get( "fallback_action_name", ACTION_DEFAULT_FALLBACK_NAME ) @@ -113,10 +114,10 @@ def _update_rule_policy_config_for_fallback( policies: The current list of configured policies. fallback_config: The configuration of the deprecated fallback configuration. """ - rule_policy_config = _get_config_for_name(RulePolicy.__name__, policies) + rule_policy_config = _get_config_for_name(POLICY_NAME_RULE, policies) if not rule_policy_config: - rule_policy_config = {"name": RulePolicy.__name__} + rule_policy_config = {"name": POLICY_NAME_RULE} policies.append(rule_policy_config) core_threshold = fallback_config.get( @@ -188,15 +189,15 @@ def migrate_mapping_policy_to_rules( has_rule_policy = False for policy in policies: - if policy.get("name") == MappingPolicy.__name__: + if policy.get("name") == POLICY_NAME_MAPPING: has_mapping_policy = True - if policy.get("name") == RulePolicy.__name__: + if policy.get("name") == POLICY_NAME_RULE: has_rule_policy = True if not has_mapping_policy: return config, domain, [] - rasa.shared.utils.cli.print_info(f"Migrating the '{MappingPolicy.__name__}'.") + rasa.shared.utils.cli.print_info(f"Migrating the '{POLICY_NAME_MAPPING}'.") new_config = copy.deepcopy(config) new_domain = copy.deepcopy(domain) @@ -214,10 +215,10 @@ def migrate_mapping_policy_to_rules( new_rules.append(trigger_rule) # finally update the policies - policies = _drop_policy(MappingPolicy.__name__, policies) + policies = _drop_policy(POLICY_NAME_MAPPING, policies) if not has_rule_policy: - policies.append({"name": RulePolicy.__name__}) + policies.append({"name": POLICY_NAME_RULE}) new_config["policies"] = policies return new_config, new_domain, new_rules diff --git a/rasa/shared/core/constants.py b/rasa/shared/core/constants.py index 2d49d4ed63aa..470c8ca3dc30 100644 --- a/rasa/shared/core/constants.py +++ b/rasa/shared/core/constants.py @@ -80,3 +80,11 @@ USE_TEXT_FOR_FEATURIZATION = "use_text_for_featurization" ENTITY_LABEL_SEPARATOR = "#" + +# if you add more policy names, make sure to add a test as well to ensure +# that the name and the policy stay in sync +POLICY_NAME_TWO_STAGE_FALLBACK = "TwoStageFallbackPolicy" +POLICY_NAME_MAPPING = "MappingPolicy" +POLICY_NAME_FALLBACK = "FallbackPolicy" +POLICY_NAME_FORM = "FormPolicy" +POLICY_NAME_RULE = "RulePolicy" diff --git a/tests/shared/core/test_constants.py b/tests/shared/core/test_constants.py new file mode 100644 index 000000000000..960070a5ac08 --- /dev/null +++ b/tests/shared/core/test_constants.py @@ -0,0 +1,28 @@ +import pytest + +from rasa.core.policies.fallback import FallbackPolicy +from rasa.core.policies.form_policy import FormPolicy +from rasa.core.policies.mapping_policy import MappingPolicy +from rasa.core.policies.rule_policy import RulePolicy +from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy +from rasa.shared.core.constants import ( + POLICY_NAME_FALLBACK, + POLICY_NAME_MAPPING, + POLICY_NAME_RULE, + POLICY_NAME_TWO_STAGE_FALLBACK, + POLICY_NAME_FORM, +) + + +@pytest.mark.parametrize( + "name_in_constant, policy_class", + [ + (POLICY_NAME_TWO_STAGE_FALLBACK, TwoStageFallbackPolicy), + (POLICY_NAME_FALLBACK, FallbackPolicy), + (POLICY_NAME_MAPPING, MappingPolicy), + (POLICY_NAME_FORM, FormPolicy), + (POLICY_NAME_RULE, RulePolicy), + ], +) +def test_policy_names(name_in_constant, policy_class): + assert name_in_constant == policy_class.__name__ From 1121de9e3dcf9454419c38f2ce3f070b94f0a61b Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Wed, 10 Mar 2021 20:38:38 +0100 Subject: [PATCH 03/24] updated telemetry example --- docs/docs/telemetry/telemetry.mdx | 1 - rasa/telemetry.py | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/docs/telemetry/telemetry.mdx b/docs/docs/telemetry/telemetry.mdx index 0b36a52dcb8e..f4d532dabd87 100644 --- a/docs/docs/telemetry/telemetry.mdx +++ b/docs/docs/telemetry/telemetry.mdx @@ -113,7 +113,6 @@ Here is an example report that shows the data reported to Rasa after running "project": "a0a7178e6e5f9e6484c5cfa3ea4497ffc0c96d0ad3f3ad8e9399a1edd88e3cf4", "python": "3.7.5", "rasa_open_source": "2.0.0", - "gpu": 0, "cpu": 16 } } diff --git a/rasa/telemetry.py b/rasa/telemetry.py index 64d8d1a36218..0883d4b017e7 100644 --- a/rasa/telemetry.py +++ b/rasa/telemetry.py @@ -474,6 +474,8 @@ def _default_context_fields() -> Dict[Text, Any]: global TELEMETRY_CONTEXT if not TELEMETRY_CONTEXT: + # Make sure to update the example in docs/docs/telemetry/telemetry.mdx + # if you change / add context TELEMETRY_CONTEXT = { "os": {"name": platform.system(), "version": platform.release()}, "ci": in_continuous_integration(), @@ -700,6 +702,8 @@ async def track_model_training( training_id = uuid.uuid4().hex + # Make sure to update the example in docs/docs/telemetry/telemetry.mdx + # if you change / add any properties _track( TRAINING_STARTED_EVENT, { From 0a9c14c600162f572539820fe1543d76cbbb9f60 Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Wed, 10 Mar 2021 21:33:21 +0100 Subject: [PATCH 04/24] added cli import test --- rasa/__init__.py | 140 +----------------------------------- rasa/api.py | 146 ++++++++++++++++++++++++++++++++++++++ rasa/cli/data.py | 3 +- rasa/run.py | 4 +- rasa/utils/train_utils.py | 6 +- tests/cli/test_cli.py | 23 +++--- 6 files changed, 167 insertions(+), 155 deletions(-) create mode 100644 rasa/api.py diff --git a/rasa/__init__.py b/rasa/__init__.py index 4f489ba3a474..e10d5781c15d 100644 --- a/rasa/__init__.py +++ b/rasa/__init__.py @@ -1,148 +1,10 @@ import logging -import typing from rasa import version -import rasa.shared.constants as __shared_constants - -if typing.TYPE_CHECKING: - from typing import Any, Text, Dict, Union, List, Optional - from rasa.train import TrainingResult - import asyncio +from rasa.api import run, train, test # define the version before the other imports since these need it __version__ = version.__version__ logging.getLogger(__name__).addHandler(logging.NullHandler()) - - -def run( - model: "Text", - endpoints: "Text", - connector: "Text" = None, - credentials: "Text" = None, - **kwargs: "Dict[Text, Any]", -): - """Runs a Rasa model. - - Args: - model: Path to model archive. - endpoints: Path to endpoints file. - connector: Connector which should be use (overwrites `credentials` - field). - credentials: Path to channel credentials file. - **kwargs: Additional arguments which are passed to - `rasa.core.run.serve_application`. - - """ - import rasa.core.run - import rasa.nlu.run - from rasa.core.utils import AvailableEndpoints - from rasa.shared.utils.cli import print_warning - import rasa.shared.utils.common - from rasa.shared.constants import DOCS_BASE_URL - - _endpoints = AvailableEndpoints.read_endpoints(endpoints) - - if not connector and not credentials: - connector = "rest" - - print_warning( - f"No chat connector configured, falling back to the " - f"REST input channel. To connect your bot to another channel, " - f"read the docs here: {DOCS_BASE_URL}/messaging-and-voice-channels" - ) - - kwargs = rasa.shared.utils.common.minimal_kwargs( - kwargs, rasa.core.run.serve_application - ) - rasa.core.run.serve_application( - model, - channel=connector, - credentials=credentials, - endpoints=_endpoints, - **kwargs, - ) - - -def train( - domain: "Text", - config: "Text", - training_files: "Union[Text, List[Text]]", - output: "Text" = __shared_constants.DEFAULT_MODELS_PATH, - dry_run: bool = False, - force_training: bool = False, - fixed_model_name: "Optional[Text]" = None, - persist_nlu_training_data: bool = False, - core_additional_arguments: "Optional[Dict]" = None, - nlu_additional_arguments: "Optional[Dict]" = None, - loop: "Optional[asyncio.AbstractEventLoop]" = None, - model_to_finetune: "Optional[Text]" = None, - finetuning_epoch_fraction: float = 1.0, -) -> "TrainingResult": - """Runs Rasa Core and NLU training in `async` loop. - - Args: - domain: Path to the domain file. - config: Path to the config for Core and NLU. - training_files: Paths to the training data for Core and NLU. - output: Output path. - dry_run: If `True` then no training will be done, and the information about - whether the training needs to be done will be printed. - force_training: If `True` retrain model even if data has not changed. - fixed_model_name: Name of model to be stored. - persist_nlu_training_data: `True` if the NLU training data should be persisted - with the model. - core_additional_arguments: Additional training parameters for core training. - nlu_additional_arguments: Additional training parameters forwarded to training - method of each NLU component. - loop: Optional EventLoop for running coroutines. - model_to_finetune: Optional path to a model which should be finetuned or - a directory in case the latest trained model should be used. - finetuning_epoch_fraction: The fraction currently specified training epochs - in the model configuration which should be used for finetuning. - - Returns: - An instance of `TrainingResult`. - """ - from rasa.train import train_async - import rasa.utils.common - - return rasa.utils.common.run_in_loop( - train_async( - domain=domain, - config=config, - training_files=training_files, - output=output, - dry_run=dry_run, - force_training=force_training, - fixed_model_name=fixed_model_name, - persist_nlu_training_data=persist_nlu_training_data, - core_additional_arguments=core_additional_arguments, - nlu_additional_arguments=nlu_additional_arguments, - model_to_finetune=model_to_finetune, - finetuning_epoch_fraction=finetuning_epoch_fraction, - ), - loop, - ) - - -def test( - model: "Text", - stories: "Text", - nlu_data: "Text", - output: "Text" = __shared_constants.DEFAULT_RESULTS_PATH, - additional_arguments: "Optional[Dict]" = None, -) -> None: - from rasa.test import test_core - import rasa.utils.common - from rasa.test import test_nlu - - if additional_arguments is None: - additional_arguments = {} - - test_core(model, stories, output, additional_arguments) - - rasa.utils.common.run_in_loop( - test_nlu(model, nlu_data, output, additional_arguments) - ) diff --git a/rasa/api.py b/rasa/api.py new file mode 100644 index 000000000000..43fe4aeea359 --- /dev/null +++ b/rasa/api.py @@ -0,0 +1,146 @@ +import rasa.shared.constants +import typing + +# WARNING: Be careful about adding any top level imports at this place! +# These functions are imported in `rasa.__init__` and any top level import +# added here will get executed as soon as someone runs `import rasa`. +# Some imports are very slow (e.g. `tensorflow`) and we want them to get +# imported when running `import rasa`. If you add more imports here, +# please check that in the chain you are importing, no slow packages +# are getting imported. + +if typing.TYPE_CHECKING: + from typing import Any, Text, Dict, Union, List, Optional + from rasa.train import TrainingResult + import asyncio + + +def run( + model: "Text", + endpoints: "Text", + connector: "Text" = None, + credentials: "Text" = None, + **kwargs: "Dict[Text, Any]", +): + """Runs a Rasa model. + + Args: + model: Path to model archive. + endpoints: Path to endpoints file. + connector: Connector which should be use (overwrites `credentials` + field). + credentials: Path to channel credentials file. + **kwargs: Additional arguments which are passed to + `rasa.core.run.serve_application`. + + """ + import rasa.core.run + from rasa.core.utils import AvailableEndpoints + from rasa.shared.utils.cli import print_warning + import rasa.shared.utils.common + from rasa.shared.constants import DOCS_BASE_URL + + _endpoints = AvailableEndpoints.read_endpoints(endpoints) + + if not connector and not credentials: + connector = "rest" + + print_warning( + f"No chat connector configured, falling back to the " + f"REST input channel. To connect your bot to another channel, " + f"read the docs here: {DOCS_BASE_URL}/messaging-and-voice-channels" + ) + + kwargs = rasa.shared.utils.common.minimal_kwargs( + kwargs, rasa.core.run.serve_application + ) + rasa.core.run.serve_application( + model, + channel=connector, + credentials=credentials, + endpoints=_endpoints, + **kwargs, + ) + + +def train( + domain: "Text", + config: "Text", + training_files: "Union[Text, List[Text]]", + output: "Text" = rasa.shared.constants.DEFAULT_MODELS_PATH, + dry_run: bool = False, + force_training: bool = False, + fixed_model_name: "Optional[Text]" = None, + persist_nlu_training_data: bool = False, + core_additional_arguments: "Optional[Dict]" = None, + nlu_additional_arguments: "Optional[Dict]" = None, + loop: "Optional[asyncio.AbstractEventLoop]" = None, + model_to_finetune: "Optional[Text]" = None, + finetuning_epoch_fraction: float = 1.0, +) -> "TrainingResult": + """Runs Rasa Core and NLU training in `async` loop. + + Args: + domain: Path to the domain file. + config: Path to the config for Core and NLU. + training_files: Paths to the training data for Core and NLU. + output: Output path. + dry_run: If `True` then no training will be done, and the information about + whether the training needs to be done will be printed. + force_training: If `True` retrain model even if data has not changed. + fixed_model_name: Name of model to be stored. + persist_nlu_training_data: `True` if the NLU training data should be persisted + with the model. + core_additional_arguments: Additional training parameters for core training. + nlu_additional_arguments: Additional training parameters forwarded to training + method of each NLU component. + loop: Optional EventLoop for running coroutines. + model_to_finetune: Optional path to a model which should be finetuned or + a directory in case the latest trained model should be used. + finetuning_epoch_fraction: The fraction currently specified training epochs + in the model configuration which should be used for finetuning. + + Returns: + An instance of `TrainingResult`. + """ + from rasa.train import train_async + import rasa.utils.common + + return rasa.utils.common.run_in_loop( + train_async( + domain=domain, + config=config, + training_files=training_files, + output=output, + dry_run=dry_run, + force_training=force_training, + fixed_model_name=fixed_model_name, + persist_nlu_training_data=persist_nlu_training_data, + core_additional_arguments=core_additional_arguments, + nlu_additional_arguments=nlu_additional_arguments, + model_to_finetune=model_to_finetune, + finetuning_epoch_fraction=finetuning_epoch_fraction, + ), + loop, + ) + + +def test( + model: "Text", + stories: "Text", + nlu_data: "Text", + output: "Text" = rasa.shared.constants.DEFAULT_RESULTS_PATH, + additional_arguments: "Optional[Dict]" = None, +) -> None: + from rasa.test import test_core + import rasa.utils.common + from rasa.test import test_nlu + + if additional_arguments is None: + additional_arguments = {} + + test_core(model, stories, output, additional_arguments) + + rasa.utils.common.run_in_loop( + test_nlu(model, nlu_data, output, additional_arguments) + ) diff --git a/rasa/cli/data.py b/rasa/cli/data.py index 6df13fce89c6..30ccfdcbdc74 100644 --- a/rasa/cli/data.py +++ b/rasa/cli/data.py @@ -11,7 +11,6 @@ from rasa.cli.arguments import data as arguments from rasa.cli.arguments import default_arguments import rasa.cli.utils -import rasa.nlu.convert from rasa.shared.constants import ( DEFAULT_DATA_PATH, DEFAULT_CONFIG_PATH, @@ -295,6 +294,8 @@ def _validate_story_structure(validator: "Validator", args: argparse.Namespace) def _convert_nlu_data(args: argparse.Namespace) -> None: + import rasa.nlu.convert + from rasa.nlu.training_data.converters.nlu_markdown_to_yaml_converter import ( NLUMarkdownToYamlConverter, ) diff --git a/rasa/run.py b/rasa/run.py index 32e0e0ceb55b..5695300cfebf 100644 --- a/rasa/run.py +++ b/rasa/run.py @@ -1,10 +1,8 @@ import logging import typing -from typing import Dict, Text +from typing import Text import rasa.shared.utils.common -from rasa.shared.utils.cli import print_warning -from rasa.shared.constants import DOCS_BASE_URL from rasa.core.lock_store import LockStore logger = logging.getLogger(__name__) diff --git a/rasa/utils/train_utils.py b/rasa/utils/train_utils.py index 4b8f3f2c2890..aef4f02b2af7 100644 --- a/rasa/utils/train_utils.py +++ b/rasa/utils/train_utils.py @@ -1,5 +1,4 @@ from pathlib import Path -import tensorflow as tf import copy import numpy as np from typing import Optional, Text, Dict, Any, Union, List, Tuple, TYPE_CHECKING @@ -46,6 +45,7 @@ if TYPE_CHECKING: from rasa.nlu.extractors.extractor import EntityTagSpec from rasa.nlu.tokenizers.tokenizer import Token + from tensorflow.keras.callbacks import Callback def normalize(values: np.ndarray, ranking_length: Optional[int] = 0) -> np.ndarray: @@ -425,7 +425,7 @@ def create_common_callbacks( tensorboard_log_dir: Optional[Text] = None, tensorboard_log_level: Optional[Text] = None, checkpoint_dir: Optional[Path] = None, -) -> List[tf.keras.callbacks.Callback]: +) -> List["Callback"]: """Create common callbacks. The following callbacks are created: @@ -443,6 +443,8 @@ def create_common_callbacks( Returns: A list of callbacks. """ + import tensorflow as tf + callbacks = [RasaTrainingLogger(epochs, silent=False)] if tensorboard_log_dir: diff --git a/tests/cli/test_cli.py b/tests/cli/test_cli.py index 9097bf11cbfd..01dd754c45cd 100644 --- a/tests/cli/test_cli.py +++ b/tests/cli/test_cli.py @@ -1,23 +1,26 @@ +from pathlib import Path +import shutil from typing import Callable -from _pytest.pytester import RunResult +from _pytest.pytester import RunResult, Testdir import pytest import sys -def test_cli_start(run: Callable[..., RunResult]): +def test_cli_start_is_fast(testdir: Testdir): """ - Checks that a call to ``rasa --help`` does not take longer than 7 seconds - (10 seconds on Windows). + Checks that a call to ``rasa --help`` does not import any slow imports. """ - start = time.time() - run("--help") - end = time.time() + rasa_path = str( + (Path(__file__).parent / ".." / ".." / "rasa" / "__main__.py").absolute() + ) + args = [shutil.which("python3"), "-X", "importtime", rasa_path, "--help"] + result = testdir.run(*args) - duration = end - start + assert result.ret == 0 - # it sometimes takes a bit more time to start it on Windows - assert duration <= 20 if sys.platform == "win32" else 10 + # tensorflow is slow -> can't get imported when running basic CLI commands + result.stderr.no_fnmatch_line("*tensorflow.python.eager") def test_data_convert_help(run: Callable[..., RunResult]): From 61cbcd7265b1c78a031165266a61a665542b7d26 Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Wed, 10 Mar 2021 21:36:43 +0100 Subject: [PATCH 05/24] added changelog --- changelog/4280.improvement.md | 1 + 1 file changed, 1 insertion(+) create mode 100644 changelog/4280.improvement.md diff --git a/changelog/4280.improvement.md b/changelog/4280.improvement.md new file mode 100644 index 000000000000..542e9250bf69 --- /dev/null +++ b/changelog/4280.improvement.md @@ -0,0 +1 @@ +Improved CLI startup time. From ce29b4dbfc802288091ae60b06bad74cd1046923 Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Wed, 10 Mar 2021 21:53:28 +0100 Subject: [PATCH 06/24] improved docstrings --- rasa/api.py | 9 +++++++++ rasa/cli/data.py | 1 + rasa/cli/scaffold.py | 1 + rasa/run.py | 6 ++++++ 4 files changed, 17 insertions(+) diff --git a/rasa/api.py b/rasa/api.py index 43fe4aeea359..069702a50cd2 100644 --- a/rasa/api.py +++ b/rasa/api.py @@ -132,6 +132,15 @@ def test( output: "Text" = rasa.shared.constants.DEFAULT_RESULTS_PATH, additional_arguments: "Optional[Dict]" = None, ) -> None: + """Test a Rasa model against a set of test data. + + Args: + model: model to test + stories: path to the dialogue test data + nlu_data: path to the NLU test data + output: path to folder where all output will be stored + additional_arguments: additional arguments for the test call + """ from rasa.test import test_core import rasa.utils.common from rasa.test import test_nlu diff --git a/rasa/cli/data.py b/rasa/cli/data.py index 30ccfdcbdc74..79da531e64ba 100644 --- a/rasa/cli/data.py +++ b/rasa/cli/data.py @@ -353,6 +353,7 @@ def _convert_nlg_data(args: argparse.Namespace) -> None: def _migrate_responses(args: argparse.Namespace) -> None: """Migrate retrieval intent responses to the new 2.0 format. + It does so modifying the stories and domain files. """ from rasa.core.training.converters.responses_prefix_converter import ( diff --git a/rasa/cli/scaffold.py b/rasa/cli/scaffold.py index 05dee1f3b3f2..b9c913de296a 100644 --- a/rasa/cli/scaffold.py +++ b/rasa/cli/scaffold.py @@ -47,6 +47,7 @@ def add_subparser( def print_train_or_instructions(args: argparse.Namespace, path: Text) -> None: + """Train a model if the user wants to.""" import questionary import rasa.train diff --git a/rasa/run.py b/rasa/run.py index 5695300cfebf..ed3d9ef9b92f 100644 --- a/rasa/run.py +++ b/rasa/run.py @@ -15,6 +15,12 @@ def create_agent(model: Text, endpoints: Text = None) -> "Agent": + """Create an agent instance based on a stored model. + + Args: + model: file path to the stored model + endpoints: file path to the used endpoint configuration + """ from rasa.core.tracker_store import TrackerStore from rasa.core.utils import AvailableEndpoints from rasa.core.agent import Agent From 1f8a27f46f6b504286c009a756063d047bf455cc Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Wed, 10 Mar 2021 21:54:56 +0100 Subject: [PATCH 07/24] fixed data test --- rasa/cli/data.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/rasa/cli/data.py b/rasa/cli/data.py index 79da531e64ba..0fd3abb8f6c3 100644 --- a/rasa/cli/data.py +++ b/rasa/cli/data.py @@ -450,6 +450,8 @@ def _migrate_model_config(args: argparse.Namespace) -> None: Args: args: The commandline args with the required paths. """ + import rasa.core.config + configuration_file = Path(args.config) model_configuration = _get_configuration(configuration_file) From 6752dc1e6bf046a2b75d6d0ed0dd28edb2ca3bdf Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Thu, 11 Mar 2021 10:56:37 +0100 Subject: [PATCH 08/24] Update tests/shared/core/test_constants.py Co-authored-by: Tobias Wochinger --- tests/shared/core/test_constants.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/shared/core/test_constants.py b/tests/shared/core/test_constants.py index 960070a5ac08..5246aef89ded 100644 --- a/tests/shared/core/test_constants.py +++ b/tests/shared/core/test_constants.py @@ -24,5 +24,5 @@ (POLICY_NAME_RULE, RulePolicy), ], ) -def test_policy_names(name_in_constant, policy_class): +def test_policy_names(name_in_constant: Text, policy_class: Type): assert name_in_constant == policy_class.__name__ From d7446f4f37f9245827ca983799c9a39c5b542f4d Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Thu, 11 Mar 2021 11:01:43 +0100 Subject: [PATCH 09/24] Update rasa/api.py Co-authored-by: Tobias Wochinger --- rasa/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rasa/api.py b/rasa/api.py index 069702a50cd2..d6feaac9012c 100644 --- a/rasa/api.py +++ b/rasa/api.py @@ -21,7 +21,7 @@ def run( connector: "Text" = None, credentials: "Text" = None, **kwargs: "Dict[Text, Any]", -): +) -> NoReturn: """Runs a Rasa model. Args: From ea8dc62ac0f2e5e9373130db8ba3511c99e21769 Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Thu, 11 Mar 2021 11:08:34 +0100 Subject: [PATCH 10/24] added fallback classifier name constant --- rasa/api.py | 2 +- rasa/core/config.py | 7 +++---- rasa/shared/core/constants.py | 6 ++++-- tests/cli/test_cli.py | 14 ++++++++++++++ tests/shared/core/test_constants.py | 13 +++++++++++++ 5 files changed, 35 insertions(+), 7 deletions(-) diff --git a/rasa/api.py b/rasa/api.py index d6feaac9012c..08f11f12aa48 100644 --- a/rasa/api.py +++ b/rasa/api.py @@ -10,7 +10,7 @@ # are getting imported. if typing.TYPE_CHECKING: - from typing import Any, Text, Dict, Union, List, Optional + from typing import Any, Text, Dict, Union, List, Optional, NoReturn from rasa.train import TrainingResult import asyncio diff --git a/rasa/core/config.py b/rasa/core/config.py index 84ccb5d01b1e..a0936541a1c7 100644 --- a/rasa/core/config.py +++ b/rasa/core/config.py @@ -17,7 +17,7 @@ POLICY_NAME_FALLBACK, POLICY_NAME_MAPPING, POLICY_NAME_TWO_STAGE_FALLBACK, - POLICY_NAME_FORM, + CLASSIFIER_NAME_FALLBACK, ) import rasa.utils.io from rasa.shared.constants import ( @@ -30,7 +30,6 @@ import rasa.shared.utils.io import rasa.utils.io -from rasa.nlu.classifiers.fallback_classifier import FallbackClassifier if TYPE_CHECKING: from rasa.core.policies.policy import Policy @@ -133,11 +132,11 @@ def _update_rule_policy_config_for_fallback( def _update_fallback_config(config: Dict, fallback_config: Dict) -> None: fallback_classifier_config = _get_config_for_name( - FallbackClassifier.__name__, config.get("pipeline", []) + CLASSIFIER_NAME_FALLBACK, config.get("pipeline", []) ) if not fallback_classifier_config: - fallback_classifier_config = {"name": FallbackClassifier.__name__} + fallback_classifier_config = {"name": CLASSIFIER_NAME_FALLBACK} config["pipeline"].append(fallback_classifier_config) nlu_threshold = fallback_config.get("nlu_threshold", DEFAULT_NLU_FALLBACK_THRESHOLD) diff --git a/rasa/shared/core/constants.py b/rasa/shared/core/constants.py index 470c8ca3dc30..f643923f4616 100644 --- a/rasa/shared/core/constants.py +++ b/rasa/shared/core/constants.py @@ -81,10 +81,12 @@ USE_TEXT_FOR_FEATURIZATION = "use_text_for_featurization" ENTITY_LABEL_SEPARATOR = "#" -# if you add more policy names, make sure to add a test as well to ensure -# that the name and the policy stay in sync +# if you add more policy/classifier names, make sure to add a test as well to ensure +# that the name and the class stay in sync POLICY_NAME_TWO_STAGE_FALLBACK = "TwoStageFallbackPolicy" POLICY_NAME_MAPPING = "MappingPolicy" POLICY_NAME_FALLBACK = "FallbackPolicy" POLICY_NAME_FORM = "FormPolicy" POLICY_NAME_RULE = "RulePolicy" + +CLASSIFIER_NAME_FALLBACK = "FallbackClassifier" diff --git a/tests/cli/test_cli.py b/tests/cli/test_cli.py index 01dd754c45cd..5fdcb25c39eb 100644 --- a/tests/cli/test_cli.py +++ b/tests/cli/test_cli.py @@ -9,6 +9,20 @@ def test_cli_start_is_fast(testdir: Testdir): """ Checks that a call to ``rasa --help`` does not import any slow imports. + + If this is failing this means, that a simple "rasa --help" commands imports + `tensorflow` which makes our CLI extremely slow. In case this test is failing + you've very likely added a global import of "tensorflow" which should be + avoided. Consider making this import (or the import of its parent module) + a local import. + + If you are clueless where that import happens, you can run + ``` + python -X importtime -m rasa.__main__ --help 2> import.log + tuna import.log + ``` + to get the import chain. + (make sure to run with python >= 3.7, and install tune (pip install tuna)) """ rasa_path = str( diff --git a/tests/shared/core/test_constants.py b/tests/shared/core/test_constants.py index 5246aef89ded..1906d7a7c908 100644 --- a/tests/shared/core/test_constants.py +++ b/tests/shared/core/test_constants.py @@ -1,3 +1,5 @@ +from typing import Text, Type + import pytest from rasa.core.policies.fallback import FallbackPolicy @@ -5,7 +7,9 @@ from rasa.core.policies.mapping_policy import MappingPolicy from rasa.core.policies.rule_policy import RulePolicy from rasa.core.policies.two_stage_fallback import TwoStageFallbackPolicy +from rasa.nlu.classifiers.fallback_classifier import FallbackClassifier from rasa.shared.core.constants import ( + CLASSIFIER_NAME_FALLBACK, POLICY_NAME_FALLBACK, POLICY_NAME_MAPPING, POLICY_NAME_RULE, @@ -22,7 +26,16 @@ (POLICY_NAME_MAPPING, MappingPolicy), (POLICY_NAME_FORM, FormPolicy), (POLICY_NAME_RULE, RulePolicy), + (CLASSIFIER_NAME_FALLBACK, FallbackClassifier), ], ) def test_policy_names(name_in_constant: Text, policy_class: Type): assert name_in_constant == policy_class.__name__ + + +@pytest.mark.parametrize( + "name_in_constant, classifier_class", + [(CLASSIFIER_NAME_FALLBACK, FallbackClassifier),], +) +def test_classifier_names(name_in_constant: Text, classifier_class: Type): + assert name_in_constant == classifier_class.__name__ From 22727682dc0458b2aeb48ed3b03bdc97af7af0c9 Mon Sep 17 00:00:00 2001 From: Tom Bocklisch Date: Thu, 11 Mar 2021 11:13:39 +0100 Subject: [PATCH 11/24] fixed not defined name --- rasa/api.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/rasa/api.py b/rasa/api.py index 08f11f12aa48..7395713a27eb 100644 --- a/rasa/api.py +++ b/rasa/api.py @@ -21,7 +21,7 @@ def run( connector: "Text" = None, credentials: "Text" = None, **kwargs: "Dict[Text, Any]", -) -> NoReturn: +) -> "NoReturn": """Runs a Rasa model. Args: From 8afca53c5ed70085ddf6371dfb0654f5b0efebd9 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Thu, 11 Mar 2021 13:51:46 +0100 Subject: [PATCH 12/24] use Python from sys.executable to get poetry's python version --- tests/cli/test_cli.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tests/cli/test_cli.py b/tests/cli/test_cli.py index 5fdcb25c39eb..57b3527489cf 100644 --- a/tests/cli/test_cli.py +++ b/tests/cli/test_cli.py @@ -1,5 +1,4 @@ from pathlib import Path -import shutil from typing import Callable from _pytest.pytester import RunResult, Testdir import pytest @@ -28,7 +27,7 @@ def test_cli_start_is_fast(testdir: Testdir): rasa_path = str( (Path(__file__).parent / ".." / ".." / "rasa" / "__main__.py").absolute() ) - args = [shutil.which("python3"), "-X", "importtime", rasa_path, "--help"] + args = [sys.executable, "-X", "importtime", rasa_path, "--help"] result = testdir.run(*args) assert result.ret == 0 From f1e4f4bf964e1f970cb5e0d0f719ee1a43e796a6 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:41:21 +0100 Subject: [PATCH 13/24] rename `rasa.train` [production code] --- rasa/api.py | 4 ++-- rasa/cli/scaffold.py | 2 +- rasa/cli/train.py | 4 ++-- rasa/{train.py => model_training.py} | 0 rasa/nlu/test.py | 2 +- rasa/server.py | 2 +- tests/cli/test_rasa_interactive.py | 2 +- tests/cli/test_rasa_train.py | 2 +- tests/conftest.py | 2 +- tests/core/policies/test_ted_policy.py | 2 +- tests/test_model.py | 2 +- tests/test_server.py | 2 +- tests/test_train.py | 6 +++--- 13 files changed, 16 insertions(+), 16 deletions(-) rename rasa/{train.py => model_training.py} (100%) diff --git a/rasa/api.py b/rasa/api.py index 7395713a27eb..0236c3f26074 100644 --- a/rasa/api.py +++ b/rasa/api.py @@ -11,7 +11,7 @@ if typing.TYPE_CHECKING: from typing import Any, Text, Dict, Union, List, Optional, NoReturn - from rasa.train import TrainingResult + from rasa.model_training import TrainingResult import asyncio @@ -103,7 +103,7 @@ def train( Returns: An instance of `TrainingResult`. """ - from rasa.train import train_async + from rasa.model_training import train_async import rasa.utils.common return rasa.utils.common.run_in_loop( diff --git a/rasa/cli/scaffold.py b/rasa/cli/scaffold.py index b9c913de296a..fd5f30e9f673 100644 --- a/rasa/cli/scaffold.py +++ b/rasa/cli/scaffold.py @@ -49,7 +49,7 @@ def add_subparser( def print_train_or_instructions(args: argparse.Namespace, path: Text) -> None: """Train a model if the user wants to.""" import questionary - import rasa.train + import rasa print_success("Finished creating project structure.") diff --git a/rasa/cli/train.py b/rasa/cli/train.py index 28d400124dd4..d64ed7b7a7b5 100644 --- a/rasa/cli/train.py +++ b/rasa/cli/train.py @@ -129,7 +129,7 @@ def train_core( Returns: Path to a trained model or `None` if training was not successful. """ - from rasa.train import train_core + from rasa.model_training import train_core output = train_path or args.out @@ -180,7 +180,7 @@ def train_nlu( Returns: Path to a trained model or `None` if training was not successful. """ - from rasa.train import train_nlu + from rasa.model_training import train_nlu output = train_path or args.out diff --git a/rasa/train.py b/rasa/model_training.py similarity index 100% rename from rasa/train.py rename to rasa/model_training.py diff --git a/rasa/nlu/test.py b/rasa/nlu/test.py index 9da46081d065..ab0b714cb718 100644 --- a/rasa/nlu/test.py +++ b/rasa/nlu/test.py @@ -1843,7 +1843,7 @@ async def compare_nlu( Returns: training examples per run """ - from rasa.train import train_nlu_async + from rasa.model_training import train_nlu_async training_examples_per_run = [] diff --git a/rasa/server.py b/rasa/server.py index 1f0b96cd2820..7091ade0f1a7 100644 --- a/rasa/server.py +++ b/rasa/server.py @@ -1037,7 +1037,7 @@ async def train(request: Request, temporary_directory: Path) -> HTTPResponse: with app.active_training_processes.get_lock(): app.active_training_processes.value += 1 - from rasa.train import train_async + from rasa.model_training import train_async # pass `None` to run in default executor training_result = await train_async(**training_payload) diff --git a/tests/cli/test_rasa_interactive.py b/tests/cli/test_rasa_interactive.py index 92ee20ae9405..554f197ca76a 100644 --- a/tests/cli/test_rasa_interactive.py +++ b/tests/cli/test_rasa_interactive.py @@ -8,7 +8,7 @@ import rasa from rasa.cli import interactive, train -from rasa.train import TrainingResult +from rasa.model_training import TrainingResult from tests.conftest import DEFAULT_NLU_DATA diff --git a/tests/cli/test_rasa_train.py b/tests/cli/test_rasa_train.py index 114439370fac..4fac719fef47 100644 --- a/tests/cli/test_rasa_train.py +++ b/tests/cli/test_rasa_train.py @@ -11,7 +11,7 @@ from rasa import model from rasa.nlu.model import Metadata from rasa.shared.nlu.training_data import training_data -from rasa.train import ( +from rasa.model_training import ( CODE_CORE_NEEDS_TO_BE_RETRAINED, CODE_NLU_NEEDS_TO_BE_RETRAINED, CODE_NLG_NEEDS_TO_BE_RETRAINED, diff --git a/tests/conftest.py b/tests/conftest.py index 79663126ef24..9572797d45b9 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -35,7 +35,7 @@ import rasa.core.run from rasa.core.tracker_store import InMemoryTrackerStore, TrackerStore from rasa.model import get_model -from rasa.train import train_async, train_nlu_async +from rasa.model_training import train_async, train_nlu_async from rasa.utils.common import TempDirectoryPath from tests.core.conftest import ( DEFAULT_DOMAIN_PATH_WITH_SLOTS, diff --git a/tests/core/policies/test_ted_policy.py b/tests/core/policies/test_ted_policy.py index 62d5b72f75ed..e0e42c023c21 100644 --- a/tests/core/policies/test_ted_policy.py +++ b/tests/core/policies/test_ted_policy.py @@ -22,7 +22,7 @@ from rasa.utils.tensorflow.data_generator import RasaBatchDataGenerator from rasa.shared.core.trackers import DialogueStateTracker from rasa.shared.nlu.interpreter import RegexInterpreter -from rasa.train import train_core +from rasa.model_training import train_core from rasa.utils import train_utils from rasa.utils.tensorflow.constants import ( EVAL_NUM_EXAMPLES, diff --git a/tests/test_model.py b/tests/test_model.py index 52e97902cf95..25ba91aad9ee 100644 --- a/tests/test_model.py +++ b/tests/test_model.py @@ -51,7 +51,7 @@ FingerprintComparisonResult, ) from rasa.exceptions import ModelNotFound -from rasa.train import train_core, train_core_async +from rasa.model_training import train_core, train_core_async from tests.core.conftest import DEFAULT_DOMAIN_PATH_WITH_MAPPING, DEFAULT_STACK_CONFIG diff --git a/tests/test_server.py b/tests/test_server.py index c6270b7c372a..e57952aab178 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -62,7 +62,7 @@ ) from rasa.shared.core.trackers import DialogueStateTracker from rasa.shared.nlu.constants import INTENT_NAME_KEY -from rasa.train import TrainingResult +from rasa.model_training import TrainingResult from rasa.utils.endpoints import EndpointConfig from tests.core.conftest import DEFAULT_STACK_CONFIG from tests.nlu.utilities import ResponseTest diff --git a/tests/test_train.py b/tests/test_train.py index 2d5145d9d8e6..3985d1a1f562 100644 --- a/tests/test_train.py +++ b/tests/test_train.py @@ -23,7 +23,7 @@ from rasa.core.interpreter import RasaNLUInterpreter from rasa.nlu.model import Interpreter -from rasa.train import train_core, train_nlu, train, dry_run_result +from rasa.model_training import train_core, train_nlu, train, dry_run_result from rasa.utils.tensorflow.constants import EPOCHS from tests.conftest import DEFAULT_CONFIG_PATH, DEFAULT_NLU_DATA, AsyncMock from tests.core.conftest import DEFAULT_DOMAIN_PATH_WITH_SLOTS, DEFAULT_STORIES_FILE @@ -286,13 +286,13 @@ def test_interpreter_of_old_model_passed_to_core_training( def test_load_interpreter_returns_none_for_none(): - from rasa.train import _load_interpreter + from rasa.model_training import _load_interpreter assert _load_interpreter(None) is None def test_interpreter_from_previous_model_returns_none_for_none(): - from rasa.train import _interpreter_from_previous_model + from rasa.model_training import _interpreter_from_previous_model assert _interpreter_from_previous_model(None) is None From 272eba9d4daaf38b876d9cb2083a0756653d64eb Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:42:20 +0100 Subject: [PATCH 14/24] rename `rasa.train` [test code] --- tests/conftest.py | 2 +- tests/{test_train.py => test_model_training.py} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename tests/{test_train.py => test_model_training.py} (100%) diff --git a/tests/conftest.py b/tests/conftest.py index 9572797d45b9..406744bbdb7d 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -79,7 +79,7 @@ Path("tests", "nlu", "selectors").absolute(), ], "category_full_model_training": [ - Path("tests", "test_train.py").absolute(), + Path("tests", "test_model_training.py").absolute(), Path("tests", "nlu", "test_train.py").absolute(), Path("tests", "core", "test_training.py").absolute(), Path("tests", "core", "test_examples.py").absolute(), diff --git a/tests/test_train.py b/tests/test_model_training.py similarity index 100% rename from tests/test_train.py rename to tests/test_model_training.py From 67968df917b79d60ddaa2126c54c8091494fb406 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:44:29 +0100 Subject: [PATCH 15/24] rename `rasa.test` [production code] --- rasa/api.py | 4 ++-- rasa/cli/test.py | 12 ++++++++++-- rasa/core/test.py | 2 +- rasa/{test.py => model_testing.py} | 0 rasa/nlu/test.py | 6 +++--- tests/nlu/test_evaluation.py | 2 +- tests/test_test.py | 12 ++++++------ 7 files changed, 23 insertions(+), 15 deletions(-) rename rasa/{test.py => model_testing.py} (100%) diff --git a/rasa/api.py b/rasa/api.py index 0236c3f26074..a9d9ae10e49b 100644 --- a/rasa/api.py +++ b/rasa/api.py @@ -141,9 +141,9 @@ def test( output: path to folder where all output will be stored additional_arguments: additional arguments for the test call """ - from rasa.test import test_core + from rasa.model_testing import test_core import rasa.utils.common - from rasa.test import test_nlu + from rasa.model_testing import test_nlu if additional_arguments is None: additional_arguments = {} diff --git a/rasa/cli/test.py b/rasa/cli/test.py index b170a41b6ee3..a98847d9cfc1 100644 --- a/rasa/cli/test.py +++ b/rasa/cli/test.py @@ -68,7 +68,11 @@ def add_subparser( def run_core_test(args: argparse.Namespace) -> None: """Run core tests.""" - from rasa.test import test_core_models_in_directory, test_core, test_core_models + from rasa.model_testing import ( + test_core_models_in_directory, + test_core, + test_core_models, + ) from rasa.core.test import FAILED_STORIES_FILE stories = rasa.cli.utils.get_validated_path( @@ -144,7 +148,11 @@ async def run_nlu_test_async( no_errors: indicates if incorrect predictions should be written to a file or not. """ - from rasa.test import compare_nlu_models, perform_nlu_cross_validation, test_nlu + from rasa.model_testing import ( + compare_nlu_models, + perform_nlu_cross_validation, + test_nlu, + ) nlu_data = rasa.cli.utils.get_validated_path(data_path, "nlu", DEFAULT_DATA_PATH) nlu_data = rasa.shared.data.get_nlu_directory(nlu_data) diff --git a/rasa/core/test.py b/rasa/core/test.py index 2541f15cf599..b06adcf10ccd 100644 --- a/rasa/core/test.py +++ b/rasa/core/test.py @@ -769,7 +769,7 @@ async def test( Returns: Evaluation summary. """ - from rasa.test import get_evaluation_metrics + from rasa.model_testing import get_evaluation_metrics generator = await _create_data_generator(stories, agent, max_stories, e2e) completed_trackers = generator.generate_story_trackers() diff --git a/rasa/test.py b/rasa/model_testing.py similarity index 100% rename from rasa/test.py rename to rasa/model_testing.py diff --git a/rasa/nlu/test.py b/rasa/nlu/test.py index ab0b714cb718..5bf39964f858 100644 --- a/rasa/nlu/test.py +++ b/rasa/nlu/test.py @@ -645,7 +645,7 @@ def _calculate_report( report_as_dict: Optional[bool] = None, exclude_label: Optional[Text] = None, ) -> Tuple[Union[Text, Dict], float, float, float, np.ndarray, List[Text]]: - from rasa.test import get_evaluation_metrics + from rasa.model_testing import get_evaluation_metrics import sklearn.metrics import sklearn.utils.multiclass @@ -1930,7 +1930,7 @@ def _compute_metrics( Returns: metrics """ - from rasa.test import get_evaluation_metrics + from rasa.model_testing import get_evaluation_metrics # compute fold metrics targets, predictions = _targets_predictions_from( @@ -1952,7 +1952,7 @@ def _compute_entity_metrics( Returns: entity metrics """ - from rasa.test import get_evaluation_metrics + from rasa.model_testing import get_evaluation_metrics entity_metric_results: EntityMetrics = defaultdict(lambda: defaultdict(list)) extractors = get_entity_extractors(interpreter) diff --git a/tests/nlu/test_evaluation.py b/tests/nlu/test_evaluation.py index 1b7b42b323bf..36d29bb888bb 100644 --- a/tests/nlu/test_evaluation.py +++ b/tests/nlu/test_evaluation.py @@ -65,7 +65,7 @@ ) from rasa.shared.nlu.training_data.message import Message from rasa.shared.nlu.training_data.training_data import TrainingData -from rasa.test import compare_nlu_models +from rasa.model_testing import compare_nlu_models from rasa.utils.tensorflow.constants import EPOCHS, ENTITY_RECOGNITION from tests.nlu.conftest import DEFAULT_DATA_PATH diff --git a/tests/test_test.py b/tests/test_test.py index aea5aed8b157..a753218186ed 100644 --- a/tests/test_test.py +++ b/tests/test_test.py @@ -35,7 +35,7 @@ def monkeypatch_get_latest_model(tmp_path: Path, monkeypatch: MonkeyPatch) -> No def test_get_sanitized_model_directory_when_not_passing_model( capsys: CaptureFixture, tmp_path: Path, monkeypatch: MonkeyPatch ): - from rasa.test import _get_sanitized_model_directory + from rasa.model_testing import _get_sanitized_model_directory monkeypatch_get_latest_model(tmp_path, monkeypatch) @@ -54,7 +54,7 @@ def test_get_sanitized_model_directory_when_not_passing_model( def test_get_sanitized_model_directory_when_passing_model_file_explicitly( capsys: CaptureFixture, tmp_path: Path, monkeypatch: MonkeyPatch ): - from rasa.test import _get_sanitized_model_directory + from rasa.model_testing import _get_sanitized_model_directory monkeypatch_get_latest_model(tmp_path, monkeypatch) @@ -73,7 +73,7 @@ def test_get_sanitized_model_directory_when_passing_model_file_explicitly( def test_get_sanitized_model_directory_when_passing_other_input( capsys: CaptureFixture, tmp_path: Path, monkeypatch: MonkeyPatch ): - from rasa.test import _get_sanitized_model_directory + from rasa.model_testing import _get_sanitized_model_directory monkeypatch_get_latest_model(tmp_path, monkeypatch) @@ -109,7 +109,7 @@ def test_get_sanitized_model_directory_when_passing_other_input( def test_get_evaluation_metrics( targets, predictions, expected_precision, expected_fscore, expected_accuracy ): - from rasa.test import get_evaluation_metrics + from rasa.model_testing import get_evaluation_metrics report, precision, f1, accuracy = get_evaluation_metrics( targets, predictions, True, exclude_label=NO_ENTITY @@ -140,7 +140,7 @@ def test_get_evaluation_metrics( ], ) def test_get_label_set(targets, exclude_label, expected): - from rasa.test import get_unique_labels + from rasa.model_testing import get_unique_labels actual = get_unique_labels(targets, exclude_label) assert set(expected) == set(actual) @@ -158,7 +158,7 @@ async def test_interpreter_passed_to_agent( def test_e2e_warning_if_no_nlu_model( monkeypatch: MonkeyPatch, trained_core_model: Text, capsys: CaptureFixture ): - from rasa.test import test_core + from rasa.model_testing import test_core # Patching is bit more complicated as we have a module `train` and function # with the same name 😬 From dae1fff0b07625dc86d1802f9e0012b45f80da9a Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:45:05 +0100 Subject: [PATCH 16/24] rename `rasa.test` [test code] --- tests/{test_test.py => test_model_testing.py} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename tests/{test_test.py => test_model_testing.py} (100%) diff --git a/tests/test_test.py b/tests/test_model_testing.py similarity index 100% rename from tests/test_test.py rename to tests/test_model_testing.py From 6ee886527a9cd53de7fbc42bfd7cfa9eef4d5656 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:48:25 +0100 Subject: [PATCH 17/24] move `rasa.run.create_agent` to `rasa.core.agent` --- rasa/core/agent.py | 28 ++++++++++++++++++++++++++++ rasa/jupyter.py | 3 +-- rasa/run.py | 32 +------------------------------- 3 files changed, 30 insertions(+), 33 deletions(-) diff --git a/rasa/core/agent.py b/rasa/core/agent.py index dbe12ac3893e..b3ae8bdddd48 100644 --- a/rasa/core/agent.py +++ b/rasa/core/agent.py @@ -11,6 +11,7 @@ from aiohttp import ClientError import rasa +import rasa.utils from rasa.core import jobs, training from rasa.core.channels.channel import OutputChannel, UserMessage from rasa.core.constants import DEFAULT_REQUEST_TIMEOUT @@ -269,6 +270,33 @@ async def schedule_model_pulling( ) +def create_agent(model: Text, endpoints: Text = None) -> "Agent": + """Create an agent instance based on a stored model. + + Args: + model: file path to the stored model + endpoints: file path to the used endpoint configuration + """ + from rasa.core.tracker_store import TrackerStore + from rasa.core.utils import AvailableEndpoints + from rasa.core.brokers.broker import EventBroker + import rasa.utils.common + + _endpoints = AvailableEndpoints.read_endpoints(endpoints) + + _broker = rasa.utils.common.run_in_loop(EventBroker.create(_endpoints.event_broker)) + _tracker_store = TrackerStore.create(_endpoints.tracker_store, event_broker=_broker) + _lock_store = LockStore.create(_endpoints.lock_store) + + return Agent.load( + model, + generator=_endpoints.nlg, + tracker_store=_tracker_store, + lock_store=_lock_store, + action_endpoint=_endpoints.action, + ) + + async def load_agent( model_path: Optional[Text] = None, model_server: Optional[EndpointConfig] = None, diff --git a/rasa/jupyter.py b/rasa/jupyter.py index e57bda6d0cee..ad4f3380734f 100644 --- a/rasa/jupyter.py +++ b/rasa/jupyter.py @@ -8,7 +8,7 @@ import rasa.utils.common if typing.TYPE_CHECKING: - from rasa.core.agent import Agent + from rasa.core.agent import Agent, create_agent def pprint(obj: Any): @@ -32,7 +32,6 @@ def chat( """ if model_path: - from rasa.run import create_agent agent = create_agent(model_path, endpoints) diff --git a/rasa/run.py b/rasa/run.py index ed3d9ef9b92f..475aa3c5864a 100644 --- a/rasa/run.py +++ b/rasa/run.py @@ -1,42 +1,12 @@ import logging import typing -from typing import Text import rasa.shared.utils.common -from rasa.core.lock_store import LockStore logger = logging.getLogger(__name__) if typing.TYPE_CHECKING: - from rasa.core.agent import Agent + pass # backwards compatibility run = rasa.run - - -def create_agent(model: Text, endpoints: Text = None) -> "Agent": - """Create an agent instance based on a stored model. - - Args: - model: file path to the stored model - endpoints: file path to the used endpoint configuration - """ - from rasa.core.tracker_store import TrackerStore - from rasa.core.utils import AvailableEndpoints - from rasa.core.agent import Agent - from rasa.core.brokers.broker import EventBroker - import rasa.utils.common - - _endpoints = AvailableEndpoints.read_endpoints(endpoints) - - _broker = rasa.utils.common.run_in_loop(EventBroker.create(_endpoints.event_broker)) - _tracker_store = TrackerStore.create(_endpoints.tracker_store, event_broker=_broker) - _lock_store = LockStore.create(_endpoints.lock_store) - - return Agent.load( - model, - generator=_endpoints.nlg, - tracker_store=_tracker_store, - lock_store=_lock_store, - action_endpoint=_endpoints.action, - ) From dc88cb0d436279a607b8fa41194aff9f7a4e5ca8 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:49:47 +0100 Subject: [PATCH 18/24] rename `rasa.run` [production code] --- rasa/cli/run.py | 2 +- rasa/run.py | 12 ------------ 2 files changed, 1 insertion(+), 13 deletions(-) delete mode 100644 rasa/run.py diff --git a/rasa/cli/run.py b/rasa/cli/run.py index 99c785b1d607..c5d1e9cc5112 100644 --- a/rasa/cli/run.py +++ b/rasa/cli/run.py @@ -75,7 +75,7 @@ def _validate_model_path(model_path: Text, parameter: Text, default: Text): def run(args: argparse.Namespace): - import rasa.run + import rasa args.endpoints = rasa.cli.utils.get_validated_path( args.endpoints, "endpoints", DEFAULT_ENDPOINTS_PATH, True diff --git a/rasa/run.py b/rasa/run.py deleted file mode 100644 index 475aa3c5864a..000000000000 --- a/rasa/run.py +++ /dev/null @@ -1,12 +0,0 @@ -import logging -import typing - -import rasa.shared.utils.common - -logger = logging.getLogger(__name__) - -if typing.TYPE_CHECKING: - pass - -# backwards compatibility -run = rasa.run From a218e862715b900cd3f0579c302a12a5bfd74eb5 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 17:59:26 +0100 Subject: [PATCH 19/24] adapt mocking in test --- tests/test_model_training.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_model_training.py b/tests/test_model_training.py index 3985d1a1f562..ad4fa6c1b14c 100644 --- a/tests/test_model_training.py +++ b/tests/test_model_training.py @@ -233,7 +233,7 @@ def test_trained_interpreter_passed_to_core_training( # Patching is bit more complicated as we have a module `train` and function # with the same name 😬 monkeypatch.setattr( - sys.modules["rasa.train"], + rasa.model_training, "_train_nlu_with_validated_data", AsyncMock(return_value=unpacked_trained_rasa_model), ) From a54532671b024eac07a004735b2e54e5cddd09a7 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 19:03:49 +0100 Subject: [PATCH 20/24] remove shorthand import for `rasa.core.train` and `rasa.core.visualize` --- rasa/cli/visualize.py | 2 +- rasa/core/__init__.py | 3 --- rasa/core/training/interactive.py | 5 ++-- rasa/model_training.py | 2 +- tests/test_model_training.py | 41 +++++++++++-------------------- 5 files changed, 19 insertions(+), 34 deletions(-) diff --git a/rasa/cli/visualize.py b/rasa/cli/visualize.py index 17b2ebff7770..0a69fc8929eb 100644 --- a/rasa/cli/visualize.py +++ b/rasa/cli/visualize.py @@ -38,7 +38,7 @@ def visualize_stories(args: argparse.Namespace): args.nlu = rasa.shared.data.get_nlu_directory(DEFAULT_DATA_PATH) rasa.utils.common.run_in_loop( - rasa.core.visualize( + rasa.core.visualize.visualize( args.config, args.domain, args.stories, args.nlu, args.out, args.max_history ) ) diff --git a/rasa/core/__init__.py b/rasa/core/__init__.py index 0f76ff881aef..4defa9baa4c2 100644 --- a/rasa/core/__init__.py +++ b/rasa/core/__init__.py @@ -2,9 +2,6 @@ import rasa -from rasa.core.train import train -from rasa.core.visualize import visualize - logging.getLogger(__name__).addHandler(logging.NullHandler()) __version__ = rasa.__version__ diff --git a/rasa/core/training/interactive.py b/rasa/core/training/interactive.py index 3b12e725715c..a57f6f2eaf46 100644 --- a/rasa/core/training/interactive.py +++ b/rasa/core/training/interactive.py @@ -34,7 +34,8 @@ REQUESTED_SLOT, LOOP_INTERRUPTED, ) -from rasa.core import run, train, utils +from rasa.core import run, utils +import rasa.core.train from rasa.core.constants import DEFAULT_SERVER_FORMAT, DEFAULT_SERVER_PORT from rasa.shared.core.domain import Domain import rasa.shared.core.events @@ -1628,7 +1629,7 @@ async def train_agent_on_start( model_directory = args.get("out", tempfile.mkdtemp(suffix="_core_model")) - _agent = await train( + _agent = await rasa.core.train.train( args.get("domain"), args.get("stories"), model_directory, diff --git a/rasa/model_training.py b/rasa/model_training.py index 96d889e6dc2b..d6b1fe0f1f00 100644 --- a/rasa/model_training.py +++ b/rasa/model_training.py @@ -545,7 +545,7 @@ async def _train_core_with_validated_data( model_type="core", is_finetuning=model_to_finetune is not None, ): - await rasa.core.train( + await rasa.core.train.train( domain_file=domain, training_resource=file_importer, output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME), diff --git a/tests/test_model_training.py b/tests/test_model_training.py index ad4fa6c1b14c..b30028dab1ba 100644 --- a/tests/test_model_training.py +++ b/tests/test_model_training.py @@ -15,6 +15,7 @@ from rasa.core.policies.ted_policy import TEDPolicy import rasa.model import rasa.core +import rasa.core.train import rasa.nlu from rasa.nlu.classifiers.diet_classifier import DIETClassifier import rasa.shared.importers.autoconfig as autoconfig @@ -239,8 +240,7 @@ def test_trained_interpreter_passed_to_core_training( ) # Mock the actual Core training - _train_core = AsyncMock() - monkeypatch.setattr(rasa.core, "train", _train_core) + _train_core = mock_core_training(monkeypatch) train( DEFAULT_DOMAIN_PATH_WITH_SLOTS, @@ -270,8 +270,7 @@ def test_interpreter_of_old_model_passed_to_core_training( ) # Mock the actual Core training - _train_core = AsyncMock() - monkeypatch.setattr(rasa.core, "train", _train_core) + _train_core = mock_core_training(monkeypatch) train( DEFAULT_DOMAIN_PATH_WITH_SLOTS, @@ -312,7 +311,7 @@ def test_train_core_autoconfig( # skip actual core training monkeypatch.setattr( - sys.modules["rasa.train"], "_train_core_with_validated_data", AsyncMock() + rasa.model_training, "_train_core_with_validated_data", AsyncMock() ) # do training @@ -341,7 +340,7 @@ def test_train_nlu_autoconfig( monkeypatch.setattr(autoconfig, "get_configuration", mocked_get_configuration) monkeypatch.setattr( - sys.modules["rasa.train"], "_train_nlu_with_validated_data", AsyncMock() + rasa.model_training, "_train_nlu_with_validated_data", AsyncMock() ) # do training @@ -367,7 +366,7 @@ async def mock_async_func(*args: Any, **kwargs: Any) -> None: def mock_core_training(monkeypatch: MonkeyPatch) -> Mock: - return mock_async(monkeypatch, rasa.core, rasa.core.train.__name__) + return mock_async(monkeypatch, rasa.core.train, rasa.core.train.train.__name__) def mock_nlu_training(monkeypatch: MonkeyPatch) -> Mock: @@ -623,8 +622,7 @@ def test_model_finetuning( mocked_nlu_training = Mock(wraps=rasa.nlu.train) monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) - mocked_core_training = Mock(wraps=rasa.core.train) - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) + mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -659,9 +657,7 @@ def test_model_finetuning_core( trained_moodbot_path: Text, use_latest_model: bool, ): - mocked_core_training = AsyncMock() - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) - + mocked_core_training = mock_core_training(monkeypatch) mock_agent_load = Mock(wraps=Agent.load) monkeypatch.setattr(Agent, "load", mock_agent_load) @@ -710,9 +706,7 @@ def test_model_finetuning_core( def test_model_finetuning_core_with_default_epochs( tmp_path: Path, monkeypatch: MonkeyPatch, trained_moodbot_path: Text, ): - mocked_core_training = AsyncMock() - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) - + mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -743,8 +737,7 @@ def test_model_finetuning_core_with_default_epochs( def test_model_finetuning_core_new_domain_label( tmp_path: Path, monkeypatch: MonkeyPatch, trained_moodbot_path: Text, ): - mocked_core_training = AsyncMock() - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) + mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -770,10 +763,8 @@ def test_model_finetuning_core_new_domain_label( def test_model_finetuning_new_domain_label_stops_all_training( tmp_path: Path, monkeypatch: MonkeyPatch, trained_moodbot_path: Text, ): - mocked_core_training = AsyncMock() - mocked_nlu_training = AsyncMock() - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_core_training = mock_core_training(monkeypatch) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -1015,9 +1006,7 @@ def test_model_finetuning_with_invalid_model( mocked_nlu_training = AsyncMock(return_value="") monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) - mocked_core_training = AsyncMock() - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) - + mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -1048,9 +1037,7 @@ def test_model_finetuning_with_invalid_model_core( model_to_fine_tune: Text, capsys: CaptureFixture, ): - mocked_core_training = AsyncMock() - monkeypatch.setattr(rasa.core, rasa.core.train.__name__, mocked_core_training) - + mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") From 3c9a2655e139657be97986197824ed8717069b43 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 19:18:28 +0100 Subject: [PATCH 21/24] remove shorthand syntax for `rasa.nlu.train`, `rasa.nlu.test`, `rasa.nlu.cross_validate` --- rasa/model_training.py | 2 +- rasa/nlu/__init__.py | 3 -- rasa/server.py | 8 ++--- tests/nlu/classifiers/test_diet_classifier.py | 25 ++++++++-------- .../extractors/test_crf_entity_extractor.py | 6 ++-- tests/nlu/selectors/test_selectors.py | 12 ++++---- tests/nlu/test_components.py | 9 +++--- tests/nlu/test_evaluation.py | 4 +-- tests/nlu/test_persistor.py | 5 ++-- tests/nlu/test_train.py | 21 +++++++------ tests/test_model_training.py | 30 +++++++------------ tests/test_server.py | 9 ++++-- 12 files changed, 65 insertions(+), 69 deletions(-) diff --git a/rasa/model_training.py b/rasa/model_training.py index d6b1fe0f1f00..b8106fe365df 100644 --- a/rasa/model_training.py +++ b/rasa/model_training.py @@ -754,7 +754,7 @@ async def _train_nlu_with_validated_data( model_type="nlu", is_finetuning=model_to_finetune is not None, ): - await rasa.nlu.train( + await rasa.nlu.train.train( config, file_importer, _train_path, diff --git a/rasa/nlu/__init__.py b/rasa/nlu/__init__.py index c78df260380d..4defa9baa4c2 100644 --- a/rasa/nlu/__init__.py +++ b/rasa/nlu/__init__.py @@ -1,9 +1,6 @@ import logging import rasa -from rasa.nlu.train import train -from rasa.nlu.test import run_evaluation as test -from rasa.nlu.test import cross_validate logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/rasa/server.py b/rasa/server.py index 7091ade0f1a7..a7f1837d01ed 100644 --- a/rasa/server.py +++ b/rasa/server.py @@ -7,7 +7,6 @@ import traceback from collections import defaultdict from functools import reduce, wraps -from http import HTTPStatus from inspect import isawaitable from pathlib import Path from http import HTTPStatus @@ -68,7 +67,8 @@ from rasa.shared.core.trackers import DialogueStateTracker, EventVerbosity from rasa.core.utils import AvailableEndpoints from rasa.nlu.emulators.no_emulator import NoEmulator -from rasa.nlu.test import run_evaluation, CVEvaluationResult +import rasa.nlu.test +from rasa.nlu.test import CVEvaluationResult from rasa.utils.endpoints import EndpointConfig if TYPE_CHECKING: @@ -1190,7 +1190,7 @@ async def _evaluate_model_using_test_set( model_directory = eval_agent.model_directory _, nlu_model = model.get_model_subdirectories(model_directory) - return await run_evaluation( + return await rasa.nlu.test.run_evaluation( data_path, nlu_model, disable_plotting=True, report_as_dict=True ) @@ -1202,7 +1202,7 @@ async def _cross_validate(data_file: Text, config_file: Text, folds: int) -> Dic config = await importer.get_config() nlu_data = await importer.get_nlu_data() - evaluations = rasa.nlu.cross_validate( + evaluations = rasa.nlu.test.cross_validate( data=nlu_data, n_folds=folds, nlu_config=config, diff --git a/tests/nlu/classifiers/test_diet_classifier.py b/tests/nlu/classifiers/test_diet_classifier.py index 47367c748aab..f0bbff90090c 100644 --- a/tests/nlu/classifiers/test_diet_classifier.py +++ b/tests/nlu/classifiers/test_diet_classifier.py @@ -1,5 +1,4 @@ from pathlib import Path -from typing import Text import numpy as np import pytest @@ -9,7 +8,7 @@ import rasa.model from rasa.shared.nlu.training_data.features import Features -from rasa.nlu import train +import rasa.nlu.train from rasa.nlu.classifiers import LABEL_RANKING_LENGTH from rasa.nlu.config import RasaNLUModelConfig from rasa.shared.nlu.constants import ( @@ -163,7 +162,7 @@ def test_model_data_signature_with_entities( # create tokens for entity parsing inside DIET tokenizer = WhitespaceTokenizer() - tokenizer.train(training_data) + tokenizer.rasa.nlu.train.train(training_data) model_data = classifier.preprocess_train_data(training_data) entity_exists = "entities" in model_data.get_signature().keys() @@ -178,7 +177,7 @@ async def _train_persist_load_with_different_settings( ): _config = RasaNLUModelConfig({"pipeline": pipeline, "language": "en"}) - (trainer, trained, persisted_path) = await train( + (trainer, trained, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data="data/examples/rasa/demo-rasa-multi-intent.yml", @@ -287,7 +286,7 @@ async def test_raise_error_on_incorrect_pipeline(component_builder, tmp_path: Pa ) with pytest.raises(Exception) as e: - await train( + await rasa.nlu.train.train( _config, path=str(tmp_path), data=DEFAULT_DATA_PATH, @@ -351,7 +350,7 @@ async def test_softmax_normalization( pipeline[2].update(classifier_params) _config = RasaNLUModelConfig({"pipeline": pipeline}) - (trained_model, _, persisted_path) = await train( + (trained_model, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data=data_path, component_builder=component_builder ) loaded = Interpreter.load(persisted_path, component_builder) @@ -399,7 +398,7 @@ async def test_inner_linear_normalization( pipeline[2].update(classifier_params) _config = RasaNLUModelConfig({"pipeline": pipeline}) - (trained_model, _, persisted_path) = await train( + (trained_model, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data=data_path, component_builder=component_builder ) loaded = Interpreter.load(persisted_path, component_builder) @@ -440,7 +439,7 @@ async def test_margin_loss_is_not_normalized( monkeypatch.setattr(train_utils, "normalize", mock.normalize) _config = RasaNLUModelConfig({"pipeline": pipeline}) - (trained_model, _, persisted_path) = await train( + (trained_model, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmpdir), data="data/test/many_intents.yml", @@ -477,14 +476,14 @@ async def test_set_random_seed(component_builder, tmpdir): ) # first run - (trained_a, _, persisted_path_a) = await train( + (trained_a, _, persisted_path_a) = await rasa.nlu.train.train( _config, path=tmpdir.strpath + "_a", data=DEFAULT_DATA_PATH, component_builder=component_builder, ) # second run - (trained_b, _, persisted_path_b) = await train( + (trained_b, _, persisted_path_b) = await rasa.nlu.train.train( _config, path=tmpdir.strpath + "_b", data=DEFAULT_DATA_PATH, @@ -525,7 +524,7 @@ async def test_train_tensorboard_logging( } ) - await train( + await rasa.nlu.train.train( _config, path=str(tmpdir), data="data/examples/rasa/demo-rasa-multi-intent.yml", @@ -562,7 +561,7 @@ async def test_train_model_checkpointing( } ) - await train( + await rasa.nlu.train.train( _config, path=str(tmpdir), data="data/examples/rasa/demo-rasa.yml", @@ -602,7 +601,7 @@ async def test_train_persist_load_with_composite_entities( _config = RasaNLUModelConfig({"pipeline": pipeline, "language": "en"}) - (trainer, trained, persisted_path) = await train( + (trainer, trained, persisted_path) = await rasa.nlu.train.train( _config, path=tmpdir.strpath, data="data/test/demo-rasa-composite-entities.yml", diff --git a/tests/nlu/extractors/test_crf_entity_extractor.py b/tests/nlu/extractors/test_crf_entity_extractor.py index 84849f1046c8..358296c9aab7 100644 --- a/tests/nlu/extractors/test_crf_entity_extractor.py +++ b/tests/nlu/extractors/test_crf_entity_extractor.py @@ -5,7 +5,7 @@ import pytest from rasa.nlu.components import ComponentBuilder -from rasa.nlu import train +import rasa.nlu.train from rasa.nlu.config import RasaNLUModelConfig from rasa.nlu.model import Interpreter from rasa.nlu.featurizers.dense_featurizer.spacy_featurizer import SpacyFeaturizer @@ -27,7 +27,7 @@ async def test_train_persist_load_with_composite_entities( _config = RasaNLUModelConfig({"pipeline": pipeline, "language": "en"}) - (trainer, trained, persisted_path) = await train( + (trainer, trained, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data="data/test/demo-rasa-composite-entities.yml", @@ -98,7 +98,7 @@ async def test_train_persist_with_different_configurations( _config = RasaNLUModelConfig({"pipeline": pipeline, "language": "en"}) - (trainer, trained, persisted_path) = await train( + (trainer, trained, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data="data/examples/rasa", diff --git a/tests/nlu/selectors/test_selectors.py b/tests/nlu/selectors/test_selectors.py index 8dbcf1dd0a9b..35c753095334 100644 --- a/tests/nlu/selectors/test_selectors.py +++ b/tests/nlu/selectors/test_selectors.py @@ -7,7 +7,7 @@ from _pytest.monkeypatch import MonkeyPatch import rasa.model -from rasa.nlu import train +import rasa.nlu.train from rasa.nlu.components import ComponentBuilder from rasa.shared.nlu.training_data import util from rasa.nlu.config import RasaNLUModelConfig @@ -228,7 +228,7 @@ async def test_train_model_checkpointing( } ) - await train( + await rasa.nlu.train.train( _config, path=str(tmpdir), data="data/test_selectors", @@ -258,7 +258,7 @@ async def _train_persist_load_with_different_settings( ): _config = RasaNLUModelConfig({"pipeline": pipeline, "language": "en"}) - (trainer, trained, persisted_path) = await train( + (trainer, trained, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data="data/examples/rasa/demo-rasa.yml", @@ -342,7 +342,7 @@ async def test_cross_entropy_with_linear_norm( pipeline[2].update(classifier_params) _config = RasaNLUModelConfig({"pipeline": pipeline}) - (trained_model, _, persisted_path) = await train( + (trained_model, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data="data/test_selectors", @@ -388,7 +388,7 @@ async def test_margin_loss_is_not_normalized( monkeypatch.setattr(train_utils, "normalize", mock.normalize) _config = RasaNLUModelConfig({"pipeline": pipeline}) - (trained_model, _, persisted_path) = await train( + (trained_model, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data="data/test_selectors", @@ -428,7 +428,7 @@ async def test_softmax_ranking( pipeline[2].update(classifier_params) _config = RasaNLUModelConfig({"pipeline": pipeline}) - (trained_model, _, persisted_path) = await train( + (trained_model, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data=data_path, component_builder=component_builder ) loaded = Interpreter.load(persisted_path, component_builder) diff --git a/tests/nlu/test_components.py b/tests/nlu/test_components.py index c3098e5df3a3..5a130b9b4537 100644 --- a/tests/nlu/test_components.py +++ b/tests/nlu/test_components.py @@ -3,7 +3,8 @@ import pytest -from rasa.nlu import registry, train +from rasa.nlu import registry +import rasa.nlu.train from rasa.nlu.components import Component, ComponentBuilder, find_unavailable_packages from rasa.nlu.config import RasaNLUModelConfig from rasa.shared.exceptions import InvalidConfigException @@ -98,7 +99,7 @@ async def test_example_component(component_builder: ComponentBuilder, tmp_path: {"pipeline": [{"name": "tests.nlu.example_component.MyComponent"}]} ) - (trainer, trained, persisted_path) = await train( + (trainer, trained, persisted_path) = await rasa.nlu.train.train( _config, data=DEFAULT_DATA_PATH, path=str(tmp_path), @@ -199,7 +200,7 @@ async def test_validate_requirements_raises_exception_on_component_without_name( ) with pytest.raises(InvalidConfigException): - await train( + await rasa.nlu.train.train( _config, data=DEFAULT_DATA_PATH, path=str(tmp_path), ) @@ -211,7 +212,7 @@ async def test_validate_component_keys_raises_warning_on_invalid_key(tmp_path: P ) with pytest.warns(UserWarning) as record: - await train( + await rasa.nlu.train.train( _config, data=DEFAULT_DATA_PATH, path=str(tmp_path), ) diff --git a/tests/nlu/test_evaluation.py b/tests/nlu/test_evaluation.py index 36d29bb888bb..e8d665d890d9 100644 --- a/tests/nlu/test_evaluation.py +++ b/tests/nlu/test_evaluation.py @@ -12,7 +12,7 @@ import rasa.shared.nlu.training_data.loading import rasa.shared.utils.io import rasa.utils.io -from rasa.nlu import train +import rasa.nlu.train from rasa.nlu.classifiers.diet_classifier import DIETClassifier from rasa.nlu.classifiers.fallback_classifier import FallbackClassifier from rasa.nlu.components import ComponentBuilder, Component @@ -394,7 +394,7 @@ async def test_eval_data( ], ) - (_, _, persisted_path) = await train( + (_, _, persisted_path) = await rasa.nlu.train.train( _config, path=str(tmp_path), data=data_importer, diff --git a/tests/nlu/test_persistor.py b/tests/nlu/test_persistor.py index baa0a2ea18d0..d852bfef38ff 100644 --- a/tests/nlu/test_persistor.py +++ b/tests/nlu/test_persistor.py @@ -4,7 +4,8 @@ from moto import mock_s3 -from rasa.nlu import persistor, train +from rasa.nlu import persistor +import rasa.nlu.train from rasa.nlu.config import RasaNLUModelConfig @@ -24,7 +25,7 @@ async def test_list_method_method_in_AWS_persistor( # noinspection PyPep8Naming os.environ["BUCKET_NAME"] = "rasa-test" os.environ["AWS_DEFAULT_REGION"] = "us-west-1" - (trained, _, persisted_path) = await train( + (trained, _, persisted_path) = await rasa.nlu.train( _config, data="data/test/demo-rasa-small.json", path=str(tmp_path), diff --git a/tests/nlu/test_train.py b/tests/nlu/test_train.py index 93924ede2f9d..bc31899cbe10 100644 --- a/tests/nlu/test_train.py +++ b/tests/nlu/test_train.py @@ -1,7 +1,8 @@ import os import pytest -from rasa.nlu import registry, train +from rasa.nlu import registry +import rasa.nlu.train from rasa.nlu.config import RasaNLUModelConfig from rasa.nlu.model import Interpreter, Trainer from rasa.shared.nlu.training_data.training_data import TrainingData @@ -138,7 +139,7 @@ def test_all_components_are_in_at_least_one_test_pipeline(): async def test_train_persist_load_parse(language, pipeline, component_builder, tmpdir): _config = RasaNLUModelConfig({"pipeline": pipeline, "language": language}) - (trained, _, persisted_path) = await train( + (trained, _, persisted_path) = await rasa.nlu.train.train( _config, path=tmpdir.strpath, data=DEFAULT_DATA_PATH, @@ -167,7 +168,7 @@ def test_train_model_without_data(language, pipeline, component_builder, tmpdir) _config = RasaNLUModelConfig({"pipeline": pipeline, "language": language}) trainer = Trainer(_config, component_builder) - trainer.train(TrainingData()) + trainer.rasa.nlu.train.train(TrainingData()) persisted_path = trainer.persist(tmpdir.strpath) loaded = Interpreter.load(persisted_path, component_builder) @@ -205,14 +206,16 @@ def test_load_and_persist_without_train(language, pipeline, component_builder, t def test_load_and_persist_without_train_non_windows( language, pipeline, component_builder, tmpdir ): - test_load_and_persist_without_train(language, pipeline, component_builder, tmpdir) + test_load_and_persist_without_rasa.nlu.train.train( + language, pipeline, component_builder, tmpdir + ) async def test_train_model_empty_pipeline(component_builder): _config = RasaNLUModelConfig({"pipeline": None, "language": "en"}) with pytest.raises(ValueError): - await train( + await rasa.nlu.train.train( _config, data=DEFAULT_DATA_PATH, component_builder=component_builder ) @@ -222,7 +225,7 @@ async def test_train_named_model(component_builder, tmpdir): {"pipeline": [{"name": "KeywordIntentClassifier"}], "language": "en"} ) - (trained, _, persisted_path) = await train( + (trained, _, persisted_path) = await rasa.nlu.train.train( _config, path=tmpdir.strpath, data=DEFAULT_DATA_PATH, @@ -242,7 +245,7 @@ async def test_handles_pipeline_with_non_existing_component( pretrained_embeddings_spacy_config.pipeline.append({"name": "my_made_up_component"}) with pytest.raises(Exception) as execinfo: - await train( + await rasa.nlu.train.train( pretrained_embeddings_spacy_config, data=DEFAULT_DATA_PATH, component_builder=component_builder, @@ -255,7 +258,7 @@ async def test_train_model_training_data_persisted(component_builder, tmpdir): {"pipeline": [{"name": "KeywordIntentClassifier"}], "language": "en"} ) - (trained, _, persisted_path) = await train( + (trained, _, persisted_path) = await rasa.nlu.train.train( _config, path=tmpdir.strpath, data=DEFAULT_DATA_PATH, @@ -276,7 +279,7 @@ async def test_train_model_no_training_data_persisted(component_builder, tmpdir) {"pipeline": [{"name": "KeywordIntentClassifier"}], "language": "en"} ) - (trained, _, persisted_path) = await train( + (trained, _, persisted_path) = await rasa.nlu.train.train( _config, path=tmpdir.strpath, data=DEFAULT_DATA_PATH, diff --git a/tests/test_model_training.py b/tests/test_model_training.py index b30028dab1ba..0e0354e838d1 100644 --- a/tests/test_model_training.py +++ b/tests/test_model_training.py @@ -370,7 +370,7 @@ def mock_core_training(monkeypatch: MonkeyPatch) -> Mock: def mock_nlu_training(monkeypatch: MonkeyPatch) -> Mock: - return mock_async(monkeypatch, rasa.nlu, rasa.nlu.train.__name__) + return mock_async(monkeypatch, rasa.nlu.train, rasa.nlu.train.train.__name__) def new_model_path_in_same_dir(old_model_path: Text) -> Text: @@ -619,9 +619,7 @@ def test_model_finetuning( trained_rasa_model: Text, use_latest_model: bool, ): - mocked_nlu_training = Mock(wraps=rasa.nlu.train) - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) - + mocked_nlu_training = mock_nlu_training(monkeypatch) mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() @@ -798,8 +796,7 @@ def test_model_finetuning_nlu( trained_nlu_moodbot_path: Text, use_latest_model: bool, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) mock_interpreter_create = Mock(wraps=Interpreter.create) monkeypatch.setattr(Interpreter, "create", mock_interpreter_create) @@ -853,8 +850,7 @@ def test_model_finetuning_nlu( def test_model_finetuning_nlu_new_label( tmp_path: Path, monkeypatch: MonkeyPatch, trained_nlu_moodbot_path: Text, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -879,8 +875,7 @@ def test_model_finetuning_nlu_new_label( def test_model_finetuning_nlu_new_entity( tmp_path: Path, monkeypatch: MonkeyPatch, trained_nlu_moodbot_path: Text, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -910,8 +905,7 @@ def test_model_finetuning_nlu_new_label_already_in_domain( default_config_path: Text, default_domain_path: Text, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -937,8 +931,7 @@ def test_model_finetuning_nlu_new_label_already_in_domain( def test_model_finetuning_nlu_new_label_to_domain_only( tmp_path: Path, monkeypatch: MonkeyPatch, trained_nlu_moodbot_path: Text, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -963,8 +956,7 @@ def test_model_finetuning_nlu_new_label_to_domain_only( def test_model_finetuning_nlu_with_default_epochs( tmp_path: Path, monkeypatch: MonkeyPatch, trained_nlu_moodbot_path: Text, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") @@ -1003,8 +995,7 @@ def test_model_finetuning_with_invalid_model( model_to_fine_tune: Text, capsys: CaptureFixture, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) mocked_core_training = mock_core_training(monkeypatch) (tmp_path / "models").mkdir() @@ -1066,8 +1057,7 @@ def test_model_finetuning_with_invalid_model_nlu( model_to_fine_tune: Text, capsys: CaptureFixture, ): - mocked_nlu_training = AsyncMock(return_value="") - monkeypatch.setattr(rasa.nlu, rasa.nlu.train.__name__, mocked_nlu_training) + mocked_nlu_training = mock_nlu_training(monkeypatch) (tmp_path / "models").mkdir() output = str(tmp_path / "models") diff --git a/tests/test_server.py b/tests/test_server.py index e57952aab178..899a42ed77a6 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -43,6 +43,7 @@ from rasa.core.channels.slack import SlackBot from rasa.core.tracker_store import InMemoryTrackerStore from rasa.model import unpack_model +import rasa.nlu.test from rasa.nlu.test import CVEvaluationResult from rasa.shared.core import events from rasa.shared.core.constants import ( @@ -990,7 +991,9 @@ async def test_cross_validation_with_callback_success( ) ) monkeypatch.setattr( - rasa.nlu, rasa.nlu.cross_validate.__name__, mocked_cross_validation + rasa.nlu.test, + rasa.nlu.test.cross_validate.__name__, + mocked_cross_validation, ) _, response = await rasa_app_nlu.post( @@ -1033,7 +1036,9 @@ async def test_cross_validation_with_callback_error( payload = f"{nlu_data}\n{config}" monkeypatch.setattr( - rasa.nlu, rasa.nlu.cross_validate.__name__, Mock(side_effect=ValueError()) + rasa.nlu.test, + rasa.nlu.test.cross_validate.__name__, + Mock(side_effect=ValueError()), ) callback_url = "https://example.com/webhooks/actions" From 180291bee7b7fbe093317b498d659a73a69299ab Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 19:22:24 +0100 Subject: [PATCH 22/24] add changelogs --- changelog/8141.misc.md | 3 +++ changelog/8141.removal.md | 6 ++++++ 2 files changed, 9 insertions(+) create mode 100644 changelog/8141.misc.md create mode 100644 changelog/8141.removal.md diff --git a/changelog/8141.misc.md b/changelog/8141.misc.md new file mode 100644 index 000000000000..c51cbfe1efe5 --- /dev/null +++ b/changelog/8141.misc.md @@ -0,0 +1,3 @@ +The following modules were renamed: +* `rasa.train` -> `rasa.model_training` +* `rasa.test` -> `rasa.model_testing` diff --git a/changelog/8141.removal.md b/changelog/8141.removal.md new file mode 100644 index 000000000000..5c4a2c581134 --- /dev/null +++ b/changelog/8141.removal.md @@ -0,0 +1,6 @@ +The following import abbreviations were removed: +* `rasa.core.train`: Please use `rasa.core.train.train` instead. +* `rasa.core.visualize`: Please use `rasa.core.visualize.visualize` instead. +* `rasa.nlu.train`: Please use `rasa.nlu.train.train` instead. +* `rasa.nlu.test`: Please use `rasa.nlu.test.run_evaluation` instead. +* `rasa.nlu.cross_validate`: Please use `rasa.nlu.test.cross_validate` instead. From 153ec8db42cd020b50411aedacbd745dbd3bc395 Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Mon, 15 Mar 2021 19:29:42 +0100 Subject: [PATCH 23/24] add missing docstrings --- rasa/cli/run.py | 9 +++++++-- rasa/core/agent.py | 18 +++++++++++++++++- rasa/jupyter.py | 3 ++- 3 files changed, 26 insertions(+), 4 deletions(-) diff --git a/rasa/cli/run.py b/rasa/cli/run.py index c5d1e9cc5112..aa964da88b0c 100644 --- a/rasa/cli/run.py +++ b/rasa/cli/run.py @@ -1,7 +1,7 @@ import argparse import logging import os -from typing import List, Text +from typing import List, Text, NoReturn from rasa.cli import SubParsersAction from rasa.cli.arguments import run as arguments @@ -74,7 +74,12 @@ def _validate_model_path(model_path: Text, parameter: Text, default: Text): return model_path -def run(args: argparse.Namespace): +def run(args: argparse.Namespace) -> NoReturn: + """Entrypoint for `rasa run`. + + Args: + args: The CLI arguments. + """ import rasa args.endpoints = rasa.cli.utils.get_validated_path( diff --git a/rasa/core/agent.py b/rasa/core/agent.py index b3ae8bdddd48..20785f3c9bd2 100644 --- a/rasa/core/agent.py +++ b/rasa/core/agent.py @@ -306,7 +306,23 @@ async def load_agent( tracker_store: Optional[TrackerStore] = None, lock_store: Optional[LockStore] = None, action_endpoint: Optional[EndpointConfig] = None, -): +) -> Optional["Agent"]: + """Loads agent from server, remote storage or disk. + + Args: + model_path: Path to the model if it's on disk. + model_server: Configuration for a potential server which serves the model. + remote_storage: URL of remote storage for model. + interpreter: NLU interpreter to parse incoming messages. + generator: Optional response generator. + tracker_store: TrackerStore for persisting the conversation history. + lock_store: LockStore to avoid that a conversation is modified by concurrent + actors. + action_endpoint: Action server configuration for executing custom actions. + + Returns: + The instantiated `Agent` or `None`. + """ try: if model_server is not None: return await load_from_server( diff --git a/rasa/jupyter.py b/rasa/jupyter.py index ad4f3380734f..a0d516bc4640 100644 --- a/rasa/jupyter.py +++ b/rasa/jupyter.py @@ -11,7 +11,8 @@ from rasa.core.agent import Agent, create_agent -def pprint(obj: Any): +def pprint(obj: Any) -> None: + """Prints JSONs with indent.""" pretty_print.pprint(obj, indent=2) From c9db2545ce989321a9b00ff8775b8db52a98ce1c Mon Sep 17 00:00:00 2001 From: Tobias Wochinger Date: Tue, 16 Mar 2021 09:25:14 +0100 Subject: [PATCH 24/24] fixes for prior module renames / removing of abbreviated imports --- tests/nlu/classifiers/test_diet_classifier.py | 2 +- tests/nlu/test_persistor.py | 2 +- tests/nlu/test_train.py | 6 ++---- tests/test_server.py | 2 +- 4 files changed, 5 insertions(+), 7 deletions(-) diff --git a/tests/nlu/classifiers/test_diet_classifier.py b/tests/nlu/classifiers/test_diet_classifier.py index f0bbff90090c..96141340a05c 100644 --- a/tests/nlu/classifiers/test_diet_classifier.py +++ b/tests/nlu/classifiers/test_diet_classifier.py @@ -162,7 +162,7 @@ def test_model_data_signature_with_entities( # create tokens for entity parsing inside DIET tokenizer = WhitespaceTokenizer() - tokenizer.rasa.nlu.train.train(training_data) + tokenizer.train(training_data) model_data = classifier.preprocess_train_data(training_data) entity_exists = "entities" in model_data.get_signature().keys() diff --git a/tests/nlu/test_persistor.py b/tests/nlu/test_persistor.py index d852bfef38ff..d7d086168abf 100644 --- a/tests/nlu/test_persistor.py +++ b/tests/nlu/test_persistor.py @@ -25,7 +25,7 @@ async def test_list_method_method_in_AWS_persistor( # noinspection PyPep8Naming os.environ["BUCKET_NAME"] = "rasa-test" os.environ["AWS_DEFAULT_REGION"] = "us-west-1" - (trained, _, persisted_path) = await rasa.nlu.train( + (trained, _, persisted_path) = await rasa.nlu.train.train( _config, data="data/test/demo-rasa-small.json", path=str(tmp_path), diff --git a/tests/nlu/test_train.py b/tests/nlu/test_train.py index bc31899cbe10..2fdefc196991 100644 --- a/tests/nlu/test_train.py +++ b/tests/nlu/test_train.py @@ -168,7 +168,7 @@ def test_train_model_without_data(language, pipeline, component_builder, tmpdir) _config = RasaNLUModelConfig({"pipeline": pipeline, "language": language}) trainer = Trainer(_config, component_builder) - trainer.rasa.nlu.train.train(TrainingData()) + trainer.train(TrainingData()) persisted_path = trainer.persist(tmpdir.strpath) loaded = Interpreter.load(persisted_path, component_builder) @@ -206,9 +206,7 @@ def test_load_and_persist_without_train(language, pipeline, component_builder, t def test_load_and_persist_without_train_non_windows( language, pipeline, component_builder, tmpdir ): - test_load_and_persist_without_rasa.nlu.train.train( - language, pipeline, component_builder, tmpdir - ) + test_load_and_persist_without_train(language, pipeline, component_builder, tmpdir) async def test_train_model_empty_pipeline(component_builder): diff --git a/tests/test_server.py b/tests/test_server.py index 899a42ed77a6..5df96ca2218d 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -218,7 +218,7 @@ def run_server(monkeypatch: MonkeyPatch) -> NoReturn: import sys monkeypatch.setattr( - sys.modules["rasa.train"], "train_async", mocked_training_function, + sys.modules["rasa.model_training"], "train_async", mocked_training_function, ) from rasa import __main__