diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py deleted file mode 100644 index c1b9135..0000000 --- a/tests/test_tokenizer.py +++ /dev/null @@ -1,18 +0,0 @@ -import pytest -import spacy -from nlpretext.token.tokenizer import LanguageNotInstalledError, SpacyModel - - -@pytest.mark.parametrize( - "fake_input, expected_model_in_message", [("en", "en_core_web_sm"), ("fr", "fr_core_news_sm")] -) -def test_get_spacy_tokenizer_when_model_not_downloaded( - monkeypatch, fake_input, expected_model_in_message -): - def mock_spacy_load(lang): - raise OSError("[E050] Can't find model 'en_core_web_sm'. It doesn't seem to be ...") - - monkeypatch.setattr(spacy, "load", mock_spacy_load) - with pytest.raises(LanguageNotInstalledError) as e: - SpacyModel.SingletonSpacyModel(fake_input) - assert expected_model_in_message in str(e.value)