diff --git a/tests/model_hub_tests/torch_tests/test_hf_transformers.py b/tests/model_hub_tests/torch_tests/test_hf_transformers.py index caeb2e0ff2a01d..8b595e5425668a 100644 --- a/tests/model_hub_tests/torch_tests/test_hf_transformers.py +++ b/tests/model_hub_tests/torch_tests/test_hf_transformers.py @@ -250,8 +250,8 @@ def forward(self, x): if model is None: from transformers import AutoModel model = AutoModel.from_pretrained(name, torchscript=True) - if hasattr(model, "set_default_language"): - model.set_default_language("en_XX") + if hasattr(model, "set_default_language"): + model.set_default_language("en_XX") if example is None: if "encodec" in mi.tags: example = (torch.randn(1, 1, 100),) @@ -294,14 +294,11 @@ def teardown_method(self): @pytest.mark.parametrize("name,type", [("allenai/led-base-16384", "led"), ("bert-base-uncased", "bert"), - ("facebook/bart-large-mnli", "bart"), ("google/flan-t5-base", "t5"), ("google/tapas-large-finetuned-wtq", "tapas"), ("gpt2", "gpt2"), ("openai/clip-vit-large-patch14", "clip"), - ("RWKV/rwkv-4-169m-pile", "rwkv"), - ("microsoft/layoutlmv3-base", "layoutlmv3"), - ("microsoft/xprophetnet-large-wiki100-cased", "xlm-prophetnet"), + ("facebook/xmod-base","xmod") ]) @pytest.mark.precommit def test_convert_model_precommit(self, name, type, ie_device):