diff --git a/python/nano/src/bigdl/nano/deps/openvino/core/model.py b/python/nano/src/bigdl/nano/deps/openvino/core/model.py index c70c9e0c82a..b583cdb9fba 100644 --- a/python/nano/src/bigdl/nano/deps/openvino/core/model.py +++ b/python/nano/src/bigdl/nano/deps/openvino/core/model.py @@ -30,6 +30,9 @@ def __init__(self, ie_network: str, device='CPU'): def forward_step(self, *inputs): return self._infer_request.infer(list(inputs)) + def __call__(self, *inputs): + return self.forward_step(*inputs) + @property def forward_args(self): return self._forward_args @@ -50,17 +53,16 @@ def ie_network(self, model): input_names = [t.any_name for t in self._ie_network.inputs] self._forward_args = input_names - def _save_model(self, path): + def _save(self, path): """ - Save PytorchOpenVINOModel to local as xml and bin file + Save OpenVINOModel to local as xml and bin file :param path: Directory to save the model. """ + self._model_exists_or_err() path = Path(path) path.mkdir(exist_ok=True) - invalidInputError(self.ie_network, - "self.ie_network shouldn't be None.") - xml_path = path / self.status['xml_path'] + xml_path = path / 'ov_saved_model.xml' save(self.ie_network, xml_path) def pot(self, @@ -148,3 +150,6 @@ def pot(self, model = Core().read_model(model_path) model.reshape(orig_shape) return model + + def _model_exists_or_err(self): + invalidInputError(self.ie_network is not None, "self.ie_network shouldn't be None.") diff --git a/python/nano/src/bigdl/nano/deps/openvino/pytorch/model.py b/python/nano/src/bigdl/nano/deps/openvino/pytorch/model.py index 8e122723cff..e8a5783e037 100644 --- a/python/nano/src/bigdl/nano/deps/openvino/pytorch/model.py +++ b/python/nano/src/bigdl/nano/deps/openvino/pytorch/model.py @@ -22,9 +22,10 @@ from .utils import export import torch from bigdl.nano.utils.log4Error import invalidInputError +from ..core.utils import save -class PytorchOpenVINOModel(OpenVINOModel, AcceleratedLightningModule): +class PytorchOpenVINOModel(AcceleratedLightningModule): def __init__(self, model, input_sample=None, logging=True, **export_kwargs): """ Create a OpenVINO model from pytorch. @@ -43,14 +44,11 @@ def __init__(self, model, input_sample=None, logging=True, **export_kwargs): if isinstance(model, torch.nn.Module): export(model, input_sample, str(dir / 'tmp.xml'), logging, **export_kwargs) ov_model_path = dir / 'tmp.xml' - OpenVINOModel.__init__(self, ov_model_path) - AcceleratedLightningModule.__init__(self, None) + self.ov_model = OpenVINOModel(ov_model_path) + super().__init__(self.ov_model) def on_forward_start(self, inputs): - if self.ie_network is None: - invalidInputError(False, - "Please create an instance by PytorchOpenVINOModel()" - " or PytorchOpenVINOModel.load()") + self.ov_model._model_exists_or_err() inputs = self.tensors_to_numpy(inputs) return inputs @@ -64,6 +62,10 @@ def status(self): status.update({"xml_path": 'ov_saved_model.xml', "weight_path": 'ov_saved_model.bin'}) return status + @property + def forward_args(self): + return self.ov_model.forward_args + @staticmethod def _load(path): """ @@ -95,7 +97,19 @@ def pot(self, if metric: metric = PytorchOpenVINOMetric(metric=metric, higher_better=higher_better) dataloader = PytorchOpenVINODataLoader(dataloader, collate_fn=self.tensors_to_numpy) - model = super().pot(dataloader, metric=metric, drop_type=drop_type, - maximal_drop=maximal_drop, max_iter_num=max_iter_num, - n_requests=n_requests, sample_size=sample_size) + model = self.ov_model.pot(dataloader, metric=metric, drop_type=drop_type, + maximal_drop=maximal_drop, max_iter_num=max_iter_num, + n_requests=n_requests, sample_size=sample_size) return PytorchOpenVINOModel(model) + + def _save_model(self, path): + """ + Save PytorchOpenVINOModel to local as xml and bin file + + :param path: Directory to save the model. + """ + self.ov_model._model_exists_or_err() + path = Path(path) + path.mkdir(exist_ok=True) + xml_path = path / self.status['xml_path'] + save(self.ov_model.ie_network, xml_path) diff --git a/python/nano/src/bigdl/nano/deps/openvino/tf/model.py b/python/nano/src/bigdl/nano/deps/openvino/tf/model.py index 012707d446d..79983c1939c 100644 --- a/python/nano/src/bigdl/nano/deps/openvino/tf/model.py +++ b/python/nano/src/bigdl/nano/deps/openvino/tf/model.py @@ -20,9 +20,10 @@ from .utils import export import tensorflow as tf from bigdl.nano.utils.log4Error import invalidInputError +from ..core.utils import save -class KerasOpenVINOModel(OpenVINOModel, AcceleratedKerasModel): +class KerasOpenVINOModel(AcceleratedKerasModel): def __init__(self, model): """ Create a OpenVINO model from Keras. @@ -39,14 +40,14 @@ def __init__(self, model): if isinstance(model, tf.keras.Model): export(model, str(dir / 'tmp.xml')) ov_model_path = dir / 'tmp.xml' - OpenVINOModel.__init__(self, ov_model_path) - AcceleratedKerasModel.__init__(self, None) + self.ov_model = OpenVINOModel(ov_model_path) + super().__init__(self.ov_model) + + def forward_step(self, *inputs): + return self.ov_model.forward_step(*inputs) def on_forward_start(self, inputs): - if self.ie_network is None: - invalidInputError(False, - "Please create an instance by KerasOpenVINOModel()" - " or KerasOpenVINOModel.load()") + self.ov_model._model_exists_or_err() inputs = self.tensors_to_numpy(inputs) return inputs @@ -79,3 +80,15 @@ def _load(path): invalidInputError(False, "nano_model_meta.yml must specify 'xml_path' for loading.") xml_path = Path(path) / status['xml_path'] return KerasOpenVINOModel(xml_path) + + def _save_model(self, path): + """ + Save KerasOpenVINOModel to local as xml and bin file + + :param path: Directory to save the model. + """ + self.ov_model._model_exists_or_err() + path = Path(path) + path.mkdir(exist_ok=True) + xml_path = path / self.status['xml_path'] + save(self.ov_model.ie_network, xml_path) diff --git a/python/nano/test/openvino/basic/test_openvino.py b/python/nano/test/openvino/basic/test_openvino.py index 1ef66e2ec1d..20691707284 100644 --- a/python/nano/test/openvino/basic/test_openvino.py +++ b/python/nano/test/openvino/basic/test_openvino.py @@ -24,5 +24,5 @@ class TestOpenVINO(TestCase): def test_openvino_model(self): openvino_model = OpenVINOModel("./intel/resnet18-xnor-binary-onnx-0001/FP16-INT1/resnet18-xnor-binary-onnx-0001.xml") x = np.random.randn(1, 3, 224, 224) - y_hat = openvino_model.forward_step(x) + y_hat = openvino_model(x) assert tuple(next(iter(y_hat)).shape) == (1, 1000)