diff --git a/docs/api/source/conf.py b/docs/api/source/conf.py index c34e22e5a65..55a40d04220 100644 --- a/docs/api/source/conf.py +++ b/docs/api/source/conf.py @@ -143,6 +143,8 @@ def collect_api_entities() -> APIInfo: "nncf.tensor.functions.numpy_linalg", "nncf.tensor.functions.torch_numeric", "nncf.tensor.functions.torch_linalg", + "nncf.tensor.functions.torch_io", + "nncf.tensor.functions.numpy_io", ] with mock(mock_modules): diff --git a/licensing/third-party-programs.txt b/licensing/third-party-programs.txt index 02da4a04c01..df8a60e4829 100644 --- a/licensing/third-party-programs.txt +++ b/licensing/third-party-programs.txt @@ -1617,3 +1617,212 @@ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +------------------------------------------------------------- + +huggingface/safetensors + +Copyright 2018- The Hugging Face team. All rights reserved. + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +------------------------------------------------------------- diff --git a/nncf/__init__.py b/nncf/__init__.py index 88ad2cfb09e..4c39aef55ce 100644 --- a/nncf/__init__.py +++ b/nncf/__init__.py @@ -26,6 +26,7 @@ from nncf.errors import InvalidQuantizerGroupError as InvalidQuantizerGroupError from nncf.errors import ModuleNotFoundError as ModuleNotFoundError from nncf.errors import ParameterNotSupportedError as ParameterNotSupportedError +from nncf.errors import StatisticsCacheError as StatisticsCacheError from nncf.errors import UnknownDatasetError as UnknownDatasetError from nncf.errors import UnsupportedBackendError as UnsupportedBackendError from nncf.errors import UnsupportedDatasetError as UnsupportedDatasetError diff --git a/nncf/common/tensor_statistics/aggregator.py b/nncf/common/tensor_statistics/aggregator.py index 2b1b21aa7f3..59c6ac1bd65 100644 --- a/nncf/common/tensor_statistics/aggregator.py +++ b/nncf/common/tensor_statistics/aggregator.py @@ -12,11 +12,10 @@ from abc import ABC from abc import abstractmethod from itertools import islice +from pathlib import Path from typing import Any, Dict, Optional, TypeVar import nncf -import nncf.common.tensor_statistics.statistics_serializer as statistics_serializer -import nncf.common.tensor_statistics.statistics_validator as statistics_validator from nncf.common import factory from nncf.common.graph.graph import NNCFGraph from nncf.common.graph.transformations.commands import TargetPoint @@ -25,6 +24,8 @@ from nncf.common.logging.track_progress import track from nncf.common.tensor import NNCFTensor from nncf.common.tensor_statistics.statistic_point import StatisticPointsContainer +from nncf.common.tensor_statistics.statistics_serializer import dump_statistics +from nncf.common.tensor_statistics.statistics_serializer import load_statistics from nncf.common.utils.backend import BackendType from nncf.data.dataset import DataItem from nncf.data.dataset import Dataset @@ -96,16 +97,15 @@ def collect_statistics(self, model: TModel, graph: NNCFGraph) -> None: f"smaller than the requested subset size {self.stat_subset_size}." ) - def load_statistics_from_dir(self, dir_path: str) -> None: + def load_statistics_from_dir(self, dir_path: Path) -> None: """ Loads statistics from a directory and populates the statistic points with the loaded data. :param dir_path: The name of the directory from which to load the statistics. """ - loaded_data, metadata = statistics_serializer.load_from_dir(dir_path) - statistics_validator.validate_backend(metadata, self.BACKEND) + loaded_data = load_statistics(dir_path, self.BACKEND) self._load_statistics(loaded_data) - nncf_logger.info(f"Statistics were successfully loaded from a directory {dir_path}.") + nncf_logger.info(f"Statistics were successfully loaded from a directory {dir_path.absolute()}") def _load_statistics(self, data: Dict[str, Any]) -> None: """ @@ -118,19 +118,19 @@ def _load_statistics(self, data: Dict[str, Any]) -> None: statistics_key = self._get_statistics_key(statistics, statistic_point.target_point) if statistics_key not in data: raise nncf.ValidationError(f"Not found statistics for {statistics_key}") - statistics_container = tensor_collector.create_statistics_container(data[statistics_key]) - tensor_collector.set_cache(statistics_container) + statistics.load_data(data[statistics_key]) + tensor_collector.set_cache(statistics) - def dump_statistics(self, dir_path: str) -> None: + def dump_statistics(self, dir_path: Path) -> None: """ Dumps the current statistics to a directory in a compressed format. :param dir_path: The path of the directory where the statistics will be saved. """ data_to_dump = self._prepare_statistics() - metadata = {"backend": self.BACKEND.value, "subset_size": self.stat_subset_size} - statistics_serializer.dump_to_dir(data_to_dump, dir_path, metadata) - nncf_logger.info(f"Statistics were successfully saved to a directory {dir_path}.") + additional_metadata = {"subset_size": self.stat_subset_size} + dump_statistics(data_to_dump, dir_path, self.BACKEND, additional_metadata) + nncf_logger.info(f"Statistics were successfully saved to a directory {dir_path.absolute()}") def _prepare_statistics(self) -> Dict[str, Any]: """ @@ -142,7 +142,7 @@ def _prepare_statistics(self) -> Dict[str, Any]: for _, statistic_point, tensor_collector in self.statistic_points.get_tensor_collectors(): statistics = tensor_collector.get_statistics() statistics_key = self._get_statistics_key(statistics, statistic_point.target_point) - data = statistics.get_data() + data = statistics.get_data(is_serialized=True) data_to_dump[statistics_key] = data return data_to_dump diff --git a/nncf/common/tensor_statistics/statistics_serializer.py b/nncf/common/tensor_statistics/statistics_serializer.py index 5f33c1e9b8c..6f4a5bcf271 100644 --- a/nncf/common/tensor_statistics/statistics_serializer.py +++ b/nncf/common/tensor_statistics/statistics_serializer.py @@ -8,112 +8,144 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. -import gzip import json -import pickle import re +from collections import defaultdict from pathlib import Path -from typing import Any, Dict, Optional, Tuple, cast +from typing import Any, Dict, List, Optional, TextIO, cast import nncf -from nncf.common.utils.os import fail_if_symlink +from nncf.common.tensor_statistics.statistics_validator import validate_cache +from nncf.common.utils.backend import BackendType from nncf.common.utils.os import safe_open +from nncf.tensor import functions as fns +from nncf.tensor.tensor import Tensor +from nncf.tensor.tensor import get_tensor_backend METADATA_FILE = "statistics_metadata.json" +STATISTICS_FILE_EXTENSION = ".safetensors" def sanitize_filename(filename: str) -> str: """ Replaces any forbidden characters with an underscore. + + :param filename: Original filename. + :return: Sanitized filename with no forbidden characters. + """ + return re.sub(r"[^\w]", "_", filename) + + +def add_unique_name(name: str, unique_map: Dict[str, List[str]]) -> str: """ - return re.sub(r'[\/:*?"<>|]', "_", filename) + Creates an unique name, adds it to a `unique_map` and returns this unique name. + + :param name: The original name. + :param unique_map: A dictionary mapping names to lists of unique sanitized names. + :return: A unique name generated by appending a count to the original name. + """ + # Next number of the same sanitized name + count = len(unique_map[name]) + 1 + unique_sanitized_name = f"{name}_{count}" + unique_map[name].append(unique_sanitized_name) + return unique_sanitized_name def load_metadata(dir_path: Path) -> Dict[str, Any]: """ Loads the metadata, including the mapping and any other metadata information from the metadata file. + :param dir_path: The directory where the metadata file is stored. - :return: A dictionary containing the mapping and metadata. + :return: A dictionary containing the metadata. """ metadata_file = dir_path / METADATA_FILE if metadata_file.exists(): with safe_open(metadata_file, "r") as f: return cast(Dict[str, Any], json.load(f)) - return {"mapping": {}, "metadata": {}} + raise nncf.StatisticsCacheError(f"Metadata file does not exist in the following path: {dir_path}") def save_metadata(metadata: Dict[str, Any], dir_path: Path) -> None: """ - Saves the mapping and metadata to the metadata file. - :param metadata: The dictionary containing both the mapping and other metadata. - :param dir_path: The directory where the metadata file will be stored. + Saves metadata to a file in the specified directory. + + :param metadata: Dictionary containing metadata and mapping. + :param dir_path: Path to the directory where the metadata file will be saved. """ metadata_file = dir_path / METADATA_FILE with safe_open(metadata_file, "w") as f: - json.dump(metadata, f, indent=4) + json.dump(metadata, cast(TextIO, f), indent=4) -def load_from_dir(dir_path: str) -> Tuple[Dict[str, Any], Dict[str, str]]: +def load_statistics(dir_path: Path, backend: BackendType) -> Dict[str, Dict[str, Tensor]]: """ - Loads statistics from gzip-compressed files in the given directory. + Loads statistics from a directory. + :param dir_path: The path to the directory from which to load the statistics. - :return: 1) A dictionary with the original statistic names as keys and the loaded statistics as values. - 2) Metadata dictionary. + :param backend: Backend type to determine the tensor backend. + :return: Statistics. """ - statistics = {} - path = Path(dir_path) - if not path.exists(): - raise nncf.ValidationError("The provided directory path does not exist.") - metadata = load_metadata(path) - mapping = metadata.get("mapping", {}) - - for statistics_file in path.iterdir(): - if statistics_file.name == METADATA_FILE: - continue # Skip the metadata file - - try: - fail_if_symlink(statistics_file) - with gzip.open(statistics_file, "rb") as f: - sanitized_name = statistics_file.name - original_name = mapping.get(sanitized_name, sanitized_name) - statistics[original_name] = pickle.load(f) - except (pickle.UnpicklingError, IOError) as e: - raise nncf.InternalError(f"Error loading statistics from {statistics_file.name}: {e}") - return statistics, metadata.get("metadata", {}) - - -def dump_to_dir( - statistics: Dict[str, Any], dir_path: str, additional_metadata: Optional[Dict[str, Any]] = None + metadata = load_metadata(dir_path) + try: + validate_cache(metadata, dir_path, backend) + statistics: Dict[str, Dict[str, Tensor]] = {} + mapping = metadata.get("mapping", {}) + tensor_backend = get_tensor_backend(backend) + for file_name, original_name in mapping.items(): + statistics_file = dir_path / file_name + statistics[original_name] = fns.io.load_file(statistics_file, backend=tensor_backend) # no device support + return statistics + except Exception as e: + raise nncf.StatisticsCacheError(str(e)) + + +def dump_statistics( + statistics: Dict[str, Dict[str, Tensor]], + dir_path: Path, + backend: BackendType, + additional_metadata: Optional[Dict[str, Any]] = None, ) -> None: """ - Dumps statistics to gzip-compressed files in the specified directory, while maintaining a mapping file. + Saves statistics and metadata to a directory. + + Metadata is stored in a JSON file named "statistics_metadata.json". + Statistics are stored in individual files with sanitized and unique filenames to prevent collisions. + + Metadata Format: + The metadata file must have a mapping of saved filenames to the original names and backend type. + { + "mapping": { + "saved_file_name_1": "original_name_1", + "saved_file_name_2": "original_name_2", + ... + }, + "backend": "backend_type", + ... (additional metadata fields) + } + :param statistics: A dictionary with statistic names as keys and the statistic data as values. :param dir_path: The path to the directory where the statistics will be dumped. + :param backend: Backend type to save in metadata. :param additional_metadata: A dictionary containing any additional metadata to be saved with the mapping. """ - path = Path(dir_path) - path.mkdir(parents=True, exist_ok=True) - - metadata, mapping = {}, {} - + dir_path.mkdir(parents=True, exist_ok=True) + metadata: Dict[str, Any] = {"mapping": {}, "backend": backend.value} + unique_map: Dict[str, List[str]] = defaultdict(list) for original_name, statistics_value in statistics.items(): sanitized_name = sanitize_filename(original_name) - file_path = path / sanitized_name + unique_sanitized_name = add_unique_name(sanitized_name, unique_map) + STATISTICS_FILE_EXTENSION + + file_path = dir_path / unique_sanitized_name # Update the mapping - mapping[sanitized_name] = original_name + metadata["mapping"][unique_sanitized_name] = original_name try: - fail_if_symlink(file_path) - with gzip.open(file_path, "wb") as f: - pickle.dump(statistics_value, f) - except (IOError, pickle.PicklingError) as e: + fns.io.save_file(statistics_value, file_path) + except Exception as e: raise nncf.InternalError(f"Failed to write data to file {file_path}: {e}") - # Add additional metadata if provided if additional_metadata: - metadata["metadata"] = additional_metadata + metadata |= additional_metadata - # Update the mapping in the metadata file - metadata["mapping"] = mapping - save_metadata(metadata, path) + save_metadata(metadata, dir_path) diff --git a/nncf/common/tensor_statistics/statistics_validator.py b/nncf/common/tensor_statistics/statistics_validator.py index fe926aaa3f2..80ec8dd458c 100644 --- a/nncf/common/tensor_statistics/statistics_validator.py +++ b/nncf/common/tensor_statistics/statistics_validator.py @@ -8,23 +8,48 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path from typing import Any, Dict -import nncf from nncf.common.utils.backend import BackendType -def validate_backend(data: Dict[str, Any], backend: BackendType) -> None: +def validate_backend(metadata: Dict[str, Any], backend: BackendType) -> None: """ - Checks whether backend in loaded data is equal to a provided backend. + Checks whether backend in metadata is equal to a provided backend. :param data: Loaded statistics. :param backend: Provided backend. """ - if "backend" not in data: - raise nncf.ValidationError("The provided metadata has no information about backend.") - data_backend = data["backend"] + if "backend" not in metadata: + raise ValueError("The provided metadata has no information about backend.") + data_backend = metadata["backend"] if data_backend != backend.value: - raise nncf.ValidationError( - f"Backend in loaded statistics {data_backend} does not match to an expected backend {backend.value}." + raise ValueError( + f"Backend in loaded statistics {data_backend} does not match the expected backend {backend.value}." ) + + +def validate_statistics_files_exist(metadata: Dict[str, Any], dir_path: Path) -> None: + """ + Checks whether all statistics files exist. + + :param metadata: Loaded metadata. + :param dir_path: Path to the cache directory. + """ + for file_name in metadata["mapping"]: + file_path = dir_path / file_name + if not file_path.exists(): + raise FileNotFoundError(f"One of the statistics file: {file_path} does not exist.") + + +def validate_cache(metadata: Dict[str, Any], dir_path: Path, backend: BackendType) -> None: + """ + Validates cache directory. + + :param metadata: Metadata. + :param dir_path: Path to the cache directory. + :param backend: Backend type. + """ + validate_backend(metadata, backend) + validate_statistics_files_exist(metadata, dir_path) diff --git a/nncf/errors.py b/nncf/errors.py index d507c3b6d79..efb82ab60ab 100644 --- a/nncf/errors.py +++ b/nncf/errors.py @@ -129,3 +129,16 @@ class UnknownDatasetError(Exception): """ pass + + +class StatisticsCacheError(Exception): + """ + Raised when any error occurs during statistics caching. + """ + + def __str__(self): + return ( + "The error occurred during statistics caching:\n" + f"{super().__str__()}\n" + "Please, remove the cache directory and collect cache again." + ) diff --git a/nncf/experimental/common/tensor_statistics/collectors.py b/nncf/experimental/common/tensor_statistics/collectors.py index c79a1bd4d84..ce138709d71 100644 --- a/nncf/experimental/common/tensor_statistics/collectors.py +++ b/nncf/experimental/common/tensor_statistics/collectors.py @@ -799,8 +799,8 @@ def _register_reduced_input_impl(self, x: TensorType) -> None: # TODO: revise this formula as possibly it is with an error; adopted from previous HAWQ implementation self._container = (self._container + trace) / x.size - def _aggregate_impl(self) -> List[TensorType]: - return [self._container * 2 / self._collected_samples] + def _aggregate_impl(self) -> Tensor: + return self._container * 2 / self._collected_samples def _move_axes_flatten_cat( diff --git a/nncf/experimental/common/tensor_statistics/statistics.py b/nncf/experimental/common/tensor_statistics/statistics.py index 9396674edb3..6479643c1a1 100644 --- a/nncf/experimental/common/tensor_statistics/statistics.py +++ b/nncf/experimental/common/tensor_statistics/statistics.py @@ -13,33 +13,64 @@ from collections import Counter from dataclasses import dataclass +from dataclasses import fields from typing import Any, ClassVar, Dict, List, Tuple +import nncf from nncf.tensor import Tensor from nncf.tensor import functions as fns +@dataclass class TensorStatistic: """Base class that stores statistic data""" - TENSOR_STATISTIC_OUTPUT_KEY = "tensor_statistic_output" - - def get_data(self) -> Dict[str, Any]: - return {key: getattr(self, key) for key in self.keys()} - - def load_data(self, data: Dict[str, Any]): - for key in self.keys(): - setattr(self, key, data.get(key)) + TENSOR_STATISTIC_OUTPUT_KEY: ClassVar[str] = "tensor_statistic_output" + + def get_data(self, is_serialized: bool = False) -> Dict[str, Any]: + """ + Retrieves the data of the tensor statistics. If `is_serialized` is True, + the data is prepared for serialization by including only Tensor instances. + + :param is_serialized: If True, the data is prepared for serialization by + including only Tensor instances. + :return: Dictionary with keys and their associated data. If `is_serialized` + is True, the dictionary will contain only Tensor instances. + """ + if is_serialized: + return self._get_serialized_data() # Dict[str, Tensor] + return {field.name: getattr(self, field.name) for field in fields(self)} + + def _get_serialized_data(self) -> Dict[str, Tensor]: + """ + Prepares the data for serialization by including only Tensor instances. + + :return: Dictionary with data for serialization. + """ + serialized_data = {} + for field in fields(self): + key = field.name + value = getattr(self, key) + if isinstance(value, Tensor): + serialized_data[key] = value + else: + raise nncf.InternalError(f"Unsupported type of value: {type(value)}") + return serialized_data + + def load_data(self, data: Dict[str, Tensor]) -> None: + """ + Loads the data from the serialized data. + + :param data: Data to load. + """ + for key in (field.name for field in fields(self)): + setattr(self, key, data[key]) @classmethod def from_config(cls, config: Dict[str, Any]) -> TensorStatistic: - args = {key: config[key] for key in cls.keys()} # noqa: SIM118 + args = {key: config[key] for key in (field.name for field in fields(cls))} return cls(**args) - @classmethod - def keys(cls) -> Tuple[str]: - return () - @dataclass class MinMaxTensorStatistic(TensorStatistic): @@ -49,10 +80,6 @@ class MinMaxTensorStatistic(TensorStatistic): min_values: Tensor max_values: Tensor - @classmethod - def keys(cls): - return (cls.MIN_STAT, cls.MAX_STAT) - def __eq__(self, other: TensorStatistic): if isinstance(other, MinMaxTensorStatistic): return fns.allclose(self.min_values, other.min_values) and fns.allclose(self.max_values, other.max_values) @@ -65,10 +92,6 @@ class AbsMaxTensorStatistic(TensorStatistic): abs_max: Tensor - @classmethod - def keys(cls): - return (cls.ABS_MAX_STAT,) - def __eq__(self, other: TensorStatistic): if isinstance(other, AbsMaxTensorStatistic): return fns.allclose(self.abs_max, other.abs_max) @@ -83,15 +106,24 @@ class MeanTensorStatistic(TensorStatistic): mean_values: Tensor shape: Tuple[int, ...] - @classmethod - def keys(cls): - return (cls.MEAN_STAT, cls.SHAPE_STAT) - def __eq__(self, other: TensorStatistic): if isinstance(other, MeanTensorStatistic): return self.shape == other.shape and fns.allclose(self.mean_values, other.mean_values) return False + def _get_serialized_data(self) -> Dict[str, Tensor]: + backend = self.mean_values.backend + dtype = self.mean_values.dtype + device = self.mean_values.device + return { + self.MEAN_STAT: self.mean_values, + self.SHAPE_STAT: fns.tensor(self.shape, backend=backend, dtype=dtype, device=device), + } + + def load_data(self, loaded_data: Dict[str, Tensor]) -> None: + self.mean_values = loaded_data[self.MEAN_STAT] + self.shape_values = tuple(loaded_data[self.SHAPE_STAT].tolist()) + @dataclass class MedianMADTensorStatistic(TensorStatistic): @@ -101,10 +133,6 @@ class MedianMADTensorStatistic(TensorStatistic): median_values: Tensor mad_values: Tensor - @classmethod - def keys(cls): - return (cls.MEDIAN_VALUES_STAT, cls.MAD_VALUES_STAT) - def __eq__(self, other: TensorStatistic): if isinstance(other, MedianMADTensorStatistic): return fns.allclose(self.median_values, other.median_values) and fns.allclose( @@ -126,10 +154,6 @@ class PercentileTensorStatistic(TensorStatistic): percentile_vs_values_dict: Dict[str, Tensor] - @classmethod - def keys(cls): - return (cls.PERCENTILE_VS_VALUE_DICT,) - def __eq__(self, other: TensorStatistic): if isinstance(other, PercentileTensorStatistic): if Counter(self.percentile_vs_values_dict.keys()) != Counter(other.percentile_vs_values_dict.keys()): @@ -150,6 +174,12 @@ def from_config(cls, config: Dict[str, Any]) -> TensorStatistic: percentile_vs_values_dict[percentile] = value return cls(percentile_vs_values_dict=percentile_vs_values_dict) + def _get_serialized_data(self) -> Dict[str, Tensor]: + return self.PERCENTILE_VS_VALUE_DICT + + def load_data(self, loaded_data: Dict[str, Tensor]) -> None: + self.percentile_vs_values_dict = loaded_data + @dataclass class RawTensorStatistic(TensorStatistic): @@ -157,10 +187,6 @@ class RawTensorStatistic(TensorStatistic): values: Tensor - @classmethod - def keys(cls): - return (cls.VALUES_STATS,) - def __eq__(self, other: RawTensorStatistic) -> bool: if isinstance(other, RawTensorStatistic): return fns.allclose(self.values, other.values) @@ -173,10 +199,6 @@ class HessianTensorStatistic(TensorStatistic): hessian: Tensor - @classmethod - def keys(cls): - return (cls.HESSIAN_INPUT_ACTIVATION_STATS,) - def __eq__(self, other: TensorStatistic): if isinstance(other, HessianTensorStatistic): return fns.allclose(self.hessian, other.hessian) @@ -189,10 +211,6 @@ class MeanVarianceTensorStatistic(TensorStatistic): mean_variance: Tensor - @classmethod - def keys(cls): - return (cls.MEAN_VARIANCE_STAT,) - def __eq__(self, other: TensorStatistic): if isinstance(other, MeanVarianceTensorStatistic): return fns.allclose(self.mean_variance, other.mean_variance) @@ -205,10 +223,6 @@ class MaxVarianceTensorStatistic(TensorStatistic): max_variance: Tensor - @classmethod - def keys(cls): - return (cls.MAX_VARIANCE_STAT,) - def __eq__(self, other: TensorStatistic): if isinstance(other, MaxVarianceTensorStatistic): return fns.allclose(self.max_variance, other.max_variance) @@ -221,10 +235,6 @@ class MeanMagnitudeTensorStatistic(TensorStatistic): mean_magnitude: Tensor - @classmethod - def keys(cls): - return (cls.MEAN_MAGNITUDE_STAT,) - def __eq__(self, other: TensorStatistic): if isinstance(other, MeanMagnitudeTensorStatistic): return fns.allclose(self.mean_magnitude, other.mean_magnitude) @@ -237,11 +247,7 @@ class WCTensorStatistic(TensorStatistic): SHAPE_STAT = "shape_values" mean_values: List[Tensor] - shape_values: List[Tuple[int, ...]] - - @classmethod - def keys(cls): - return (cls.MEAN_STAT, cls.SHAPE_STAT) + shape_values: List[Tuple[Tensor]] def __eq__(self, other: Any) -> bool: shapes_equal = all(self.shapes[i] == other.shapes[i] for i in range(len(self.mean_values))) @@ -252,6 +258,24 @@ def __eq__(self, other: Any) -> bool: ) return mean_values_equal + def _get_serialized_data(self) -> Dict[str, Tensor]: + backend = self.mean_values[0].backend + dtype = self.mean_values[0].dtype + device = self.mean_values[0].device + return { + self.MEAN_STAT: fns.stack(self.mean_values), + self.SHAPE_STAT: fns.tensor( + [[dim.data for dim in shape] for shape in self.shape_values], + backend=backend, + dtype=dtype, + device=device, + ), + } + + def load_data(self, loaded_data: Dict[str, Tensor]) -> None: + self.shape_values = [tuple(shape) for shape in loaded_data[self.SHAPE_STAT]] + self.mean_values = [it for it in loaded_data[self.MEAN_STAT]] + @classmethod def from_config(cls, config: Dict[str, Any]) -> TensorStatistic: mean_values, shape_values = None, None diff --git a/nncf/openvino/quantization/quantize_model.py b/nncf/openvino/quantization/quantize_model.py index 46db1c50cca..423b9d6d42e 100644 --- a/nncf/openvino/quantization/quantize_model.py +++ b/nncf/openvino/quantization/quantize_model.py @@ -403,8 +403,9 @@ def compress_weights_impl( statistics_points = None if advanced_parameters and advanced_parameters.statistics_path: # If there is no such directory, then caches statistics - if not Path(advanced_parameters.statistics_path).exists(): - cache_weight_compression_statistics(model, graph, dataset, subset_size, advanced_parameters.statistics_path) + statistics_path = Path(advanced_parameters.statistics_path) + if not statistics_path.exists(): + cache_weight_compression_statistics(model, graph, dataset, subset_size, statistics_path) statistics_aggregator = StatisticsAggregatorFactory.create(model, dataset) compression_algorithm.set_backend_entity(model) _, matmul_input_to_output_nodes_map = compression_algorithm.get_compression_nodes_info(graph) @@ -415,7 +416,7 @@ def compress_weights_impl( compression_algorithm, matmul_input_to_output_nodes_map, ) - statistics_aggregator.load_statistics_from_dir(advanced_parameters.statistics_path) + statistics_aggregator.load_statistics_from_dir(statistics_path) statistics_points = statistics_aggregator.statistic_points return compression_algorithm.apply(model, graph, statistics_points, dataset) diff --git a/nncf/quantization/statistics_caching.py b/nncf/quantization/statistics_caching.py index d6253f2fdda..7d45ef866a5 100644 --- a/nncf/quantization/statistics_caching.py +++ b/nncf/quantization/statistics_caching.py @@ -8,6 +8,7 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +from pathlib import Path from typing import Dict, List, Tuple from nncf.api.compression import TModel @@ -100,7 +101,7 @@ def register_all_statistics( def cache_weight_compression_statistics( - model: TModel, graph: NNCFGraph, dataset: Dataset, subset_size: int, statistics_path: str + model: TModel, graph: NNCFGraph, dataset: Dataset, subset_size: int, statistics_path: Path ) -> None: """ Caches compression statistics for a given model and dataset. diff --git a/nncf/tensor/functions/__init__.py b/nncf/tensor/functions/__init__.py index 5a286a6fc13..9b6b66df746 100644 --- a/nncf/tensor/functions/__init__.py +++ b/nncf/tensor/functions/__init__.py @@ -9,6 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. +from nncf.tensor.functions import io as io from nncf.tensor.functions import linalg as linalg from nncf.tensor.functions.numeric import abs as abs from nncf.tensor.functions.numeric import all as all @@ -56,6 +57,7 @@ from nncf.tensor.functions.numeric import squeeze as squeeze from nncf.tensor.functions.numeric import stack as stack from nncf.tensor.functions.numeric import sum as sum +from nncf.tensor.functions.numeric import tensor as tensor from nncf.tensor.functions.numeric import transpose as transpose from nncf.tensor.functions.numeric import unsqueeze as unsqueeze from nncf.tensor.functions.numeric import unstack as unstack @@ -68,12 +70,14 @@ def _initialize_backends(): import contextlib + import nncf.tensor.functions.numpy_io import nncf.tensor.functions.numpy_linalg import nncf.tensor.functions.numpy_numeric with contextlib.suppress(ImportError): + import nncf.tensor.functions.torch_io import nncf.tensor.functions.torch_linalg - import nncf.tensor.functions.torch_numeric # noqa: F401 + import nncf.tensor.functions.torch_numeric # noqa F401 _initialize_backends() diff --git a/nncf/tensor/functions/dispatcher.py b/nncf/tensor/functions/dispatcher.py index 11787edebee..193d5a2b15a 100644 --- a/nncf/tensor/functions/dispatcher.py +++ b/nncf/tensor/functions/dispatcher.py @@ -9,10 +9,11 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -from typing import Callable, List +from typing import Callable, Dict, List import numpy as np +import nncf from nncf.tensor import Tensor from nncf.tensor.definitions import TensorBackend @@ -43,6 +44,27 @@ def dispatch_list(fn: "functools._SingleDispatchCallable", tensor_list: List[Ten return fn.dispatch(type(unwrapped_list[0]))(unwrapped_list, *args, **kwargs) +def dispatch_dict(fn: "functools._SingleDispatchCallable", tensor_dict: Dict[str, Tensor], *args, **kwargs): + """ + Dispatches the function to the type of the wrapped data of the any element in tensor_dict. + + :param fn: A function wrapped by `functools.singledispatch`. + :param tensor_dict: Dict of Tensors. + :return: The result value of the function call. + """ + unwrapped_dict = {} + tensor_backend = None + for key, tensor in tensor_dict.items(): + if tensor_backend is None: + tensor_backend = type(tensor.data) + else: + if tensor_backend is not type(tensor.data): + raise nncf.InternalError("All tensors in the dictionary should have the same backend") + unwrapped_dict[key] = tensor.data + + return fn.dispatch(tensor_backend)(unwrapped_dict, *args, **kwargs) + + def register_numpy_types(singledispatch_fn): """ Decorator to register function to singledispatch for numpy classes. @@ -75,3 +97,21 @@ def get_numeric_backend_fn(fn_name: str, backend: TensorBackend) -> Callable: from nncf.tensor.functions import torch_numeric return getattr(torch_numeric, fn_name) + + +def get_io_backend_fn(fn_name: str, backend: TensorBackend) -> Callable: + """ + Returns a io function based on the provided function name and backend type. + + :param fn_name: The name of the numeric function. + :param backend: The backend type for which the function is required. + :return: The backend-specific io function. + """ + if backend == TensorBackend.numpy: + from nncf.tensor.functions import numpy_io + + return getattr(numpy_io, fn_name) + if backend == TensorBackend.torch: + from nncf.tensor.functions import torch_io + + return getattr(torch_io, fn_name) diff --git a/nncf/tensor/functions/io.py b/nncf/tensor/functions/io.py new file mode 100644 index 00000000000..768bc7a6c90 --- /dev/null +++ b/nncf/tensor/functions/io.py @@ -0,0 +1,55 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import functools +from pathlib import Path +from typing import Dict, Optional + +from nncf.tensor import Tensor +from nncf.tensor.definitions import TensorBackend +from nncf.tensor.definitions import TensorDeviceType +from nncf.tensor.functions.dispatcher import dispatch_dict +from nncf.tensor.functions.dispatcher import get_io_backend_fn + + +def load_file( + file_path: Path, + *, + backend: TensorBackend, + device: Optional[TensorDeviceType] = None, +) -> Dict[str, Tensor]: + """ + Loads a file containing tensor data and returns a dictionary of tensors. + + :param file_path: The path to the file to be loaded. + :param backend: The backend type to determine the loading function. + :param device: The device on which the tensor will be allocated, If device is not given, + then the default device is determined by backend. + :return: A dictionary where the keys are tensor names and the values are Tensor objects. + """ + loaded_dict = get_io_backend_fn("load_file", backend)(file_path, device=device) + return {key: Tensor(val) for key, val in loaded_dict.items()} + + +@functools.singledispatch +def save_file( + data: Dict[str, Tensor], + file_path: Path, +) -> None: + """ + Saves a dictionary of tensors to a file. + + :param data: A dictionary where the keys are tensor names and the values are Tensor objects. + :param file_path: The path to the file where the tensor data will be saved. + """ + if isinstance(data, dict): + return dispatch_dict(save_file, data, file_path) + raise NotImplementedError(f"Function `save_file` is not implemented for {type(data)}") diff --git a/nncf/tensor/functions/numeric.py b/nncf/tensor/functions/numeric.py index 061d1ee6e66..391e4b21c7c 100644 --- a/nncf/tensor/functions/numeric.py +++ b/nncf/tensor/functions/numeric.py @@ -11,7 +11,7 @@ import functools from collections import deque -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, Optional, Sequence, Tuple, Union import numpy as np @@ -24,6 +24,7 @@ from nncf.tensor.functions.dispatcher import dispatch_list from nncf.tensor.functions.dispatcher import get_numeric_backend_fn from nncf.tensor.functions.dispatcher import tensor_guard +from nncf.tensor.tensor import TTensor @functools.singledispatch @@ -905,3 +906,24 @@ def ceil(a: Tensor) -> Tensor: :return: An array of the same type as a, containing the ceiling values. """ return Tensor(ceil(a.data)) + + +def tensor( + data: Union[TTensor, Sequence[float]], + *, + backend: TensorBackend, + dtype: Optional[TensorDataType] = None, + device: Optional[TensorDeviceType] = None, +) -> Tensor: + """ + Creates a tensor from the given data. + + :param data: The data for the tensor. + :param backend: The backend type for which the tensor is required. + :param dtype: The data type of the returned tensor, If dtype is not given, + then the default data type is determined by backend. + :param device: The device on which the tensor will be allocated, If device is not given, + then the default device is determined by backend. + :return: A tensor created from the given data. + """ + return Tensor(get_numeric_backend_fn("tensor", backend)(data, dtype=dtype, device=device)) diff --git a/nncf/tensor/functions/numpy_io.py b/nncf/tensor/functions/numpy_io.py new file mode 100644 index 00000000000..9f1879a728b --- /dev/null +++ b/nncf/tensor/functions/numpy_io.py @@ -0,0 +1,31 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Optional + +import numpy as np +from safetensors.numpy import load_file as np_load_file +from safetensors.numpy import save_file as np_save_file + +from nncf.tensor.definitions import TensorDeviceType +from nncf.tensor.functions import io as io +from nncf.tensor.functions.dispatcher import register_numpy_types +from nncf.tensor.functions.numpy_numeric import validate_device + + +def load_file(file_path: str, *, device: Optional[TensorDeviceType] = None) -> Dict[str, np.ndarray]: + validate_device(device) + return np_load_file(file_path) + + +@register_numpy_types(io.save_file) +def _(data: Dict[str, np.ndarray], file_path: str) -> None: + return np_save_file(data, file_path) diff --git a/nncf/tensor/functions/numpy_numeric.py b/nncf/tensor/functions/numpy_numeric.py index 2496882ddba..aac6cb801bd 100644 --- a/nncf/tensor/functions/numpy_numeric.py +++ b/nncf/tensor/functions/numpy_numeric.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, Optional, Sequence, Tuple, Union import numpy as np @@ -19,6 +19,7 @@ from nncf.tensor.definitions import TypeInfo from nncf.tensor.functions import numeric as numeric from nncf.tensor.functions.dispatcher import register_numpy_types +from nncf.tensor.tensor import TTensor DTYPE_MAP = { TensorDataType.float16: np.dtype(np.float16), @@ -33,6 +34,15 @@ DTYPE_MAP_REV = {v: k for k, v in DTYPE_MAP.items()} +def validate_device(device: TensorDeviceType) -> None: + if device is not None and device != TensorDeviceType.CPU: + raise ValueError("numpy_numeric only supports CPU device.") + + +def convert_to_numpy_dtype(dtype: TensorDataType) -> np.dtype: + return DTYPE_MAP[dtype] if dtype is not None else None + + @register_numpy_types(numeric.device) def _(a: Union[np.ndarray, np.generic]) -> TensorDeviceType: return TensorDeviceType.CPU @@ -187,7 +197,7 @@ def _( keepdims: bool = False, dtype: Optional[TensorDataType] = None, ) -> np.ndarray: - dtype = DTYPE_MAP[dtype] if dtype else None + dtype = convert_to_numpy_dtype(dtype) return np.array(np.mean(a, axis=axis, keepdims=keepdims, dtype=dtype)) @@ -387,10 +397,8 @@ def zeros( dtype: Optional[TensorDataType] = None, device: Optional[TensorDeviceType] = None, ) -> np.ndarray: - if device is not None and device != TensorDeviceType.CPU: - raise ValueError("numpy_numeric.zeros only supports CPU device.") - if dtype is not None: - dtype = DTYPE_MAP[dtype] + validate_device(device) + dtype = convert_to_numpy_dtype(dtype) return np.zeros(shape, dtype=dtype) @@ -401,10 +409,8 @@ def eye( dtype: Optional[TensorDataType] = None, device: Optional[TensorDeviceType] = None, ) -> np.ndarray: - if device is not None and device != TensorDeviceType.CPU: - raise ValueError("numpy_numeric.eye only supports CPU device.") - if dtype is not None: - dtype = DTYPE_MAP[dtype] + validate_device(device) + dtype = convert_to_numpy_dtype(dtype) return np.eye(n, m, dtype=dtype) @@ -416,10 +422,8 @@ def arange( dtype: Optional[TensorDataType] = None, device: Optional[TensorDeviceType] = None, ) -> np.ndarray: - if device is not None and device != TensorDeviceType.CPU: - raise ValueError("numpy_numeric.arange only supports CPU device.") - if dtype is not None: - dtype = DTYPE_MAP[dtype] + validate_device(device) + dtype = convert_to_numpy_dtype(dtype) return np.arange(start, end, step, dtype=dtype) @@ -431,3 +435,14 @@ def _(a: Union[np.ndarray, np.generic]) -> Union[np.ndarray, np.generic]: @register_numpy_types(numeric.ceil) def _(a: Union[np.ndarray, np.generic]) -> np.ndarray: return np.ceil(a) + + +def tensor( + data: Union[TTensor, Sequence[float]], + *, + dtype: Optional[TensorDataType] = None, + device: Optional[TensorDeviceType] = None, +) -> np.ndarray: + validate_device(device) + dtype = convert_to_numpy_dtype(dtype) + return np.array(data, dtype=dtype) diff --git a/nncf/tensor/functions/torch_io.py b/nncf/tensor/functions/torch_io.py new file mode 100644 index 00000000000..48c6e897b8d --- /dev/null +++ b/nncf/tensor/functions/torch_io.py @@ -0,0 +1,30 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from typing import Dict, Optional + +import torch +from safetensors.torch import load_file as pt_load_file +from safetensors.torch import save_file as pt_save_file + +from nncf.tensor import TensorDeviceType +from nncf.tensor.functions import io as io +from nncf.tensor.functions.torch_numeric import convert_to_torch_device + + +def load_file(file_path: str, *, device: Optional[TensorDeviceType] = None) -> Dict[str, torch.Tensor]: + device = convert_to_torch_device(device) + return pt_load_file(file_path, device=device) + + +@io.save_file.register(torch.Tensor) +def _(data: Dict[str, torch.Tensor], file_path: str) -> None: + return pt_save_file(data, file_path) diff --git a/nncf/tensor/functions/torch_numeric.py b/nncf/tensor/functions/torch_numeric.py index e3163d28aab..84f82971f07 100644 --- a/nncf/tensor/functions/torch_numeric.py +++ b/nncf/tensor/functions/torch_numeric.py @@ -9,7 +9,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Any, Callable, List, Optional, Tuple, Union +from typing import Any, Callable, List, Optional, Sequence, Tuple, Union import numpy as np import torch @@ -19,6 +19,7 @@ from nncf.tensor.definitions import TensorBackend from nncf.tensor.definitions import TypeInfo from nncf.tensor.functions import numeric as numeric +from nncf.tensor.tensor import TTensor DTYPE_MAP = { TensorDataType.float16: torch.float16, @@ -37,6 +38,14 @@ DEVICE_MAP_REV = {v: k for k, v in DEVICE_MAP.items()} +def convert_to_torch_device(device: TensorDeviceType) -> str: + return DEVICE_MAP[device] if device is not None else None + + +def convert_to_torch_dtype(dtype: TensorDataType) -> torch.dtype: + return DTYPE_MAP[dtype] if dtype is not None else None + + @numeric.device.register(torch.Tensor) def _(a: torch.Tensor) -> TensorDeviceType: return DEVICE_MAP_REV[a.device.type] @@ -200,7 +209,7 @@ def _( keepdims: bool = False, dtype: Optional[TensorDataType] = None, ) -> torch.Tensor: - dtype = DTYPE_MAP[dtype] if dtype else None + dtype = convert_to_torch_dtype(dtype) return torch.mean(a, dim=axis, keepdim=keepdims, dtype=dtype) @@ -416,10 +425,8 @@ def zeros( dtype: Optional[TensorDataType] = None, device: Optional[TensorDeviceType] = None, ) -> torch.Tensor: - if dtype is not None: - dtype = DTYPE_MAP[dtype] - if device is not None: - device = DEVICE_MAP[device] + device = convert_to_torch_device(device) + dtype = convert_to_torch_dtype(dtype) return torch.zeros(*shape, dtype=dtype, device=device) @@ -430,10 +437,8 @@ def eye( dtype: Optional[TensorDataType] = None, device: Optional[TensorDeviceType] = None, ) -> torch.Tensor: - if dtype is not None: - dtype = DTYPE_MAP[dtype] - if device is not None: - device = DEVICE_MAP[device] + device = convert_to_torch_device(device) + dtype = convert_to_torch_dtype(dtype) p_args = (n,) if m is None else (n, m) return torch.eye(*p_args, dtype=dtype, device=device) @@ -446,10 +451,8 @@ def arange( dtype: Optional[TensorDataType] = None, device: Optional[TensorDeviceType] = None, ) -> torch.Tensor: - if dtype is not None: - dtype = DTYPE_MAP[dtype] - if device is not None: - device = DEVICE_MAP[device] + device = convert_to_torch_device(device) + dtype = convert_to_torch_dtype(dtype) return torch.arange(start, end, step, dtype=dtype, device=device) @@ -465,3 +468,14 @@ def _(a: torch.Tensor) -> torch.Tensor: @numeric.ceil.register(torch.Tensor) def _(a: torch.Tensor) -> torch.Tensor: return torch.ceil(a) + + +def tensor( + data: Union[TTensor, Sequence[float]], + *, + dtype: Optional[TensorDataType] = None, + device: Optional[TensorDeviceType] = None, +) -> torch.Tensor: + device = convert_to_torch_device(device) + dtype = convert_to_torch_dtype(dtype) + return torch.tensor(data, dtype=dtype, device=device) diff --git a/nncf/tensor/tensor.py b/nncf/tensor/tensor.py index 9edbd4acb50..20cc73ea1f6 100644 --- a/nncf/tensor/tensor.py +++ b/nncf/tensor/tensor.py @@ -11,8 +11,10 @@ from __future__ import annotations import operator -from typing import Any, Optional, Tuple, TypeVar, Union +from typing import Any, Dict, Optional, Tuple, TypeVar, Union +import nncf +from nncf.common.utils.backend import BackendType from nncf.tensor.definitions import TensorBackend from nncf.tensor.definitions import TensorDataType from nncf.tensor.definitions import TensorDeviceType @@ -162,13 +164,6 @@ def __gt__(self, other: Union[Tensor, float]) -> Tensor: def __ge__(self, other: Union[Tensor, float]) -> Tensor: return Tensor(self.data >= unwrap_tensor_data(other)) - # Methods to support pickling and unpickling - def __getstate__(self): - return self._data - - def __setstate__(self, state): - self._data = state - # Tensor functions def squeeze(self, axis: Optional[Union[int, Tuple[int, ...]]] = None) -> Tensor: @@ -251,3 +246,22 @@ def unwrap_tensor_data(obj: Any) -> TTensor: :return: The data of the Tensor object, or the object itself. """ return obj.data if isinstance(obj, Tensor) else obj + + +def get_tensor_backend(backend: BackendType) -> TensorBackend: + """ + Returns a tensor backend based on the provided backend. + + :param backend: Backend type. + :return: Corresponding tensor backend type. + """ + BACKEND_TO_TENSOR_BACKEND: Dict[BackendType, TensorBackend] = { + BackendType.OPENVINO: TensorBackend.numpy, + BackendType.ONNX: TensorBackend.numpy, + BackendType.TORCH_FX: TensorBackend.torch, + BackendType.TORCH: TensorBackend.torch, + } + if backend not in BACKEND_TO_TENSOR_BACKEND: + raise nncf.ValidationError(f"Unsupported backend type: {backend}") + + return BACKEND_TO_TENSOR_BACKEND[backend] diff --git a/pyproject.toml b/pyproject.toml index 883c5093384..3f475f9786e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,6 +50,7 @@ dependencies = [ "scipy>=1.3.2", "tabulate>=0.9.0", "tqdm>=4.54.1", + "safetensors>=0.4.1" ] [project.optional-dependencies] diff --git a/tests/common/experimental/test_reducers_and_aggregators.py b/tests/common/experimental/test_reducers_and_aggregators.py index 9b490e5a0d2..a693efed0ac 100644 --- a/tests/common/experimental/test_reducers_and_aggregators.py +++ b/tests/common/experimental/test_reducers_and_aggregators.py @@ -567,5 +567,5 @@ def test_hawq_aggregator(self, inputs, reference_output): for x in inputs: aggregator.register_reduced_input(self.get_nncf_tensor(x, Dtype.FLOAT)) - ret_val = aggregator.aggregate()[0] + ret_val = aggregator.aggregate() assert fns.allclose(ret_val, reference_output) diff --git a/tests/common/test_statistics_caching.py b/tests/common/test_statistics_caching.py deleted file mode 100644 index 0093c39545f..00000000000 --- a/tests/common/test_statistics_caching.py +++ /dev/null @@ -1,89 +0,0 @@ -# Copyright (c) 2024 Intel Corporation -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# http://www.apache.org/licenses/LICENSE-2.0 -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -from collections import deque - -import numpy as np -import pytest - -import nncf -import nncf.common.tensor_statistics.statistics_serializer as statistics_serializer -from nncf.tensor import Tensor -from nncf.tensor.functions import allclose - - -def _compare_dicts(dict1, dict2): - """ - Recursively compares two dictionaries. - Supports comparing numpy arrays and Tensor objects. - """ - if not isinstance(dict1, dict) or not isinstance(dict2, dict): - raise ValueError("Both inputs must be dictionaries") - - if dict1.keys() != dict2.keys(): - return False - - for key in dict1: - val1 = dict1[key] - val2 = dict2[key] - - if isinstance(val1, np.ndarray) and isinstance(val2, np.ndarray): - if not np.array_equal(val1, val2): - return False - elif isinstance(val1, Tensor) and isinstance(val2, Tensor): - if not allclose(val1, val2): - return False - # Recursively compare nested dictionaries - elif isinstance(val1, dict) and isinstance(val2, dict): - if not _compare_dicts(val1, val2): - return False - # Direct comparison for other types - else: - if val1 != val2: - return False - - return True - - -@pytest.fixture -def dummy_statistics(): - """ - Returns a dummy statistics dictionary for testing purposes. - """ - return { - "point_A": {"min": 1, "max": 2}, - "point_B": { - "min_tuple": (1, 2), - "max_dict": {"tensor_1": [10, 10], "tensor_2": deque([1, 2])}, - "tensor_numpy": Tensor(np.ones(shape=(10, 5, 3))), - }, - } - - -def test_dump_and_load_statistics(tmp_path, dummy_statistics): - """ - Tests that dumped statistics can be loaded and match the original. - """ - test_dir = "test_dir" - statistics_serializer.dump_to_dir(dummy_statistics, tmp_path / test_dir) - assert (tmp_path / test_dir).exists(), "Dumped file was not created" - - loaded_statistics, _ = statistics_serializer.load_from_dir(tmp_path / test_dir) - assert _compare_dicts(dummy_statistics, loaded_statistics), "Loaded statistics do not match the original" - - -def test_load_statistics_from_non_existent_dir(): - """ - Tests that attempting to load statistics from a non-existent directory raises an error. - """ - file_path = "non_existent_dir" - with pytest.raises(nncf.ValidationError) as exc_info: - statistics_serializer.load_from_dir(file_path) - assert "The provided directory path does not exist." in str(exc_info) diff --git a/tests/common/test_statistics_serializer.py b/tests/common/test_statistics_serializer.py index 2ecfc7c0a57..ea2536fff1c 100644 --- a/tests/common/test_statistics_serializer.py +++ b/tests/common/test_statistics_serializer.py @@ -9,16 +9,42 @@ # See the License for the specific language governing permissions and # limitations under the License. import json -from pathlib import Path +from collections import defaultdict +from typing import Dict +import numpy as np import pytest import nncf -from nncf.common.tensor_statistics.statistics_serializer import dump_to_dir -from nncf.common.tensor_statistics.statistics_serializer import load_from_dir +from nncf.common.tensor_statistics.statistics_serializer import add_unique_name from nncf.common.tensor_statistics.statistics_serializer import load_metadata from nncf.common.tensor_statistics.statistics_serializer import sanitize_filename from nncf.common.tensor_statistics.statistics_serializer import save_metadata +from nncf.common.utils.backend import BackendType +from nncf.common.utils.os import safe_open +from nncf.tensor import Tensor +from nncf.tensor.definitions import TensorBackend +from tests.cross_fw.test_templates.test_statistics_serializer import TemplateTestStatisticsSerializer + + +class TestNPStatisticsSerializer(TemplateTestStatisticsSerializer): + def _get_backend_statistics(self) -> Dict[str, Dict[str, np.ndarray]]: + return { + "layer/1/activation": {"mean": Tensor(np.array([0.1, 0.2, 0.3]))}, + "layer/2/activation": {"variance": Tensor(np.array([0.05, 0.06, 0.07]))}, + } + + def _get_backend(self) -> TensorBackend: + # any backend for numpy tensor, e.g. OpenVINO + return BackendType.OPENVINO + + def is_equal(self, a1: Dict[str, Tensor], a2: Dict[str, Tensor]) -> bool: + for key in a1: + if key not in a2: + return False + if not np.array_equal(a1[key].data, a2[key].data): + return False + return True def test_sanitize_filename(): @@ -27,17 +53,32 @@ def test_sanitize_filename(): assert sanitized == "layer_1_mean_activation", "Filename was not sanitized correctly" +def test_sanitize_filenames_with_collisions(): + filename_1 = "layer/1_mean:activation" + filename_2 = "layer.1_mean/activation" + unique_map = defaultdict(list) + for filename in (filename_1, filename_2): + sanitized = sanitize_filename(filename) + add_unique_name(sanitized, unique_map) + assert unique_map[sanitized] == ["layer_1_mean_activation_1", "layer_1_mean_activation_2"] + + def test_load_metadata(tmp_path): # Create a metadata file in the temp directory metadata = {"mapping": {"key1": "value1"}, "metadata": {"model": "test"}} metadata_file = tmp_path / "statistics_metadata.json" - with open(metadata_file, "w") as f: + with safe_open(metadata_file, "w") as f: json.dump(metadata, f) loaded_metadata = load_metadata(tmp_path) assert loaded_metadata == metadata, "Metadata was not loaded correctly" +def test_load_no_existing_metadata(tmp_path): + with pytest.raises(nncf.StatisticsCacheError, match="Metadata file does not exist in the following path"): + load_metadata(tmp_path) + + def test_save_metadata(tmp_path): metadata = {"mapping": {"key1": "value1"}, "metadata": {"model": "test"}} save_metadata(metadata, tmp_path) @@ -45,40 +86,6 @@ def test_save_metadata(tmp_path): metadata_file = tmp_path / "statistics_metadata.json" assert metadata_file.exists(), "Metadata file was not created" - with open(metadata_file, "r") as f: + with safe_open(metadata_file, "r") as f: loaded_metadata = json.load(f) assert loaded_metadata == metadata, "Metadata was not saved correctly" - - -def test_dump_and_load_statistics(tmp_path): - statistics = {"layer/1_mean/activation": [0.1, 0.2, 0.3], "layer/2_variance": [0.05, 0.06, 0.07]} - additional_metadata = {"model": "facebook/opt-125m", "compression": "8-bit"} - - dump_to_dir(statistics, tmp_path, additional_metadata) - - assert len(list(Path(tmp_path).iterdir())) > 0, "No files created during dumping" - - metadata_file = tmp_path / "statistics_metadata.json" - assert metadata_file.exists(), "Metadata file was not created" - - with open(metadata_file, "r") as f: - metadata = json.load(f) - assert "mapping" in metadata, "Mapping is missing in metadata" - assert metadata["metadata"]["model"] == "facebook/opt-125m" - - # Load the statistics and ensure it was loaded correctly - loaded_statistics, loaded_metadata = load_from_dir(tmp_path) - assert "layer/1_mean/activation" in loaded_statistics, "Statistics not loaded correctly" - assert loaded_statistics["layer/1_mean/activation"] == [0.1, 0.2, 0.3] - assert loaded_metadata["model"] == "facebook/opt-125m", "Metadata not loaded correctly" - - -def test_invalid_gzip_file(tmp_path): - # Create a corrupt gzip file in the directory - invalid_file = tmp_path / "invalid_file.gz" - with open(invalid_file, "w") as f: - f.write("This is not a valid gzip file") - - # Expect the load_from_dir to raise an error when trying to load the invalid file - with pytest.raises(nncf.InternalError, match="Error loading statistics"): - load_from_dir(tmp_path) diff --git a/tests/common/test_statistics_validator.py b/tests/common/test_statistics_validator.py index 8f234f011e6..8c4c1caecf5 100644 --- a/tests/common/test_statistics_validator.py +++ b/tests/common/test_statistics_validator.py @@ -10,7 +10,6 @@ # limitations under the License. import pytest -import nncf from nncf.common.tensor_statistics.statistics_validator import validate_backend from nncf.common.utils.backend import BackendType @@ -23,12 +22,12 @@ def test_validate_backend(backend_value): validate_backend(data, backend) - with pytest.raises(nncf.ValidationError) as exc_info: + with pytest.raises(ValueError) as exc_info: # Test case where backend does not match validate_backend({"backend": BackendType.ONNX.value}, BackendType.TORCH) - assert "Backend in loaded statistics ONNX does not match to an expected backend Torch." in str(exc_info) + assert "Backend in loaded statistics" in str(exc_info) - with pytest.raises(nncf.ValidationError) as exc_info: + with pytest.raises(ValueError) as exc_info: # Test case where backend key is missing validate_backend({}, BackendType.TORCH) assert "The provided metadata has no information about backend." in str(exc_info) diff --git a/tests/cross_fw/test_templates/template_test_nncf_tensor.py b/tests/cross_fw/test_templates/template_test_nncf_tensor.py index 13f2d6bc976..1df4393fd86 100644 --- a/tests/cross_fw/test_templates/template_test_nncf_tensor.py +++ b/tests/cross_fw/test_templates/template_test_nncf_tensor.py @@ -1695,3 +1695,26 @@ def test_svd(self, a, full_matrices, abs_res_ref): for act, abs_ref in zip(res, abs_res_ref): assert isinstance(act, Tensor) assert fns.allclose(fns.abs(act), abs_ref, atol=1e-7) + + @pytest.mark.parametrize("data", [[[3.0, 2.0, 2.0], [2.0, 3.0, -2.0]]]) + def test_save_load_file(self, tmp_path, data): + tensor_key, tensor_filename = "tensor_key", "test_tensor" + tensor = Tensor(self.to_tensor(data)) + stat = {tensor_key: tensor} + fns.io.save_file(stat, tmp_path / tensor_filename) + loaded_stat = fns.io.load_file(tmp_path / tensor_filename, backend=tensor.backend, device=tensor.device) + assert fns.allclose(stat[tensor_key], loaded_stat[tensor_key]) + assert isinstance(loaded_stat[tensor_key], Tensor) + assert loaded_stat[tensor_key].backend == tensor.backend + assert loaded_stat[tensor_key].device == tensor.device + assert loaded_stat[tensor_key].dtype == tensor.dtype + + @pytest.mark.parametrize("data", [[3.0, 2.0, 2.0], [1, 2, 3]]) + @pytest.mark.parametrize("dtype", [TensorDataType.float32, TensorDataType.int32, TensorDataType.uint8, None]) + def test_fn_tensor(self, data, dtype): + nncf_tensor = fns.tensor(data, backend=self.backend(), dtype=dtype, device=self.device()) + backend_tensor = Tensor(self.to_tensor(data)) + if dtype is not None: + backend_tensor = backend_tensor.astype(dtype) + assert fns.allclose(nncf_tensor, backend_tensor) + assert nncf_tensor.dtype == backend_tensor.dtype diff --git a/tests/cross_fw/test_templates/test_statistics_caching.py b/tests/cross_fw/test_templates/test_statistics_caching.py index 1035dc8fcb4..f5e902178be 100644 --- a/tests/cross_fw/test_templates/test_statistics_caching.py +++ b/tests/cross_fw/test_templates/test_statistics_caching.py @@ -11,7 +11,6 @@ from abc import abstractmethod from pathlib import Path -import numpy as np import pytest import nncf @@ -48,6 +47,14 @@ def get_statistics_aggregator(self): """ pass + @abstractmethod + def _create_dummy_min_max_tensor(self) -> Tensor: + """ + Creates a dummy tensor for testing purposes. + + :return: A Tensor object with dummy data. + """ + def _create_dummy_statistic_point(self) -> StatisticPoint: """ Creates a dummy statistic point for testing purposes. @@ -56,7 +63,7 @@ def _create_dummy_statistic_point(self) -> StatisticPoint: """ dummy_t_p = self.create_target_point(TargetType.PRE_LAYER_OPERATION, "dummy_name", 0) dummy_tensor_collector = TensorCollector() - dummy_tensor_collector._cached_statistics = MinMaxTensorStatistic(Tensor(np.zeros((3))), Tensor(np.ones((3)))) + dummy_tensor_collector._cached_statistics = MinMaxTensorStatistic(*self._create_dummy_min_max_tensor()) return StatisticPoint( target_point=dummy_t_p, tensor_collector=dummy_tensor_collector, algorithm="dummy_algorithm" ) @@ -98,5 +105,5 @@ def test_incorrect_backend_statistics_load(self, tmp_path: Path): assert (tmp_path / test_file).exists(), "Statistics file was not created" # spoil backend aggregator.BACKEND = BackendType.TENSORFLOW - with pytest.raises(nncf.ValidationError): + with pytest.raises(nncf.StatisticsCacheError): aggregator.load_statistics_from_dir(tmp_path / test_file) diff --git a/tests/cross_fw/test_templates/test_statistics_serializer.py b/tests/cross_fw/test_templates/test_statistics_serializer.py new file mode 100644 index 00000000000..bf750c04dd7 --- /dev/null +++ b/tests/cross_fw/test_templates/test_statistics_serializer.py @@ -0,0 +1,72 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +from abc import abstractmethod +from pathlib import Path +from typing import Dict + +import pytest + +import nncf +from nncf.common.tensor_statistics.statistics_serializer import dump_statistics +from nncf.common.tensor_statistics.statistics_serializer import load_statistics +from nncf.common.utils.backend import BackendType +from nncf.common.utils.os import safe_open +from nncf.tensor.tensor import Tensor + + +class TemplateTestStatisticsSerializer: + @abstractmethod + def _get_backend_statistics(self) -> Dict[str, Dict[str, Tensor]]: + """Returns a dictionary of statistics for testing purposes.""" + + @abstractmethod + def _get_backend(self) -> BackendType: + """Returns the backend used for testing.""" + + @abstractmethod + def is_equal(self, a1: Dict[str, Tensor], a2: Dict[str, Tensor]) -> bool: + """Determine if two statistics are equal.""" + + def test_load_no_statistics_file(self, tmp_path): + # Create a metadata file in the temp directory + backend = self._get_backend() + metadata = {"mapping": {"key1": "value1"}, "model": "test", "backend": backend.value} + metadata_file = tmp_path / "statistics_metadata.json" + with safe_open(metadata_file, "w") as f: + json.dump(metadata, f) + + # Expect the load_statistics_from_dir to raise an error when trying to load non existed statistics + with pytest.raises(nncf.StatisticsCacheError, match="One of the statistics file:"): + load_statistics(tmp_path, backend) + + def test_dump_and_load_statistics(self, tmp_path): + backend = self._get_backend() + statistics = self._get_backend_statistics() + additional_metadata = {"model": "facebook/opt-125m", "compression": "8-bit"} + + dump_statistics(statistics, tmp_path, backend, additional_metadata) + + assert len(list(Path(tmp_path).iterdir())) > 0, "No files created during dumping" + + metadata_file = tmp_path / "statistics_metadata.json" + assert metadata_file.exists(), "Metadata file was not created" + + with safe_open(metadata_file, "r") as f: + metadata = json.load(f) + assert "mapping" in metadata, "Mapping is missing in metadata" + assert metadata["model"] == "facebook/opt-125m" + + # Load the statistics and ensure it was loaded correctly + loaded_statistics = load_statistics(tmp_path, backend) + for layer_name, stat in statistics.items(): + assert layer_name in loaded_statistics, "Statistics not loaded correctly" + assert self.is_equal(loaded_statistics[layer_name], stat) diff --git a/tests/onnx/test_statistics_caching.py b/tests/onnx/test_statistics_caching.py index e1224e7b5f3..c02548aa8ca 100644 --- a/tests/onnx/test_statistics_caching.py +++ b/tests/onnx/test_statistics_caching.py @@ -8,9 +8,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import numpy as np + from nncf.common.graph.transformations.commands import TargetType from nncf.onnx.graph.transformations.commands import ONNXTargetPoint from nncf.onnx.statistics.aggregator import ONNXStatisticsAggregator +from nncf.tensor import Tensor from tests.cross_fw.test_templates.test_statistics_caching import TemplateTestStatisticsCaching @@ -20,3 +23,6 @@ def create_target_point(self, target_point_type: TargetType, name: str, port_id: def get_statistics_aggregator(self): return ONNXStatisticsAggregator(None) + + def _create_dummy_min_max_tensor(self) -> Tensor: + return Tensor(np.zeros((3))), Tensor(np.ones((3))) diff --git a/tests/openvino/native/test_statistics_caching.py b/tests/openvino/native/test_statistics_caching.py index 15000d0bba7..874e9daf4a4 100644 --- a/tests/openvino/native/test_statistics_caching.py +++ b/tests/openvino/native/test_statistics_caching.py @@ -8,9 +8,12 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import numpy as np + from nncf.common.graph.transformations.commands import TargetType from nncf.openvino.graph.transformations.commands import OVTargetPoint from nncf.openvino.statistics.aggregator import OVStatisticsAggregator +from nncf.tensor import Tensor from tests.cross_fw.test_templates.test_statistics_caching import TemplateTestStatisticsCaching @@ -20,3 +23,6 @@ def create_target_point(self, target_point_type: TargetType, name: str, port_id: def get_statistics_aggregator(self): return OVStatisticsAggregator(None) + + def _create_dummy_min_max_tensor(self) -> Tensor: + return Tensor(np.zeros((3))), Tensor(np.ones((3))) diff --git a/tests/torch/fx/test_statistics_caching.py b/tests/torch/fx/test_statistics_caching.py index 1533769c69a..7635e82cd3d 100644 --- a/tests/torch/fx/test_statistics_caching.py +++ b/tests/torch/fx/test_statistics_caching.py @@ -8,8 +8,11 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import torch + from nncf.common.graph.transformations.commands import TargetType from nncf.experimental.torch.fx.statistics.aggregator import FXStatisticsAggregator +from nncf.tensor import Tensor from nncf.torch.graph.transformations.commands import PTTargetPoint from tests.cross_fw.test_templates.test_statistics_caching import TemplateTestStatisticsCaching @@ -20,3 +23,6 @@ def create_target_point(self, target_point_type: TargetType, name: str, port_id: def get_statistics_aggregator(self): return FXStatisticsAggregator(None) + + def _create_dummy_min_max_tensor(self) -> Tensor: + return Tensor(torch.zeros((3))), Tensor(torch.ones((3))) diff --git a/tests/torch/ptq/test_statistics_caching.py b/tests/torch/ptq/test_statistics_caching.py index 6fd527ce15c..39e707a6521 100644 --- a/tests/torch/ptq/test_statistics_caching.py +++ b/tests/torch/ptq/test_statistics_caching.py @@ -8,7 +8,10 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import torch + from nncf.common.graph.transformations.commands import TargetType +from nncf.tensor import Tensor from nncf.torch.graph.transformations.commands import PTTargetPoint from nncf.torch.statistics.aggregator import PTStatisticsAggregator from tests.cross_fw.test_templates.test_statistics_caching import TemplateTestStatisticsCaching @@ -20,3 +23,6 @@ def create_target_point(self, target_point_type: TargetType, name: str, port_id: def get_statistics_aggregator(self): return PTStatisticsAggregator(None) + + def _create_dummy_min_max_tensor(self) -> Tensor: + return Tensor(torch.zeros((3))), Tensor(torch.ones((3))) diff --git a/tests/torch/test_statistics_serializer.py b/tests/torch/test_statistics_serializer.py new file mode 100644 index 00000000000..a09c3561d86 --- /dev/null +++ b/tests/torch/test_statistics_serializer.py @@ -0,0 +1,37 @@ +# Copyright (c) 2024 Intel Corporation +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# http://www.apache.org/licenses/LICENSE-2.0 +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from typing import Dict + +import torch + +from nncf.common.utils.backend import BackendType +from nncf.tensor import Tensor +from nncf.tensor.definitions import TensorBackend +from tests.cross_fw.test_templates.test_statistics_serializer import TemplateTestStatisticsSerializer + + +class TestTorchStatisticsSerializer(TemplateTestStatisticsSerializer): + def _get_backend_statistics(self) -> Dict[str, Dict[str, Tensor]]: + return { + "layer/1/activation": {"mean": Tensor(torch.tensor([0.1, 0.2, 0.3]))}, + "layer/2/activation": {"variance": Tensor(torch.tensor([0.05, 0.06, 0.07]))}, + } + + def _get_backend(self) -> TensorBackend: + return BackendType.TORCH + + def is_equal(self, a1: Dict[str, Tensor], a2: Dict[str, Tensor]) -> bool: + for key in a1: + if key not in a2: + return False + if not torch.allclose(a1[key].data, a2[key].data): + return False + return True