Skip to content

Commit

Permalink
Enabled nf4 tests after switching to openvino==2023.2 (#2281)
Browse files Browse the repository at this point in the history
### Changes

as stated in title

### Reason for changes

new version of OpenVINO contains nf4 type, therefore no need to xfail
nf4 tests

### Tests

nf4 tests
  • Loading branch information
ljaljushkin authored Nov 22, 2023
1 parent 638878d commit 8a45acb
Showing 1 changed file with 1 addition and 7 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,6 @@
from nncf.quantization.algorithms.weight_compression.openvino_backend import _get_integer_quantization_error
from nncf.quantization.algorithms.weight_compression.openvino_backend import _reshape_weights_for_grouped_quantization
from nncf.scopes import IgnoredScope
from tests.openvino.native.common import get_openvino_version
from tests.openvino.native.models import GatherWithTwoReductionAxes
from tests.openvino.native.models import IntegerModel
from tests.openvino.native.models import SequentialMatmulModel
Expand Down Expand Up @@ -162,9 +161,6 @@ def get_mixed_mapping(primary_fn: Callable, list_layers: List[str]):
),
)
def test_compare_compressed_weights(mode, group_size, check_fn_per_node_map):
ov_version = get_openvino_version()
if mode == CompressWeightsMode.NF4 and ov_version != "2023.2":
pytest.xfail("NF4 is not supported until 2023.2")
model = IntegerModel().ov_model
compressed_model = compress_weights(model, mode=mode, group_size=group_size)
actual_stats = {}
Expand All @@ -189,13 +185,11 @@ def test_compare_compressed_weights(mode, group_size, check_fn_per_node_map):
(
(1, ["weights_1", "weights_2", "weights_3"]),
(0.8, ["weights_2", "weights_3"]),
(0.4, ["weights_3"]),
(0.4, ["weights_2"]),
(0.3, []),
),
)
def test_mixed_precision(ratio, group_size, ref_nf4_nodes):
if ratio > 0.3:
pytest.xfail("Waiting for the merge NF4 support in OV - PR 19900")
model = SequentialMatmulModel().ov_model
compressed_model = compress_weights(model, mode=CompressWeightsMode.NF4, ratio=ratio, group_size=group_size)
for op in compressed_model.get_ordered_ops():
Expand Down

0 comments on commit 8a45acb

Please sign in to comment.