Skip to content

Commit

Permalink
Use tabulate instead of texttable (#2574)
Browse files Browse the repository at this point in the history
### Changes

Using tabulate instead of texttable package to create tables.

### Reason for changes

Avoid installation two packages to same task.
Tabulate is optional dependencies of pandas package, that used in NNCF.
#2542

### Related tickets

134603
  • Loading branch information
AlexanderDokuchaev authored Mar 19, 2024
1 parent d1beec0 commit 7898e8c
Show file tree
Hide file tree
Showing 6 changed files with 68 additions and 41 deletions.
37 changes: 16 additions & 21 deletions examples/quantization_aware_training/torch/resnet18/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@
import torchvision.models as models
import torchvision.transforms as transforms
from fastdownload import FastDownload
from texttable import Texttable
from torch.jit import TracerWarning

import nncf
from nncf.common.logging.track_progress import track
from nncf.common.utils.helpers import create_table

warnings.filterwarnings("ignore", category=TracerWarning)
warnings.filterwarnings("ignore", category=UserWarning)
Expand Down Expand Up @@ -303,28 +303,23 @@ def transform_fn(data_item):
###############################################################################
# Step 6: Summary
print(os.linesep + "[Step 6] Summary")

table = Texttable()
table.header(["", "FP32", "INT8", "Summary"])
table.add_rows(
tabular_data = [
[
[
"Accuracy@1",
f"{acc1_fp32:.3f}",
f"{acc1_int8:.3f}",
f"{acc1_int8_init:.3f} (init) + {acc1_int8 - acc1_int8_init:.3f} (tuned)",
],
[
"Model Size, Mb",
f"{fp32_model_size:.3f}",
f"{int8_model_size:.3f}",
f"Compression rate is {fp32_model_size / int8_model_size:.3f}",
],
["Performance, FPS", f"{fp32_fps:.3f}", f"{int8_fps:.3f}", f"Speedup x{int8_fps / fp32_fps:.3f}"],
"Accuracy@1",
acc1_fp32,
acc1_int8,
f"{acc1_int8_init:.3f} (init) + {acc1_int8 - acc1_int8_init:.3f} (tuned)",
],
header=False,
)
print(table.draw())
[
"Model Size, Mb",
fp32_model_size,
int8_model_size,
f"Compression rate is {fp32_model_size / int8_model_size:.3f}",
],
["Performance, FPS", fp32_fps, int8_fps, f"Speedup x{int8_fps / fp32_fps:.3f}"],
]
print(create_table(["", "FP32", "INT8", "Summary"], tabular_data))

return acc1_fp32, acc1_int8_init, acc1_int8, fp32_fps, int8_fps, fp32_model_size, int8_model_size


Expand Down
25 changes: 24 additions & 1 deletion licensing/third-party-programs.txt
Original file line number Diff line number Diff line change
Expand Up @@ -867,7 +867,7 @@ Apache License

END OF TERMS AND CONDITIONS
-------------------------------------------------------------
* Other names and brands may be claimed as the property of others.
* Other names and brands may be claimed as the property of others.

Ma-Dan keras-yolo4

Expand Down Expand Up @@ -1594,3 +1594,26 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

-------------------------------------------------------------

astanin/python-tabulate

Copyright (c) 2011-2020 Sergey Astanin and contributors

Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:

The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
18 changes: 14 additions & 4 deletions nncf/common/utils/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,20 +12,30 @@
import itertools
import os
import os.path as osp
from typing import Any, Dict, Hashable, List
from typing import Any, Dict, Hashable, Iterable, List, Optional, Union

from texttable import Texttable
from tabulate import tabulate


def create_table(header: List[str], rows: List[List[Any]]) -> str:
def create_table(
header: List[str],
rows: List[List[Any]],
table_fmt: str = "mixed_grid",
max_col_widths: Optional[Union[int, Iterable[int]]] = None,
) -> str:
"""
Returns a string which represents a table with a header and rows.
:param header: Table's header.
:param rows: Table's rows.
:param table_fmt: Type of formatting of the table.
:param max_col_widths: Max widths of columns.
:return: A string which represents a table with a header and rows.
"""
return Texttable().header(header).add_rows(rows, header=False).draw()
if not rows:
# For empty rows max_col_widths raises IndexError
max_col_widths = None
return tabulate(tabular_data=rows, headers=header, tablefmt=table_fmt, maxcolwidths=max_col_widths, floatfmt=".3f")


def configure_accuracy_aware_paths(log_dir: str) -> str:
Expand Down
25 changes: 12 additions & 13 deletions nncf/torch/pruning/base_algo.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@
from typing import Dict, List

import torch
from texttable import Texttable
from torch import nn

from nncf import NNCFConfig
Expand All @@ -21,6 +20,7 @@
from nncf.common.pruning.clusterization import Clusterization
from nncf.common.pruning.mask_propagation import MaskPropagationAlgorithm
from nncf.common.pruning.utils import is_prunable_depthwise_conv
from nncf.common.utils.helpers import create_table
from nncf.config.extractors import extract_algo_specific_config
from nncf.config.schemata.defaults import PRUNE_BATCH_NORMS
from nncf.config.schemata.defaults import PRUNE_DOWNSAMPLE_CONVS
Expand Down Expand Up @@ -264,17 +264,16 @@ def get_stats_for_pruned_modules(self):
"""
Creates a table with layer pruning level statistics
"""
table = Texttable()
table.set_cols_width([33, 20, 6, 8])
header = ["Name", "Weight's shape", "Bias shape", "Layer PR"]
data = [header]
rows_data = []
for minfo in self.pruned_module_groups_info.get_all_nodes():
drow = {h: 0 for h in header}
drow["Name"] = str(minfo.module_scope)
drow["Weight's shape"] = list(minfo.module.weight.size())
drow["Bias shape"] = list(minfo.module.bias.size()) if minfo.module.bias is not None else []
drow["Layer PR"] = self.pruning_level_for_mask(minfo)
row = [drow[h] for h in header]
data.append(row)
table.add_rows(data)
return table
rows_data.append(
[
str(minfo.module_scope),
list(minfo.module.weight.size()),
list(minfo.module.bias.size()) if minfo.module.bias is not None else [],
self.pruning_level_for_mask(minfo),
]
)

return create_table(header, rows_data, max_col_widths=[33, 20, 6, 8])
2 changes: 1 addition & 1 deletion nncf/torch/pruning/filter_pruning/algo.py
Original file line number Diff line number Diff line change
Expand Up @@ -622,7 +622,7 @@ def prepare_for_export(self):
self._propagate_masks()

pruned_layers_stats = self.get_stats_for_pruned_modules()
nncf_logger.info(f"Pruned layers statistics: \n{pruned_layers_stats.draw()}")
nncf_logger.info(f"Pruned layers statistics: \n{pruned_layers_stats}")

def compression_stage(self) -> CompressionStage:
target_pruning_level = self.scheduler.target_level
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def find_version(*file_paths):
"rich>=13.5.2",
"scikit-learn>=0.24.0",
"scipy>=1.3.2",
"texttable>=1.6.3",
"tabulate>=0.9.0",
"tqdm>=4.54.1",
]

Expand Down

0 comments on commit 7898e8c

Please sign in to comment.