Skip to content

Commit

Permalink
Supported add hook for root module in intermediate layers KD (#1029)
Browse files Browse the repository at this point in the history
Signed-off-by: Ye, Xinyu <[email protected]>
  • Loading branch information
XinyuYe-Intel authored Jun 27, 2023
1 parent da99d28 commit 13aec25
Show file tree
Hide file tree
Showing 2 changed files with 16 additions and 12 deletions.
13 changes: 7 additions & 6 deletions neural_compressor/compression/distillation/criterions.py
Original file line number Diff line number Diff line change
Expand Up @@ -881,13 +881,14 @@ def register_hooks_for_models(self):
"""
from neural_compressor.experimental.common import torch_utils
def register_model_forward_hook(model, path, output_process='', student=False):
nodes = path.split('.')
module = model
for node in nodes:
try:
module = module.__getattr__(node)
except:
raise AttributeError('There is no path {} in the model.'.format(path))
if path != '':
nodes = path.split('.')
for node in nodes:
try:
module = module.__getattr__(node)
except:
raise AttributeError('There is no path {} in the model.'.format(path))
return module.register_forward_hook(
torch_utils.get_activation(path, output_process, student)
)
Expand Down
15 changes: 9 additions & 6 deletions test/distillation/test_distillation_2.x.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,12 +72,15 @@ def test_distillation(self):
def test_distillation_intermediate_layers(self):
criterion = nn.CrossEntropyLoss()
distillation_criterion_conf = IntermediateLayersKnowledgeDistillationLossConfig(
layer_mappings=[['layer1.0', ],
[['layer1.1.conv1', ''], ['layer1.1.conv1', '0']],],
loss_types=['KL', 'MSE'],
loss_weights=[0.5, 0.5])

distillation_criterion_conf.config.IntermediateLayersKnowledgeDistillationLoss.layer_mappings[1][1][-1] = \
layer_mappings=[
['', ],
['layer1.0', ],
[['layer1.1.conv1', ''], ['layer1.1.conv1', '0']],
],
loss_types=['L1', 'KL', 'MSE'],
loss_weights=[0.5, 0.2, 0.3])

distillation_criterion_conf.config.IntermediateLayersKnowledgeDistillationLoss.layer_mappings[2][1][-1] = \
lambda x: x[:, :2,...]
optimizer = torch.optim.SGD(self.student_model.parameters(), lr=0.0001)
conf = DistillationConfig(self.teacher_model, distillation_criterion_conf)
Expand Down

0 comments on commit 13aec25

Please sign in to comment.