Skip to content

Commit

Permalink
Fix handling of QKeras activations passed as an argument
Browse files Browse the repository at this point in the history
  • Loading branch information
AdrianAlan committed May 23, 2022
1 parent 03fd5d4 commit 9d7df61
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 14 deletions.
34 changes: 25 additions & 9 deletions hls4ml/converters/keras_to_hls.py
Original file line number Diff line number Diff line change
Expand Up @@ -316,16 +316,32 @@ def keras_to_hls(config):
layer_list.append( layer )
if 'activation' in layer and layer['class_name'] not in activation_layers + recurrent_layers:# + qkeras_layers:
act_layer = {}
act_layer['name'] = layer['name'] + '_' + layer['activation']
act_layer['activation'] = layer['activation']
if 'activ_param' in layer:
act_layer['activ_param'] = layer['activ_param']
act_layer['class_name'] = layer['activation']
elif layer['activation'] == 'softmax':
act_layer['class_name'] = 'Softmax'
act_layer['axis'] = -1
# Workaround for QKeras activations passed as an argument
if isinstance(layer['activation'], dict):
act_details = layer['activation']
act_layer['class_name'] = 'QActivation'
act_layer['config'] = {
'name': layer['name'] + '_' + act_details['class_name'],
'activation': act_details['class_name']
}
act_layer, output_shape = layer_handlers['QActivation'](
act_layer,
None,
[output_shape],
reader,
config
)
else:
act_layer['class_name'] = 'Activation'
act_layer['name'] = layer['name'] + '_' + layer['activation']
act_layer['activation'] = layer['activation']
if 'activ_param' in layer:
act_layer['activ_param'] = layer['activ_param']
act_layer['class_name'] = layer['activation']
elif layer['activation'] == 'softmax':
act_layer['class_name'] = 'Softmax'
act_layer['axis'] = -1
else:
act_layer['class_name'] = 'Activation'
inputs_map[layer['name']] = act_layer['name']
if output_layers is not None and layer['name'] in output_layers:
output_layers = [act_layer['name'] if name == layer['name'] else name for name in output_layers]
Expand Down
17 changes: 12 additions & 5 deletions hls4ml/utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,14 +177,21 @@ def config_from_keras_model(model, granularity='model', default_precision='ap_fi

print('Layer name: {}, layer type: {}'.format(layer['name'], layer['class_name']))
layer_list.append( layer )
if 'activation' in layer['config'] and layer['class_name'] not in activation_layers + qkeras_layers:
if 'activation' in layer['config'] and layer['class_name'] not in activation_layers:
act_layer = {}
act_layer['name'] = layer['name'] + '_' + layer['config']['activation']
act_layer['class_name'] = 'Activation'
print(' -> Activation ({}), layer name: {}'.format(layer['config']['activation'], layer['name']))
act_details = layer['config']['activation']
if isinstance(act_details, dict):
precision = _get_precision_from_quantizer(act_details)
act_details = act_details['class_name']
act_layer['precision'] = {}
act_layer['precision']['result'] = precision
act_layer['class_name'] = 'QActivation'
else:
act_layer['class_name'] = 'Activation'
act_layer['name'] = layer['name'] + '_' + act_details
print(' -> Activation ({}), layer name: {}'.format(act_details, layer['name']))
layer_list.append(act_layer)


def make_layer_config(layer):
layer_config = {}
if layer['class_name'] in dense_layers + conv_layers + rnn_layers:
Expand Down

0 comments on commit 9d7df61

Please sign in to comment.