Skip to content
This repository has been archived by the owner on Jan 24, 2024. It is now read-only.

Update of Parser #458

Merged
merged 18 commits into from
Sep 29, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/Manual/Converter_ch.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,8 @@ Anakin 模型转换器输入支持 Caffe 和 Fluid 两种格式的预测模型

## 系统要求

- Python 2.7+
- Protobuf 3.1+(务必注意 Python 与系统环境 Protobuf 版本一致)
- Python 2.7
- Protobuf 3.1+(务必注意 Pip Protobuf 与系统环境 Protobuf 版本一致)
- PaddlePaddle 0.12.0+ (Fluid 模式下)
- flask, bson, matplotlib, scikit-image
- tkinter
Expand Down
2 changes: 1 addition & 1 deletion docs/Manual/INSTALL_ch.md
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@

#### Anakin - CPU ###

在编译 CPU 版本前,我们建议您升级 GCC-G++ 至 5.4.0 以上,链接的 libm.so 库版本高于 2.17,以发挥更佳性能。
在编译 CPU 版本前,我们建议您升级 GCC-G++ 至 5.4.0,链接的 libm.so 库版本为 2.17 ~ 2.23,以发挥更佳性能。

#### Anakin - AMDGPU ###

Expand Down
16 changes: 10 additions & 6 deletions framework/operators/normalize.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,12 +32,16 @@ Status NormalizeHelper<Ttype, Dtype, Ptype>::InitParam() {
auto eps = GET_PARAMETER(float, eps);
auto p = GET_PARAMETER(int, p);

using pblock_type = PBlock<typename DataTypeWarpper<Dtype>::type, Ttype>;
auto input_scale = GET_PARAMETER(pblock_type, weight_1);

saber::NormalizeParam<Tensor4d<Ttype, Dtype>> normalize_param(is_across_spatial, is_shared_channel, \
&(input_scale.d_tensor()), eps, p);
_param_normalize = normalize_param;
if (FIND_PARAMETER(weight_1)) {
using pblock_type = PBlock<typename DataTypeWarpper<Dtype>::type, Ttype>;
auto input_scale = GET_PARAMETER(pblock_type, weight_1);
saber::NormalizeParam<Tensor4d<Ttype, Dtype>> normalize_param(is_across_spatial, is_shared_channel, \
&(input_scale.d_tensor()), eps, p);
_param_normalize = normalize_param;
} else {
saber::NormalizeParam<Tensor4d<Ttype, Dtype>> normalize_param(is_across_spatial, is_shared_channel, eps, p);
_param_normalize = normalize_param;
}
return Status::OK();
}

Expand Down
10 changes: 10 additions & 0 deletions saber/saber_funcs_param.h
Original file line number Diff line number Diff line change
Expand Up @@ -1515,7 +1515,17 @@ struct NormalizeParam {
eps = eps_in;
CHECK_EQ(p == 2 || p == 1, true) << "only support L1 and L2 norm";
}
NormalizeParam(bool is_across_spatial, bool is_shared_channel, \
float eps_in = 1e-6f, int pin = 2) {

across_spatial = is_across_spatial;
channel_shared = is_shared_channel;
p = pin;
has_scale = false;
scale = nullptr;
eps = eps_in;
CHECK_EQ(p == 2 || p == 1, true) << "only support L1 and L2 norm";
}
NormalizeParam(const NormalizeParam<opTensor>& right) {
channel_shared = right.channel_shared;
across_spatial = right.across_spatial;
Expand Down
24 changes: 24 additions & 0 deletions tools/external_converter_v2/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
# -*- coding: utf-8 -*-

import os
import sys
import subprocess
from yaml import load, dump
try:
Expand Down Expand Up @@ -38,6 +39,8 @@ def __init__(self, config_file_path=ConfigFilePath):
# parse TARGET info from config file.
if self.framework == "CAFFE":
proto_list = data['TARGET'][self.framework]['ProtoPaths']
assert type(proto_list) == list, \
"The ProtoPaths format maybe incorrect, please check if there is any HORIZONTAL LINE."
self.__generate_pbs(proto_list)
self.framework_config_dict = data['TARGET'][self.framework]
elif self.framework == "PADDLE":
Expand All @@ -58,6 +61,26 @@ def __init__(self, config_file_path=ConfigFilePath):
except NameError:
raise

def check_protobuf_version(self):
for path in sys.path:
module_path = os.path.join(path, 'google', 'protobuf', '__init__.py')
if os.path.exists(module_path):
with open(module_path) as f:
__name__ = '__main__'
exec(f.read(), locals())
break
try:
protoc_out = subprocess.check_output(["protoc", "--version"]).split()[1]
except OSError as exc:
raise OSError('Can not find Protobuf in system environment.')
sys_versions = map(int, protoc_out.split('.'))
pip_versions = map(int, __version__.split('.'))
assert sys_versions[0] >= 3 and pip_versions[0] >= 3 , \
"Protobuf version must be greater than 3.0. Please refer to the Anakin Docs."
assert pip_versions[1] >= sys_versions[1], \
"ERROR: Protobuf must be the same.\nSystem Protobuf %s\nPython Protobuf %s\n" \
% (protoc_out, __version__) + "Try to execute pip install protobuf==%s" % (protoc_out)

def generate_pbs_of_anakin(self):
protoFilesStr = subprocess.check_output(["ls", "parser/proto/"])
filesList = protoFilesStr.split('\n')
Expand All @@ -77,6 +100,7 @@ def __generate_pbs(self, proto_list, default_save_path="parser/pbs/"):
proto_list: ['/path/to/proto_0','/path/to/proto_1', ... ]
default_save_path: default saved to 'parser/pbs/'
"""
self.check_protobuf_version()
for pFile in proto_list:
subprocess.check_call(['protoc', '-I',
os.path.dirname(pFile) + "/",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1098,13 +1098,14 @@ def Parser_axpy(args):
def Parser_priorbox(args):
layer = args[1]
prior_box_param = layer.prior_box_param
OpsRegister()["PriorBox"].min_size = list(prior_box_param.min_size)
OpsRegister()["PriorBox"].max_size = list(prior_box_param.max_size)
OpsRegister()["PriorBox"].aspect_ratio = list(prior_box_param.aspect_ratio)
OpsRegister()["PriorBox"].fixed_size = list(prior_box_param.fixed_size)
OpsRegister()["PriorBox"].fixed_ratio = list(prior_box_param.fixed_ratio)
OpsRegister()["PriorBox"].density = list(prior_box_param.density)
OpsRegister()["PriorBox"].aspect_ratio = list(prior_box_param.aspect_ratio)
if len(prior_box_param.aspect_ratio) > 0:
OpsRegister()["PriorBox"].min_size = list(prior_box_param.min_size)
OpsRegister()["PriorBox"].max_size = list(prior_box_param.max_size)
OpsRegister()["PriorBox"].aspect_ratio = list(prior_box_param.aspect_ratio)
if len(prior_box_param.density) > 0:
OpsRegister()["PriorBox"].fixed_size = list(prior_box_param.fixed_size)
OpsRegister()["PriorBox"].fixed_ratio = list(prior_box_param.fixed_ratio)
OpsRegister()["PriorBox"].density = list(prior_box_param.density)
OpsRegister()["PriorBox"].is_flip = prior_box_param.flip
OpsRegister()["PriorBox"].is_clip = prior_box_param.clip
OpsRegister()["PriorBox"].variance = list(prior_box_param.variance)
Expand Down Expand Up @@ -1173,6 +1174,16 @@ def Parser_relu6(args):
OpsRegister()["Activation"].type = "ClippedRelu"
OpsRegister()["Activation"].clip_relu_num = 6

@ParserFeedDecorator("Interp")
def Parser_interp(args):
layer = args[1]
interp_param = layer.interp_param
OpsRegister()["Interp"].height = interp_param.height
OpsRegister()["Interp"].width = interp_param.width
OpsRegister()["Interp"].zoom_factor = interp_param.zoom_factor
OpsRegister()["Interp"].shrink_factor = interp_param.shrink_factor
OpsRegister()["Interp"].pad_beg = interp_param.pad_beg
OpsRegister()["Interp"].pad_end = interp_param.pad_end

# caffe layer parameter parser map
CAFFE_LAYER_PARSER = {
Expand Down Expand Up @@ -1243,5 +1254,6 @@ def Parser_relu6(args):
"ReLU6": OpsParam().set_parser(Parser_relu6),
"Normalization": OpsParam().set_parser(Parser_normalize),
"ShuffleChannel": OpsParam().set_parser(Parser_ShuffleChannel),
"RoisAnchorFeature": OpsParam().set_parser(Parser_rois_anchor_feature)
"RoisAnchorFeature": OpsParam().set_parser(Parser_rois_anchor_feature),
"Interp": OpsParam().set_parser(Parser_interp)
}
32 changes: 32 additions & 0 deletions tools/external_converter_v2/parser/caffe/parser_caffe.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@ def _DetectionArch(self):
self._FilterNet()
self._SplitInception(False)
self._InsSplitBtwSliceConcat()
self._InsSplitBtwSliceEltwise()
self._InsertSplits()
self._ScatterInputLayer()
# create input node
Expand Down Expand Up @@ -267,6 +268,37 @@ def _InsSplitBtwSliceConcat(self):
del self.net_parameter.layers[:]
self.net_parameter.layers.extend(new_param.layer)

def _InsSplitBtwSliceEltwise(self):
'''
Currently, the connection between Slice and Concat must be implemented via Split.
'''
layers = self.net_parameter.layer or self.net_parameter.layers
top_blobs_of_slices = list()
btm_blobs_of_concats = list()
for layer in layers:
if layer.type == 'Slice':
top_blobs_of_slices.extend(layer.top)
elif layer.type == 'Eltwise':
btm_blobs_of_concats.extend(layer.bottom)
intersection_blobs = list(set(top_blobs_of_slices).intersection(set(btm_blobs_of_concats)))
new_param = NetParameter()
for layer in layers:
new_layer = new_param.layer.add()
new_layer.CopyFrom(layer)
if layer.type == 'Slice':
for top_blob in layer.top:
if top_blob in intersection_blobs:
split_param = new_param.layer.add()
split_param.bottom.append(top_blob)
split_param.top.append(top_blob)
split_param.name = 'Split_' + top_blob
split_param.type = 'Split'
if self.net_parameter.layer:
del self.net_parameter.layer[:]
self.net_parameter.layer.extend(new_param.layer)
else:
del self.net_parameter.layers[:]
self.net_parameter.layers.extend(new_param.layer)

def _InsertSplits(self):
"""
Expand Down
1 change: 0 additions & 1 deletion tools/external_converter_v2/parser/fluid/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#! /usr/bin/env python
# Copyright (c) 2017, Cuichaowen. All rights reserved.
# -*- coding: utf-8 -*-

from parser_fluid import *
7 changes: 6 additions & 1 deletion tools/external_converter_v2/parser/fluid/fluid_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,7 +224,12 @@ def shape_by_var_name(self, var_name, layout = 'NCHW'):
def np_data_by_var_name(self, var_name):
'''
'''
numpy_array = fluid.executor.fetch_var(var_name, self.scope, True)
if hasattr(fluid.executor, '_fetch_var'):
numpy_array = fluid.executor._fetch_var(str(var_name), self.scope, True)
elif hasattr(fluid.executor, 'fetch_var'):
numpy_array = fluid.executor.fetch_var(var_name, self.scope, True)
else:
raise NameError('ERROR: Unknown Fluid version.')
return numpy_array

def dtype_by_var_name(self, var_name):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -447,6 +447,17 @@ def Parser_elementwise_mul(args):
else:
OpsRegister()["Scale"].bias_term = False

@ParserFeedDecorator("Flatten")
def Parser_flatten(args):
pass

@ParserFeedDecorator("assign_value")
def Parser_assign_value(args):
pass

@ParserFeedDecorator("shape")
def Parser_shape(args):
pass

FLUID_NODE_FILLER = {
"feed":OpsParam().set_parser(Parser_feed),
Expand Down Expand Up @@ -488,4 +499,7 @@ def Parser_elementwise_mul(args):
"layer_norm":OpsParam().set_parser(Parser_layer_norm),
"dropout":OpsParam().set_parser(Parser_dropout),
"scale":OpsParam().set_parser(Parser_scale),
"flatten":OpsParam().set_parser(Parser_flatten),
"assign_value":OpsParam().set_parser(Parser_assign_value),
"shape":OpsParam().set_parser(Parser_shape),
}
Loading