Skip to content

Commit

Permalink
🐌 SNPE env and example fix (microsoft#860)
Browse files Browse the repository at this point in the history
## Describe your changes

There are a few issues for current SNPE examples:
1. The libxx.so under customized `python3.6` need to be put visible for
system for SNPE cmd running
- put `str(Path(f"{snpe_root}/python36-env/lib"))` under
`LD_LIBRARY_PATH` can fix.
2. SNPE evaluation extent the logits wrongly, 
    - fixed in this PR.
3. We did not support tensorflow model evaluation.
    - update examples without input model evaluation.
4. Wrong metrics config for inception examples
5. Importing issue for vgg, tell that in README.md to let user update it
manually as a workaround.
6. Python3.6 seems a bit outdated for latest SNPE updates. The vgg
example failed to be run. Updated the python env to 3.8 can fix.


## Checklist before requesting a review
- [ ] Add unit tests for this change.
- [ ] Make sure all tests can pass.
- [ ] Update documents if necessary.
- [ ] Lint and apply fixes to your code by running `lintrunner -a`
- [ ] Is this a user-facing change? If yes, give a description of this
change to be included in the release notes.

## (Optional) Issue link
  • Loading branch information
trajepl authored Jan 9, 2024
1 parent 7b8a267 commit 06e5c11
Show file tree
Hide file tree
Showing 11 changed files with 163 additions and 63 deletions.
9 changes: 7 additions & 2 deletions examples/snpe/inception_snpe_qualcomm_npu/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,16 @@ Outputs a summary of the accuracy and latency metrics for each SNPE model.
### Download and unzip SNPE SDK
Download the SNPE SDK zip following [instructions from Qualcomm](https://developer.qualcomm.com/software/qualcomm-neural-processing-sdk)

We test it with SNPE v2.18.0.240101.

Unzip the file and set the unzipped directory path as environment variable `SNPE_ROOT`

### Configure SNPE
```
python -m olive.snpe.configure
```sh
# in general, python 3.8 is recommended
python -m olive.snpe.configure --py_version 3.8
# only when the tensorflow 1.15.0 is needed, use python 3.6
python -m olive.snpe.configure --py_version 3.6
```

### Pip requirements
Expand Down
59 changes: 48 additions & 11 deletions examples/snpe/inception_snpe_qualcomm_npu/inception_config.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{
"input_model":{
"input_model": {
"type": "TensorFlowModel",
"config": {
"model_path": "models/inception_v3.pb"
Expand All @@ -17,8 +17,17 @@
},
"params_config": {
"data_dir": "data",
"input_names": ["input"],
"input_shapes": [[1, 299, 299, 3]],
"input_names": [
"input"
],
"input_shapes": [
[
1,
299,
299,
3
]
],
"input_order_file": "input_order.txt",
"annotations_file": "labels.npy",
"batch_size": 7
Expand All @@ -27,14 +36,22 @@
},
"evaluators": {
"common_evaluator": {
"metrics":[
"metrics": [
{
"name": "accuracy",
"type": "accuracy",
"sub_types": [
{"name": "accuracy_score", "priority": 1}
{
"name": "accuracy_score",
"priority": 1,
"metric_config": {
"task": "multiclass",
"num_classes": "100",
"top_k": 1
}
}
],
"user_config":{
"user_config": {
"inference_settings": {
"snpe": {
"return_numpy_results": true
Expand All @@ -47,7 +64,15 @@
"name": "latency",
"type": "latency",
"sub_types": [
{"name": "avg", "priority": 2, "metric_config": {"warmup_num": 0, "repeat_test_num": 5, "sleep_num": 2}}
{
"name": "avg",
"priority": 2,
"metric_config": {
"warmup_num": 0,
"repeat_test_num": 5,
"sleep_num": 2
}
}
],
"user_config": {
"inference_settings": {
Expand All @@ -67,9 +92,20 @@
"snpe_conversion": {
"type": "SNPEConversion",
"config": {
"input_names": ["input"],
"input_shapes": [[1, 299, 299, 3]],
"output_names": ["InceptionV3/Predictions/Reshape_1"]
"input_names": [
"input"
],
"input_shapes": [
[
1,
299,
299,
3
]
],
"output_names": [
"InceptionV3/Predictions/Reshape_1"
]
}
},
"snpe_quantization": {
Expand All @@ -82,9 +118,10 @@
},
"engine": {
"search_strategy": false,
"evaluate_input_model": false,
"evaluator": "common_evaluator",
"cache_dir": "cache",
"output_dir" : "outputs",
"output_dir": "outputs",
"output_name": "snpe_quantized"
}
}
18 changes: 16 additions & 2 deletions examples/snpe/vgg_snpe_qualcomm_npu/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,11 +8,16 @@ Performs optimization pipeline:
### Download and unzip SNPE SDK
Download the SNPE SDK zip following [instructions from Qualcomm](https://developer.qualcomm.com/software/qualcomm-neural-processing-sdk)

We test it with SNPE v2.18.0.240101.

Unzip the file and set the unzipped directory path as environment variable `SNPE_ROOT`.

### Configure SNPE
```
python -m olive.snpe.configure
```sh
# in general, python 3.8 is recommended
python -m olive.snpe.configure --py_version 3.8
# only when the tensorflow 1.15.0 is needed, use python 3.6
python -m olive.snpe.configure --py_version 3.6
```

### Pip requirements
Expand All @@ -32,3 +37,12 @@ Run the conversion and quantization locally. Only supports `x64-Linux`.
```
python -m olive.workflows.run --config vgg_config.json
```

## Issues

1. "Module 'qti.aisw.converters' has no attribute 'onnx':
Refer to this: https://developer.qualcomm.com/comment/21810#comment-21810,
change the import statement in `{SNPE_ROOT}/lib/python/qti/aisw/converters/onnx/onnx_to_ir.py:L30` to:
```python
from qti.aisw.converters.onnx import composable_custom_op_utils as ComposableCustomOp
```
2 changes: 2 additions & 0 deletions examples/snpe/vgg_snpe_qualcomm_npu/vgg_config.json
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,9 @@
}
},
"engine": {
"log_severity_level": 0,
"search_strategy": false,
"clean_cache": true,
"cache_dir": "cache",
"output_dir" : "outputs"
}
Expand Down
3 changes: 2 additions & 1 deletion examples/stable_diffusion/openvino/stable_diffusion.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,8 @@ def __init__(
[CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically
the clip-vit-large-patch14(https://huggingface.co/openai/clip-vit-large-patch14) variant.
tokenizer (CLIPTokenizer):
Tokenizer of class CLIPTokenizer(https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).
Tokenizer of class CLIPTokenizer
(https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).
unet (Model): Conditional U-Net architecture to denoise the encoded image latents.
scheduler (SchedulerMixin):
A scheduler to be used in combination with unet to denoise the encoded image latents. Can be one of
Expand Down
4 changes: 2 additions & 2 deletions olive/evaluator/olive_evaluator.py
Original file line number Diff line number Diff line change
Expand Up @@ -936,8 +936,8 @@ def _inference(
raise ValueError("Post processing function is required for SNPE model")
preds.extend(outputs.tolist())
targets.extend(labels.tolist())
# TODO(trajep): verify if we need to return logits
logits.extend(result.tolist())
lg = result["results"].get("logits")
logits.extend(lg.to_list() if lg else [])
return OliveModelOutput(preds=preds, logits=logits), targets

def _evaluate_accuracy(
Expand Down
21 changes: 16 additions & 5 deletions olive/snpe/configure.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------
import argparse
import logging
import shutil
from importlib import resources
Expand All @@ -13,16 +14,16 @@
logger = logging.getLogger("olive.snpe.configure")


def dev():
def dev(args):
snpe_arch = get_snpe_target_arch(False)
if snpe_arch != "x64-Linux":
return

get_snpe_root()

logger.info(f"Configuring SNPE for {snpe_arch}...")
with resources.path("olive.snpe", "create_python36_env.sh") as create_python36_env_path:
cmd = f"bash {create_python36_env_path}"
logger.info(f"Configuring SNPE for {snpe_arch} with python{args.py_version}...")
with resources.path("olive.snpe", "create_python_env.sh") as create_python_env_path:
cmd = f"bash {create_python_env_path} -v {args.py_version}"
return_code, stdout, stderr = run_subprocess(cmd)
if return_code != 0:
raise RuntimeError(f"Failed to create python36 environment. stdout: {stdout}, stderr: {stderr}")
Expand Down Expand Up @@ -66,5 +67,15 @@ def eval(): # noqa: A001 #pylint: disable=redefined-builtin


if __name__ == "__main__":
dev()
# create args for py_version
parser = argparse.ArgumentParser("Olive SNPE: Configure")
parser.add_argument(
"--py_version",
type=str,
help="Python version, use 3.6 for tensorflow 1.15. Otherwise 3.8",
required=True,
choices=["3.6", "3.8"],
)
args = parser.parse_args()
dev(args)
eval()
33 changes: 0 additions & 33 deletions olive/snpe/create_python36_env.sh

This file was deleted.

61 changes: 61 additions & 0 deletions olive/snpe/create_python_env.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
#!/usr/bin/env bash
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License.
# --------------------------------------------------------------------------
set -eux

# This script creates a python 3.6 environment in $SNPE_ROOT/olive-pyenv
# and installs the required packages for SNPE-v2.18.0.240101

# Usage: ./create_python_env.sh -v/--version <python_version>
while [[ "$#" -gt 0 ]]; do
key="$1"
case $key in
-v|--version)
PY_VERSION="$2"
shift
shift
;;
*)
echo "Unknown option: $key"
exit 1
;;
esac
done

PY_ENV_NAME=olive-pyenv
FILES_DIR=$SNPE_ROOT/python-env-setup
rm -rf "$FILES_DIR"
mkdir "$FILES_DIR"

# Install conda if not already installed
if ! command -v conda; then
# Install conda
curl -fsSL -o "$FILES_DIR"/install_conda.sh https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh
sh "$FILES_DIR"/install_conda.sh -b -p "$FILES_DIR"/miniconda
CONDA=$FILES_DIR/miniconda/bin/conda
else
CONDA=conda
fi

# Create python environment
$CONDA create -y -p "$FILES_DIR"/$PY_ENV_NAME python="$PY_VERSION"

# Install snpe requirements
"$FILES_DIR"/$PY_ENV_NAME/bin/python -m pip install --upgrade pip
if [ "$PY_VERSION" == "3.6" ]; then
"$FILES_DIR"/$PY_ENV_NAME/bin/python -m pip install onnx==1.11.0 onnx-simplifier packaging tensorflow==1.15.0 pyyaml
elif [ "$PY_VERSION" == "3.8" ]; then
"$FILES_DIR"/$PY_ENV_NAME/bin/python -m pip install onnx onnx-simplifier packaging tensorflow pyyaml
else
echo "Unsupported python version: $PY_VERSION, only 3.6 and 3.8 are supported"
exit 1
fi


rm -rf "${SNPE_ROOT:?}"/$PY_ENV_NAME
mv "$FILES_DIR"/$PY_ENV_NAME "$SNPE_ROOT"/$PY_ENV_NAME

# Remove all unnecessary files
rm -rf "$FILES_DIR"
14 changes: 8 additions & 6 deletions olive/snpe/utils/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -90,20 +90,22 @@ def get_snpe_env(dev: bool = False) -> dict:

bin_path = str(Path(f"{snpe_root}/bin/{target_arch_name}"))
lib_path = str(Path(f"{snpe_root}/lib/{target_arch_name}"))
python_env_bin_path = str(Path(f"{snpe_root}/olive-pyenv/bin"))
python_env_lib_path = str(Path(f"{snpe_root}/olive-pyenv/lib"))

env = {}
delimiter = os.path.pathsep
if platform.system() == "Linux":
env["LD_LIBRARY_PATH"] = lib_path
if dev:
python36_env_path = str(Path(f"{snpe_root}/python36-env/bin"))
if not Path(python36_env_path).exists():
if not Path(python_env_bin_path).exists():
raise FileNotFoundError(
f"Path {python36_env_path} does not exist. Please run 'python -m olive.snpe.configure' to add the"
" missing file"
f"Path {python_env_bin_path} does not exist."
" Please run 'python -m olive.snpe.configure' to add the missing file"
)
bin_path += delimiter + python36_env_path
bin_path += delimiter + python_env_bin_path
lib_path += delimiter + python_env_lib_path
env["PYTHONPATH"] = str(Path(f"{snpe_root}/lib/python"))
env["LD_LIBRARY_PATH"] = lib_path
bin_path += delimiter + "/usr/bin"
elif platform.system() == "Windows":
if target_arch == "ARM64-Windows":
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ def get_extra_deps(rel_path):
"olive": ["extra_dependencies.json"],
"olive.auto_optimizer": ["config_template/*.yaml"],
"olive.engine.packaging": ["sample_code/*/*/*"],
"olive.snpe": ["create_python36_env.sh", "copy_libcdsprpc.ps1"],
"olive.snpe": ["create_python_env.sh", "copy_libcdsprpc.ps1"],
"olive.systems.docker": ["Dockerfile*"],
},
data_files=[],
Expand Down

0 comments on commit 06e5c11

Please sign in to comment.