Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to Alibi 0.5.5 #2571

Merged
merged 1 commit into from
Oct 23, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion components/alibi-explain-server/.gitignore
Original file line number Diff line number Diff line change
@@ -1 +1,2 @@
models
models
test_models
92 changes: 57 additions & 35 deletions components/alibi-explain-server/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ IMAGE=alibiexplainer

.PHONY: get_apis
get_apis:
# Protobuf
cp ${SELDON_CORE_DIR}/proto/prediction.proto alibiexplainer/proto/
$(MAKE) -C ${SELDON_CORE_DIR}/proto/tensorflow/ create_protos
cp -r $(SELDON_CORE_DIR)/proto/tensorflow/tensorflow \
Expand All @@ -18,7 +17,6 @@ install_requirements:

.PHONY: build_apis
build_apis: get_apis install_requirements
# Protobuf
cd alibiexplainer && python \
-m grpc.tools.protoc \
-I./ \
Expand All @@ -27,18 +25,6 @@ build_apis: get_apis install_requirements
--grpc_python_out=./ \
--mypy_out=./ \
./proto/prediction.proto
## We build TF's protobufs as well in case the tensorflow package
## is not found
cd alibiexplainer/proto && python \
-m grpc.tools.protoc \
-I./ \
-I../ \
--python_out=./ \
./tensorflow/core/framework/*.proto
sed -i "s/from tensorflow/from alibiexplainer.proto.tensorflow/" alibiexplainer/proto/*.py
sed -i "s/from tensorflow.core.framework/from ./" \
$(addprefix alibiexplainer/proto/tensorflow/core/framework/, \
resource_handle_pb2.py tensor_pb2.py tensor_shape_pb2.py types_pb2.py)
sed -i "s/from proto/from alibiexplainer.proto/g" alibiexplainer/proto/prediction_pb2_grpc.py

dev_install:
Expand Down Expand Up @@ -69,14 +55,18 @@ redhat-image-scan:

clean:
rm -rf kfserving

rm -rf test_models

#
# Test Tabular Explanations
#

run_predictor_adult:
docker run --rm -d --name "sklearnserver" -p 5000:5000 -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"gs://seldon-models/sklearn/income/model-0.23.2"}]' seldonio/sklearnserver_rest:${VERSION}
test_models/sklearn/income/model-0.23.2:
mkdir -p test_models/sklearn/income
gsutil cp -r gs://seldon-models/sklearn/income/model-0.23.2 test_models/sklearn/income

run_predictor_adult: test_models/sklearn/income/model-0.23.2
docker run -d --rm --name "sklearnserver" -p 5000:5000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/income/model-0.23.2"}]' seldonio/sklearnserver_rest:${VERSION}

curl_predict_adult:
curl -d '{"data": {"ndarray":[[39, 7, 1, 1, 1, 1, 4, 1, 2174, 0, 40, 9]]}}' -X POST http://localhost:5000/api/v1.0/predictions -H "Content-Type: application/json"
Expand All @@ -98,8 +88,13 @@ cleanup_adult:
# Test Text Explanations
#

run_predictor_movie:
docker run --rm -d --name "sklearnserver" -p 5000:5000 -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"gs://seldon-models/sklearn/moviesentiment"}]' seldonio/sklearnserver_rest:${VERSION}

test_models/sklearn/moviesentiment:
mkdir -p test_models/sklearn
gsutil cp -r gs://seldon-models/sklearn/moviesentiment test_models/sklearn

run_predictor_movie: test_models/sklearn/moviesentiment
docker run --rm -d --name "sklearnserver" -p 5000:5000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/moviesentiment"}]' seldonio/sklearnserver_rest:${VERSION}

curl_predict_movie:
curl -d '{"data": {"ndarray":["a visually exquisite but narratively opaque and emotionally vapid experience of style and mystification"]}}' -X POST http://localhost:5000/api/v1.0/predictions -H "Content-Type: application/json"
Expand All @@ -121,23 +116,24 @@ cleanup_movie:
# Test Image Explanation
#

models/resnet32:
mkdir -p models && gsutil cp -r gs://seldon-models/tfserving/cifar10/resnet32 models
test_models/tfserving/cifar10/resnet32:
mkdir -p test_models/tfserving/cifar10
gsutil cp -r gs://seldon-models/tfserving/cifar10/resnet32 test_models/tfserving/cifar10


run_predictor_image:
docker run --name tfserver --rm -d -p 8501:8501 -p 8500:8500 -v "${PWD}/models:/models" -e MODEL_NAME=resnet32 tensorflow/serving
run_predictor_image: test_models/tfserving/cifar10/resnet32
docker run --name tfserver --rm -d -p 8501:8501 -p 8500:8500 -v "${PWD}/test_models/tfserving/cifar10:/models" -e MODEL_NAME=resnet32 tensorflow/serving


curl_predict_image:
curl -d @./input.json -X POST http://localhost:8501/v1/models/resnet32:predict -H "Content-Type: application/json"


run_explainer_image:
python -m alibiexplainer --model_name resnet32 --protocol tensorflow.http --storage_uri gs://seldon-models/tfserving/imagenet/alibi/0.4.0 --predictor_host localhost:8501 AnchorImages
python -m alibiexplainer --model_name resnet32 --protocol tensorflow.http --storage_uri gs://seldon-models/tfserving/imagenet/explainer-py36-0.5.2 --predictor_host localhost:8501 AnchorImages

run_explainer_image_docker:
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name resnet32 --protocol tensorflow.http --storage_uri gs://seldon-models/tfserving/imagenet/alibi/0.4.0 --predictor_host localhost:8501 AnchorImages
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name resnet32 --protocol tensorflow.http --storage_uri gs://seldon-models/tfserving/imagenet/explainer-py36-0.5.2 --predictor_host localhost:8501 AnchorImages

curl_explain_image:
curl -d @./input.json -X POST http://localhost:8080/v1/models/resnet32:explain -H "Content-Type: application/json"
Expand All @@ -152,16 +148,29 @@ cleanup_image:
#


run_explainer_kernelshap:
python -m alibiexplainer --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/sklearn/adult_shap/kernelshap/py36 --predictor_host localhost:5000 KernelShap
test_models/sklearn/wine/model-py36-0.23.2:
mkdir -p test_models/sklearn/wine
gsutil cp -r gs://seldon-models/sklearn/wine/model-py36-0.23.2 test_models/sklearn/wine

#
# Docker image is py36 - need to dill.save in py36 the explainer otherwise
#
run_predictor_wine: test_models/sklearn/income/model-0.23.2
docker run -d --rm --name "sklearnserver" -p 5000:5000 -v ${PWD}/test_models:/models -e PREDICTIVE_UNIT_PARAMETERS='[{"type":"STRING","name":"model_uri","value":"/models/sklearn/wine/model-py36-0.23.2"},{"type":"STRING","name":"method","value":"decision_function"}]' seldonio/sklearnserver_rest:${VERSION}

curl_predict_wine:
curl -d '{"data": {"ndarray":[[-0.24226334, 0.26757916, 0.42085937, 0.7127641 , 0.84067236, -1.27747161, -0.60582812, -0.9706341 , -0.5873972 , 2.42611713, -2.06608025, -1.55017035, -0.86659858]]}}' -X POST http://localhost:5000/api/v1.0/predictions -H "Content-Type: application/json"


run_explainer_kernelshap:
python -m alibiexplainer --model_name wine --protocol seldon.http --storage_uri gs://seldon-models/sklearn/wine/kernel_shap_py36_alibi_0.5.5 --predictor_host localhost:5000 KernelShap


run_explainer_kernelshap_docker:
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/sklearn/adult_shap/kernelshap/py36 --predictor_host localhost:5000 KernelShap
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name wine --protocol seldon.http --storage_uri gs://seldon-models/sklearn/wine/kernel_shap_py36_alibi_0.5.5 --predictor_host localhost:5000 KernelShap


curl_explain_wine:
curl -d '{"data": {"ndarray":[[-0.24226334, 0.26757916, 0.42085937, 0.7127641 , 0.84067236, -1.27747161, -0.60582812, -0.9706341 , -0.5873972 , 2.42611713, -2.06608025, -1.55017035, -0.86659858]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"



cleanup_kernelshap:
docker rm -f sklearnserver
Expand All @@ -173,22 +182,35 @@ cleanup_kernelshap:
#

run_explainer_integratedgradients:
python -m alibiexplainer --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/keras/imdb IntegratedGradients IntegratedGradients
python -m alibiexplainer --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/keras/imdb IntegratedGradients IntegratedGradients --layer 1


run_explainer_integratedgradients_docker:
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/keras/imdb IntegratedGradients
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/keras/imdb IntegratedGradients --layer 1

curl_explain_imdb:
curl -d '{"data": {"ndarray":[[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 591, 202, 14, 31, 6, 717, 10, 10, 2, 2, 5, 4, 360, 7, 4, 177, 5760, 394, 354, 4, 123, 9, 1035, 1035, 1035, 10, 10, 13, 92, 124, 89, 488, 7944, 100, 28, 1668, 14, 31, 23, 27, 7479, 29, 220, 468, 8, 124, 14, 286, 170, 8, 157, 46, 5, 27, 239, 16, 179, 2, 38, 32, 25, 7944, 451, 202, 14, 6, 717]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"

cleanup_integratedfradients:
docker rm -f explainer



#
# Test Tree Shap
# White box so does not need separate model
#

run_explainer_treeshap:
python -m alibiexplainer --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/xgboost/adult/tree_shap_py36_0.5.2 TreeShap
python -m alibiexplainer --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/xgboost/adult/tree_shap_py368_alibi_0.5.5 TreeShap

run_explainer_treeshap_docker:
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/xgboost/adult/tree_shap_py36_0.5.2 TreeShap
docker run --rm -d --name "explainer" --network=host -p 8080:8080 seldonio/${IMAGE}:${VERSION} --model_name adult --protocol seldon.http --storage_uri gs://seldon-models/xgboost/adult/tree_shap_py368_alibi_0.5.5 TreeShap

curl_explain_adult_treeshap:
curl -d '{"data": {"ndarray":[[39, 7, 1, 1, 1, 1, 4, 1, 2174, 0, 40, 9]]}}' -X POST http://localhost:8080/api/v1.0/explain -H "Content-Type: application/json"



cleanup_treeshap:
docker rm -f explainer
2 changes: 0 additions & 2 deletions components/alibi-explain-server/alibiexplainer/kernel_shap.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from alibi.api.interfaces import Explanation
from alibiexplainer.explainer_wrapper import ExplainerWrapper
from alibiexplainer.constants import SELDON_LOGLEVEL
from shap.common import Model
from typing import Callable, List, Optional

logging.basicConfig(level=SELDON_LOGLEVEL)
Expand All @@ -26,7 +25,6 @@ def __init__(
def explain(self, inputs: List) -> Explanation:
arr = np.array(inputs)
self.kernel_shap.predictor = self.predict_fn
self.kernel_shap._explainer.model = Model(self.predict_fn,None)
logging.info("kernel Shap call with %s", self.kwargs)
logging.info("kernel shap data shape %s",arr.shape)
shap_exp = self.kernel_shap.explain(arr, l1_reg=False, **self.kwargs)
Expand Down
4 changes: 2 additions & 2 deletions components/alibi-explain-server/setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,15 +32,15 @@
packages=find_packages("alibiexplainer"),
install_requires=[
"kfserving>=0.3.0",
"alibi==0.5.2",
"alibi==0.5.5",
"scikit-learn>= 0.23.0",
"argparse>=1.4.0",
"requests>=2.22.0",
"joblib>=0.13.2",
"dill>=0.3.0",
"grpcio>=1.22.0",
"xgboost==1.0.2",
"shap==0.35.0"
"shap==0.36.0"
],
tests_require=tests_require,
extras_require={'test': tests_require}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@
import kfserving
import dill

IMAGENET_EXPLAINER_URI = "gs://seldon-models/tfserving/imagenet/alibi/0.4.0"
IMAGENET_EXPLAINER_URI = "gs://seldon-models/tfserving/imagenet/explainer-py36-0.5.2"
ADULT_MODEL_URI = "gs://seldon-models/sklearn/income/model"
EXPLAINER_FILENAME = "explainer.dill"

Expand Down
14 changes: 7 additions & 7 deletions components/alibi-explain-server/tests/test_kernel_shap.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,27 +2,27 @@
import kfserving
import os
import dill
from alibi.datasets import fetch_adult
from sklearn.datasets import load_wine
import numpy as np
import json
from .utils import SKLearnServer
ADULT_EXPLAINER_URI = "gs://seldon-models/sklearn/adult_shap/kernelshap/py36"
ADULT_MODEL_URI = "gs://seldon-models/sklearn/adult_shap/model"
WINE_EXPLAINER_URI = "gs://seldon-models/sklearn/wine/kernel_shap_py36_alibi_0.5.5"
WINE_MODEL_URI = "gs://seldon-models/sklearn/wine/model-py36-0.23.2"
EXPLAINER_FILENAME = "explainer.dill"


def test_kernel_shap():
os.environ.clear()
alibi_model = os.path.join(
kfserving.Storage.download(ADULT_EXPLAINER_URI), EXPLAINER_FILENAME
kfserving.Storage.download(WINE_EXPLAINER_URI), EXPLAINER_FILENAME
)
with open(alibi_model, "rb") as f:
skmodel = SKLearnServer(ADULT_MODEL_URI)
skmodel = SKLearnServer(WINE_MODEL_URI)
skmodel.load()
alibi_model = dill.load(f)
kernel_shap = KernelShap(skmodel.predict, alibi_model)
adult = fetch_adult()
X_test = adult.data[30001:, :]
wine = load_wine()
X_test = wine.data
np.random.seed(0)
explanation = kernel_shap.explain(X_test[0:1].tolist())
exp_json = json.loads(explanation.to_json())
Expand Down
4 changes: 2 additions & 2 deletions components/alibi-explain-server/tests/test_tree_shap.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
from alibi.datasets import fetch_adult
import numpy as np
import json
ADULT_EXPLAINER_URI = "gs://seldon-models/xgboost/adult/tree_shap_py36_0.5.2"
ADULT_EXPLAINER_URI = "gs://seldon-models/xgboost/adult/tree_shap_py368_alibi_0.5.5"
EXPLAINER_FILENAME = "explainer.dill"


def test_kernel_shap():
def test_tree_shap():
os.environ.clear()
alibi_model = os.path.join(
kfserving.Storage.download(ADULT_EXPLAINER_URI), EXPLAINER_FILENAME
Expand Down
2 changes: 1 addition & 1 deletion doc/source/analytics/explainers.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ We provide [an example notebook](../examples/explainer_examples.html) showing ho

For Alibi explainers that need to be trained you should

1. Use python 3.6 as the Seldon python wrapper also runs in python 3.6 when it loads your explainer.
1. Use python 3.6.8 as the Seldon python Alibi explainer wrapper also runs in python 3.6.8 when it loads your explainer.
1. Follow the [Alibi docs](https://docs.seldon.io/projects/alibi/en/latest/index.html) for your particular desired explainer. The Seldon Wrapper presently supports: Anchors (Tabular, Text and Image), KernelShap and Integrated Gradients.
1. Save your explainer to a file called `explainer.dill` using the [dill](https://pypi.org/project/dill/) python package and store on a bucket store or PVC in your cluster. We support gcs, s3 (including Minio) or Azure blob.

Expand Down
Loading