forked from intel-analytics/ipex-llm
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add python notebook support (intel-analytics#3)
* add python notebook support * put post process calculation into executors
- Loading branch information
1 parent
5ad8c44
commit d26dc50
Showing
11 changed files
with
709 additions
and
55 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
# Demo Setup Guide | ||
|
||
## Install Dependency Packages | ||
|
||
Reference https://github.com/intel-analytics/BigDL/wiki/Python-Support | ||
|
||
## Download BigDL jars | ||
|
||
Download BigDL Nightly Build jars from https://github.com/intel-analytics/BigDL/wiki/Downloads | ||
|
||
The default spark version is Spark 1.5.1 | ||
|
||
|
||
## 2 Start Jupyter Server | ||
|
||
* Create start_notebook.sh, copy and paste the contents below, and edit SPARK_HOME, BigDL_HOME accordingly. Change other parameter settings as you need. | ||
```bash | ||
#!/bin/bash | ||
|
||
#setup pathes | ||
SPARK_HOME=/Users/bigdl/spark-1.6.0-bin-hadoop2.6/ | ||
Analytics_HOME=/Users/bigdl/analytics-zoo | ||
BigDL_HOME=/Users/bigdl/dist-spark-1.5.1-scala-2.10.5-linux64-0.2.0-20170510.012057-18-dist | ||
#use local mode or cluster mode | ||
#MASTER=spark://xxxx:7077 | ||
MASTER="local[4]" | ||
|
||
PYTHON_API_ZIP_PATH=${BigDL_HOME}/lib/bigdl-0.2.0-SNAPSHOT-python-api.zip | ||
BigDL_JAR_PATH=${Analytics_HOME}/pipeline/target/pipeline-0.1-SNAPSHOT-jar-with-dependencies.jar | ||
|
||
export PYTHONPATH=${PYTHON_API_ZIP_PATH}:$PYTHONPATH | ||
export IPYTHON_OPTS="notebook --notebook-dir=./ --ip=* --no-browser --NotebookApp.token=''" | ||
|
||
${SPARK_HOME}/bin/pyspark \ | ||
--master ${MASTER} \ | ||
--properties-file ${BigDL_HOME}/conf/spark-bigdl.conf \ | ||
--driver-cores 1 \ | ||
--driver-memory 10g \ | ||
--total-executor-cores 3 \ | ||
--executor-cores 1 \ | ||
--executor-memory 20g \ | ||
--conf spark.akka.frameSize=64 \ | ||
--py-files ${PYTHON_API_ZIP_PATH} \ | ||
--jars ${BigDL_JAR_PATH} \ | ||
--conf spark.driver.extraClassPath=${BigDL_JAR_PATH} \ | ||
--conf spark.executor.extraClassPath=pipeline-0.1-SNAPSHOT-jar-with-dependencies.jar | ||
``` | ||
* Put start_notebook.sh and start_tensorboard.sh in home directory and execute them in bash. | ||
|
||
|
Large diffs are not rendered by default.
Oops, something went wrong.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,18 @@ | ||
<assembly xmlns="http://maven.apache.org/ASSEMBLY/2.0.0" | ||
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | ||
xsi:schemaLocation="http://maven.apache.org/ASSEMBLY/2.0.0 http://maven.apache.org/xsd/assembly-2.0.0.xsd"> | ||
<id>python-api</id> | ||
<formats> | ||
<format>zip</format> | ||
</formats> | ||
<includeBaseDirectory>false</includeBaseDirectory> | ||
<fileSets> | ||
<fileSet> | ||
<includes> | ||
<include>**/*.py</include> | ||
</includes> | ||
<outputDirectory>/..</outputDirectory> | ||
<directory>src/main/python/</directory> | ||
</fileSet> | ||
</fileSets> | ||
</assembly> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,4 @@ | ||
from util.common import JavaCreator | ||
|
||
JavaCreator.set_creator_class( | ||
"com.intel.analytics.bigdl.python.api.SSDPythonBigDL") # noqa |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,41 @@ | ||
from nn.layer import * | ||
|
||
class Test(Model): | ||
""" | ||
>>> test = Test("myworld") | ||
creating: createTest | ||
>>> print(test.value) | ||
hello myworld | ||
>>> linear = Linear(1, 2) | ||
creating: createLinear | ||
""" | ||
def __init__(self, message, bigdl_type="float"): | ||
super(Test, self).__init__(None, bigdl_type, message) | ||
|
||
|
||
def _test(): | ||
import sys | ||
print sys.path | ||
import doctest | ||
from pyspark import SparkContext | ||
from util.common import init_engine | ||
from util.common import create_spark_conf | ||
from util.common import JavaCreator | ||
import ssd | ||
globs = ssd.__dict__.copy() | ||
sc = SparkContext(master="local[4]", appName="test layer", | ||
conf=create_spark_conf()) | ||
globs['sc'] = sc | ||
JavaCreator.set_creator_class("com.intel.analytics.bigdl.python.api.SSDPythonBigDL") # noqa | ||
init_engine() | ||
(failure_count, test_count) = doctest.testmod(globs=globs, | ||
optionflags=doctest.ELLIPSIS) | ||
if failure_count: | ||
exit(-1) | ||
|
||
def predict(resolution, batch_size, n_partition, folder, _sc, _model, n_classes): | ||
return callBigDlFunc("float", "ssdPredict", resolution, batch_size, n_partition, | ||
folder, _sc, _model, n_classes) | ||
|
||
if __name__ == "__main__": | ||
_test() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
64 changes: 64 additions & 0 deletions
64
pipeline/src/main/scala/com/intel/analytics/zoo/pipeline/python/api/SSDPythonBigDL.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
/* | ||
* Copyright 2016 The BigDL Authors. | ||
* | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package com.intel.analytics.bigdl.python.api | ||
|
||
import java.lang.{Boolean => JBoolean} | ||
import java.util.{ArrayList => JArrayList, HashMap => JHashMap, List => JList, Map => JMap} | ||
|
||
import com.intel.analytics.bigdl.dataset.{Identity => DIdentity, Sample => JSample} | ||
import com.intel.analytics.bigdl.nn.abstractnn.{AbstractModule, Activity} | ||
import com.intel.analytics.bigdl.numeric._ | ||
import com.intel.analytics.bigdl.tensor.TensorNumericMath.TensorNumeric | ||
import com.intel.analytics.zoo.pipeline.ssd._ | ||
import org.apache.spark.api.java.JavaSparkContext | ||
|
||
import scala.collection.JavaConverters._ | ||
import scala.language.existentials | ||
import scala.reflect.ClassTag | ||
|
||
object SSDPythonBigDL { | ||
|
||
def ofFloat(): PythonBigDL[Float] = new SSDPythonBigDL[Float]() | ||
|
||
def ofDouble(): PythonBigDL[Double] = new SSDPythonBigDL[Double]() | ||
|
||
} | ||
|
||
|
||
class SSDPythonBigDL[T: ClassTag](implicit ev: TensorNumeric[T]) extends PythonBigDL[T] { | ||
|
||
def ssdPredict(resolution: Int, batchSize: Int, nPartition: Int, | ||
folder: String, sc: JavaSparkContext, | ||
model: AbstractModule[Activity, Activity, Float], nClasses: Int = 21) | ||
: JList[JList[JList[JList[Float]]]] = { | ||
val predictor = new Predictor(model, | ||
PreProcessParam(batchSize, resolution, (123f, 117f, 104f), false), nClasses) | ||
val data = IOUtils.loadLocalFolder(nPartition, folder, sc) | ||
val results = predictor.predict(data).collect() | ||
val pathArr = data.map(x => x.path).collect() | ||
results.zip(pathArr).map(res => { | ||
val bboxes = res._1.map(r => if (r != null) r.bboxes.storage().array().toList.asJava | ||
else null).toList.asJava | ||
val scores = res._1.map(r => if (r != null) r.classes.storage().array().toList.asJava | ||
else null).toList.asJava | ||
List(scores, bboxes).asJava | ||
}).toList.asJava | ||
} | ||
} | ||
|
||
|
||
|
Oops, something went wrong.