Skip to content

Commit

Permalink
Merge pull request #5 from intel-analytics/main
Browse files Browse the repository at this point in the history
Update
  • Loading branch information
ForJadeForest authored Jul 26, 2022
2 parents 18dfb83 + 5f72d8a commit 13e4625
Show file tree
Hide file tree
Showing 52 changed files with 6,334 additions and 1,100 deletions.
1 change: 1 addition & 0 deletions .github/CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
/.github/workflows/* @glorysdj @intel-analytics/CICD
63 changes: 61 additions & 2 deletions .github/workflows/maven-publish-scala.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ jobs:
uses: stCarolas/[email protected]
with:
maven-version: 3.8.2

- name: Set up Maven Settings
uses: s4u/[email protected]
with:
Expand Down Expand Up @@ -69,7 +69,18 @@ jobs:
- name: Build with Maven
run: |
ls
cat ~/.m2/settings.xml
#spark3.1.2
cp scala/pom.xml scala/pom.xml.origin
cp scala/common/spark-version/pom.xml scala/common/spark-version/pom.xml.origin
cp scala/common/spark-version/3.0/pom.xml scala/common/spark-version/3.0/pom.xml.origin
cp scala/dllib/pom.xml scala/dllib/pom.xml.origin
cp scala/orca/pom.xml scala/orca/pom.xml.origin
cp scala/friesian/pom.xml scala/friesian/pom.xml.origin
cp scala/grpc/pom.xml scala/grpc/pom.xml.origin
cp scala/serving/pom.xml scala/serving/pom.xml.origin
cp scala/ppml/pom.xml scala/ppml/pom.xml.origin
cp scala/assembly/pom.xml scala/assembly/pom.xml.origin
sed -i 's/<artifactId>${spark-version.project}<\/artifactId>/<artifactId>${spark-version.project}-${SPARK_PLATFORM}<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>3.0<\/artifactId>/<artifactId>3.0-${SPARK_PLATFORM}<\/artifactId>/' scala/common/spark-version/3.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.2<\/artifactId>/' scala/pom.xml
Expand All @@ -83,6 +94,54 @@ jobs:
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.2<\/artifactId>/' scala/ppml/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_3.1.2<\/artifactId>/' scala/assembly/pom.xml
mvn -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} clean deploy -DskipTests -Dspark.version=3.1.2 -DSPARK_PLATFORM=SPARK_3.1 -P spark_3.x --file scala/pom.xml
mv scala/pom.xml.origin scala/pom.xml
mv scala/common/spark-version/pom.xml.origin scala/common/spark-version/pom.xml
mv scala/common/spark-version/3.0/pom.xml.origin scala/common/spark-version/3.0/pom.xml
mv scala/dllib/pom.xml.origin scala/dllib/pom.xml
mv scala/orca/pom.xml.origin scala/orca/pom.xml
mv scala/friesian/pom.xml.origin scala/friesian/pom.xml
mv scala/grpc/pom.xml.origin scala/grpc/pom.xml
mv scala/serving/pom.xml.origin scala/serving/pom.xml
mv scala/ppml/pom.xml.origin scala/ppml/pom.xml
mv scala/assembly/pom.xml.origin scala/assembly/pom.xml
#spark2.4.6
cp scala/pom.xml scala/pom.xml.origin
cp scala/common/spark-version/pom.xml scala/common/spark-version/pom.xml.origin
cp scala/common/spark-version/2.0/pom.xml scala/common/spark-version/2.0/pom.xml.origin
cp scala/dllib/pom.xml scala/dllib/pom.xml.origin
cp scala/orca/pom.xml scala/orca/pom.xml.origin
cp scala/friesian/pom.xml scala/friesian/pom.xml.origin
cp scala/grpc/pom.xml scala/grpc/pom.xml.origin
cp scala/serving/pom.xml scala/serving/pom.xml.origin
cp scala/ppml/pom.xml scala/ppml/pom.xml.origin
cp scala/assembly/pom.xml scala/assembly/pom.xml.origin
sed -i 's/<artifactId>${spark-version.project}<\/artifactId>/<artifactId>${spark-version.project}-${SPARK_PLATFORM}<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>2.0<\/artifactId>/<artifactId>2.0-${SPARK_PLATFORM}<\/artifactId>/' scala/common/spark-version/2.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/common/spark-version/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/common/spark-version/2.0/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/dllib/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/orca/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/friesian/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/grpc/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/serving/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/ppml/pom.xml
sed -i 's/<artifactId>bigdl-parent-spark_${spark.version}<\/artifactId>/<artifactId>bigdl-parent-spark_2.4.6<\/artifactId>/' scala/assembly/pom.xml
mvn -Dhttp.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttp.proxyPort=${{ secrets.HTTP_PROXY_PORT_2 }} -Dhttps.proxyHost=${{ secrets.HTTP_PROXY_HOST_2 }} -Dhttps.proxyPort=${{ secrets.HTTP_PROXY_PORT_3 }} clean deploy -DskipTests -Dspark.version=2.4.6 -DSPARK_PLATFORM=SPARK_2.4 -P spark_2.x --file scala/pom.xml
mv scala/pom.xml.origin scala/pom.xml
mv scala/common/spark-version/pom.xml.origin scala/common/spark-version/pom.xml
mv scala/common/spark-version/2.0/pom.xml.origin scala/common/spark-version/2.0/pom.xml
mv scala/dllib/pom.xml.origin scala/dllib/pom.xml
mv scala/orca/pom.xml.origin scala/orca/pom.xml
mv scala/friesian/pom.xml.origin scala/friesian/pom.xml
mv scala/grpc/pom.xml.origin scala/grpc/pom.xml
mv scala/serving/pom.xml.origin scala/serving/pom.xml
mv scala/ppml/pom.xml.origin scala/ppml/pom.xml
mv scala/assembly/pom.xml.origin scala/assembly/pom.xml


# - name: Publish to GitHub Packages Apache Maven
# run: mvn deploy -s $GITHUB_WORKSPACE/settings.xml
Expand Down
83 changes: 83 additions & 0 deletions .github/workflows/nano_unit_tests_basic.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
name: Nano Unit Tests Basic

# Controls when the action will run.
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
branches: [ main ]
pull_request:
branches: [ main ]
paths:
- 'python/nano/**'
- '.github/workflows/nano_unit_tests_basic.yml'

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
nano-unit-test-basic:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: ["ubuntu-20.04"]
python-version: ["3.7"]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install --upgrade setuptools==58.0.4
python -m pip install --upgrade wheel
- name: Run Nano-init test
shell: bash
run: |
$CONDA/bin/conda create -n bigdl-init -y python==3.7.10 setuptools==58.0.4
source $CONDA/bin/activate bigdl-init
$CONDA/bin/conda info
bash python/nano/dev/release_default_linux.sh default false
whl_name=`ls python/nano/dist`
pip install python/nano/dist/${whl_name}
source bigdl-nano-init
if [ 0"$LD_PRELOAD" = "0" ]; then
exit 1
else
echo "Set environment variable successfully."
fi
source $CONDA/bin/deactivate
if [ ! 0"$LD_PRELOAD" = "0" ]; then
exit 1
else
echo "Unset environment variable successfully while deactivating conda environment."
fi
source $CONDA/bin/activate bigdl-init
if [ 0"$LD_PRELOAD" = "0" ]; then
exit 1
else
echo "Setup environment variable successfully while activating conda environment."
fi
pip uninstall -y bigdl-nano
source $CONDA/bin/deactivate
$CONDA/bin/conda remove -n bigdl-init --all
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}

- name: Run Basic unit tests (OpenVINO)
shell: bash
run: |
$CONDA/bin/conda create -n openvino-basic -y python==3.7.10 setuptools=58.0.4
source $CONDA/bin/activate openvino-basic
$CONDA/bin/conda info
bash python/nano/dev/release_default_linux.sh default false
whl_name=`ls python/nano/dist`
pip install python/nano/dist/${whl_name}
pip install pytest openvino-dev
source bigdl-nano-init
bash python/nano/test/run-nano-basic-openvino-tests.sh
source $CONDA/bin/deactivate
$CONDA/bin/conda remove -n openvino-basic --all
env:
ANALYTICS_ZOO_ROOT: ${{ github.workspace }}
39 changes: 39 additions & 0 deletions .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries

# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.

name: Upload Python Package

on:
release:
types: [published]

permissions:
contents: read

jobs:
deploy:

runs-on: ubuntu-latest

steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
2 changes: 1 addition & 1 deletion docs/docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ Before we create web pages according to documents, environments need to be set.
```
conda create --name py36 python=3.6
pip install mkdocs==0.16.3
cd analytics-zoo
cd BigDL
docs/gen_site.py -p -s -m 8080
```
Then choose correct proxy and open the link of your ip address and port number.
Expand Down
5 changes: 3 additions & 2 deletions docs/readthedocs/requirements-doc.txt
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ sphinx-jsonschema==1.19.1
sphinxemoji==0.2.0
click==8.1.3
markdown<3.4
tensorflow==1.15.2
bigdl==0.12.0
cloudpickle==2.1.0
ray[tune]==1.9.2
Expand All @@ -16,7 +15,7 @@ setuptools==41.0.1
docutils==0.17
mock==1.0.1
pillow==5.4.1
sphinx==5.0.2
sphinx==4.5.0
alabaster>=0.7,<0.8,!=0.7.5
commonmark==0.8.1
recommonmark==0.5.0
Expand All @@ -35,3 +34,5 @@ protobuf~=3.19.0
sphinx-tabs==3.4
optuna==2.10.0
ConfigSpace==0.5.0
sphinx-design==0.2.0
sphinx-external-toc==0.3.0
102 changes: 102 additions & 0 deletions docs/readthedocs/source/_toc.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,102 @@
root: index
subtrees:
- caption: Quick Start
entries:
- file: doc/Orca/QuickStart/orca-tf-quickstart
- file: doc/Orca/QuickStart/orca-keras-quickstart
- file: doc/Orca/QuickStart/orca-tf2keras-quickstart
- file: doc/Orca/QuickStart/orca-pytorch-quickstart
- file: doc/Ray/QuickStart/ray-quickstart

- caption: User Guide
entries:
- file: doc/UserGuide/python
- file: doc/UserGuide/scala
- file: doc/UserGuide/colab
- file: doc/UserGuide/docker
- file: doc/UserGuide/hadoop
- file: doc/UserGuide/k8s
- file: doc/UserGuide/databricks
- file: doc/UserGuide/develop

- caption: Nano
entries:
- file: doc/Nano/Overview/nano
- file: doc/Nano/Overview/windows_guide
- file: doc/Nano/QuickStart/pytorch_train
- file: doc/Nano/QuickStart/pytorch_inference
- file: doc/Nano/QuickStart/tensorflow_train
- file: doc/Nano/QuickStart/tensorflow_inference
- file: doc/Nano/QuickStart/hpo
- file: doc/Nano/Overview/known_issues
- file: doc/Nano/QuickStart/index

- caption: DLlib
entries:
- file: doc/DLlib/Overview/dllib
- file: doc/DLlib/Overview/keras-api
- file: doc/DLlib/Overview/nnframes

- caption: Orca
entries:
- file: doc/Orca/Overview/orca
title: "Orca User Guide"
- file: doc/Orca/Overview/orca-context
- file: doc/Orca/Overview/data-parallel-processing
- file: doc/Orca/Overview/distributed-training-inference
- file: doc/Orca/Overview/distributed-tuning
- file: doc/Ray/Overview/ray
- file: doc/Orca/Overview/known_issues

- caption: Chronos
entries:
- file: doc/Chronos/Overview/chronos
- file: doc/Chronos/Overview/deep_dive
- file: doc/Chronos/QuickStart/index
- file: doc/Chronos/Overview/chronos_known_issue


- caption: PPML
entries:
- file: doc/PPML/Overview/ppml
- file: doc/PPML/Overview/trusted_big_data_analytics_and_ml
- file: doc/PPML/Overview/trusted_fl
- file: doc/PPML/QuickStart/secure_your_services
- file: doc/PPML/QuickStart/build_kernel_with_sgx
- file: doc/PPML/QuickStart/deploy_intel_sgx_device_plugin_for_kubernetes
- file: doc/PPML/QuickStart/trusted-serving-on-k8s-guide
- file: doc/PPML/QuickStart/tpc-h_with_sparksql_on_k8s
- file: doc/PPML/QuickStart/tpc-ds_with_sparksql_on_k8s
- file: doc/PPML/Overview/azure_ppml

- caption: Serving
entries:
- file: doc/Serving/Overview/serving.md
- file: doc/Serving/QuickStart/serving-quickstart
- file: doc/Serving/ProgrammingGuide/serving-installation
- file: doc/Serving/ProgrammingGuide/serving-start
- file: doc/Serving/ProgrammingGuide/serving-inference
- file: doc/Serving/Example/example
- file: doc/Serving/FAQ/faq
- file: doc/Serving/FAQ/contribute-guide

- caption: Common Use Case
entries:
- file: doc/Orca/QuickStart/orca-pytorch-distributed-quickstart
- file: doc/UseCase/spark-dataframe
- file: doc/UseCase/xshards-pandas
- file: doc/Orca/QuickStart/orca-autoestimator-pytorch-quickstart
- file: doc/Orca/QuickStart/orca-autoxgboost-quickstart

- caption: Python API
entries:
- file: doc/PythonAPI/Orca/orca
- file: doc/PythonAPI/Friesian/feature
- file: doc/PythonAPI/Chronos/index
- file: doc/PythonAPI/Nano/index

- caption: Real-World Application
entries:
- file: doc/Application/presentations
- file: doc/Application/blogs
- file: doc/Application/powered-by
12 changes: 11 additions & 1 deletion docs/readthedocs/source/conf.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
import shutil
import urllib

autodoc_mock_imports = ["openvino", "pytorch_lightning", "keras"]
autodoc_mock_imports = ["openvino", "pytorch_lightning", "keras", "tensorflow"]

# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, '.')
Expand Down Expand Up @@ -90,6 +90,8 @@
'recommonmark',
'sphinx_markdown_tables',
'sphinx_tabs.tabs',
'sphinx_design',
'sphinx_external_toc',
]


Expand Down Expand Up @@ -152,6 +154,14 @@
htmlhelp_basename = 'BigDL Documentation'



# -- Options for external TOC tree ---
external_toc_exclude_missing = False
external_toc_path = "_toc.yml"

# this is to surpresswarnings about explicit "toctree" directives
suppress_warnings = ["etoc.toctree"]

# -- Options for LaTeX output ------------------------------------------------

latex_elements = {
Expand Down
Loading

0 comments on commit 13e4625

Please sign in to comment.