Skip to content

Commit

Permalink
Sync for 4.0b4 release (#950)
Browse files Browse the repository at this point in the history
* sync for 4.0b4 release

* fix extra space character in build.sh and add a simple prediction test for smoke testing on older macs
  • Loading branch information
aseemw authored Oct 1, 2020
1 parent a629958 commit cf5f317
Show file tree
Hide file tree
Showing 87 changed files with 4,056 additions and 1,571 deletions.
77 changes: 75 additions & 2 deletions .gitlab-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ check_python_flake8:

########################################################################
#
# linux - Build & Test
# linux - Build
#
########################################################################

Expand All @@ -44,9 +44,33 @@ build_wheel_linux_py27:
variables:
PYTHON: "2.7"

build_wheel_linux_py35:
<<: *build_linux
image: registry.gitlab.com/zach_nation/coremltools/build-image-ubuntu-14.04:1.0.3
variables:
PYTHON: "3.5"

build_wheel_linux_py36:
<<: *build_linux
image: registry.gitlab.com/zach_nation/coremltools/build-image-ubuntu-14.04:1.0.3
variables:
PYTHON: "3.6"

build_wheel_linux_py37:
<<: *build_linux
image: registry.gitlab.com/zach_nation/coremltools/build-image-ubuntu-14.04:1.0.3
variables:
PYTHON: "3.7"

build_wheel_linux_py38:
<<: *build_linux
image: registry.gitlab.com/zach_nation/coremltools/build-image-ubuntu-14.04:1.0.3
variables:
PYTHON: "3.8"

#########################################################################
##
## macOS - Build & Test
## macOS - Build
##
#########################################################################

Expand Down Expand Up @@ -91,6 +115,12 @@ build_wheel_macos_py38:
variables:
PYTHON: "3.8"

#########################################################################
##
## macOS - Test
##
#########################################################################

.test_macos_pkg: &test_macos_pkg
stage: test
script:
Expand Down Expand Up @@ -272,6 +302,45 @@ test_macos11_py37_mil:
- coremltools/converters/mil/mil/**/*.{py}
- coremltools/converters/mil/backend/**/*.{py}

#########################################################################
##
## macOS - Smoke Test on older versions
##
#########################################################################

test_macos11_py38_coremltools_smoke_test:
<<: *test_macos_pkg
tags:
- macos11
dependencies:
- build_wheel_macos_py38
variables:
WHEEL_PATH: build/dist/*cp38*10_16*
TEST_PACKAGE: coremltools.test.neural_network.test_simple_nn_inference
PYTHON: "3.8"

test_macos15_py38_coremltools_smoke_test:
<<: *test_macos_pkg
tags:
- macos10.15
dependencies:
- build_wheel_macos_py38
variables:
WHEEL_PATH: build/dist/*cp38*10_15*
TEST_PACKAGE: coremltools.test.neural_network.test_simple_nn_inference
PYTHON: "3.8"

test_macos14_py38_coremltools_smoke_test:
<<: *test_macos_pkg
tags:
- macos10.14
dependencies:
- build_wheel_macos_py38
variables:
WHEEL_PATH: build/dist/*cp38*10_14*
TEST_PACKAGE: coremltools.test.neural_network.test_simple_nn_inference
PYTHON: "3.8"

#########################################################################
##
## Make docs
Expand Down Expand Up @@ -314,6 +383,10 @@ collect_artifacts:
echo "Collect artifacts (wheels and documentation)"
dependencies:
- build_wheel_linux_py27
- build_wheel_linux_py35
- build_wheel_linux_py36
- build_wheel_linux_py37
- build_wheel_linux_py38
- build_wheel_macos_py27
- build_wheel_macos_py35
- build_wheel_macos_py36
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ With coremltools, you can do the following:
To get the latest version of coremltools:

```shell
pip install coremltools==4.0b3
pip install coremltools==4.0b4
```

For the latest changes please see the [release notes](https://github.com/apple/coremltools/releases/).
Expand Down
10 changes: 7 additions & 3 deletions coremlpython/CoreMLPython.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,10 +51,14 @@ namespace CoreML {
std::unique_ptr<NNBuffer::NeuralNetworkBuffer> nnBuffer;

public:
NeuralNetworkBufferInformation(const std::string& bufferFilePath, NNBuffer::bufferMode mode);
NeuralNetworkBufferInformation(const std::string& bufferFilePath, NNBuffer::BufferMode mode);
~NeuralNetworkBufferInformation();
std::vector<float> getBuffer(const u_int64_t offset);
u_int64_t addBuffer(const std::vector<float>& buffer);

template <typename T>
u_int64_t addBuffer(const std::vector<T>& buffer);

template <typename T>
std::vector<T> getBuffer(const u_int64_t offset);
};
}
}
33 changes: 20 additions & 13 deletions coremlpython/CoreMLPython.mm
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@
/*
* NeuralNetworkBuffer - NeuralNetworkBuffer
*/
NeuralNetworkBufferInformation::NeuralNetworkBufferInformation(const std::string &bufferFilePath, NNBuffer::bufferMode mode)
NeuralNetworkBufferInformation::NeuralNetworkBufferInformation(const std::string &bufferFilePath, NNBuffer::BufferMode mode)
: nnBuffer(std::make_unique<NNBuffer::NeuralNetworkBuffer>(bufferFilePath, mode))
{
}
Expand All @@ -149,18 +149,20 @@
* Writes given buffer into file
* Returns offset from the beginning of buffer
*/
inline u_int64_t NeuralNetworkBufferInformation::addBuffer(const std::vector<float> &buffer) {
return nnBuffer->addBuffer(buffer);
template <typename T>
inline u_int64_t NeuralNetworkBufferInformation::addBuffer(const std::vector<T>& buffer) {
return nnBuffer->AddBuffer(buffer);
}

/*
* NeuralNetworkBufferInformation - getBuffer
* Reads buffer from given offset and of given size and writes to data
*/
inline std::vector<float> NeuralNetworkBufferInformation::getBuffer(const u_int64_t offset) {
template <typename T>
inline std::vector<T> NeuralNetworkBufferInformation::getBuffer(const u_int64_t offset) {
// TODO: Explore Pybind11 Opaque to pass vector by reference
std::vector<float> buffer;
nnBuffer->getBuffer(offset, buffer);
std::vector<T> buffer;
nnBuffer->GetBuffer(offset, buffer);
return buffer;
}

Expand All @@ -180,13 +182,18 @@
.def("print", &NeuralNetworkShapeInformation::print);

py::class_<NeuralNetworkBufferInformation> netBuffer(m, "_NeuralNetworkBuffer");
netBuffer.def(py::init<const std::string&, NNBuffer::bufferMode>())
.def("add_buffer", &NeuralNetworkBufferInformation::addBuffer)
.def("get_buffer", &NeuralNetworkBufferInformation::getBuffer);
py::enum_<NNBuffer::bufferMode>(netBuffer, "mode")
.value("write", NNBuffer::bufferMode::write)
.value("append", NNBuffer::bufferMode::append)
.value("read", NNBuffer::bufferMode::read)
netBuffer.def(py::init<const std::string&, NNBuffer::BufferMode>())
.def("add_buffer_float", &NeuralNetworkBufferInformation::addBuffer<float>)
.def("add_buffer_int", &NeuralNetworkBufferInformation::addBuffer<int32_t>)
.def("add_buffer_bool", &NeuralNetworkBufferInformation::addBuffer<uint8_t>)
.def("get_buffer_float", &NeuralNetworkBufferInformation::getBuffer<float>)
.def("get_buffer_int", &NeuralNetworkBufferInformation::getBuffer<int32_t>)
.def("get_buffer_bool", &NeuralNetworkBufferInformation::getBuffer<uint8_t>);

py::enum_<NNBuffer::BufferMode>(netBuffer, "mode")
.value("write", NNBuffer::BufferMode::Write)
.value("append", NNBuffer::BufferMode::Append)
.value("read", NNBuffer::BufferMode::Read)
.export_values();

return m.ptr();
Expand Down
4 changes: 2 additions & 2 deletions coremltools/_deps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -86,10 +86,10 @@ def __get_sklearn_version(version):
_HAS_TF = True
_HAS_TF_1 = False
_HAS_TF_2 = False
_TF_1_MIN_VERSION = "1.0.0"
_TF_1_MIN_VERSION = "1.12.0"
_TF_1_MAX_VERSION = "1.15.0"
_TF_2_MIN_VERSION = "2.1.0"
_TF_2_MAX_VERSION = "2.2.0"
_TF_2_MAX_VERSION = "2.3.0"

try:
import tensorflow
Expand Down
3 changes: 2 additions & 1 deletion coremltools/converters/_converters_entry.py
Original file line number Diff line number Diff line change
Expand Up @@ -327,7 +327,8 @@ def _flatten_list(_inputs):
if convert_to == 'mil':
return proto_spec # Returns the MIL program

model = coremltools.models.MLModel(proto_spec, useCPUOnly=True)
useCPUOnly = kwargs.get("useCPUOnly", True)
model = coremltools.models.MLModel(proto_spec, useCPUOnly=useCPUOnly)

if minimum_deployment_target is not None:
check_deployment_compatibility(
Expand Down
Loading

0 comments on commit cf5f317

Please sign in to comment.