Skip to content

Commit

Permalink
chore: Remove outer directories for building CPR images. (#1572)
Browse files Browse the repository at this point in the history
* chore: Remove outer directories for building CPR images.

* chore: Fixed coments.
  • Loading branch information
abcdefgs0324 authored Aug 23, 2022
1 parent 4e7175f commit 0b48b50
Show file tree
Hide file tree
Showing 7 changed files with 176 additions and 107 deletions.
51 changes: 21 additions & 30 deletions google/cloud/aiplatform/docker_utils/build.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,31 +117,25 @@ def _prepare_dependency_entries(
)

if requirements_path is not None:
ret += _generate_copy_command(
requirements_path,
"./requirements.txt",
comment="requirements.txt file specified, thus copy it to the docker container.",
) + textwrap.dedent(
ret += textwrap.dedent(
"""
RUN {} install --no-cache-dir {} -r ./requirements.txt
RUN {} install --no-cache-dir {} -r {}
""".format(
pip_command,
"--force-reinstall" if force_reinstall else "",
requirements_path,
)
)

if extra_packages is not None:
for extra in extra_packages:
package_name = os.path.basename(extra)
for package in extra_packages:
ret += textwrap.dedent(
"""
{}
RUN {} install --no-cache-dir {} {}
""".format(
_generate_copy_command(extra, package_name),
pip_command,
"--force-reinstall" if force_reinstall else "",
quote(package_name),
quote(package),
)
)

Expand Down Expand Up @@ -190,24 +184,18 @@ def _prepare_entrypoint(package: Package, python_command: str = "python") -> str
return "\nENTRYPOINT {}\n".format(exec_str)


def _prepare_package_entry(package: Package) -> str:
"""Returns the Dockerfile entries required to append at the end before entrypoint.
Including:
- copy the parent directory of the main executable into a docker container.
- inject an entrypoint that executes a script or python module inside that
directory.
def _copy_source_directory() -> str:
"""Returns the Dockerfile entry required to copy the package to the image.
Args:
package (Package):
Required. The main application copied to and run in the container.
The Docker build context has been changed to host_workdir. We copy all
the files to the working directory of images.
Returns:
The generated package related command used in Dockerfile.
The generated package related copy command used in Dockerfile.
"""
copy_code = _generate_copy_command(
".", # Dockefile context location has been changed to host_workdir
Path(package.package_path).name,
".", # Copy all the files to the working directory of images.
comment="Copy the source directory into the docker container.",
)

Expand Down Expand Up @@ -275,14 +263,18 @@ def _get_relative_path_to_workdir(
The relative path to the workdir or None if path is None.
Raises:
ValueError: If the path is not relative to the workdir.
ValueError: If the path does not exist or is not relative to the workdir.
"""
if path is None:
return None

if not Path(path).is_file():
raise ValueError(f'The {value_name} "{path}" must exist.')
if not path_utils._is_relative_to(path, workdir):
raise ValueError(f'The {value_name} "{path}" must be in "{workdir}".')
return Path(path).relative_to(workdir).as_posix()
abs_path = Path(path).expanduser().resolve()
abs_workdir = Path(workdir).expanduser().resolve()
return Path(abs_path).relative_to(abs_workdir).as_posix()


def make_dockerfile(
Expand Down Expand Up @@ -382,8 +374,10 @@ def make_dockerfile(
environment_variables=environment_variables
)

# Installs packages from requirements_path which copies requirements_path
# to the image before installing.
# Copies user code to the image.
dockerfile += _copy_source_directory()

# Installs packages from requirements_path.
dockerfile += _prepare_dependency_entries(
requirements_path=requirements_path,
setup_path=None,
Expand All @@ -394,9 +388,6 @@ def make_dockerfile(
pip_command=pip_command,
)

# Copies user code to the image.
dockerfile += _prepare_package_entry(main_package)

# Installs additional packages from user code.
dockerfile += _prepare_dependency_entries(
requirements_path=None,
Expand Down
47 changes: 25 additions & 22 deletions google/cloud/aiplatform/prediction/local_model.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@
#

from copy import copy
from pathlib import Path
from typing import Dict, List, Optional, Sequence, Type

from google.cloud import aiplatform
Expand Down Expand Up @@ -161,37 +160,42 @@ def build_cpr_model(
This method builds a docker image to include user-provided predictor, and handler.
An example src_dir (e.g. "./user_src_dir") provided looks like:
.
|-- user_src_dir/
|-- predictor.py
|-- requirements.txt
|-- user_code/
| |-- utils.py
| |-- custom_package.tar.gz
| |-- ...
|-- ...
user_src_dir/
|-- predictor.py
|-- requirements.txt
|-- user_code/
| |-- utils.py
| |-- custom_package.tar.gz
| |-- ...
|-- ...
To build a custom container:
local_model = LocalModel.build_cpr_model(
"./user_src_dir",
"us-docker.pkg.dev/[PROJECT]/[REPOSITORY]/[IMAGE_NAME]",
predictor=[CUSTOM_PREDICTOR_CLASS],
"us-docker.pkg.dev/$PROJECT/$REPOSITORY/$IMAGE_NAME$",
predictor=$CUSTOM_PREDICTOR_CLASS,
requirements_path="./user_src_dir/requirements.txt",
extra_packages=["./user_src_dir/user_code/custom_package.tar.gz"],
)
In the built image, it will look like:
container_workdir/
|-- predictor.py
|-- requirements.txt
|-- custom_package.tar.gz
|-- user_src_dir/
|-- predictor.py
|-- requirements.txt
|-- user_code/
| |-- utils.py
| |-- custom_package.tar.gz
| |-- ...
|-- ...
|-- user_code/
| |-- utils.py
| |-- custom_package.tar.gz
| |-- ...
|-- ...
If you have any files or directories in the src_dir you would like to exclude in built
images, you could add a file, .dockerignore, to the root of the src_dir and list all of
them in it. See https://docs.docker.com/engine/reference/builder/#dockerignore-file for
more details about the .dockerignore file.
In order to save and restore class instances transparently with Pickle, the class definition
must be importable and live in the same module as when the object was stored. If you want to
use Pickle, you must save your objects right under the src_dir you provide.
The created CPR images default the number of model server workers to the number of cores.
Depending on the characteristics of your model, you may need to adjust the number of workers.
Expand Down Expand Up @@ -252,7 +256,6 @@ def build_cpr_model(
handler, src_dir
)
environment_variables = {
"CPR_USER_DIR_NAME": Path(src_dir).name,
"HANDLER_MODULE": handler_module,
"HANDLER_CLASS": handler_class,
}
Expand Down
5 changes: 0 additions & 5 deletions google/cloud/aiplatform/prediction/model_server.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@
import logging
import multiprocessing
import os
import sys
import traceback

try:
Expand Down Expand Up @@ -187,10 +186,6 @@ def set_number_of_workers_from_env() -> None:


if __name__ == "__main__":
cpr_user_dir_name = os.getenv("CPR_USER_DIR_NAME")
if cpr_user_dir_name:
sys.path.insert(0, os.path.join(os.getcwd(), cpr_user_dir_name))

set_number_of_workers_from_env()
uvicorn.run(
"google.cloud.aiplatform.prediction.model_server:CprModelServer",
Expand Down
1 change: 0 additions & 1 deletion google/cloud/aiplatform/utils/prediction_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,6 @@ def inspect_source_from_class(
custom_class_import_path.stem
)
custom_class_import = custom_class_import_path.as_posix().replace(os.sep, ".")
custom_class_import = f"{src_dir_abs_path.name}.{custom_class_import}"

return custom_class_import, custom_class_name

Expand Down
Loading

0 comments on commit 0b48b50

Please sign in to comment.