From 0b48b50d0321faf05492b93d509538a3c356dfe2 Mon Sep 17 00:00:00 2001 From: Chun-Hsiang Wang Date: Mon, 22 Aug 2022 17:45:22 -0700 Subject: [PATCH] chore: Remove outer directories for building CPR images. (#1572) * chore: Remove outer directories for building CPR images. * chore: Fixed coments. --- google/cloud/aiplatform/docker_utils/build.py | 51 +++--- .../aiplatform/prediction/local_model.py | 47 ++--- .../aiplatform/prediction/model_server.py | 5 - .../aiplatform/utils/prediction_utils.py | 1 - tests/unit/aiplatform/test_docker_utils.py | 170 +++++++++++++----- tests/unit/aiplatform/test_prediction.py | 7 - tests/unit/aiplatform/test_utils.py | 2 +- 7 files changed, 176 insertions(+), 107 deletions(-) diff --git a/google/cloud/aiplatform/docker_utils/build.py b/google/cloud/aiplatform/docker_utils/build.py index 5469bf2a66..f8cbf500a2 100644 --- a/google/cloud/aiplatform/docker_utils/build.py +++ b/google/cloud/aiplatform/docker_utils/build.py @@ -117,31 +117,25 @@ def _prepare_dependency_entries( ) if requirements_path is not None: - ret += _generate_copy_command( - requirements_path, - "./requirements.txt", - comment="requirements.txt file specified, thus copy it to the docker container.", - ) + textwrap.dedent( + ret += textwrap.dedent( """ - RUN {} install --no-cache-dir {} -r ./requirements.txt + RUN {} install --no-cache-dir {} -r {} """.format( pip_command, "--force-reinstall" if force_reinstall else "", + requirements_path, ) ) if extra_packages is not None: - for extra in extra_packages: - package_name = os.path.basename(extra) + for package in extra_packages: ret += textwrap.dedent( """ - {} RUN {} install --no-cache-dir {} {} """.format( - _generate_copy_command(extra, package_name), pip_command, "--force-reinstall" if force_reinstall else "", - quote(package_name), + quote(package), ) ) @@ -190,24 +184,18 @@ def _prepare_entrypoint(package: Package, python_command: str = "python") -> str return "\nENTRYPOINT {}\n".format(exec_str) -def _prepare_package_entry(package: Package) -> str: - """Returns the Dockerfile entries required to append at the end before entrypoint. - - Including: - - copy the parent directory of the main executable into a docker container. - - inject an entrypoint that executes a script or python module inside that - directory. +def _copy_source_directory() -> str: + """Returns the Dockerfile entry required to copy the package to the image. - Args: - package (Package): - Required. The main application copied to and run in the container. + The Docker build context has been changed to host_workdir. We copy all + the files to the working directory of images. Returns: - The generated package related command used in Dockerfile. + The generated package related copy command used in Dockerfile. """ copy_code = _generate_copy_command( ".", # Dockefile context location has been changed to host_workdir - Path(package.package_path).name, + ".", # Copy all the files to the working directory of images. comment="Copy the source directory into the docker container.", ) @@ -275,14 +263,18 @@ def _get_relative_path_to_workdir( The relative path to the workdir or None if path is None. Raises: - ValueError: If the path is not relative to the workdir. + ValueError: If the path does not exist or is not relative to the workdir. """ if path is None: return None + if not Path(path).is_file(): + raise ValueError(f'The {value_name} "{path}" must exist.') if not path_utils._is_relative_to(path, workdir): raise ValueError(f'The {value_name} "{path}" must be in "{workdir}".') - return Path(path).relative_to(workdir).as_posix() + abs_path = Path(path).expanduser().resolve() + abs_workdir = Path(workdir).expanduser().resolve() + return Path(abs_path).relative_to(abs_workdir).as_posix() def make_dockerfile( @@ -382,8 +374,10 @@ def make_dockerfile( environment_variables=environment_variables ) - # Installs packages from requirements_path which copies requirements_path - # to the image before installing. + # Copies user code to the image. + dockerfile += _copy_source_directory() + + # Installs packages from requirements_path. dockerfile += _prepare_dependency_entries( requirements_path=requirements_path, setup_path=None, @@ -394,9 +388,6 @@ def make_dockerfile( pip_command=pip_command, ) - # Copies user code to the image. - dockerfile += _prepare_package_entry(main_package) - # Installs additional packages from user code. dockerfile += _prepare_dependency_entries( requirements_path=None, diff --git a/google/cloud/aiplatform/prediction/local_model.py b/google/cloud/aiplatform/prediction/local_model.py index c3ca86431f..f05dfad40d 100644 --- a/google/cloud/aiplatform/prediction/local_model.py +++ b/google/cloud/aiplatform/prediction/local_model.py @@ -16,7 +16,6 @@ # from copy import copy -from pathlib import Path from typing import Dict, List, Optional, Sequence, Type from google.cloud import aiplatform @@ -161,37 +160,42 @@ def build_cpr_model( This method builds a docker image to include user-provided predictor, and handler. An example src_dir (e.g. "./user_src_dir") provided looks like: - . - |-- user_src_dir/ - |-- predictor.py - |-- requirements.txt - |-- user_code/ - | |-- utils.py - | |-- custom_package.tar.gz - | |-- ... - |-- ... + user_src_dir/ + |-- predictor.py + |-- requirements.txt + |-- user_code/ + | |-- utils.py + | |-- custom_package.tar.gz + | |-- ... + |-- ... To build a custom container: local_model = LocalModel.build_cpr_model( "./user_src_dir", - "us-docker.pkg.dev/[PROJECT]/[REPOSITORY]/[IMAGE_NAME]", - predictor=[CUSTOM_PREDICTOR_CLASS], + "us-docker.pkg.dev/$PROJECT/$REPOSITORY/$IMAGE_NAME$", + predictor=$CUSTOM_PREDICTOR_CLASS, requirements_path="./user_src_dir/requirements.txt", extra_packages=["./user_src_dir/user_code/custom_package.tar.gz"], ) In the built image, it will look like: container_workdir/ + |-- predictor.py |-- requirements.txt - |-- custom_package.tar.gz - |-- user_src_dir/ - |-- predictor.py - |-- requirements.txt - |-- user_code/ - | |-- utils.py - | |-- custom_package.tar.gz - | |-- ... - |-- ... + |-- user_code/ + | |-- utils.py + | |-- custom_package.tar.gz + | |-- ... + |-- ... + + If you have any files or directories in the src_dir you would like to exclude in built + images, you could add a file, .dockerignore, to the root of the src_dir and list all of + them in it. See https://docs.docker.com/engine/reference/builder/#dockerignore-file for + more details about the .dockerignore file. + + In order to save and restore class instances transparently with Pickle, the class definition + must be importable and live in the same module as when the object was stored. If you want to + use Pickle, you must save your objects right under the src_dir you provide. The created CPR images default the number of model server workers to the number of cores. Depending on the characteristics of your model, you may need to adjust the number of workers. @@ -252,7 +256,6 @@ def build_cpr_model( handler, src_dir ) environment_variables = { - "CPR_USER_DIR_NAME": Path(src_dir).name, "HANDLER_MODULE": handler_module, "HANDLER_CLASS": handler_class, } diff --git a/google/cloud/aiplatform/prediction/model_server.py b/google/cloud/aiplatform/prediction/model_server.py index 97cfeb7c0b..a0ffa894e6 100644 --- a/google/cloud/aiplatform/prediction/model_server.py +++ b/google/cloud/aiplatform/prediction/model_server.py @@ -19,7 +19,6 @@ import logging import multiprocessing import os -import sys import traceback try: @@ -187,10 +186,6 @@ def set_number_of_workers_from_env() -> None: if __name__ == "__main__": - cpr_user_dir_name = os.getenv("CPR_USER_DIR_NAME") - if cpr_user_dir_name: - sys.path.insert(0, os.path.join(os.getcwd(), cpr_user_dir_name)) - set_number_of_workers_from_env() uvicorn.run( "google.cloud.aiplatform.prediction.model_server:CprModelServer", diff --git a/google/cloud/aiplatform/utils/prediction_utils.py b/google/cloud/aiplatform/utils/prediction_utils.py index af0c8592a1..9cdfa14d17 100644 --- a/google/cloud/aiplatform/utils/prediction_utils.py +++ b/google/cloud/aiplatform/utils/prediction_utils.py @@ -70,7 +70,6 @@ def inspect_source_from_class( custom_class_import_path.stem ) custom_class_import = custom_class_import_path.as_posix().replace(os.sep, ".") - custom_class_import = f"{src_dir_abs_path.name}.{custom_class_import}" return custom_class_import, custom_class_name diff --git a/tests/unit/aiplatform/test_docker_utils.py b/tests/unit/aiplatform/test_docker_utils.py index 62645b08eb..ac8cfd124b 100644 --- a/tests/unit/aiplatform/test_docker_utils.py +++ b/tests/unit/aiplatform/test_docker_utils.py @@ -513,8 +513,9 @@ def test_print_container_logs_with_message(self, docker_container_mock): class TestBuild: BASE_IMAGE = "python:3.7" + SOURCE_DIR = "src" HOST_WORKDIR_BASENAME = "user_code" - HOST_WORKDIR = f"./src/{HOST_WORKDIR_BASENAME}" + HOST_WORKDIR = f"./{SOURCE_DIR}/{HOST_WORKDIR_BASENAME}" HOME = utils.DEFAULT_HOME WORKDIR = utils.DEFAULT_WORKDIR SCRIPT = "./user_code/entrypoint.py" @@ -544,7 +545,7 @@ def test_make_dockerfile(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result def test_make_dockerfile_with_python_module(self): @@ -555,7 +556,7 @@ def test_make_dockerfile_with_python_module(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "-m", "{self.PYTHON_MODULE}"]' in result def test_make_dockerfile_no_script_and_module(self): @@ -566,7 +567,7 @@ def test_make_dockerfile_no_script_and_module(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert "ENTRYPOINT" not in result def test_make_dockerfile_with_requirements_path(self): @@ -583,11 +584,10 @@ def test_make_dockerfile_with_requirements_path(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result - assert f'COPY ["{requirements_path}", "./requirements.txt"]\n' in result assert ( - "RUN pip install --no-cache-dir --force-reinstall -r ./requirements.txt\n" + f"RUN pip install --no-cache-dir --force-reinstall -r {requirements_path}\n" in result ) @@ -605,7 +605,7 @@ def test_make_dockerfile_with_setup_path(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result assert f'COPY ["{setup_path}", "./setup.py"]\n' in result assert "RUN pip install --no-cache-dir --force-reinstall .\n" in result @@ -624,7 +624,7 @@ def test_make_dockerfile_with_extra_requirements(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result assert ( f"RUN pip install --no-cache-dir --force-reinstall {extra_requirement}\n" @@ -646,11 +646,10 @@ def test_make_dockerfile_with_extra_packages(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result - assert f'COPY ["{extra_package}", "{extra_package_basename}"]\n' assert ( - f"RUN pip install --no-cache-dir --force-reinstall {extra_package_basename}\n" + f"RUN pip install --no-cache-dir --force-reinstall {extra_package}\n" in result ) @@ -668,7 +667,7 @@ def test_make_dockerfile_with_extra_dirs(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result assert f'COPY ["{extra_dir}", "{extra_dir}"]\n' in result @@ -686,7 +685,7 @@ def test_make_dockerfile_with_exposed_ports(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result assert f"EXPOSE {exposed_port}\n" in result @@ -707,7 +706,7 @@ def test_make_dockerfile_with_environment_variables(self): assert f"FROM {self.BASE_IMAGE}\n" in result assert f"WORKDIR {self.WORKDIR}\n" in result assert f"ENV HOME={self.HOME}\n" in result - assert f'COPY [".", "{self.HOST_WORKDIR_BASENAME}"]\n' in result + assert 'COPY [".", "."]\n' in result assert f'ENTRYPOINT ["python", "{self.SCRIPT}"]' in result assert "ENV FAKE_ENV1=FAKE_VALUE1\n" in result assert "ENV FAKE_ENV2=FAKE_VALUE2\n" in result @@ -844,20 +843,27 @@ def test_build_image_with_extra_requirements( assert image.default_workdir == self.WORKDIR def test_build_image_with_requirements_path( - self, make_dockerfile_mock, execute_command_mock + self, tmp_path, make_dockerfile_mock, execute_command_mock ): + source_dir = tmp_path / self.SOURCE_DIR + source_dir.mkdir() + host_workdir = tmp_path / self.HOST_WORKDIR + host_workdir.mkdir() + requirements_file = host_workdir / self.REQUIREMENTS_FILE + requirements_file.write_text("") + image = build.build_image( self.BASE_IMAGE, - self.HOST_WORKDIR, + host_workdir.as_posix(), self.OUTPUT_IMAGE_NAME, - requirements_path=f"{self.HOST_WORKDIR}/{self.REQUIREMENTS_FILE}", + requirements_path=requirements_file.as_posix(), ) make_dockerfile_mock.assert_called_once_with( self.BASE_IMAGE, utils.Package( script=None, - package_path=self.HOST_WORKDIR, + package_path=host_workdir.as_posix(), python_module=None, ), utils.DEFAULT_WORKDIR, @@ -880,7 +886,7 @@ def test_build_image_with_requirements_path( self.OUTPUT_IMAGE_NAME, "--rm", "-f-", - self.HOST_WORKDIR, + host_workdir.as_posix(), ], input_str=make_dockerfile_mock.return_value, ) @@ -888,9 +894,9 @@ def test_build_image_with_requirements_path( assert image.default_home == self.HOME assert image.default_workdir == self.WORKDIR - def test_build_image_invalid_requirements_path(self, make_dockerfile_mock): + def test_build_image_not_found_requirements_path(self, make_dockerfile_mock): requirements_path = f"./another_src/{self.REQUIREMENTS_FILE}" - expected_message = f'The requirements_path "{requirements_path}" must be in "{self.HOST_WORKDIR}".' + expected_message = f'The requirements_path "{requirements_path}" must exist.' with pytest.raises(ValueError) as exception: _ = build.build_image( @@ -903,21 +909,54 @@ def test_build_image_invalid_requirements_path(self, make_dockerfile_mock): assert not make_dockerfile_mock.called assert str(exception.value) == expected_message + def test_build_image_invalid_requirements_path( + self, tmp_path, make_dockerfile_mock + ): + source_dir = tmp_path / self.SOURCE_DIR + source_dir.mkdir() + host_workdir = tmp_path / self.HOST_WORKDIR + host_workdir.mkdir() + another_dir = tmp_path / "another_dir" + another_dir.mkdir() + requirements_file = another_dir / self.REQUIREMENTS_FILE + requirements_file.write_text("") + expected_message = ( + f'The requirements_path "{requirements_file}" must be in "{host_workdir}".' + ) + + with pytest.raises(ValueError) as exception: + _ = build.build_image( + self.BASE_IMAGE, + host_workdir.as_posix(), + self.OUTPUT_IMAGE_NAME, + requirements_path=requirements_file.as_posix(), + ) + + assert not make_dockerfile_mock.called + assert str(exception.value) == expected_message + def test_build_image_with_setup_path( - self, make_dockerfile_mock, execute_command_mock + self, tmp_path, make_dockerfile_mock, execute_command_mock ): + source_dir = tmp_path / self.SOURCE_DIR + source_dir.mkdir() + host_workdir = tmp_path / self.HOST_WORKDIR + host_workdir.mkdir() + setup_file = host_workdir / self.SETUP_FILE + setup_file.write_text("") + image = build.build_image( self.BASE_IMAGE, - self.HOST_WORKDIR, + host_workdir.as_posix(), self.OUTPUT_IMAGE_NAME, - setup_path=f"{self.HOST_WORKDIR}/{self.SETUP_FILE}", + setup_path=setup_file.as_posix(), ) make_dockerfile_mock.assert_called_once_with( self.BASE_IMAGE, utils.Package( script=None, - package_path=self.HOST_WORKDIR, + package_path=host_workdir.as_posix(), python_module=None, ), utils.DEFAULT_WORKDIR, @@ -940,7 +979,7 @@ def test_build_image_with_setup_path( self.OUTPUT_IMAGE_NAME, "--rm", "-f-", - self.HOST_WORKDIR, + host_workdir.as_posix(), ], input_str=make_dockerfile_mock.return_value, ) @@ -948,11 +987,9 @@ def test_build_image_with_setup_path( assert image.default_home == self.HOME assert image.default_workdir == self.WORKDIR - def test_build_image_invalid_setup_path(self, make_dockerfile_mock): + def test_build_image_not_found_setup_path(self, make_dockerfile_mock): setup_path = f"./another_src/{self.SETUP_FILE}" - expected_message = ( - f'The setup_path "{setup_path}" must be in "{self.HOST_WORKDIR}".' - ) + expected_message = f'The setup_path "{setup_path}" must exist.' with pytest.raises(ValueError) as exception: _ = build.build_image( @@ -965,21 +1002,50 @@ def test_build_image_invalid_setup_path(self, make_dockerfile_mock): assert not make_dockerfile_mock.called assert str(exception.value) == expected_message + def test_build_image_invalid_setup_path(self, tmp_path, make_dockerfile_mock): + source_dir = tmp_path / self.SOURCE_DIR + source_dir.mkdir() + host_workdir = tmp_path / self.HOST_WORKDIR + host_workdir.mkdir() + another_dir = tmp_path / "another_dir" + another_dir.mkdir() + setup_file = another_dir / self.SETUP_FILE + setup_file.write_text("") + expected_message = f'The setup_path "{setup_file}" must be in "{host_workdir}".' + + with pytest.raises(ValueError) as exception: + _ = build.build_image( + self.BASE_IMAGE, + host_workdir.as_posix(), + self.OUTPUT_IMAGE_NAME, + setup_path=setup_file.as_posix(), + ) + + assert not make_dockerfile_mock.called + assert str(exception.value) == expected_message + def test_build_image_with_extra_packages( - self, make_dockerfile_mock, execute_command_mock + self, tmp_path, make_dockerfile_mock, execute_command_mock ): + source_dir = tmp_path / self.SOURCE_DIR + source_dir.mkdir() + host_workdir = tmp_path / self.HOST_WORKDIR + host_workdir.mkdir() + extra_package = host_workdir / self.EXTRA_PACKAGE + extra_package.write_text("") + image = build.build_image( self.BASE_IMAGE, - self.HOST_WORKDIR, + host_workdir.as_posix(), self.OUTPUT_IMAGE_NAME, - extra_packages=[f"{self.HOST_WORKDIR}/{self.EXTRA_PACKAGE}"], + extra_packages=[extra_package.as_posix()], ) make_dockerfile_mock.assert_called_once_with( self.BASE_IMAGE, utils.Package( script=None, - package_path=self.HOST_WORKDIR, + package_path=host_workdir.as_posix(), python_module=None, ), utils.DEFAULT_WORKDIR, @@ -1002,7 +1068,7 @@ def test_build_image_with_extra_packages( self.OUTPUT_IMAGE_NAME, "--rm", "-f-", - self.HOST_WORKDIR, + host_workdir.as_posix(), ], input_str=make_dockerfile_mock.return_value, ) @@ -1010,20 +1076,42 @@ def test_build_image_with_extra_packages( assert image.default_home == self.HOME assert image.default_workdir == self.WORKDIR + def test_build_image_not_found_extra_packages(self, make_dockerfile_mock): + extra_package = f"./another_src/{self.EXTRA_PACKAGE}" + expected_message = f'The extra_packages "{extra_package}" must exist.' + + with pytest.raises(ValueError) as exception: + _ = build.build_image( + self.BASE_IMAGE, + self.HOST_WORKDIR, + self.OUTPUT_IMAGE_NAME, + extra_packages=[extra_package], + ) + + assert not make_dockerfile_mock.called + assert str(exception.value) == expected_message + def test_build_image_invalid_extra_packages( - self, make_dockerfile_mock, execute_command_mock + self, tmp_path, make_dockerfile_mock, execute_command_mock ): - extra_package = f"./another_src/{self.EXTRA_PACKAGE}" + source_dir = tmp_path / self.SOURCE_DIR + source_dir.mkdir() + host_workdir = tmp_path / self.HOST_WORKDIR + host_workdir.mkdir() + another_dir = tmp_path / "another_dir" + another_dir.mkdir() + extra_package = another_dir / self.EXTRA_PACKAGE + extra_package.write_text("") expected_message = ( - f'The extra_packages "{extra_package}" must be in "{self.HOST_WORKDIR}".' + f'The extra_packages "{extra_package}" must be in "{host_workdir}".' ) with pytest.raises(ValueError) as exception: _ = build.build_image( self.BASE_IMAGE, - self.HOST_WORKDIR, + host_workdir.as_posix(), self.OUTPUT_IMAGE_NAME, - extra_packages=[extra_package], + extra_packages=[extra_package.as_posix()], ) assert not make_dockerfile_mock.called diff --git a/tests/unit/aiplatform/test_prediction.py b/tests/unit/aiplatform/test_prediction.py index b73396b6f0..7f01f573ad 100644 --- a/tests/unit/aiplatform/test_prediction.py +++ b/tests/unit/aiplatform/test_prediction.py @@ -1220,7 +1220,6 @@ class {predictor_class}: extra_packages=None, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": _DEFAULT_HANDLER_MODULE, "HANDLER_CLASS": _DEFAULT_HANDLER_CLASS, "PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}", @@ -1335,7 +1334,6 @@ class {handler_class}: extra_packages=None, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": f"{_TEST_SRC_DIR}.{_TEST_HANDLER_FILE_STEM}", "HANDLER_CLASS": _TEST_HANDLER_CLASS, "PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}", @@ -1392,7 +1390,6 @@ class {handler_class}: extra_packages=None, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": f"{_TEST_SRC_DIR}.{_TEST_HANDLER_FILE_STEM}", "HANDLER_CLASS": _TEST_HANDLER_CLASS, }, @@ -1446,7 +1443,6 @@ class {predictor_class}: extra_packages=None, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": _DEFAULT_HANDLER_MODULE, "HANDLER_CLASS": _DEFAULT_HANDLER_CLASS, "PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}", @@ -1504,7 +1500,6 @@ class {predictor_class}: extra_packages=None, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": _DEFAULT_HANDLER_MODULE, "HANDLER_CLASS": _DEFAULT_HANDLER_CLASS, "PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}", @@ -1562,7 +1557,6 @@ class {predictor_class}: extra_packages=extra_packages, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": _DEFAULT_HANDLER_MODULE, "HANDLER_CLASS": _DEFAULT_HANDLER_CLASS, "PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}", @@ -1617,7 +1611,6 @@ class {predictor_class}: extra_packages=None, exposed_ports=[DEFAULT_HTTP_PORT], environment_variables={ - "CPR_USER_DIR_NAME": _TEST_SRC_DIR, "HANDLER_MODULE": _DEFAULT_HANDLER_MODULE, "HANDLER_CLASS": _DEFAULT_HANDLER_CLASS, "PREDICTOR_MODULE": f"{_TEST_SRC_DIR}.{_TEST_PREDICTOR_FILE_STEM}", diff --git a/tests/unit/aiplatform/test_utils.py b/tests/unit/aiplatform/test_utils.py index 4d47cae929..2f460ce9be 100644 --- a/tests/unit/aiplatform/test_utils.py +++ b/tests/unit/aiplatform/test_utils.py @@ -646,7 +646,7 @@ class {custom_class}: my_custom_class, str(src_dir) ) - assert class_import == f"{self.SRC_DIR}.{self.CUSTOM_CLASS_FILE_STEM}" + assert class_import == f"{self.CUSTOM_CLASS_FILE_STEM}" assert class_name == self.CUSTOM_CLASS def test_inspect_source_from_class_fails_class_not_in_source(self, tmp_path):