Skip to content

Commit

Permalink
configure: Remove --with_cuda option.
Browse files Browse the repository at this point in the history
This removes the --with_cuda option from ./configure, forcing the base
tensorflow package to always be used, rather than re-writing it to
tensorflow-gpu. Because of this, I no longer need to generate a
requirements.txt file from tools/requirements, so that can be removed,
simplifying the build process.

For now, I retain the 'with_cuda' field in the config.pbtxt proto, but
hardcode its value to False. Future patches will replace checks on
this field to calls to runtime libraries, such as using
GPUtil.getGPUs() for Python, defined in //third_party/py/gputil.

github.com//issues/73

Signed-off-by: format 2020.01.16 <github.com/ChrisCummins/format>
  • Loading branch information
ChrisCummins committed Jan 24, 2020
1 parent abcc295 commit 7e8f19f
Show file tree
Hide file tree
Showing 6 changed files with 8 additions and 59 deletions.
2 changes: 0 additions & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,6 @@
/.env
/bootstrap.sh
/config.pbtxt
/requirements.txt
/third_party/py/tensorflow/BUILD
/learn/docker/clgen/*.tar.bz2
/experimental/deeplearning/clgen/docker_worker/*.tar.bz2
/experimental/deeplearning/clgen/docker_worker/cache
Expand Down
46 changes: 1 addition & 45 deletions configure
Original file line number Diff line number Diff line change
Expand Up @@ -167,18 +167,6 @@ def GetConfigurationOptions(argv):
help='Disable caching of configure result.')
parser.set_defaults(cache_result=True)

# --[no]with_cuda.
parser.add_argument(
'--with_cuda', dest='with_cuda', action='store_true',
help=('Enable CUDA support. This requires that CUDA is installed on the '
'host machine.'))
parser.add_argument(
'--nowith_cuda', dest='with_cuda', action='store_false',
help='Disable CUDA support.')
parser.set_defaults(with_cuda=GuessIfCudaIsAvailable())
yes_no_prompts.append(
('with_cuda', 'Enable CUDA support? (Stricly optional, not required)'))

args = parser.parse_args(argv)

# Prompt the user for options.
Expand Down Expand Up @@ -230,37 +218,6 @@ def main(argv):
sys.exit(1)
assert os.path.isfile(python_path)

base_requirements_txt_path = os.path.join(
PHD_ROOT, 'tools', 'requirements.txt')
assert os.path.isfile(base_requirements_txt_path)
with open(base_requirements_txt_path) as f:
base_requirements = f.read()

tensorflow_build_in_path = os.path.join(
PHD_ROOT, 'third_party/py/tensorflow/BUILD.in')
tensorflow_build_path = os.path.join(
PHD_ROOT, 'third_party/py/tensorflow/BUILD')
with open(tensorflow_build_in_path) as f:
tensorflow_build = f.read()

if args.with_cuda:
requirements_txt = base_requirements.replace(
'tensorflow=', 'tensorflow-gpu=')
tensorflow_build = tensorflow_build.replace(
'requirement("tensorflow")',
'requirement("tensorflow-gpu")')
else:
requirements_txt = base_requirements

requirements_txt_path = os.path.join(PHD_ROOT, 'requirements.txt')
with open(requirements_txt_path, 'w') as f:
f.write('# GENERATED BY ./configure. DO NOT EDIT!\n')
f.write(requirements_txt)

with open(tensorflow_build_path, 'w') as f:
f.write('# GENERATED BY ./configure. DO NOT EDIT!\n')
f.write(tensorflow_build)

# Create //config.pbtxt.
options_to_exclude = {'print_id', 'noninteractive', 'cache_result'}
options_to_export = {k: args.__dict__[k] for k in args.__dict__
Expand All @@ -276,7 +233,7 @@ def main(argv):
# Proto: phd.GlobalConfig
uname: "{uname}"
configure_id: "{config_id}"
with_cuda: {with_cuda}
with_cuda: false
options {{
{options}
}}
Expand All @@ -285,7 +242,6 @@ paths {{
python: "{python_path}"
}}
""".format(uname=uname, config_id=config_id, phd_root=PHD_ROOT,
with_cuda='true' if args.with_cuda else 'false',
options=options, python_path=python_path))

# Create the environment file.
Expand Down
7 changes: 1 addition & 6 deletions tools/requirements.txt → requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -192,12 +192,7 @@ sqlparse==0.3.0
statistics==1.0.3.5
subprocess32==3.5.0
tabulate==0.8.5
# If ./configure detects CUDA, tensorflow-gpu is used.
# Maintenance note: If changing the version of tensorflow, you must check the
# TensorFlow release notes for the required version of CUDA, and if necessary
# update the hardcoded $CUDA_HOME default in GetCudaHome() of the configure
# script.
tensorflow==1.14.0
tensorflow==1.14.0 # NOTE: Must be installed manually with `pip install`.
terminado==0.8.1
testpath==0.3.1
TogglPy==0.1.1
Expand Down
4 changes: 2 additions & 2 deletions third_party/py/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,9 @@ group multiple packages under a single bazel target (see

## To add a package

1. Add the new pip packages to `//tools/requirements.txt`.
1. Add the new pip packages to `//:requirements.txt`.
1. Create a package in this directory which contains a single `py_library` rule
and pulls in the new pip package as a `dep` (copy any of the existing
and pulls in the new pip package as a `dep` (copy any of the existing
packages as a starting point).
1. Add the `//third_party/py/<package>` dep to any python targets which require
this new module.
File renamed without changes.
8 changes: 4 additions & 4 deletions tools/source_tree/phd_workspace.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"BUILD", # Top-level BUILD file is always needed.
"WORKSPACE", # Implicit dependency of everything.
"README.md",
"requirements.txt", # Needed by WORKSPACE.
"tools/Brewfile.travis", # Needed by Travis CI.
"tools/bzl/*", # Implicit dependency of WORKSPACE file.
"tools/BUILD", # Needed by //tools/bzl:maven_jar.bzl.
Expand All @@ -36,7 +37,6 @@
"tools/flaky_bazel.sh", # Needed by Travis CI.
"third_party/py/tensorflow/BUILD.in", # Needed by ./configure
"tools/workspace_status.sh", # Needed by .bazelrc
# tools/requirements.txt is always needed, but is handled separately.
]

# A list of relative paths to files which are excluded from export. Glob
Expand Down Expand Up @@ -150,14 +150,14 @@ def GetPythonRequirementsForTarget(
if output:
dependencies = dependencies.union(set(output.split("\n")))

with open(self.workspace_root / "tools/requirements.txt") as f:
with open(self.workspace_root / "requirements.txt") as f:
all_requirements = set(f.readlines())

needed = []
all_dependencies = set()
# This is a pretty hacky approach that tries to match the package component
# of the generated @pypi__<package>_<vesion> package to the name as it
# appears in tools/requirements.txt.
# appears in requirements.txt.
for dependency in dependencies:
if not dependency.startswith("@pypi__"):
continue
Expand Down Expand Up @@ -193,7 +193,7 @@ def CreatePythonRequirementsFileForTargets(
self, workspace: bazelutil.Workspace, targets: typing.List[str]
) -> None:
# Export the subset of python requirements that are needed.
print("tools/requirements.txt")
print("requirements.txt")
requirements = self.GetPythonRequirementsForTarget(targets)
requirements_path = workspace.workspace_root / "tools" / "requirements.txt"
with open(requirements_path, "w") as f:
Expand Down

0 comments on commit 7e8f19f

Please sign in to comment.