From 27fed1f7e3e7e6562746168b157a80c74109cb8f Mon Sep 17 00:00:00 2001 From: Jiangtao Hu Date: Thu, 7 Nov 2024 16:14:01 -0800 Subject: [PATCH] [Misc] report relevant env vars in collect_env.py tool (#9293) Signed-off-by: Isotr0py <2037008807@qq.com> --- collect_env.py | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/collect_env.py b/collect_env.py index 80403d576d78f..254c19b19a5ac 100644 --- a/collect_env.py +++ b/collect_env.py @@ -1,17 +1,19 @@ # ruff: noqa # code borrowed from https://github.com/pytorch/pytorch/blob/main/torch/utils/collect_env.py -# Unlike the rest of the PyTorch this file must be python2 compliant. -# This script outputs relevant system environment info -# Run it with `python collect_env.py` or `python -m torch.utils.collect_env` import datetime import locale import os import re import subprocess import sys +# Unlike the rest of the PyTorch this file must be python2 compliant. +# This script outputs relevant system environment info +# Run it with `python collect_env.py` or `python -m torch.utils.collect_env` from collections import namedtuple +from vllm.envs import environment_variables + try: import torch TORCH_AVAILABLE = True @@ -52,6 +54,7 @@ 'vllm_version', # vllm specific field 'vllm_build_flags', # vllm specific field 'gpu_topo', # vllm specific field + 'env_vars', ]) DEFAULT_CONDA_PATTERNS = { @@ -512,6 +515,22 @@ def is_xnnpack_available(): else: return "N/A" +def get_env_vars(): + env_vars = '' + secret_terms=('secret', 'token', 'api', 'access', 'password') + report_prefix = ("TORCH", "NCCL", "PYTORCH", + "CUDA", "CUBLAS", "CUDNN", + "OMP_", "MKL_", + "NVIDIA") + for k, v in os.environ.items(): + if any(term in k.lower() for term in secret_terms): + continue + if k in environment_variables: + env_vars = env_vars + "{}={}".format(k, v) + "\n" + if k.startswith(report_prefix): + env_vars = env_vars + "{}={}".format(k, v) + "\n" + + return env_vars def get_env_info(): run_lambda = run @@ -583,6 +602,7 @@ def get_version_or_na(cfg, prefix): vllm_version=vllm_version, vllm_build_flags=vllm_build_flags, gpu_topo=gpu_topo, + env_vars=get_env_vars(), ) @@ -631,6 +651,8 @@ def get_version_or_na(cfg, prefix): {vllm_build_flags} GPU Topology: {gpu_topo} + +{env_vars} """.strip()