From 612dd633a2021ea1a30be16b1802066b1fea2f28 Mon Sep 17 00:00:00 2001 From: Felipe Mello Date: Tue, 29 Oct 2024 18:15:18 -0400 Subject: [PATCH] change nproc to 8 --- recipes/configs/llama3_2_vision/90B_full.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/recipes/configs/llama3_2_vision/90B_full.yaml b/recipes/configs/llama3_2_vision/90B_full.yaml index 0755efb42..a76ff8940 100644 --- a/recipes/configs/llama3_2_vision/90B_full.yaml +++ b/recipes/configs/llama3_2_vision/90B_full.yaml @@ -5,11 +5,11 @@ # tune download meta-llama/Llama-3.2-90B-Vision-Instruct --output-dir /tmp/Llama-3.2-90B-Vision-Instruct --ignore-patterns "original/consolidated*" # # To launch on a single device, run the following command from root: -# tune run --nproc_per_node 4 full_finetune_distributed --config llama3_2_vision/90B_full +# tune run --nproc_per_node 8 full_finetune_distributed --config llama3_2_vision/90B_full # # You can add specific overrides through the command line. For example # to override the checkpointer directory while launching training: -# tune run --nproc_per_node 4 full_finetune_distributed --config llama3_2_vision/90B_full checkpointer.checkpoint_dir= +# tune run --nproc_per_node 8 full_finetune_distributed --config llama3_2_vision/90B_full checkpointer.checkpoint_dir= # # This config works best when the model is being fine-tuned on 2+ GPUs.