From 9e0a5869eff9929d42586663a3f7ffe10c2e8d75 Mon Sep 17 00:00:00 2001 From: Xinhao Zhuang Date: Fri, 23 Aug 2024 17:46:42 +0800 Subject: [PATCH] Remove dup line in `llama3_1_8b.yaml` This line is a duplicate of the previous line. If it is not removed, it will cause an omegaconf error with the message 'found duplicate key sequence_parallel'. Alternatively, could you explain why it is necessary to have an independent 'llama3_1_xb' configuration when the 'llama3_xb' configuration files already exist? --- launcher_scripts/conf/training/llama/llama3_1_8b.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/launcher_scripts/conf/training/llama/llama3_1_8b.yaml b/launcher_scripts/conf/training/llama/llama3_1_8b.yaml index 2f7838644..5a82f5abb 100644 --- a/launcher_scripts/conf/training/llama/llama3_1_8b.yaml +++ b/launcher_scripts/conf/training/llama/llama3_1_8b.yaml @@ -124,7 +124,6 @@ model: num_micro_batches_with_partial_activation_checkpoints: null activations_checkpoint_layers_per_pipeline: null sequence_parallel: false - sequence_parallel: false deterministic_mode: false ## Transformer Engine