Explorar o código

adding comment

Hamid Shojanazeri %!s(int64=2) %!d(string=hai) anos
pai
achega
db1fd97f35
Modificáronse 1 ficheiros con 1 adicións e 1 borrados
  1. 1 1
      src/llama_recipes/finetuning.py

+ 1 - 1
src/llama_recipes/finetuning.py

@@ -75,7 +75,7 @@ def main(**kwargs):
             from torch.distributed.optim import _apply_optimizer_in_backward
             optimizer_in_backward_available = True
         except ImportError:
-            print("The required module for optimizer overlap in 'torch.distributed.optim' is not available.")
+            print("The required module for optimizer overlap in 'torch.distributed.optim' is not available, skipping applying optimizer overlap.")
             
     # Load the pre-trained model and setup its configuration
     use_cache = False if train_config.enable_fsdp else None