浏览代码

Update src/llama_recipes/finetuning.py

Matthias Reso 9 月之前
父节点
当前提交
3cf3966801
共有 1 个文件被更改,包括 1 次插入1 次删除
  1. 1 1
      src/llama_recipes/finetuning.py

+ 1 - 1
src/llama_recipes/finetuning.py

@@ -149,7 +149,7 @@ def main(**kwargs):
             wandb_run.config.update(peft_config)
             wandb_run.config.update(peft_config)
         model.print_trainable_parameters()
         model.print_trainable_parameters()
 
 
-    hsdp_device_mesh = None
+    hsdp_device_mesh_plan = None
     if fsdp_config.hsdp and fsdp_config.sharding_strategy == ShardingStrategy.HYBRID_SHARD:
     if fsdp_config.hsdp and fsdp_config.sharding_strategy == ShardingStrategy.HYBRID_SHARD:
         hsdp_device_mesh_plan = hsdp_device_mesh(replica_group_size=fsdp_config.replica_group_size, sharding_group_size=fsdp_config.sharding_group_size)
         hsdp_device_mesh_plan = hsdp_device_mesh(replica_group_size=fsdp_config.replica_group_size, sharding_group_size=fsdp_config.sharding_group_size)
         print("HSDP device mesh is ready")
         print("HSDP device mesh is ready")