瀏覽代碼

Use RuntimeError instead assert

Matthias Reso 11 月之前
父節點
當前提交
4b93dc6080
共有 1 個文件被更改,包括 8 次插入11 次删除
  1. 8 11
      src/llama_recipes/utils/config_utils.py

+ 8 - 11
src/llama_recipes/utils/config_utils.py

@@ -45,17 +45,14 @@ def generate_peft_config(train_config, kwargs):
     peft_configs = (LoraConfig, AdaptionPromptConfig, PrefixTuningConfig)
     peft_configs = (LoraConfig, AdaptionPromptConfig, PrefixTuningConfig)
     names = tuple(c.__name__.rstrip("_config") for c in configs)
     names = tuple(c.__name__.rstrip("_config") for c in configs)
 
 
-    assert (
-        train_config.peft_method in names
-    ), f"Peft config not found: {train_config.peft_method}"
-
-    assert (
-        train_config.peft_method != "prefix"
-    ), "PrefixTuning is currently not supported (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089350811)"
-    if train_config.enable_fsdp:
-        assert (
-            train_config.peft_method != "llama_adapter"
-        ), "Llama_adapter is currently not supported in combination with FSDP (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089274425)"
+    if train_config.peft_method not in names:
+        raise RuntimeError(f"Peft config not found: {train_config.peft_method}")
+
+    if train_config.peft_method == "prefix":
+        raise RuntimeError("PrefixTuning is currently not supported (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089350811)")
+
+    if train_config.enable_fsdp and train_config.peft_method == "llama_adapter":
+        raise RuntimeError("Llama_adapter is currently not supported in combination with FSDP (see https://github.com/meta-llama/llama-recipes/issues/359#issuecomment-2089274425)")
 
 
     config = configs[names.index(train_config.peft_method)]()
     config = configs[names.index(train_config.peft_method)]()