peft.py 775 B

12345678910111213141516171819202122232425262728
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. from dataclasses import dataclass, field
  4. from typing import List
  5. @dataclass
  6. class lora_config:
  7. r: int=8
  8. lora_alpha: int=32
  9. target_modules: List[str] = field(default_factory=lambda: ["q_proj", "v_proj"])
  10. bias= "none"
  11. task_type: str= "CAUSAL_LM"
  12. lora_dropout: float=0.05
  13. inference_mode: bool = False
  14. @dataclass
  15. class llama_adapter_config:
  16. adapter_len: int= 10
  17. adapter_layers: int= 30
  18. task_type: str= "CAUSAL_LM"
  19. #CAUTION prefix tuning is currently not supported
  20. @dataclass
  21. class prefix_config:
  22. num_virtual_tokens: int=30
  23. task_type: str= "CAUSAL_LM"