pyproject.toml 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. [build-system]
  2. requires = ["hatchling", "hatch-requirements-txt"]
  3. build-backend = "hatchling.build"
  4. [project]
  5. name = "llama-recipes"
  6. version = "0.0.4"
  7. authors = [
  8. { name="Hamid Shojanazeri", email="hamidnazeri@meta.com" },
  9. { name="Matthias Reso", email="mreso@meta.com" },
  10. { name="Geeta Chauhan", email="gchauhan@meta.com" },
  11. ]
  12. description = "Llama-recipes is a companion project to the Llama models. It's goal is to provide examples to quickly get started with fine-tuning for domain adaptation and how to run inference for the fine-tuned models."
  13. readme = "README.md"
  14. requires-python = ">=3.8"
  15. classifiers = [
  16. "Programming Language :: Python :: 3",
  17. "License :: Other/Proprietary License",
  18. "Operating System :: OS Independent",
  19. ]
  20. dynamic = ["dependencies"]
  21. [project.optional-dependencies]
  22. vllm = ["vllm"]
  23. tests = ["pytest-mock"]
  24. auditnlg = ["auditnlg"]
  25. langchain = ["langchain_openai", "langchain", "langchain_community"]
  26. [project.urls]
  27. "Homepage" = "https://github.com/facebookresearch/llama-recipes/"
  28. "Bug Tracker" = "https://github.com/facebookresearch/llama-recipes/issues"
  29. [tool.hatch.build]
  30. exclude = [
  31. "dist/*",
  32. ]
  33. [tool.hatch.build.targets.wheel]
  34. packages = ["src/llama_recipes"]
  35. [tool.hatch.metadata.hooks.requirements_txt]
  36. files = ["requirements.txt"]
  37. [tool.pytest.ini_options]
  38. markers = [
  39. "skip_missing_tokenizer: skip tests when we can not access meta-llama/Llama-2-7b-hf on huggingface hub (Log in with `huggingface-cli login` to unskip).",
  40. ]