conftest.py 1.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. import pytest
  4. from transformers import LlamaTokenizer
  5. ACCESS_ERROR_MSG = "Could not access tokenizer at 'meta-llama/Llama-2-7b-hf'. Did you log into huggingface hub and provided the correct token?"
  6. unskip_missing_tokenizer = False
  7. @pytest.fixture(scope="module")
  8. def llama_tokenizer():
  9. try:
  10. return LlamaTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf")
  11. except OSError as e:
  12. if unskip_missing_tokenizer:
  13. raise e
  14. return None
  15. @pytest.fixture
  16. def setup_tokenizer(llama_tokenizer):
  17. def _helper(tokenizer_mock):
  18. #Align with Llama 2 tokenizer
  19. tokenizer_mock.from_pretrained.return_value = llama_tokenizer
  20. return _helper
  21. @pytest.fixture(autouse=True)
  22. def skip_if_tokenizer_is_missing(request, llama_tokenizer):
  23. if request.node.get_closest_marker("skip_missing_tokenizer") and not unskip_missing_tokenizer:
  24. if llama_tokenizer is None:
  25. pytest.skip(ACCESS_ERROR_MSG)
  26. def pytest_addoption(parser):
  27. parser.addoption(
  28. "--unskip-missing-tokenizer",
  29. action="store_true",
  30. default=False, help="disable skip missing tokenizer")
  31. @pytest.hookimpl(tryfirst=True)
  32. def pytest_cmdline_preparse(config, args):
  33. if "--unskip-missing-tokenizer" not in args:
  34. return
  35. global unskip_missing_tokenizer
  36. unskip_missing_tokenizer = True