瀏覽代碼

Skip test_batching when tokenizer is missing

Matthias Reso 9 月之前
父節點
當前提交
cb3658f284
共有 1 個文件被更改,包括 2 次插入0 次删除
  1. 2 0
      src/tests/test_batching.py

+ 2 - 0
src/tests/test_batching.py

@@ -25,6 +25,7 @@ EXPECTED_SAMPLE_NUMBER ={
     }
 }
 
+@pytest.mark.skip_missing_tokenizer
 @patch('llama_recipes.finetuning.train')
 @patch('llama_recipes.finetuning.AutoTokenizer')
 @patch("llama_recipes.finetuning.AutoConfig.from_pretrained")
@@ -90,6 +91,7 @@ def test_packing(
         assert batch["attention_mask"][0].size(0) == 4096
 
 
+@pytest.mark.skip_missing_tokenizer
 @patch("llama_recipes.finetuning.torch.cuda.is_available")
 @patch('llama_recipes.finetuning.train')
 @patch('llama_recipes.finetuning.AutoTokenizer')