소스 검색

Adding test cases for the bugs found, local_rank None and output dir not created yet

Beto 1 년 전
부모
커밋
bdc7f44be2
2개의 변경된 파일112개의 추가작업 그리고 2개의 파일을 삭제
  1. 5 1
      src/llama_recipes/utils/train_utils.py
  2. 107 1
      tests/test_train_utils.py

+ 5 - 1
src/llama_recipes/utils/train_utils.py

@@ -69,7 +69,7 @@ def train(model, train_dataloader,eval_dataloader, tokenizer, optimizer, lr_sche
     val_loss =[]
 
     if train_config.save_metrics:
-        metrics_filename = f"{train_config.output_dir}/metrics_data_{local_rank}-{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.json"
+        metrics_filename = f"{train_config.output_dir}/metrics_data_{local_rank or '0'}-{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.json"
         train_step_perplexity = []
         train_step_loss = []
         val_step_loss = []
@@ -465,5 +465,9 @@ def save_to_json(output_filename, train_step_loss, train_epoch_loss, train_step_
         "val_step_perplexity": val_step_ppl,
         "val_epoch_perplexity": val_epoch_ppl
     }
+    dir_path = os.path.dirname(output_filename)
+    if not os.path.exists(dir_path):
+        os.makedirs(dir_path, exist_ok=True)
+
     with open(output_filename, "w") as f:
         json.dump(metrics_data, f)

+ 107 - 1
tests/test_train_utils.py

@@ -22,6 +22,13 @@ def temp_output_dir():
     # Delete the directory during the session-level teardown
     shutil.rmtree(temp_output_dir)
 
+@pytest.fixture(scope="session")
+def path_to_generate():
+    path_to_generate = "some/random/path"
+    yield path_to_generate
+    
+    shutil.rmtree(path_to_generate)
+
 
 @patch("llama_recipes.utils.train_utils.MemoryTrace")
 @patch("llama_recipes.utils.train_utils.nullcontext")
@@ -81,7 +88,7 @@ def test_gradient_accumulation(autocast, scaler, nullcontext, mem_trace, mocker)
     assert nullcontext.call_count == 0
     assert autocast.call_count == 5
 
-def test_save_to_json(temp_output_dir, mocker):
+def test_save_to_json_file_exists(temp_output_dir, mocker):
     model = mocker.MagicMock(name="model")
     model().loss.__truediv__().detach.return_value = torch.tensor(1)
     mock_tensor = mocker.MagicMock(name="tensor")
@@ -115,4 +122,103 @@ def test_save_to_json(temp_output_dir, mocker):
     assert results["metrics_filename"] not in ["", None]
     assert os.path.isfile(results["metrics_filename"])
 
+def test_save_to_json_filen_name_local_rank_0(temp_output_dir, mocker):
+    model = mocker.MagicMock(name="model")
+    model().loss.__truediv__().detach.return_value = torch.tensor(1)
+    mock_tensor = mocker.MagicMock(name="tensor")
+    batch = {"input": mock_tensor}
+    train_dataloader = [batch, batch, batch, batch, batch]
+    eval_dataloader = None
+    tokenizer = mocker.MagicMock()
+    optimizer = mocker.MagicMock()
+    lr_scheduler = mocker.MagicMock()
+    gradient_accumulation_steps = 1
+    train_config = mocker.MagicMock()
+    train_config.enable_fsdp = False
+    train_config.use_fp16 = False
+    train_config.run_validation = False
+    train_config.gradient_clipping = False
+    train_config.save_metrics = True
+    train_config.output_dir = temp_output_dir
+
+    results = train(
+        model,
+        train_dataloader,
+        eval_dataloader,
+        tokenizer,
+        optimizer,
+        lr_scheduler,
+        gradient_accumulation_steps,
+        train_config
+    )
+
+    assert "None" not in results["metrics_filename"]
+    assert "metrics_data_0" in results["metrics_filename"]
+
+def test_save_to_json_filen_name_local_rank_1(temp_output_dir, mocker):
+    model = mocker.MagicMock(name="model")
+    model().loss.__truediv__().detach.return_value = torch.tensor(1)
+    mock_tensor = mocker.MagicMock(name="tensor")
+    batch = {"input": mock_tensor}
+    train_dataloader = [batch, batch, batch, batch, batch]
+    eval_dataloader = None
+    tokenizer = mocker.MagicMock()
+    optimizer = mocker.MagicMock()
+    lr_scheduler = mocker.MagicMock()
+    gradient_accumulation_steps = 1
+    train_config = mocker.MagicMock()
+    train_config.enable_fsdp = False
+    train_config.use_fp16 = False
+    train_config.run_validation = False
+    train_config.gradient_clipping = False
+    train_config.save_metrics = True
+    train_config.output_dir = temp_output_dir
+
+    results = train(
+        model,
+        train_dataloader,
+        eval_dataloader,
+        tokenizer,
+        optimizer,
+        lr_scheduler,
+        gradient_accumulation_steps,
+        train_config,
+        local_rank=1
+    )
+
+    assert "None" not in results["metrics_filename"]
+    assert "metrics_data_1" in results["metrics_filename"]
+
+def test_save_to_json_folder_exists(path_to_generate, mocker):
+    model = mocker.MagicMock(name="model")
+    model().loss.__truediv__().detach.return_value = torch.tensor(1)
+    mock_tensor = mocker.MagicMock(name="tensor")
+    batch = {"input": mock_tensor}
+    train_dataloader = [batch, batch, batch, batch, batch]
+    eval_dataloader = None
+    tokenizer = mocker.MagicMock()
+    optimizer = mocker.MagicMock()
+    lr_scheduler = mocker.MagicMock()
+    gradient_accumulation_steps = 1
+    train_config = mocker.MagicMock()
+    train_config.enable_fsdp = False
+    train_config.use_fp16 = False
+    train_config.run_validation = False
+    train_config.gradient_clipping = False
+    train_config.save_metrics = True
+    train_config.output_dir = path_to_generate
+
+    results = train(
+        model,
+        train_dataloader,
+        eval_dataloader,
+        tokenizer,
+        optimizer,
+        lr_scheduler,
+        gradient_accumulation_steps,
+        train_config
+    )
+    
+    assert os.path.isfile(results["metrics_filename"])
+