concatenator.py 1.0 KB

1234567891011121314151617181920212223242526272829303132333435
  1. # Copyright (c) Meta Platforms, Inc. and affiliates.
  2. # This software may be used and distributed according to the terms of the Llama 2 Community License Agreement.
  3. from tqdm import tqdm
  4. from itertools import chain
  5. from torch.utils.data import Dataset
  6. class ConcatDataset(Dataset):
  7. def __init__(self, dataset, chunk_size=4096):
  8. self.dataset = dataset
  9. self.chunk_size = chunk_size
  10. self.samples = []
  11. buffer = {
  12. "input_ids": [],
  13. "attention_mask": [],
  14. "labels": [],
  15. }
  16. for sample in tqdm(self.dataset, desc="Preprocessing dataset", dynamic_ncols=True):
  17. buffer = {k: v + sample[k] for k,v in buffer.items()}
  18. while len(next(iter(buffer.values()))) > self.chunk_size:
  19. self.samples.append({k: v[:self.chunk_size] for k,v in buffer.items()})
  20. buffer = {k: v[self.chunk_size:] for k,v in buffer.items()}
  21. def __getitem__(self, idx):
  22. return self.samples[idx]
  23. def __len__(self):
  24. return len(self.samples)