tpu support fix

This commit is contained in:
Zchen
2025-10-12 20:15:02 +08:00
parent 11af77f382
commit 68538746da

View File

@@ -193,7 +193,6 @@ class BrainToTextDecoder_Trainer:
if use_tpu:
# For TPU, create a custom DataLoader that properly handles our batch-returning Dataset
# TPU requires specific DataLoader configuration to avoid batch_sampler issues
from torch.utils.data import DataLoader
self.train_loader = DataLoader(
self.train_dataset,
batch_size = None, # None because our Dataset returns batches