#data loader batch_size = 32 # The batch size num_workers = 0 # Subprocess for loading the data train_loader = torch.utils.data.DataLoader(dataset=train_dataset, batch_size=batch_size, num_workers=num_workers, shuffle=True) # shuffle every epoch in training set to avoid training biases print(train_loader) print(train_loader.__dict__) # Information held by the dataloader