@@ -332,14 +332,14 @@ def main():
332332 # train_ds_w = monai.data.Dataset(data=train_files_w, transform=train_transforms)
333333 # val_ds = monai.data.Dataset(data=val_files, transform=val_transforms)
334334
335- train_loader_a = ThreadDataLoader (train_ds_a , num_workers = 0 , batch_size = num_images_per_batch , shuffle = True )
336- train_loader_w = ThreadDataLoader (train_ds_w , num_workers = 0 , batch_size = num_images_per_batch , shuffle = True )
337- val_loader = ThreadDataLoader (val_ds , num_workers = 0 , batch_size = 1 , shuffle = False )
335+ train_loader_a = ThreadDataLoader (train_ds_a , num_workers = 4 , batch_size = num_images_per_batch , shuffle = True )
336+ train_loader_w = ThreadDataLoader (train_ds_w , num_workers = 4 , batch_size = num_images_per_batch , shuffle = True )
337+ val_loader = ThreadDataLoader (val_ds , num_workers = 4 , batch_size = 1 , shuffle = False )
338338
339339 # DataLoader can be used as alternatives when ThreadDataLoader is less efficient.
340- # train_loader_a = DataLoader(train_ds_a, batch_size=num_images_per_batch, shuffle=True, num_workers=2 , pin_memory=torch.cuda.is_available())
341- # train_loader_w = DataLoader(train_ds_w, batch_size=num_images_per_batch, shuffle=True, num_workers=2 , pin_memory=torch.cuda.is_available())
342- # val_loader = DataLoader(val_ds, batch_size=1, shuffle=False, num_workers=2 , pin_memory=torch.cuda.is_available())
340+ # train_loader_a = DataLoader(train_ds_a, batch_size=num_images_per_batch, shuffle=True, num_workers=4 , pin_memory=torch.cuda.is_available())
341+ # train_loader_w = DataLoader(train_ds_w, batch_size=num_images_per_batch, shuffle=True, num_workers=4 , pin_memory=torch.cuda.is_available())
342+ # val_loader = DataLoader(val_ds, batch_size=1, shuffle=False, num_workers=4 , pin_memory=torch.cuda.is_available())
343343
344344 dints_space = monai .networks .nets .TopologySearch (
345345 channel_mul = 0.5 ,
0 commit comments