const outer_1 = new Promise(resolve => {
const inner_1 = new Promise(resolve1 => {
console.log(1);
resolve1();
});
const inner_2 = inner_1.then(() => {
console.log(2);
});
const inner_3 = inner_2.then(() => {
console.log(3);
})
resolve()
});
const outer_2 = outer_1.then(() => {
console.log(4)
})
/*
And the execution order:
# sync
inner_1 ->
log(1)
queue microtask (inner2)
outer_1 ->
queue microtask (outer2)
# microtask-checkpoint
inner_2 ->
log(2)
queue microtask (inner3)
outer_2 ->
log(4)
inner_3 ->
log(3)
*/
这是我的自定义数据集类,我试图递归地将目录中的每个h5文件加载。
我认为def getitem 出了点问题,但是我不确定这是什么。
当我尝试加载时
class HDF5Dataset(torch.utils.data.Dataset):
def __setup_files(self):
files = glob.glob(os.path.join(self.dir_path,'**/*.h5'))
return files
def __init__(self, dir_path, IMG_SIZE):
self.dir_path = dir_path
self.IMG_SIZE = IMG_SIZE
self.files = self.__setup_files()
self.length = len(self.files)
self.transform = transforms.Compose([
transforms.Resize((IMG_SIZE, IMG_SIZE)),
transforms.RandomHorizontalFlip(p=0.5),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])
def __getitem__(self, idx):
record = self.files[idx]
h5 = h5py.File(record , 'r')
image = h5['data'].value
label = h5['label'].value
h5.close()
image = np.asarray(image)
image = Image.fromarray(image.astype('uint8'), 'RGB')
return self.transform(image), label
def __len__(self):
return self.length
和此代码
dataloaders['train'] = torch.utils.data.DataLoader(datasets['train'],
batch_size=batch_size, shuffle=True, pin_memory=True,
num_workers=12)
给出类型错误:
inputs, classes = next(iter(dataloaders['train']))
我想确切地了解如何在pytorch中为h5文件构建自定义数据集以及如何加载它们。
谢谢!