Skip to content

Commit c1ac59a

Browse files
committed
ENH update stimuli hdf5 file
1 parent 3491dd1 commit c1ac59a

2 files changed

Lines changed: 19 additions & 14 deletions

File tree

tutorials/movies_3T/06_extract_motion_energy.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -64,29 +64,30 @@
6464
from voxelwise_tutorials.io import load_hdf5_array
6565

6666

67-
def compute_luminance(run_name, size=(96, 96)):
67+
def compute_luminance(run_name, size=(96, 96), batch_size=100):
6868

6969
stimuli_file = os.path.join(directory, 'stimuli', run_name)
7070

71-
# get the list of batches in the stimuli file
71+
# get the number of images in the stimuli file
7272
with h5py.File(stimuli_file, 'r') as f:
73-
keys = list(f.keys())
74-
keys.sort() # sort the batches
73+
n_images = f['stimuli'].shape[0]
7574

7675
# compute the luminance on each batch
77-
luminance = []
78-
for key in bar(keys, title=f'compute_luminance({run_name})'):
76+
luminance = np.zeros((n_images, *size))
77+
for start in bar(range(0, n_images, batch_size),
78+
title=f'compute_luminance({run_name})'):
7979
# load the batch of images
80-
images = load_hdf5_array(stimuli_file, key=key)
80+
batch = slice(start, start + batch_size)
81+
images = load_hdf5_array(stimuli_file, key='stimuli', slice=batch)
8182

8283
# ``imagearray2luminance`` uses uint8 arrays
8384
if images.dtype != 'uint8':
8485
images = np.int_(np.clip(images, 0, 1) * 255).astype(np.uint8)
8586

8687
# convert RGB images to a single luminance channel
87-
luminance.append(imagearray2luminance(images, size=size))
88+
luminance[batch] = imagearray2luminance(images, size=size)
8889

89-
return np.concatenate(luminance)
90+
return luminance
9091

9192

9293
luminance_train = np.concatenate(
@@ -161,7 +162,8 @@ def compute_motion_energy(luminance,
161162

162163
save_hdf5_dataset(
163164
os.path.join(features_directory, "motion_energy_recomputed.hdf"),
164-
dataset=dict(X_train=motion_energy_train, X_test=motion_energy_test))
165+
dataset=dict(X_train=motion_energy_train, X_test=motion_energy_test,
166+
run_onsets=np.arange(0, 3600, 300)))
165167

166168
###############################################################################
167169
# References

voxelwise_tutorials/io.py

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -91,15 +91,18 @@ def unpack_archive(archive_name):
9191
shutil.unpack_archive(archive_name, extract_dir=extract_dir)
9292

9393

94-
def load_hdf5_array(file_name, key=None):
94+
def load_hdf5_array(file_name, key=None, slice=slice(0, None)):
9595
"""Function to load data from an hdf file.
9696
9797
Parameters
9898
----------
9999
file_name: string
100-
hdf5 file name
100+
hdf5 file name.
101101
key: string
102102
Key name to load. If not provided, all keys will be loaded.
103+
slice: slice, or tuple of slices
104+
Load only a slice of the hdf5 array. It will load `array[slice]`.
105+
Use a tuple of slices to get a slice in multiple dimensions.
103106
104107
Returns
105108
-------
@@ -110,10 +113,10 @@ def load_hdf5_array(file_name, key=None):
110113
if key is None:
111114
data = dict()
112115
for k in hf.keys():
113-
data[k] = hf[k][()]
116+
data[k] = hf[k][slice]
114117
return data
115118
else:
116-
return hf[key][()]
119+
return hf[key][slice]
117120

118121

119122
def load_hdf5_sparse_array(file_name, key):

0 commit comments

Comments
 (0)