|
141 | 141 | scores = backend.to_numpy(scores) |
142 | 142 | print("(n_voxels,) =", scores.shape) |
143 | 143 |
|
| 144 | +############################################################################### |
| 145 | +# Intermission: understanding delays |
| 146 | +# ---------------------------------- |
| 147 | +# |
| 148 | +# To have an intuitive understanding of what we accomplish by delaying the |
| 149 | +# features before model fitting, we will simulate one voxel and a single |
| 150 | +# feature. We will then create a ``Delayer`` object (which was used in the |
| 151 | +# previous pipeline) and visualize its effect on our single feature. Let's |
| 152 | +# start by simulating the data. |
| 153 | + |
| 154 | +# number of total trs |
| 155 | +n_trs = 50 |
| 156 | +# repetition time for the simulated data |
| 157 | +TR = 2.0 |
| 158 | +rng = np.random.RandomState(42) |
| 159 | +y = rng.randn(n_trs) |
| 160 | +x = np.zeros(n_trs) |
| 161 | +# add some arbitrary value to our feature |
| 162 | +x[15:20] = .5 |
| 163 | +x += rng.randn(n_trs) * 0.1 # add some noise |
| 164 | + |
| 165 | +# create a delayer object and delay the features |
| 166 | +delayer = Delayer(delays=[0, 1, 2, 3, 4]) |
| 167 | +x_delayed = delayer.fit_transform(x[:, None]) |
| 168 | + |
| 169 | +############################################################################### |
| 170 | +# In the next cell we are plotting six lines. The subplot at the top shows the |
| 171 | +# simulated BOLD response, while the other subplots show the simulated feature |
| 172 | +# at different delays. The effect of the delayer is clear: it creates multiple |
| 173 | +# copies of the original feature shifted forward in time by how many samples we |
| 174 | +# requested (in this case, from 0 to 4 samples, which correspond to 0, 2, 4, 6, |
| 175 | +# and 8 s in time with a 2 s TR). |
| 176 | +# |
| 177 | +# When these delayed features are used to fit a voxelwise encoding model, the |
| 178 | +# brain response :math:`y` at time :math:`t` is simultaneously modeled by the |
| 179 | +# feature :math:`x` at times :math:`t-0, t-2, t-4, t-6, t-8`. In the remaining |
| 180 | +# of this example we will see that this method improves model prediction accuracy |
| 181 | +# and it allows to account for the underlying shape of the hemodynamic response |
| 182 | +# function. |
| 183 | + |
| 184 | +import matplotlib.pyplot as plt |
| 185 | +fig, axs = plt.subplots(6, 1, figsize=(8, 6.5), constrained_layout=True, |
| 186 | + sharex=True) |
| 187 | +times = np.arange(n_trs)*TR |
| 188 | + |
| 189 | +axs[0].plot(times, y, color="r") |
| 190 | +axs[0].set_title("BOLD response") |
| 191 | +for i, (ax, xx) in enumerate(zip(axs.flat[1:], x_delayed.T)): |
| 192 | + ax.plot(times, xx, color='k') |
| 193 | + ax.set_title("$x(t - {0:.0f})$ (feature delayed by {1} sample{2})".format( |
| 194 | + i*TR, i, "" if i == 1 else "s")) |
| 195 | +for ax in axs.flat: |
| 196 | + ax.axvline(40, color='gray') |
| 197 | + ax.set_yticks([]) |
| 198 | +_ = axs[-1].set_xlabel("Time [s]") |
| 199 | +plt.show() |
| 200 | + |
| 201 | + |
144 | 202 | ############################################################################### |
145 | 203 | # Compare with a model without delays |
146 | 204 | # ----------------------------------- |
|
171 | 229 | # diagonal corresponds to identical prediction accuracy for both models. A |
172 | 230 | # distibution deviating from the diagonal means that one model has better |
173 | 231 | # prediction accuracy than the other. |
174 | | -import matplotlib.pyplot as plt |
175 | 232 | from voxelwise_tutorials.viz import plot_hist2d |
176 | 233 |
|
177 | 234 | ax = plot_hist2d(scores_no_delay, scores) |
|
0 commit comments