#!/usr/bin/env python # coding: utf-8 # Open In Colab   Open in Kaggle # ## Loading of Miller ECoG data of motor imagery # # includes some visualizations # In[ ]: # @title Data retrieval import os, requests fname = 'motor_imagery.npz' url = "https://osf.io/ksqv8/download" if not os.path.isfile(fname): try: r = requests.get(url) except requests.ConnectionError: print("!!! Failed to download data !!!") else: if r.status_code != requests.codes.ok: print("!!! Failed to download data !!!") else: with open(fname, "wb") as fid: fid.write(r.content) # In[ ]: # @title Install packages (`nilearn`, `nimare`. `duecredit`), import `matplotlib` and set defaults # install packages to visualize brains and electrode locations get_ipython().system('pip install nilearn --quiet') get_ipython().system('pip install nimare --quiet') get_ipython().system('pip install duecredit --quiet') from matplotlib import rcParams from matplotlib import pyplot as plt rcParams['figure.figsize'] = [20, 4] rcParams['font.size'] = 15 rcParams['axes.spines.top'] = False rcParams['axes.spines.right'] = False rcParams['figure.autolayout'] = True # In[ ]: # @title Data loading import numpy as np alldat = np.load(fname, allow_pickle=True)['dat'] # select just one of the recordings here. 11 is nice because it has some neurons in vis ctx. dat1 = alldat[0][0] dat2 = alldat[0][1] print(dat1.keys()) print(dat2.keys()) # # Dataset info # # # This is one of multiple ECoG datasets from Miller 2019, recorded in a clinical settings with a variety of tasks. Raw data and dataset paper are here: # # https://exhibits.stanford.edu/data/catalog/zk881ps0522 # https://www.nature.com/articles/s41562-019-0678-3 # # This particular dataset was originally described in this paper: # # - Miller, Kai J., Gerwin Schalk, Eberhard E. Fetz, Marcel Den Nijs, Jeffrey G. Ojemann, and Rajesh PN Rao. "Cortical activity during motor execution, motor imagery, and imagery-based online feedback." Proceedings of the National Academy of Sciences (2010): 200913697. doi: [10.1073/pnas.0913697107](https://doi.org/10.1073/pnas.0913697107) # #
# # `dat1` and `dat2` are data from the two blocks performed in each subject. The first one was the actual movements, the second one was motor imagery. For the movement task, from the original dataset instructions: # # *Patients performed simple, repetitive, motor tasks of hand (synchronous flexion and extension of all fingers, i.e., clenching and releasing a fist at a self-paced rate of ~1-2 Hz) or tongue (opening of mouth with protrusion and retraction of the tongue, i.e., sticking the tongue in and out, also at ~1-2 Hz). These movements were performed in an interval-based manner, alternating between movement and rest, and the side of move- ment was always contralateral to the side of cortical grid placement.* # #
# # For the imagery task, from the original dataset instructions: # # *Following the overt movement experiment, each subject performed an imagery task, imagining making identical movement rather than executing the movement. The imagery was kinesthetic rather than visual (“imagine yourself performing the actions like you just did”; i.e., “don’t imagine what it looked like, but imagine making the motions”).* # #
# # Sample rate is always 1000Hz, and the ECoG data has been notch-filtered at 60, 120, 180, 240 and 250Hz, followed by z-scoring across time and conversion to float16 to minimize size. Please convert back to float32 after loading the data in the notebook, to avoid unexpected behavior. # # Both experiments: # * `dat['V']`: continuous voltage data (time by channels) # * `dat['srate']`: acquisition rate (1000 Hz). All stimulus times are in units of this. # * `dat['t_on']`: time of stimulus onset in data samples # * `dat['t_off']`: time of stimulus offset, always 400 samples after `t_on` # * `dat['stim_id`]: identity of stimulus (11 = tongue, 12 = hand), real or imaginary stimulus # * `dat['scale_uv']`: scale factor to multiply the data values to get to microvolts (uV). # * `dat['locs`]`: 3D electrode positions on the brain surface # In[ ]: from nilearn import plotting from nimare import utils plt.figure(figsize=(8, 8)) locs = dat1['locs'] view = plotting.view_markers(utils.tal2mni(locs), marker_labels=['%d'%k for k in np.arange(locs.shape[0])], marker_color='purple', marker_size=5) view # In[ ]: # quick way to get broadband power in time-varying windows from scipy import signal # pick subject 0 and experiment 0 (real movements) dat1 = alldat[0][0] # V is the voltage data V = dat1['V'].astype('float32') # high-pass filter above 50 Hz b, a = signal.butter(3, [50], btype='high', fs=1000) V = signal.filtfilt(b, a, V, 0) # compute smooth envelope of this signal = approx power V = np.abs(V)**2 b, a = signal.butter(3, [10], btype='low', fs=1000) V = signal.filtfilt(b, a, V, 0) # normalize each channel so its mean power is 1 V = V/V.mean(0) # In[ ]: # average the broadband power across all tongue and hand trials nt, nchan = V.shape nstim = len(dat1['t_on']) trange = np.arange(0, 2000) ts = dat1['t_on'][:, np.newaxis] + trange V_epochs = np.reshape(V[ts, :], (nstim, 2000, nchan)) V_tongue = (V_epochs[dat1['stim_id'] == 11]).mean(0) V_hand = (V_epochs[dat1['stim_id'] == 12]).mean(0) # In[ ]: # let's find the electrodes that distinguish tongue from hand movements # note the behaviors happen some time after the visual cue plt.figure(figsize=(20, 10)) for j in range(46): ax = plt.subplot(5, 10, j+1) plt.plot(trange, V_tongue[:, j]) plt.plot(trange, V_hand[:, j]) plt.title('ch%d'%j) plt.xticks([0, 1000, 2000]) plt.ylim([0, 4]) plt.show() # In[ ]: # let's look at all the trials for electrode 20 that has a good response to hand movements # we will sort trials by stimulus id plt.subplot(1, 3, 1) isort = np.argsort(dat1['stim_id']) plt.imshow(V_epochs[isort, :, 20].astype('float32'), aspect='auto', vmax=7, vmin=0, cmap='magma') plt.colorbar() plt.show() # In[ ]: # Electrode 42 seems to respond to tongue movements isort = np.argsort(dat1['stim_id']) plt.subplot(1, 3, 1) plt.imshow(V_epochs[isort, :, 42].astype('float32'), aspect='auto', vmax=7, vmin=0, cmap='magma') plt.colorbar() plt.show()