diff --git a/doc/api/visualization.rst b/doc/api/visualization.rst index 03a2d22a529..359f1373129 100644 --- a/doc/api/visualization.rst +++ b/doc/api/visualization.rst @@ -88,6 +88,21 @@ Visualization get_browser_backend use_browser_backend +Eyetracking +----------- + +.. currentmodule:: mne.viz.eyetracking + +:py:mod:`mne.viz.eyetracking`: + +.. automodule:: mne.viz.eyetracking + :no-members: + :no-inherited-members: +.. autosummary:: + :toctree: generated/ + + plot_gaze + UI Events --------- diff --git a/doc/documentation/datasets.rst b/doc/documentation/datasets.rst index 3946ef64be5..348da772e90 100644 --- a/doc/documentation/datasets.rst +++ b/doc/documentation/datasets.rst @@ -481,16 +481,34 @@ EYELINK ======= :func:`mne.datasets.eyelink.data_path` -A small example dataset from a pupillary light reflex experiment. Both EEG (EGI) and -eye-tracking (SR Research EyeLink; ASCII format) data were recorded and stored in -separate files. 1 participant fixated on the screen while short light flashes appeared. -Event onsets were recorded by a photodiode attached to the screen and were -sent to both the EEG and eye-tracking systems. +Two small example datasets of eye-tracking data from SR Research EyeLink. + +EEG-Eyetracking +^^^^^^^^^^^^^^^ +:func:`mne.datasets.eyelink.data_path`. Data exists at ``/eeg-et/``. + +Contains both EEG (EGI) and eye-tracking (ASCII format) data recorded from a +pupillary light reflex experiment, stored in separate files. 1 participant fixated +on the screen while short light flashes appeared. Event onsets were recorded by a +photodiode attached to the screen and were sent to both the EEG and eye-tracking +systems. .. topic:: Examples * :ref:`tut-eyetrack` +Freeviewing +^^^^^^^^^^^ +:func:`mne.datasets.eyelink.data_path`. Data exists at ``/freeviewing/``. + +Contains eye-tracking data (ASCII format) from 1 participant who was free-viewing a +video of a natural scene. In some videos, the natural scene was pixelated such that +the people in the scene were unrecognizable. + +.. topic:: Examples + + * :ref:`tut-eyetrack-heatmap` + References ========== diff --git a/examples/visualization/eyetracking_plot_heatmap.py b/examples/visualization/eyetracking_plot_heatmap.py new file mode 100644 index 00000000000..00c9fee6611 --- /dev/null +++ b/examples/visualization/eyetracking_plot_heatmap.py @@ -0,0 +1,89 @@ +# -*- coding: utf-8 -*- +""" +.. _tut-eyetrack-heatmap: + +============================================= +Plotting eye-tracking heatmaps in MNE-Python +============================================= + +This tutorial covers plotting eye-tracking position data as a heatmap. + +.. seealso:: + + :ref:`tut-importing-eyetracking-data` + :ref:`tut-eyetrack` + +""" + +# %% +# Data loading +# ------------ +# +# As usual we start by importing the modules we need and loading some +# :ref:`example data `: eye-tracking data recorded from SR research's +# ``'.asc'`` file format. + + +import matplotlib.pyplot as plt + +import mne +from mne.viz.eyetracking import plot_gaze + +task_fpath = mne.datasets.eyelink.data_path() / "freeviewing" +et_fpath = task_fpath / "sub-01_task-freeview_eyetrack.asc" +stim_fpath = task_fpath / "stim" / "naturalistic.png" + +raw = mne.io.read_raw_eyelink(et_fpath) + +# %% +# Process and epoch the data +# -------------------------- +# +# First we will interpolate missing data during blinks and epoch the data. + +mne.preprocessing.eyetracking.interpolate_blinks(raw, interpolate_gaze=True) +raw.annotations.rename({"dvns": "natural"}) # more intuitive +event_ids = {"natural": 1} +events, event_dict = mne.events_from_annotations(raw, event_id=event_ids) + +epochs = mne.Epochs( + raw, events=events, event_id=event_dict, tmin=0, tmax=20, baseline=None +) + + +# %% +# Plot a heatmap of the eye-tracking data +# --------------------------------------- +# +# To make a heatmap of the eye-tracking data, we can use the function +# :func:`~mne.viz.eyetracking.plot_gaze`. We will need to define the dimensions of our +# canvas; for this file, the eye position data are reported in pixels, so we'll use the +# screen resolution of the participant screen (1920x1080) as the width and height. We +# can also use the sigma parameter to smooth the plot. + +px_width, px_height = 1920, 1080 +cmap = plt.get_cmap("viridis") +plot_gaze(epochs["natural"], width=px_width, height=px_height, cmap=cmap, sigma=50) + +# %% +# Overlaying plots with images +# ---------------------------- +# +# We can use matplotlib to plot gaze heatmaps on top of stimuli images. We'll +# customize a :class:`~matplotlib.colors.Colormap` to make some values of the heatmap +# completely transparent. We'll then use the ``vlim`` parameter to force the heatmap to +# start at a value greater than the darkest value in our previous heatmap, which will +# make the darkest colors of the heatmap transparent. + +cmap.set_under("k", alpha=0) # make the lowest values transparent +ax = plt.subplot() +ax.imshow(plt.imread(stim_fpath)) +plot_gaze( + epochs["natural"], + width=px_width, + height=px_height, + vlim=(0.0003, None), + sigma=50, + cmap=cmap, + axes=ax, +) diff --git a/mne/datasets/config.py b/mne/datasets/config.py index 76ea0934e39..2cf4d50a0ff 100644 --- a/mne/datasets/config.py +++ b/mne/datasets/config.py @@ -345,9 +345,9 @@ # eyelink dataset MNE_DATASETS["eyelink"] = dict( - archive_name="eeg-eyetrack_data.zip", - hash="md5:c4fc788fe01737e08e9086c90cab642d", - url=("https://osf.io/63fjm/download?version=1"), - folder_name="eyelink-example-data", + archive_name="MNE-eyelink-data.zip", + hash="md5:68a6323ef17d655f1a659c3290ee1c3f", + url=("https://osf.io/xsu4g/download?version=1"), + folder_name="MNE-eyelink-data", config_key="MNE_DATASETS_EYELINK_PATH", ) diff --git a/mne/utils/docs.py b/mne/utils/docs.py index 9e72685e738..40308a9074d 100644 --- a/mne/utils/docs.py +++ b/mne/utils/docs.py @@ -743,6 +743,14 @@ def _reflow_param_docstring(docstring, has_first_line=True, width=75): ``pos_lims``, as the surface plot must show the magnitude. """ +docdict[ + "cmap" +] = """ +cmap : matplotlib colormap | str | None + The :class:`~matplotlib.colors.Colormap` to use. Defaults to ``None``, which + will use the matplotlib default colormap. +""" + docdict[ "cmap_topomap" ] = """ diff --git a/mne/viz/eyetracking/__init__.py b/mne/viz/eyetracking/__init__.py new file mode 100644 index 00000000000..7de13fd8900 --- /dev/null +++ b/mne/viz/eyetracking/__init__.py @@ -0,0 +1,5 @@ +"""Eye-tracking visualization routines.""" +# +# License: BSD-3-Clause + +from .heatmap import plot_gaze diff --git a/mne/viz/eyetracking/heatmap.py b/mne/viz/eyetracking/heatmap.py new file mode 100644 index 00000000000..d3ff4756d8d --- /dev/null +++ b/mne/viz/eyetracking/heatmap.py @@ -0,0 +1,155 @@ +# Authors: Scott Huberty +# +# License: BSD-3-Clause + +import numpy as np +from scipy.ndimage import gaussian_filter + +from ...utils import _ensure_int, _validate_type, fill_doc, logger +from ..utils import plt_show + + +@fill_doc +def plot_gaze( + epochs, + width, + height, + *, + sigma=25, + cmap=None, + alpha=1.0, + vlim=(None, None), + axes=None, + show=True, +): + """Plot a heatmap of eyetracking gaze data. + + Parameters + ---------- + epochs : instance of Epochs + The :class:`~mne.Epochs` object containing eyegaze channels. + width : int + The width dimension of the plot canvas. For example, if the eyegaze data units + are pixels, and the participant screen resolution was 1920x1080, then the width + should be 1920. + height : int + The height dimension of the plot canvas. For example, if the eyegaze data units + are pixels, and the participant screen resolution was 1920x1080, then the height + should be 1080. + sigma : float | None + The amount of Gaussian smoothing applied to the heatmap data (standard + deviation in pixels). If ``None``, no smoothing is applied. Default is 25. + %(cmap)s + alpha : float + The opacity of the heatmap (default is 1). + %(vlim_plot_topomap)s + %(axes_plot_topomap)s + %(show)s + + Returns + ------- + fig : instance of Figure + The resulting figure object for the heatmap plot. + + Notes + ----- + .. versionadded:: 1.6 + """ + from mne import BaseEpochs + from mne._fiff.pick import _picks_to_idx + + _validate_type(epochs, BaseEpochs, "epochs") + _validate_type(alpha, "numeric", "alpha") + _validate_type(sigma, ("numeric", None), "sigma") + width = _ensure_int(width, "width") + height = _ensure_int(height, "height") + + pos_picks = _picks_to_idx(epochs.info, "eyegaze") + gaze_data = epochs.get_data(picks=pos_picks) + gaze_ch_loc = np.array([epochs.info["chs"][idx]["loc"] for idx in pos_picks]) + x_data = gaze_data[:, np.where(gaze_ch_loc[:, 4] == -1)[0], :] + y_data = gaze_data[:, np.where(gaze_ch_loc[:, 4] == 1)[0], :] + + if x_data.shape[1] > 1: # binocular recording. Average across eyes + logger.info("Detected binocular recording. Averaging positions across eyes.") + x_data = np.nanmean(x_data, axis=1) # shape (n_epochs, n_samples) + y_data = np.nanmean(y_data, axis=1) + canvas = np.vstack((x_data.flatten(), y_data.flatten())) # shape (2, n_samples) + + # Create 2D histogram + # Bin into image-like format + hist, _, _ = np.histogram2d( + canvas[1, :], + canvas[0, :], + bins=(height, width), + range=[[0, height], [0, width]], + ) + # Convert density from samples to seconds + hist /= epochs.info["sfreq"] + # Smooth the heatmap + if sigma: + hist = gaussian_filter(hist, sigma=sigma) + + return _plot_heatmap_array( + hist, + width=width, + height=height, + cmap=cmap, + alpha=alpha, + vmin=vlim[0], + vmax=vlim[1], + axes=axes, + show=show, + ) + + +def _plot_heatmap_array( + data, + width, + height, + cmap=None, + alpha=None, + vmin=None, + vmax=None, + axes=None, + show=True, +): + """Plot a heatmap of eyetracking gaze data from a numpy array.""" + import matplotlib.pyplot as plt + + # Prepare axes + if axes is not None: + from matplotlib.axes import Axes + + _validate_type(axes, Axes, "axes") + ax = axes + fig = ax.get_figure() + else: + fig, ax = plt.subplots(constrained_layout=True) + + ax.set_title("Gaze heatmap") + ax.set_xlabel("X position") + ax.set_ylabel("Y position") + + # Prepare the heatmap + alphas = 1 if alpha is None else alpha + vmin = np.nanmin(data) if vmin is None else vmin + vmax = np.nanmax(data) if vmax is None else vmax + extent = [0, width, height, 0] # origin is the top left of the screen + + # Plot heatmap + im = ax.imshow( + data, + aspect="equal", + cmap=cmap, + alpha=alphas, + extent=extent, + origin="upper", + vmin=vmin, + vmax=vmax, + ) + + # Prepare the colorbar + fig.colorbar(im, ax=ax, shrink=0.6, label="Dwell time (seconds)") + plt_show(show) + return fig diff --git a/mne/viz/eyetracking/tests/__init__.py b/mne/viz/eyetracking/tests/__init__.py new file mode 100644 index 00000000000..e69de29bb2d diff --git a/mne/viz/eyetracking/tests/test_heatmap.py b/mne/viz/eyetracking/tests/test_heatmap.py new file mode 100644 index 00000000000..99103c552b1 --- /dev/null +++ b/mne/viz/eyetracking/tests/test_heatmap.py @@ -0,0 +1,35 @@ +# Authors: Scott Huberty +# +# License: Simplified BSD + +import matplotlib.pyplot as plt +import numpy as np +import pytest + +import mne + + +@pytest.mark.parametrize("axes", [None, True]) +def test_plot_heatmap(axes): + """Test plot_gaze.""" + # Create a toy epochs instance + info = info = mne.create_info( + ch_names=["xpos", "ypos"], sfreq=100, ch_types="eyegaze" + ) + # simulate a steady fixation at the center of the screen + width, height = (1920, 1080) + shape = (1, 100) # x or y, time + data = np.vstack([np.full(shape, width / 2), np.full(shape, height / 2)]) + epochs = mne.EpochsArray(data[None, ...], info) + epochs.info["chs"][0]["loc"][4] = -1 + epochs.info["chs"][1]["loc"][4] = 1 + + if axes: + axes = plt.subplot() + fig = mne.viz.eyetracking.plot_gaze( + epochs, width=width, height=height, axes=axes, cmap="Greys", sigma=None + ) + img = fig.axes[0].images[0].get_array() + # We simulated a 2D histogram where only the central pixel (960, 540) was active + assert img.T[width // 2, height // 2] == 1 # central pixel is active + assert np.sum(img) == 1 # only the central pixel should be active diff --git a/tutorials/preprocessing/90_eyetracking_data.py b/tutorials/preprocessing/90_eyetracking_data.py index 2e71f3d00b4..a788988bae0 100644 --- a/tutorials/preprocessing/90_eyetracking_data.py +++ b/tutorials/preprocessing/90_eyetracking_data.py @@ -31,9 +31,10 @@ import mne from mne.datasets.eyelink import data_path from mne.preprocessing.eyetracking import read_eyelink_calibration +from mne.viz.eyetracking import plot_gaze -et_fpath = data_path() / "sub-01_task-plr_eyetrack.asc" -eeg_fpath = data_path() / "sub-01_task-plr_eeg.mff" +et_fpath = data_path() / "eeg-et" / "sub-01_task-plr_eyetrack.asc" +eeg_fpath = data_path() / "eeg-et" / "sub-01_task-plr_eeg.mff" raw_et = mne.io.read_raw_eyelink(et_fpath, create_annotations=["blinks"]) raw_eeg = mne.io.read_raw_egi(eeg_fpath, preload=True, verbose="warning") @@ -123,7 +124,9 @@ # window 50 ms before and 200 ms after the blink, so that the noisy data surrounding # the blink is also interpolated. -mne.preprocessing.eyetracking.interpolate_blinks(raw_et, buffer=(0.05, 0.2)) +mne.preprocessing.eyetracking.interpolate_blinks( + raw_et, buffer=(0.05, 0.2), interpolate_gaze=True +) # %% # .. important:: By default, :func:`~mne.preprocessing.eyetracking.interpolate_blinks`, @@ -176,6 +179,7 @@ ) # Add EEG channels to the eye-tracking raw object raw_et.add_channels([raw_eeg], force_update_info=True) +del raw_eeg # free up some memory # Define a few channel groups of interest and plot the data frontal = ["E19", "E11", "E4", "E12", "E5"] @@ -194,11 +198,25 @@ # Now let's extract epochs around our flash events. We should see a clear pupil # constriction response to the flashes. -epochs = mne.Epochs(raw_et, events=et_events, event_id=event_dict, tmin=-0.3, tmax=3) +# Skip baseline correction for now. We will apply baseline correction later. +epochs = mne.Epochs( + raw_et, events=et_events, event_id=event_dict, tmin=-0.3, tmax=3, baseline=None +) +del raw_et # free up some memory epochs[:8].plot(events=et_events, event_id=event_dict, order=picks_idx) +# %% +# For this experiment, the participant was instructed to fixate on a crosshair in the +# center of the screen. Let's plot the gaze position data to confirm that the +# participant primarily kept their gaze fixated at the center of the screen. + +plot_gaze(epochs, width=1920, height=1080) + +# %% +# .. seealso:: :ref:`tut-eyetrack-heatmap` + # %% # Finally, let's plot the evoked responses to the light flashes to get a sense of the # average pupillary light response, and the associated ERP in the EEG data. -epochs.average().plot(picks=occipital + pupil) +epochs.apply_baseline().average().plot(picks=occipital + pupil)