Skip to content

Commit

Permalink
Add 'tutorial' (#23)
Browse files Browse the repository at this point in the history
* add tutorial

* apply precommit
  • Loading branch information
Remi-Gau committed Aug 18, 2024
1 parent eb9a7ac commit 8d9ab5a
Show file tree
Hide file tree
Showing 4 changed files with 90 additions and 5 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/run_examples.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ jobs:
strategy:
fail-fast: false
matrix:
example: [compare_designs.py, JSS_example.py, optimisation.py]
example: [compare_designs.py, JSS_example.py, optimisation.py, tutorial.py]
steps:
- name: Checkout
uses: actions/checkout@v4
Expand Down
81 changes: 81 additions & 0 deletions examples/tutorial.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# Adapted from https://lukas-snoek.com/NI-edu/fMRI-introduction/week_3/neurodesign.html

import os

import matplotlib.pyplot as plt
import numpy as np
from rich import print

import neurodesign

# Neurodesigninternally parallelizes some computations using multithreading,
# which is a massive burden on the CPU. So let's limit the number of threads
os.environ["OMP_NUM_THREADS"] = "1"
os.environ["OPENBLAS_NUM_THREADS"] = "1"
os.environ["MKL_NUM_THREADS"] = "1"


TR = 1.6
rho = 0.6

n_stimuli = 2
stim_duration = 1
duration = 5 * 60
P = [0.5, 0.5]
t_pre = 0.1
t_post = 0.1

ITImodel = "uniform"
ITImin = 2
ITImax = 4

C = np.array([[1, -1], [-1, 1]])

# %%
exp = neurodesign.Experiment(
TR=TR,
rho=rho,
n_stimuli=n_stimuli,
stim_duration=stim_duration,
P=P,
duration=duration,
t_pre=t_pre,
t_post=t_post,
ITImodel=ITImodel,
ITImin=ITImin,
ITImax=ITImax,
C=C,
)

# %%
weights = [0, 1, 0, 0] # order: Fe, Fd, Ff, Fc
outdes = 10

opt = neurodesign.Optimisation(
# we have to give our previously created `exp` object to this class as well
experiment=exp,
weights=weights,
preruncycles=10,
cycles=1,
seed=2,
outdes=outdes,
)

opt.optimise()
opt.evaluate()

print(f"Onsets: {opt.bestdesign.onsets}")
print(f"Order: {opt.bestdesign.order}")

Xconv = opt.bestdesign.Xconv

plt.figure(figsize=(15, 5))
plt.plot(Xconv)
for ons, cond in zip(opt.bestdesign.onsets, opt.bestdesign.order):
c = "tab:blue" if cond == 0 else "tab:orange"
plt.plot([ons, ons], [0.35, 0.37], c=c, lw=2)

plt.legend(["Faces", "Houses"])
plt.grid()
plt.xlim(0, Xconv.shape[0])
plt.show()
4 changes: 2 additions & 2 deletions neurodesign/classes.py
Original file line number Diff line number Diff line change
Expand Up @@ -482,7 +482,7 @@ def __init__(
if not np.isclose(self.TR % self.resolution, 0):
self.resolution = _find_new_resolution(self.TR, self.resolution)
warnings.warn(
"the resolution is adjusted to be a multiple of the TR."
"the resolution is adjusted to be a multiple of the TR. "
f"New resolution: {self.resolution}"
)

Expand Down Expand Up @@ -773,7 +773,7 @@ def add_new_designs(self, weights=None, R=None):
:type experiment: experiment
:param weights: weights for efficiency calculation.
:type weights: list of floats, summing to 1
:param seed: The seed for ramdom processes.
:param seed: The seed for random processes.
:type seed: integer or None
"""
# weights
Expand Down
8 changes: 6 additions & 2 deletions neurodesign/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import time
from pathlib import Path

import matplotlib
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import gridspec
Expand All @@ -22,11 +23,12 @@
Table,
)

plt.switch_backend("agg")


def make_report(population, outfile: str | Path = "NeuroDesign.pdf"):
"""Create a report of a finished design optimisation."""
backend = matplotlib.get_backend()
plt.switch_backend("agg")

if not isinstance(population.cov, np.ndarray):
population.evaluate()

Expand Down Expand Up @@ -172,6 +174,8 @@ def make_report(population, outfile: str | Path = "NeuroDesign.pdf"):

doc.build(story)

plt.switch_backend(backend)


def _form_xo_reader(imgdata):
(page,) = PdfReader(imgdata).pages
Expand Down

0 comments on commit 8d9ab5a

Please sign in to comment.