Skip to content
Permalink
master
Switch branches/tags

Name already in use

A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
Go to file
 
 
Cannot retrieve contributors at this time
executable file 221 lines (178 sloc) 9.64 KB
#!/usr/bin/env python
# A minimal nipype script to demonstrate first level task fMRI analysis on volume data
import sys
import os
import numpy as np
import pandas as pd
import nipype
import nipype.pipeline as pe
import nipype.interfaces.fsl as fsl
import nipype.algorithms.modelgen as modelgen
import nipype.interfaces.io as io
import nipype.interfaces.utility as utility
from nipype.interfaces.base import Bunch
from pathlib import Path
# from nipype import config
# config.enable_debug_mode()
# config.stop_on_first_crash = True
# try to use the avilable number of CPUs. Default to 1 if OMP_NUM_THREADS is not set
n_cpus = int(os.getenv('OMP_NUM_THREADS', 4))
# set up variables used through out the script
subj_id = '01'
run = '01'
session = '01'
task = 'Stroop'
space = 'MNI152NLin2009cAsym'
TR = 1.2
DROP_TRS = 0
SMOOTHING_FWHM = 6 # mm FWHM of the spatial smoothing kernel
HP_FILTER = 90.0
# path to processed fmriprep data
fmriprep_path = '/fmriprep'
# path to BIDS data
bids_path = '/bids'
#path to save results
out_path = '/results'
base_dir = f'{out_path}/sub-{subj_id}/ses-{session}/'
Path(base_dir).mkdir(parents=True, exist_ok=True)
# Generate FEAT design files
design = pe.Node(interface = fsl.Level1Design(), name='design', iterfield = [''])
design.inputs.interscan_interval = TR
design.inputs.bases = {'dgamma':{'derivs': False}}
design.inputs.model_serial_correlations = True
# add your conditions and contrasts here
condition_names = ['Con', 'InCon']
contrast_congruency = ('Con-InCon', 'T', condition_names, [-1, 1])
design.inputs.contrasts = [contrast_congruency]
#load confounds
confounds = pd.read_csv(f'{fmriprep_path}/sub-{subj_id}/ses-{session}/func/sub-{subj_id}_ses-{session}_task-{task}_run-{run}_desc-confounds_regressors.tsv',
sep='\t')
confounds = confounds.iloc[DROP_TRS::]
confound_names = ['trans_x', 'trans_y', 'trans_z', 'rot_x', 'rot_y', 'rot_z']
confound_regressors = [list(confounds[x]) for x in confound_names]
# Make FEAT model
trials = pd.read_csv(f'{bids_path}/sub-{subj_id}/ses-{session}/func/sub-{subj_id}_ses-{session}_task-{task}_run-{run}_events.tsv', sep='\t')
trials['onset'] = trials['onset'] - DROP_TRS*TR
groups = trials.groupby('trial_type')
conditions =[]
onsets = []
durations = []
for g in groups:
if g[0] not in condition_names:
continue
conditions.append(g[0])
condition_onsets = groups.get_group(g[0])
onsets += [list(condition_onsets['onset'])]
durations += [list(condition_onsets['duration'])]
model_spec = pe.Node(interface = modelgen.SpecifyModel(), name='model_spec', iterfield = [''])
model_spec.inputs.subject_info = [Bunch(conditions=conditions, onsets=onsets, durations=durations,
regressor_names=confound_names, regressors=confound_regressors)]
model_spec.inputs.input_units = 'secs'
model_spec.inputs.high_pass_filter_cutoff = HP_FILTER
model_spec.inputs.time_repetition = TR
fsl_FEATModel = pe.Node(interface = fsl.FEATModel(), name='fsl_FEATModel', iterfield = [''])
# smooth
fsl_smooth = pe.Node(interface = fsl.SpatialFilter(), name='fsl_smooth', iterfield = [''])
fsl_smooth.inputs.operation = 'mean'
fsl_smooth.inputs.kernel_shape = 'gauss'
# FSL specifies kernels in sd. Convert from FWHM to SD
fsl_smooth.kernel_size = SMOOTHING_FWHM / (2.0 * (np.sqrt(2.0 * np.log(2.0))))
# drop pre steady state volumes from temporary NIFTI
drop_volumes = pe.Node(interface = fsl.ExtractROI(), name='drop_volumes', iterfield = [''])
drop_volumes.inputs.t_min = DROP_TRS
drop_volumes.inputs.t_size = -1
# intensity normalization
fsl_tmean = pe.Node(interface = fsl.ImageStats(op_string='-M'), name='fsl_tmean' )
fn_Scaling = pe.Node(interface = utility.wrappers.Function(input_names=['mean'], output_names=['scaling_op']), name='fn_Scaling')
fn_Scaling.inputs.function_str = "def f(mean): return('-mul ' + str(10000.0/float(mean)))"
fsl_IntNorm = pe.Node(interface = fsl.ImageMaths(), name='fsl_IntNorm')
# Save the prefiltered mean
fsl_MeanImage = pe.Node(interface = fsl.MeanImage(), name='fsl_MeanImage', iterfield = [''])
# temporal filtering of the NIFTI
fsl_TemporalFilter = pe.Node(interface = fsl.TemporalFilter(), name='fsl_TemporalFilter', iterfield = [''])
fsl_TemporalFilter.inputs.highpass_sigma = 0.5 * HP_FILTER / TR
# add the mean back to the filtered data
fsl_MultiImageMaths = pe.Node(interface = fsl.MultiImageMaths(), name='fsl_MultiImageMaths', iterfield = [''])
fsl_MultiImageMaths.inputs.op_string = '-add %s'
# GLS model fitting
filmgls_volume = pe.Node(interface = fsl.FILMGLS(), name='filmgls_volume', iterfield = [''])
filmgls_volume.inputs.mode = 'volumetric'
filmgls_volume.inputs.smooth_autocorr = True
filmgls_volume.inputs.mask_size = 5
filmgls_volume.inputs.threshold = 1
filmgls_volume.inputs.output_type = 'NIFTI'
#Flexibly collect data from disk to feed into workflows.
select_func = pe.Node(
io.SelectFiles(
templates = {'out_bold': f'sub-{subj_id}_ses-{session}_task-{task}_run-{run}_space-{space}_desc-preproc_bold.nii.gz'}
),
name = 'select_func'
)
select_func.inputs.base_directory = f'{fmriprep_path}/sub-{subj_id}/ses-{session}/func'
#select_func.inputs.out_bold = f'Results/ses-{session}_task-{task}_run-{run}_bold/ses-{session}_task-{task}_run-{run}_bold.nii.gz'
# Select the files we want to keep
select_pe_volume = pe.Node(interface = utility.Select(), name='select_pe_volume', iterfield = [''])
select_cope_volume = pe.Node(interface = utility.Select(), name='select_cope_volume', iterfield = [''])
select_varcope_volume = pe.Node(interface = utility.Select(), name='select_varcope_volume', iterfield = [''])
select_zstat_volume = pe.Node(interface = utility.Select(), name='select_zstat_volume', iterfield = [''])
#Basic interface class generates identity mappings
pe_list = pe.Node(utility.IdentityInterface(fields=['pe_index']), name='pe_list')
pe_list.iterables = ('pe_index', np.arange(len(condition_names)))
n_tstats = len([x[0] for x in design.inputs.contrasts if x[1]=='T'])
cope_list = pe.Node(utility.IdentityInterface(fields=['cope_index']), name='cope_list')
cope_list.iterables = ('cope_index', np.arange(n_tstats))
varcope_list = pe.Node(utility.IdentityInterface(fields=['varcope_index']), name='varcope_list')
varcope_list.iterables = ('varcope_index', np.arange(n_tstats))
zstat_list = pe.Node(utility.IdentityInterface(fields=['zstat_index']), name='zstat_list')
zstat_list.iterables = ('zstat_index', np.arange(n_tstats))
#Generic datasink module to store structured outputs
io_DataSink = pe.Node(interface = io.DataSink(), name='io_DataSink', iterfield = [''])
io_DataSink.inputs.base_directory = base_dir
io_DataSink.inputs.container = f'sub-{subj_id}_ses-{session}_task-{task}_run-{run}_bold/level1.feat'
io_DataSink.inputs.parameterization = False
#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow(f'sub-{subj_id}_ses-{session}_task-{task}_run-{run}_level1_wf')
analysisflow.connect(drop_volumes, "roi_file", model_spec, "functional_runs")
analysisflow.connect(model_spec, "session_info", design, "session_info")
analysisflow.connect(design, "fsf_files", fsl_FEATModel, "fsf_file")
analysisflow.connect(design, "ev_files", fsl_FEATModel, "ev_files")
analysisflow.connect(select_func, "out_bold", drop_volumes, "in_file")
analysisflow.connect(fsl_IntNorm, "out_file", fsl_MeanImage, "in_file")
analysisflow.connect(drop_volumes, "roi_file", fsl_tmean, "in_file")
analysisflow.connect(fsl_tmean, "out_stat", fn_Scaling, "mean")
analysisflow.connect(fn_Scaling, "scaling_op", fsl_IntNorm, "op_string")
analysisflow.connect(drop_volumes, "roi_file", fsl_IntNorm, "in_file")
analysisflow.connect(fsl_IntNorm, "out_file", fsl_TemporalFilter, "in_file")
analysisflow.connect(fsl_TemporalFilter, "out_file", fsl_MultiImageMaths, "in_file")
analysisflow.connect(fsl_MeanImage, "out_file", fsl_MultiImageMaths, "operand_files")
analysisflow.connect(fsl_MultiImageMaths, "out_file", fsl_smooth, "in_file")
analysisflow.connect(fsl_smooth, "out_file", filmgls_volume, "in_file")
analysisflow.connect(fsl_FEATModel, "design_file", filmgls_volume, "design_file")
analysisflow.connect(fsl_FEATModel, "con_file", filmgls_volume, "tcon_file")
analysisflow.connect(fsl_FEATModel, "fcon_file", filmgls_volume, "fcon_file")
#cope
analysisflow.connect(cope_list, "cope_index", select_cope_volume, "index")
analysisflow.connect(filmgls_volume, "copes", select_cope_volume, "inlist")
analysisflow.connect(select_cope_volume, "out", io_DataSink, "cope")
#varcopes
analysisflow.connect(varcope_list, "varcope_index", select_varcope_volume, "index")
analysisflow.connect(filmgls_volume, "varcopes", select_varcope_volume, "inlist")
analysisflow.connect(select_varcope_volume, "out", io_DataSink, "varcope")
# merge outputs into a single CIFTI dense timeseries
# zstats
analysisflow.connect(zstat_list, "zstat_index", select_zstat_volume, "index")
analysisflow.connect(filmgls_volume, "zstats", select_zstat_volume, "inlist")
analysisflow.connect(select_zstat_volume, "out", io_DataSink, "zstat")
#DF
analysisflow.connect(filmgls_volume, "dof_file", io_DataSink, "dof")
# design debugging
analysisflow.connect(fsl_FEATModel, "design_file", io_DataSink, "design")
analysisflow.connect(fsl_FEATModel, "design_image", io_DataSink, "design.@design_image")
analysisflow.connect(fsl_FEATModel, "design_cov", io_DataSink, "design.@design_cov")
analysisflow.connect(fsl_FEATModel, "con_file", io_DataSink, "design.@con_file")
analysisflow.connect(fsl_FEATModel, "fcon_file", io_DataSink, "design.@fcon_file")
#Run the workflow
plugin = 'MultiProc' #adjust your desired plugin here
plugin_args = {'n_procs': n_cpus} #adjust to your number of cores
#analysisflow.write_graph(graph2use='flat', format='png', simple_form=False)
analysisflow.run(plugin=plugin, plugin_args=plugin_args)