Skip to content
Permalink
master
Switch branches/tags

Name already in use

A tag already exists with the provided branch name. Many Git commands accept both tag and branch names, so creating this branch may cause unexpected behavior. Are you sure you want to create this branch?
Go to file
 
 
Cannot retrieve contributors at this time
executable file 495 lines (416 sloc) 25.6 KB
#!/usr/bin/env python
import sys
import os
import argparse
import numpy as np
import pandas as pd
import json
import nipype
import nipype.pipeline as pe
import nipype.interfaces.fsl as fsl
import nipype.algorithms.modelgen as modelgen
import nipype.interfaces.io as io
import nipype.interfaces.utility as utility
import nipype.interfaces.workbench as workbench
from nipype.interfaces.base import Bunch
# from nipype import config
# config.enable_debug_mode()
# config.stop_on_first_crash = True
parser = argparse.ArgumentParser(description='First level models for CIFTI data')
parser.add_argument('bids_dir', help='Full path to the directory with the input dataset '
'formatted according to the BIDS standard.')
parser.add_argument('output_dir', help='Full path to the base directory containing preprocessed data.'
'This directory will be modified.'
'For ciftify data, this is directory containing the directories named ciftify and fmriprep.'
'For HCP data, this directory contains the subject level output directories.' )
parser.add_argument('tcontrast_file', help='Full path to T contrast specification')
parser.add_argument('participant', help='Participant label to process', type=str)
parser.add_argument('task', help='Task label to process', type=str)
parser.add_argument('run', help='Run label to process', type=str)
parser.add_argument('--session', help='Session label to process',
default=None, type=str)
group_mode = parser.add_mutually_exclusive_group(required=True)
group_mode.add_argument('--mode_ciftify', '--mode-ciftify', action='store_true',
default=False,
help='Use data processed by ciftify')
group_mode.add_argument('--mode_hcp', '--mode-hcp', action='store_true',
default=False,
help='Use data processed by the HCP BIDS app')
parser.add_argument('--fmriprep_confounds', help='Additional fmriprep confound columns to include.',
nargs='*', type=str)
parser.add_argument('--n_cpus','--n-cpus', help='Number of CPUs/cores available to use.',
default=1, type=int)
parser.add_argument('--fwhm', help='Smoothing FWHM in mm',
default=2.0, type=float)
parser.add_argument('--hpfilter', help='Highpass filter length in seconds',
default=120.0, type=float)
parser.add_argument('--drop_trs', help='Number of initial TRs to drop',
default=0, type=int)
parser.add_argument('--mesh', help='Mesh resolution',
default=32, type=int)
parser.add_argument('--resolution', help='Volume resolution',
default=2, type=int)
parser.add_argument('--motion6', help='Include 6 motion regressors as confounds',
action='store_true', default=False)
parser.add_argument('--motion6dt', help='Include the first derivative of the 6 motion regressors as confounds',
action='store_true', default=False)
parser.add_argument('--ciftify_desc', help='ciftify desc value', default=None, type=str)
args = parser.parse_args()
assert args.drop_trs == 0, 'Dropping initial TRs is not implemented'
# set up variables used through out the script
subj_id = args.participant
task = args.task
run = args.run
mesh = args.mesh
resolution = args.resolution
output_dir = args.output_dir # path to folder containing ciftify and fmriprep folders, or hcpout folder
bids_dir = args.bids_dir # path to bids folder
if args.session is not None:
session_path = f'ses-{args.session}'
session_key = f'ses-{args.session}_'
else:
session_path = ''
session_key = ''
if args.mode_ciftify:
base_dir = f'{output_dir}/ciftify/sub-{subj_id}/MNINonLinear'
s0_str = '_s0' # ciftify always includes smoothing in filenames
if args.ciftify_desc is not None:
bold_suffix = f'_desc-{args.ciftify_desc}'
else:
bold_suffix = ''
if args.mode_hcp:
base_dir = f'{output_dir}/sub-{subj_id}/MNINonLinear'
s0_str = '' # HCP does not include non-zero smoothing in the name
bold_suffix = '_bold'
# get TR
with open(f'{bids_dir}/sub-{subj_id}/{session_path}/func/sub-{subj_id}_{session_key}task-{task}_run-{run}_bold.json', 'r') as fp:
bold_meta = json.load(fp)
# Generate FEAT design files
design = pe.Node(interface = fsl.Level1Design(), name='design', iterfield = [''])
design.inputs.interscan_interval = bold_meta['RepetitionTime']
design.inputs.bases = {'dgamma':{'derivs': False}}
design.inputs.model_serial_correlations = True
# create t contrast list
tcontrasts = pd.read_csv(args.tcontrast_file, sep='\t')
condition_names = list(tcontrasts.columns)[1::]
contrasts = []
for idx, row in tcontrasts.iterrows():
contrasts.append((row['contrast'], 'T', condition_names, list(row[condition_names])))
design.inputs.contrasts = contrasts
# load confounds
# all columns are demeaned by FSL
confound_names = []
if args.mode_ciftify:
confounds = pd.read_csv(f'{output_dir}/fmriprep/sub-{subj_id}/{session_path}/func/sub-{subj_id}_{session_key}task-{task}_run-{run}_desc-confounds_regressors.tsv',
sep='\t')
if args.fmriprep_confounds is not None:
for concol in args.fmriprep_confounds:
if concol not in list(confounds.columns):
raise Exception(f'Confound {concol} not found')
confound_names += args.fmriprep_confounds
if args.mode_hcp:
confounds = pd.read_table(f'{base_dir}/Results/{session_key}task-{task}_run-{run}_bold/Movement_Regressors.txt',
sep=' +', engine='python', names=['trans_x', 'trans_y', 'trans_z', 'rot_x', 'rot_y', 'rot_z', 'trans_x_derivative1', 'trans_y_derivative1', 'trans_z_derivative1', 'rot_x_derivative1', 'rot_y_derivative1', 'rot_z_derivative1'])
confounds = confounds.iloc[args.drop_trs::]
if args.motion6:
confound_names += ['trans_x', 'trans_y', 'trans_z', 'rot_x', 'rot_y', 'rot_z']
if args.motion6dt:
confound_names += ['trans_x_derivative1', 'trans_y_derivative1', 'trans_z_derivative1', 'rot_x_derivative1', 'rot_y_derivative1', 'rot_z_derivative1']
confound_regressors = [list(confounds[x]) for x in confound_names]
# Make FEAT model
event_file = f'{bids_dir}/sub-{subj_id}/{session_path}/func/sub-{subj_id}_{session_key}task-{task}_run-{run}_events.tsv'
if not os.path.exists(event_file):
print(f'Could not find the run-specific event file {event_file}. Checking for a top-level file.')
event_file = f'{bids_dir}/task-{task}_events.tsv'
if not os.path.exists(event_file):
raise Exception('Cannot find an _events.tsv file')
trials = pd.read_csv(event_file, sep='\t')
# type checking
assert np.issubdtype(trials['onset'].dtype, np.number), 'Event onsets must be numeric'
assert np.issubdtype(trials['duration'].dtype, np.number), 'Event durations must be numeric'
assert trials.shape[0] == trials[['onset', 'duration', 'trial_type']].dropna().shape[0], 'Event files cannot contain missing values'
trials['onset'] = trials['onset'] - args.drop_trs * bold_meta['RepetitionTime']
trials['trial_type'] = trials['trial_type'].astype(str) # needed in case of numeric trial_type
groups = trials.groupby('trial_type')
onsets = []
durations = []
# check the design
if not set(condition_names).issubset(set(groups.groups.keys())):
raise Exception(f'Missing conditions in {event_file}')
# keep the correct condition order
for g in condition_names:
condition_onsets = groups.get_group(g)
onsets += [list(condition_onsets['onset'])]
durations += [list(condition_onsets['duration'])]
model_spec = pe.Node(interface = modelgen.SpecifyModel(), name='model_spec', iterfield = [''])
model_spec.inputs.subject_info = [Bunch(conditions=condition_names, onsets=onsets, durations=durations,
regressor_names = confound_names, regressors=confound_regressors)]
model_spec.inputs.input_units = 'secs'
model_spec.inputs.high_pass_filter_cutoff = args.hpfilter
model_spec.inputs.time_repetition = bold_meta['RepetitionTime']
fsl_FEATModel = pe.Node(interface = fsl.FEATModel(), name='fsl_FEATModel', iterfield = [''])
# surface smoothing using workbench
wb_smooth = pe.Node(interface = workbench.CiftiSmooth(), name='wb_smooth', iterfield = [''])
wb_smooth.inputs.surface_sigma = args.fwhm / (2.0 * (np.sqrt(2.0 * np.log(2.0))))
wb_smooth.inputs.volume_sigma = args.fwhm / (2.0 * (np.sqrt(2.0 * np.log(2.0))))
wb_smooth.inputs.direction = 'COLUMN'
# drop pre steady state volumes from temporary NIFTI
drop_volumes = pe.Node(interface = fsl.ExtractROI(), name='drop_volumes', iterfield = [''])
drop_volumes.inputs.t_min = args.drop_trs
drop_volumes.inputs.t_size = -1
# Convert CIFTI to NIFTI for filtering
wb_tonifti = pe.Node(interface = workbench.CiftiToNifti(), name='wb_tonifti', iterfield = [''])
# intensity normalization
fsl_tmean = pe.Node(interface = fsl.ImageStats(op_string='-M'), name='fsl_tmean' )
fn_Scaling = pe.Node(interface = utility.wrappers.Function(input_names=['mean'], output_names=['scaling_op']), name='fn_Scaling')
fn_Scaling.inputs.function_str = "def f(mean): return('-mul ' + str(10000.0/float(mean)))"
fsl_IntNorm = pe.Node(interface = fsl.ImageMaths(), name='fsl_IntNorm')
# Save the prefiltered mean
fsl_MeanImage = pe.Node(interface = fsl.MeanImage(), name='fsl_MeanImage', iterfield = [''])
# temporal filtering of the NIFTI
fsl_TemporalFilter = pe.Node(interface = fsl.TemporalFilter(), name='fsl_TemporalFilter', iterfield = [''])
fsl_TemporalFilter.inputs.highpass_sigma = 0.5 * args.hpfilter / bold_meta['RepetitionTime']
# add the mean back to the filtered data
fsl_MultiImageMaths = pe.Node(interface = fsl.MultiImageMaths(), name='fsl_MultiImageMaths', iterfield = [''])
fsl_MultiImageMaths.inputs.op_string = '-add %s'
# convert filtered NIFTI back to CIFTI
# TODO use wb_command to drop volumes
wb_tocifti = pe.Node(interface = workbench.NiftiToCifti(), name='wb_tocifti', iterfield = [''])
wb_tocifti.inputs.cifti_template = f'{base_dir}/Results/{session_key}task-{task}_run-{run}{bold_suffix}/{session_key}task-{task}_run-{run}{bold_suffix}_Atlas{s0_str}.dtseries.nii'
# separate the CIFTI into L, R and columetric
wb_separate = pe.Node(interface = workbench.CiftiSeparate(), name='wb_separate', iterfield = [''])
wb_separate.inputs.direction = 'COLUMN'
#
filmgls_left = pe.Node(interface = fsl.FILMGLS(), name='filmgls_left', iterfield = [''])
filmgls_left.inputs.mode = 'surface'
filmgls_left.inputs.smooth_autocorr = True
filmgls_left.inputs.mask_size = 15
filmgls_left.inputs.brightness_threshold = 5
filmgls_left.inputs.output_type = 'GIFTI'
#Wraps command **film_gls**
filmgls_right = pe.Node(interface = fsl.FILMGLS(), name='filmgls_right', iterfield = [''])
filmgls_right.inputs.mode = 'surface'
filmgls_right.inputs.smooth_autocorr = True
filmgls_right.inputs.mask_size = 15
filmgls_right.inputs.brightness_threshold = 5
filmgls_right.inputs.output_type = 'GIFTI'
#Wraps command **film_gls**
filmgls_volume = pe.Node(interface = fsl.FILMGLS(), name='filmgls_volume', iterfield = [''])
filmgls_volume.inputs.mode = 'volumetric'
filmgls_volume.inputs.smooth_autocorr = True
filmgls_volume.inputs.mask_size = 5
filmgls_volume.inputs.threshold = 1
filmgls_volume.inputs.output_type = 'NIFTI'
#Flexibly collect data from disk to feed into workflows.
select_surfaces = pe.Node(
io.SelectFiles(
templates = {'gii_left': f'fsaverage_LR{mesh}k/sub-{subj_id}.L.midthickness.{mesh}k_fs_LR.surf.gii',
'gii_right': f'fsaverage_LR{mesh}k/sub-{subj_id}.R.midthickness.{mesh}k_fs_LR.surf.gii'}
),
name = 'select_surfaces'
)
select_surfaces.inputs.base_directory = base_dir
select_surfaces.inputs.gii_left = f'fsaverage_LR{mesh}k/sub-{subj_id}.L.midthickness.{mesh}k_fs_LR.surf.gii'
select_surfaces.inputs.gii_right = f'fsaverage_LR{mesh}k/sub-{subj_id}.R.midthickness.{mesh}k_fs_LR.surf.gii'
#Flexibly collect data from disk to feed into workflows.
select_rois = pe.Node(
io.SelectFiles(
templates = {'rois': f'ROIs/ROIs.{resolution}.nii.gz'}
),
name = 'select_rois'
)
select_rois.inputs.base_directory = base_dir
select_rois.inputs.rois = f'ROIs/ROIs.{resolution}.nii.gz'
#Flexibly collect data from disk to feed into workflows.
select_func = pe.Node(
io.SelectFiles(
templates = {'out_bold': f'Results/{session_key}task-{task}_run-{run}{bold_suffix}/{session_key}task-{task}_run-{run}{bold_suffix}.nii.gz',
'out_dtseries': f'Results/{session_key}task-{task}_run-{run}{bold_suffix}/{session_key}task-{task}_run-{run}{bold_suffix}_Atlas{s0_str}.dtseries.nii'}
),
name = 'select_func'
)
select_func.inputs.base_directory = base_dir
select_func.inputs.out_dtseries = f'Results/{session_key}task-{task}_run-{run}{bold_suffix}/{session_key}task-{task}_run-{run}{bold_suffix}_Atlas{s0_str}.dtseries.nii'
select_func.inputs.out_bold = f'Results/{session_key}task-{task}_run-{run}{bold_suffix}/{session_key}task-{task}_run-{run}{bold_suffix}.nii.gz'
#Runs arbitrary function as an interface
wb_dilate_left = pe.Node(interface = workbench.MetricDilate(), name='wb_dilate_left', iterfield = [''])
wb_dilate_left.inputs.distance = 50
wb_dilate_left.inputs.nearest = True
wb_dilate_right = pe.Node(interface = workbench.MetricDilate(), name='wb_dilate_right', iterfield = [''])
wb_dilate_right.inputs.distance = 50
wb_dilate_right.inputs.nearest = True
select_pe_left = pe.Node(interface = utility.Select(), name='select_pe_left', iterfield = [''])
select_pe_right = pe.Node(interface = utility.Select(), name='select_pe_right', iterfield = [''])
select_pe_volume = pe.Node(interface = utility.Select(), name='select_pe_volume', iterfield = [''])
select_cope_left = pe.Node(interface = utility.Select(), name='select_cope_left', iterfield = [''])
select_cope_right = pe.Node(interface = utility.Select(), name='select_cope_right', iterfield = [''])
select_cope_volume = pe.Node(interface = utility.Select(), name='select_cope_volume', iterfield = [''])
select_varcope_left = pe.Node(interface = utility.Select(), name='select_varcope_left', iterfield = [''])
select_varcope_right = pe.Node(interface = utility.Select(), name='select_varcope_right', iterfield = [''])
select_varcope_volume = pe.Node(interface = utility.Select(), name='select_varcope_volume', iterfield = [''])
select_zstat_left = pe.Node(interface = utility.Select(), name='select_zstat_left', iterfield = [''])
select_zstat_right = pe.Node(interface = utility.Select(), name='select_zstat_right', iterfield = [''])
select_zstat_volume = pe.Node(interface = utility.Select(), name='select_zstat_volume', iterfield = [''])
select_zfstat_left = pe.Node(interface = utility.Select(), name='select_zfstat_left', iterfield = [''])
select_zfstat_right = pe.Node(interface = utility.Select(), name='select_zfstat_right', iterfield = [''])
select_zfstat_volume = pe.Node(interface = utility.Select(), name='select_zfstat_volume', iterfield = [''])
#Basic interface class generates identity mappings
pe_list = pe.Node(utility.IdentityInterface(fields=['pe_index']), name='pe_list')
pe_list.iterables = ('pe_index', np.arange(len(condition_names)))
n_tstats = len([x[0] for x in design.inputs.contrasts if x[1]=='T'])
n_fstats = len([x[0] for x in design.inputs.contrasts if x[1]=='F'])
cope_list = pe.Node(utility.IdentityInterface(fields=['cope_index']), name='cope_list')
cope_list.iterables = ('cope_index', np.arange(n_tstats))
varcope_list = pe.Node(utility.IdentityInterface(fields=['varcope_index']), name='varcope_list')
varcope_list.iterables = ('varcope_index', np.arange(n_tstats))
zstat_list = pe.Node(utility.IdentityInterface(fields=['zstat_index']), name='zstat_list')
zstat_list.iterables = ('zstat_index', np.arange(n_tstats))
zfstat_list = pe.Node(utility.IdentityInterface(fields=['zfstat_index']), name='zfstat_list')
zfstat_list.iterables = ('zfstat_index', np.arange(n_fstats))
#Runs arbitrary function as an interface
wb_merge_pe = pe.Node(interface = workbench.CiftiCreateDenseTimeseries(), name='wb_merge_pe', iterfield = [''])
wb_merge_cope = pe.Node(interface=workbench.CiftiCreateDenseTimeseries(), name='wb_merge_cope', iterfield=[''])
wb_merge_zstat = pe.Node(interface = workbench.CiftiCreateDenseTimeseries(), name='wb_merge_zstat', iterfield = [''])
wb_merge_zfstat = pe.Node(interface = workbench.CiftiCreateDenseTimeseries(), name='wb_merge_zfstat', iterfield = [''])
wb_merge_varcope = pe.Node(interface = workbench.CiftiCreateDenseTimeseries(), name='wb_merge_varcope', iterfield = [''])
if args.mode_ciftify:
# use the template folder to avoid broken symlinks
roi_path_l = f'{output_dir}/ciftify/zz_templates/L.atlasroi.32k_fs_LR.shape.gii'
roi_path_r = f'{output_dir}/ciftify/zz_templates/R.atlasroi.32k_fs_LR.shape.gii'
roi_path_v = f'{output_dir}/ciftify/zz_templates/Atlas_ROIs.{resolution}.nii.gz'
if args.mode_hcp:
roi_path_l = f'{base_dir}/fsaverage_LR32k/sub-{subj_id}.L.atlasroi.32k_fs_LR.shape.gii'
roi_path_r = f'{base_dir}/fsaverage_LR32k/sub-{subj_id}.R.atlasroi.32k_fs_LR.shape.gii'
roi_path_v = f'{base_dir}/ROIs/Atlas_ROIs.{resolution}.nii.gz'
wb_merge_pe.inputs.roi_left = roi_path_l
wb_merge_pe.inputs.roi_right = roi_path_r
wb_merge_pe.inputs.volume_label = roi_path_v
wb_merge_cope.inputs.roi_left = roi_path_l
wb_merge_cope.inputs.roi_right = roi_path_r
wb_merge_cope.inputs.volume_label = roi_path_v
wb_merge_zstat.inputs.roi_left = roi_path_l
wb_merge_zstat.inputs.roi_right = roi_path_r
wb_merge_zstat.inputs.volume_label = roi_path_v
wb_merge_zfstat.inputs.roi_left = roi_path_l
wb_merge_zfstat.inputs.roi_right = roi_path_r
wb_merge_zfstat.inputs.volume_label = roi_path_v
wb_merge_varcope.inputs.roi_left = roi_path_l
wb_merge_varcope.inputs.roi_right = roi_path_r
wb_merge_varcope.inputs.volume_label = roi_path_v
#Generic datasink module to store structured outputs
io_DataSink = pe.Node(interface = io.DataSink(), name='io_DataSink', iterfield = [''])
io_DataSink.inputs.base_directory = base_dir
io_DataSink.inputs.container = f'Results/{session_key}task-{task}_run-{run}{bold_suffix}/level1.feat'
io_DataSink.inputs.parameterization = False
#Create a workflow to connect all those nodes
analysisflow = nipype.Workflow(f'sub-{subj_id}_{session_key}task-{task}_run-{run}_level1_wf')
analysisflow.connect(drop_volumes, "roi_file", model_spec, "functional_runs")
analysisflow.connect(model_spec, "session_info", design, "session_info")
analysisflow.connect(design, "fsf_files", fsl_FEATModel, "fsf_file")
analysisflow.connect(design, "ev_files", fsl_FEATModel, "ev_files")
analysisflow.connect(select_surfaces, "gii_left", wb_smooth, "left_surface")
analysisflow.connect(select_surfaces, "gii_right", wb_smooth, "right_surface")
analysisflow.connect(wb_smooth, "out_file", wb_tonifti, "in_file")
analysisflow.connect(wb_tonifti, "out_file", drop_volumes, "in_file")
analysisflow.connect(drop_volumes, "roi_file", fsl_MeanImage, "in_file")
analysisflow.connect(drop_volumes, "roi_file", fsl_tmean, "in_file")
analysisflow.connect(fsl_tmean, "out_stat", fn_Scaling, "mean")
analysisflow.connect(fn_Scaling, "scaling_op", fsl_IntNorm, "op_string")
analysisflow.connect(drop_volumes, "roi_file", fsl_IntNorm, "in_file")
analysisflow.connect(fsl_IntNorm, "out_file", fsl_TemporalFilter, "in_file")
analysisflow.connect(fsl_TemporalFilter, "out_file", fsl_MultiImageMaths, "in_file")
analysisflow.connect(fsl_MeanImage, "out_file", fsl_MultiImageMaths, "operand_files")
analysisflow.connect(fsl_MultiImageMaths, "out_file", wb_tocifti, "in_file")
analysisflow.connect(wb_tocifti, "out_file", wb_separate, "in_file")
# setup three film_gls nodes (each hemisphere + volume)
# include surface dilation
analysisflow.connect(fsl_FEATModel, "design_file", filmgls_left, "design_file")
analysisflow.connect(fsl_FEATModel, "con_file", filmgls_left, "tcon_file")
analysisflow.connect(fsl_FEATModel, "fcon_file", filmgls_left, "fcon_file")
analysisflow.connect(select_surfaces, "gii_left", filmgls_left, "surface")
analysisflow.connect(wb_dilate_left, "out_file", filmgls_left, "in_file")
analysisflow.connect(fsl_FEATModel, "design_file", filmgls_right, "design_file")
analysisflow.connect(fsl_FEATModel, "con_file", filmgls_right, "tcon_file")
analysisflow.connect(fsl_FEATModel, "fcon_file", filmgls_right, "fcon_file")
analysisflow.connect(select_surfaces, "gii_right", filmgls_right, "surface")
analysisflow.connect(wb_dilate_right, "out_file", filmgls_right, "in_file")
analysisflow.connect(fsl_FEATModel, "design_file", filmgls_volume, "design_file")
analysisflow.connect(fsl_FEATModel, "con_file", filmgls_volume, "tcon_file")
analysisflow.connect(fsl_FEATModel, "fcon_file", filmgls_volume, "fcon_file")
analysisflow.connect(wb_separate, "out_volume", filmgls_volume, "in_file")
analysisflow.connect(select_func, "out_dtseries", wb_smooth, "in_file")
#analysisflow.connect(select_func, "out_dtseries", wb_tocifti, "cifti_template")
analysisflow.connect(wb_separate, "out_left", wb_dilate_left, "in_file")
analysisflow.connect(wb_separate, "out_right", wb_dilate_right, "in_file")
analysisflow.connect(select_surfaces, "gii_left", wb_dilate_left, "surface_file")
analysisflow.connect(select_surfaces, "gii_right", wb_dilate_right, "surface_file")
#DF
analysisflow.connect(filmgls_volume, "dof_file", io_DataSink, "stats")
# merge outputs into a single CIFTI dense timeseries
#pe
#analysisflow.connect(pe_list, "pe_index", select_pe_left, "index")
#analysisflow.connect(pe_list, "pe_index", select_pe_right, "index")
# analysisflow.connect(pe_list, "pe_index", select_pe_volume, "index")
# analysisflow.connect(filmgls_left, "param_estimates", select_pe_left, "inlist")
# analysisflow.connect(filmgls_right, "param_estimates", select_pe_right, "inlist")
# analysisflow.connect(filmgls_volume, "param_estimates", select_pe_volume, "inlist")
# analysisflow.connect(select_pe_left, "out", wb_merge_pe, "left_metric")
# analysisflow.connect(select_pe_right, "out", wb_merge_pe, "right_metric")
# analysisflow.connect(select_pe_volume, "out", wb_merge_pe, "volume_file")
#analysisflow.connect(wb_merge_pe, "out_file", io_DataSink, "stats.@pe")
#cope
analysisflow.connect(cope_list, "cope_index", select_cope_left, "index")
analysisflow.connect(cope_list, "cope_index", select_cope_right, "index")
analysisflow.connect(cope_list, "cope_index", select_cope_volume, "index")
analysisflow.connect(filmgls_left, "copes", select_cope_left, "inlist")
analysisflow.connect(filmgls_right, "copes", select_cope_right, "inlist")
analysisflow.connect(filmgls_volume, "copes", select_cope_volume, "inlist")
analysisflow.connect(select_cope_left, "out", wb_merge_cope, "left_metric")
analysisflow.connect(select_cope_right, "out", wb_merge_cope, "right_metric")
analysisflow.connect(select_cope_volume, "out", wb_merge_cope, "volume_file")
analysisflow.connect(wb_merge_cope, "out_file", io_DataSink, "stats.@cope")
#varcopes
analysisflow.connect(varcope_list, "varcope_index", select_varcope_left, "index")
analysisflow.connect(varcope_list, "varcope_index", select_varcope_right, "index")
analysisflow.connect(varcope_list, "varcope_index", select_varcope_volume, "index")
analysisflow.connect(filmgls_left, "varcopes", select_varcope_left, "inlist")
analysisflow.connect(filmgls_right, "varcopes", select_varcope_right, "inlist")
analysisflow.connect(filmgls_volume, "varcopes", select_varcope_volume, "inlist")
analysisflow.connect(select_varcope_left, "out", wb_merge_varcope, "left_metric")
analysisflow.connect(select_varcope_right, "out", wb_merge_varcope, "right_metric")
analysisflow.connect(select_varcope_volume, "out", wb_merge_varcope, "volume_file")
analysisflow.connect(wb_merge_varcope, "out_file", io_DataSink, "stats.@varcope")
# merge outputs into a single CIFTI dense timeseries
# zstats
analysisflow.connect(zstat_list, "zstat_index", select_zstat_left, "index")
analysisflow.connect(zstat_list, "zstat_index", select_zstat_right, "index")
analysisflow.connect(zstat_list, "zstat_index", select_zstat_volume, "index")
analysisflow.connect(filmgls_left, "zstats", select_zstat_left, "inlist")
analysisflow.connect(filmgls_right, "zstats", select_zstat_right, "inlist")
analysisflow.connect(filmgls_volume, "zstats", select_zstat_volume, "inlist")
analysisflow.connect(select_zstat_left, "out", wb_merge_zstat, "left_metric")
analysisflow.connect(select_zstat_right, "out", wb_merge_zstat, "right_metric")
analysisflow.connect(select_zstat_volume, "out", wb_merge_zstat, "volume_file")
analysisflow.connect(wb_merge_zstat, "out_file", io_DataSink, "stats.@zstat")
# # f stats
# analysisflow.connect(zfstat_list, "zfstat_index", select_zfstat_left, "index")
# analysisflow.connect(zfstat_list, "zfstat_index", select_zfstat_right, "index")
# analysisflow.connect(zfstat_list, "zfstat_index", select_zfstat_volume, "index")
# analysisflow.connect(filmgls_left, "zfstats", select_zfstat_left, "inlist")
# analysisflow.connect(filmgls_right, "zfstats", select_zfstat_right, "inlist")
# analysisflow.connect(filmgls_volume, "zfstats", select_zfstat_volume, "inlist")
# analysisflow.connect(select_zfstat_left, "out", wb_merge_zfstat, "left_metric")
# analysisflow.connect(select_zfstat_right, "out", wb_merge_zfstat, "right_metric")
# analysisflow.connect(select_zfstat_volume, "out", wb_merge_zfstat, "volume_file")
# analysisflow.connect(wb_merge_zfstat, "out_file", io_DataSink, "stats.@zfstat")
# design debugging
analysisflow.connect(fsl_FEATModel, "design_file", io_DataSink, "design")
analysisflow.connect(fsl_FEATModel, "design_image", io_DataSink, "design.@design_image")
analysisflow.connect(fsl_FEATModel, "design_cov", io_DataSink, "design.@design_cov")
analysisflow.connect(fsl_FEATModel, "con_file", io_DataSink, "design.@con_file")
analysisflow.connect(fsl_FEATModel, "fcon_file", io_DataSink, "design.@fcon_file")
#Run the workflow
plugin = 'MultiProc' #adjust your desired plugin here
plugin_args = {'n_procs': args.n_cpus} #adjust to your number of cores
#analysisflow.write_graph(graph2use='exec', format='png', simple_form=False)
analysisflow.run(plugin=plugin, plugin_args=plugin_args)