#!/usr/bin/env tcsh # Run afni_proc.py on the given subject ID. # Do the analysis under subject_results/SID. # This script can be re-started to continue where it left off. # ---------------------------------------------------------------------- # check usage if ( $#argv != 1 ) then echo "usage `basename $0` SUBJ_ID" exit endif # set data directories set top_dir = `pwd` set data_dir = $top_dir/input_data/Cambridge_Buckner # ---------------------------------------------------------------------- # set subject identifier and execution directory set subj = $argv[1] set exec_dir = subject_results/$subj # ---------------------------------------------------------------------- # note data inputs and check for existence set anat = $data_dir/$subj/anat/mprage_skullstripped.nii.gz set epi = $data_dir/$subj/func/rest.nii.gz if ( ! -f $anat || ! -f $epi ) then echo "** missing input data for subject $subj" exit endif # ---------------------------------------------------------------------- # if afni_proc.py has already been run, do not reprocess (user must delete) # (if it looks like a subject was partially processed, delete and continue) if ( -d $exec_dir/$subj.results ) then echo -n "** subject $subj already has output dir" # if the final errts exists, subject is done if ( -f $exec_dir/$subj.results/errts.$subj+tlrc.HEAD ) then echo " and errts, skipping" exit endif # have results dir but no errts, start process over echo " but no final errts dataset" echo " ==> will re-process ..." \rm -fr $exec_dir endif # ---------------------------------------------------------------------- # create subject execution directory and enter mkdir --parents $exec_dir cd $exec_dir # ---------------------------------------------------------------------- # create afni_proc.py script, just to have in subject dir cat << EOF > cmd.afni_proc.txt #!/bin/tcsh # run afni_proc.py to create a single subject processing script # ---------------------------------------------------------------------- # from uber_subject.py defaults, but: # plus despike, no tshift (do not have it), no scale (no effect) # blur to 6 mm # tlrc base MNI_avg152T1+tlrc # no skull-stripping, already done # bandpass # censor motion > 0.2 mm, censor outliers > 0.1 (10% brain) # regress demean deriv # -align_opts_aea -cost lpc+ZZ -giant_move # 3dD -jobs 5 # -volreg_warp_dxyz 3.0 (make sure everyone comes out the same) # ---------------------------------------------------------------------- # run afni_proc.py to create a single subject processing script afni_proc.py -subj_id $subj \ -script proc.$subj -scr_overwrite \ -blocks despike align tlrc volreg blur mask regress \ -copy_anat $anat \ -tcat_remove_first_trs 0 \ -dsets $epi \ -align_opts_aea -cost lpc+ZZ -giant_move \ -tlrc_base MNI_avg152T1+tlrc \ -tlrc_no_ss \ -volreg_align_to third \ -volreg_align_e2a \ -volreg_tlrc_warp \ -volreg_warp_dxyz 3.0 \ -blur_size 6.0 \ -regress_censor_motion 0.2 \ -regress_censor_outliers 0.1 \ -regress_bandpass 0.01 0.1 \ -regress_apply_mot_types demean deriv \ -regress_run_clustsim no \ -regress_opts_3dD \ -jobs 5 \ -regress_est_blur_errts \ -execute EOF # ---------------------------------------------------------------------- # execute the afni_proc.py command # (this creates and then runs the single subject processing script) echo "++ processing subject $subj" echo " to monitor, try: tail -f $exec_dir/output.proc.$subj" echo " this should take 3-20 minutes..." echo -n " date: " date # output will already be saved, so hide it here to give the user the # option to see it elsewhere (so user can monitor subjects) tcsh cmd.afni_proc.txt >& /dev/null echo "" # return to previous directory (though this isn't really needed) cd -