#!/usr/bin/env tcsh # Run afni_proc.py on the given subject ID. # Do the analysis under subject_results/SID. # This script can be re-started to continue where it left off. # ---------------------------------------------------------------------- # check usage if ( $#argv != 1 ) then echo "usage `basename $0` SUBJ_ID" exit endif # set data directories set top_dir = `pwd` set data_dir = $top_dir/input_data # ---------------------------------------------------------------------- # set subject identifier and execution directory set subj = $argv[1] set exec_dir = subject_results/$subj # ---------------------------------------------------------------------- # note data inputs and check for existence set anat = $data_dir/anat_$subj.nii.gz set epi = $data_dir/rest_$subj.nii.gz if ( ! -f $anat || ! -f $epi ) then echo "** missing input data for subject $subj" exit endif # ---------------------------------------------------------------------- # if afni_proc.py has already been run, do not reprocess (user must delete) # (if it looks like a subject was partially processed, delete and continue) if ( -d $exec_dir/$subj.results ) then echo -n "** subject $subj already has output dir" # if the final errts exists, subject is done if ( -f $exec_dir/$subj.results/errts.$subj+tlrc.HEAD ) then echo " and errts, skipping" exit endif # have results dir but no errts, start process over echo " but no final errts dataset, will re-process" \rm -fr $exec_dir endif # ---------------------------------------------------------------------- # create subject execution directory and enter mkdir --parents $exec_dir cd $exec_dir # ---------------------------------------------------------------------- # create afni_proc.py script, just to have in subject dir cat << EOF > cmd.afni_proc.txt #!/bin/tcsh # run afni_proc.py to create a single subject processing script # =================================================================== # - no timing correction (info not in input files) # - allow for large alignment # - align to MNI avg 152 T1 template # - anats are already skull-stripped # - convert resolution to 5 mm # ** this is only for demo purposes: low RAM for GroupInstaCorr # (a real analysis would use 3 mm, the approx orig resolution) # - blur to 6 mm FWHM # - censor motion > .2 mm # - bandpass and censor during regression # - use 7 CPUs (the appropriateness of this will vary per machine!) # - do not compute fitts # - do not run 3dClustSim # - have afni_proc.py execute the created processing script afni_proc.py -subj_id $subj \ -script proc.$subj -scr_overwrite \ -blocks despike align tlrc volreg blur mask regress \ -copy_anat $anat \ -tcat_remove_first_trs 0 \ -dsets $epi \ -align_opts_aea -cost lpc+ZZ -giant_move \ -tlrc_base MNI_avg152T1+tlrc \ -tlrc_no_ss \ -volreg_align_to third \ -volreg_align_e2a \ -volreg_tlrc_warp \ -volreg_warp_dxyz 5.0 \ -blur_size 6.0 \ -regress_censor_motion 0.2 \ -regress_bandpass 0.01 0.1 \ -regress_apply_mot_types demean deriv \ -regress_opts_3dD \ -jobs 7 \ -regress_no_fitts \ -regress_est_blur_errts \ -regress_run_clustsim no \ -execute EOF # ---------------------------------------------------------------------- # execute the afni_proc.py command # (this creates and then runs the single subject processing script) echo "++ processing subject $subj" echo " to monitor output, try: tail -f $exec_dir/output.proc.$subj" echo " this should take 3-20 minutes..." echo -n " date: " date # output will already be saved, so hide it here to give the user the # option to see it elsewhere (so user can monitor subjects) tcsh cmd.afni_proc.txt >& /dev/null echo "" # return to previous directory (though this isn't really needed) cd -