#!/bin/tcsh if ( $#argv > 0 ) then set subjects = ( $argv ) else set subjects = ED endif #=========================================================================== # Above command will run script for all our subjects - ED, EE, EF - one after # the other if, when we execute the script, we type: ./@analyze_ht05 ED EE EF. # If we type ./@analyze_ht05 or tcsh @analyze_ht05, it'll run the script only # for subject ED. The user will then have to go back and edit the script so # that 'set subjects' = EE and then EF, and then run the script for each subj. #=========================================================================== foreach subj ($subjects) cd $subj # be sure that we have not yet processed this subject if ( -f dfile.r1.1D ) then echo ERROR: subject $subj has already been processed, skipping... cd .. continue endif #====================================================================== # volume register and time shift our datasets, and remove the first # two time points #====================================================================== set runs = () foreach run ( `count -digits 1 1 11` ) set dset = ${subj}_r${run}+orig.HEAD # can we use this run? if ( ! -f $dset ) then if ( $run <= 9 ) then echo ERROR: subject $subj missing run $run endif continue endif # create runs array set runs = ( $runs $run ) 3dvolreg -verbose \ -base ${subj}_r1+orig'[2]' \ -tshift 0 \ -prefix ${subj}_r${run}_vr \ -1Dfile dfile.r$run.1D \ ${subj}_r${run}+orig'[2..137]' # store run data in runs_orig directory #====================================================================== # smooth data with 3dmerge. #====================================================================== 3dmerge -1blur_rms 4 \ -doall \ -prefix {$subj}_r{$run}_vr_bl \ {$subj}_r{$run}_vr+orig end #====================================================================== # create masks for each run using 3dAutomask #====================================================================== foreach run ( $runs ) 3dAutomask -prefix mask_r{$run} {$subj}_r{$run}_vr_bl+orig end #====================================================================== # create a mask enveloping masks of the individual runs #====================================================================== # create the 3dcalc command: set letters = ( a b c d e f g h i j k l ) set dsets = ( -a mask_r1+orig ) set expr_dsets = a foreach run ( $runs[2-] ) set dsets = ( $dsets -$letters[$run] mask_r$run+orig ) set expr_dsets = $expr_dsets+$letters[$run] end 3dcalc $dsets -expr "step($expr_dsets)" -prefix full_mask rm -f mask_r*+orig.* #====================================================================== # re-scale each run's baseline to 100. # E.g., If baseline is 100, and result of 3dcalc on one voxel is 106, then # we can say that at that voxel shows a 6% increase is signal activity # relative to baseline. # Use full_mask to remove non-brain #====================================================================== foreach run ( $runs ) 3dTstat -prefix mean_r{$run} {$subj}_r{$run}_vr_bl+orig 3dcalc -a {$subj}_r{$run}_vr_bl+orig \ -b mean_r{$run}+orig \ -c full_mask+orig \ -expr "(a/b * 100) * c" \ -prefix scaled_r{$run} \rm mean_r{$run}+orig* end #====================================================================== # Now we can concatenate our 10 normalized runs with 3dTcat. #====================================================================== set dsets = () set dfiles = () foreach run ( $runs ) set dsets = ( $dsets scaled_r$run+orig ) set dfiles = ( $dfiles dfile.r$run.1D ) end 3dTcat -prefix {$subj}_all_runs $dsets cat $dfiles > dfile.all.1D #====================================================================== # mv unneeded run data into separate directories #====================================================================== mkdir runs_orig runs_temp mv {$subj}_r*_vr* scaled* runs_temp mv {$subj}_r* runs_orig #====================================================================== # update concat file, in case it is short #====================================================================== set concat_file = ../misc_files/runs.1D set length = `wc -l $concat_file | awk '{print $1}'` if ( $#runs < $length ) then # make new concat_file head -$#runs $concat_file > runs.1D set concat_file = runs.1D endif #====================================================================== # run deconvolution analysis #====================================================================== 3dDeconvolve -input {$subj}_all_runs+orig -num_stimts 10 \ -stim_file 1 ../misc_files/all_stims.1D'[0]' -stim_label 1 ToolMovie \ -stim_minlag 1 0 -stim_maxlag 1 14 -stim_nptr 1 2 \ -stim_file 2 ../misc_files/all_stims.1D'[1]' -stim_label 2 HumanMovie\ -stim_minlag 2 0 -stim_maxlag 2 14 -stim_nptr 2 2 \ -stim_file 3 ../misc_files/all_stims.1D'[2]' -stim_label 3 ToolPoint \ -stim_minlag 3 0 -stim_maxlag 3 14 -stim_nptr 3 2 \ -stim_file 4 ../misc_files/all_stims.1D'[3]' -stim_label 4 HumanPoint\ -stim_minlag 4 0 -stim_maxlag 4 14 -stim_nptr 4 2 \ -stim_file 5 dfile.all.1D'[0]' -stim_base 5 \ -stim_file 6 dfile.all.1D'[1]' -stim_base 6 \ -stim_file 7 dfile.all.1D'[2]' -stim_base 7 \ -stim_file 8 dfile.all.1D'[3]' -stim_base 8 \ -stim_file 9 dfile.all.1D'[4]' -stim_base 9 \ -stim_file 10 dfile.all.1D'[5]' -stim_base 10 \ -gltsym ../misc_files/contrast1.1D -glt_label 1 FullF \ -gltsym ../misc_files/contrast2.1D -glt_label 2 HvsT \ -gltsym ../misc_files/contrast3.1D -glt_label 3 MvsP \ -gltsym ../misc_files/contrast4.1D -glt_label 4 HMvsHP \ -gltsym ../misc_files/contrast5.1D -glt_label 5 TMvsTP \ -gltsym ../misc_files/contrast6.1D -glt_label 6 HPvsTP \ -gltsym ../misc_files/contrast7.1D -glt_label 7 HMvsTM \ -iresp 1 TMirf -iresp 2 HMirf -iresp 3 TPirf -iresp 4 HPirf \ -full_first -fout -tout -nobout -polort 2 \ -concat $concat_file \ -progress 10000 \ -bucket {$subj}_func #====================================================================== # make slim dataset. Too many sub-bricks in our bucket dataset. # Use 3dbucket to slim it down and include sub-bricks of interest only. #====================================================================== 3dbucket -prefix {$subj}_func_slim -fbuc {$subj}_func+orig'[125..151]' #====================================================================== # Remember IRF datasets created by 3dDeconvolve? # There are 15 time lags in each voxel. Remove lags 0-3 and 10-15 b/c not # interesting. Then compute mean percent signal change for lags 4-9 in # each voxel with '3dTstat'. # Then transform to Talairach coordinates with 'adwarp'. #====================================================================== foreach cond (TM HM TP HP) 3dTstat -prefix {$subj}_{$cond}_irf_mean {$cond}irf+orig'[3..9]' adwarp -apar {$subj}spgr+tlrc -dpar {$subj}_{$cond}_irf_mean+orig end cd .. end #====================================================================== # End of script! # Take the {$subj}_{$cond}_irf_mean+tlrc datasets and input into 3dANOVA2. #======================================================================