#!/bin/tcsh #====================================================================== # start with basic variable initializations #====================================================================== # save a couple of directory names for future use set topdir = `pwd` set outlier_dir = $topdir/afni/outliers #====================================================================== # now perform various tests to make sure the world is okay #====================================================================== # verify that the script is being run from the proper directory if ( ! -d DDmb_data ) then echo "" echo "error: this script must be run from the 'ht02_DDmb' directory" echo " under the top level HOWTO directory" echo "exiting..." echo "" exit endif # if the afni directory is already here, complain and exit if ( -e afni ) then echo "failure: the 'afni' directory already exists" exit endif # make a directory to store the outlier files from the to3d commands mkdir -p $outlier_dir # verify that the previous command succeeded if ( ! -d afni ) then echo "failure: cannot create 'afni' directory (no write permission?)" exit endif # verify that an AFNI program is in the PATH which to3d >& /dev/null if ( $status != 0 ) then echo "failure: cannot find 'to3d' - is AFNI installed?" exit endif #====================================================================== # begin pre-processing the data #====================================================================== # create the SPGR dataset, putting it in the afni directory cd DDmb_data/SPGR_data to3d -prefix DDSPGR -orient ASL -zorigin 70 -session $topdir/afni -anat I.* # ---------------------------------------------------------------------- # put run 1 EPI data into a 3D+time AFNI dataset # - run 1 will be the '-geomparent' for the other run data cd $topdir/DDmb_data/EPI_data to3d -session $topdir/afni -prefix DDr1 \ -save_outliers $outlier_dir/DDr1.outliers \ -orient SPR -zorigin 69 -epan \ -xSLAB 118.125S-I -ySLAB 118.125P-A -zSLAB 69R-61L \ -time:tz 110 27 2500 alt+z 3Ds:0:0:64:64:1:"DDr1*" #------------------------------------------------------- # put the other run data into 3D+time AFNI datasets foreach run ( 2 3 4 ) to3d -geomparent $topdir/afni/DDr1+orig -session $topdir/afni \ -prefix DDr${run} -save_outliers $outlier_dir/DDr${run}.outliers \ -time:tz 110 27 2500 alt+z \ 3Ds:0:0:64:64:1:"DDr${run}*" end # ---------------------------------------------------------------------- # concatenate all 4 runs (time points 2 through 109) into a single dataset # - this makes a dataset with 432 = 108*4 time points (0 through 431) cd $topdir/afni 3dTcat -prefix DDrall \ DDr1+orig'[2..109]' DDr2+orig'[2..109]' \ DDr3+orig'[2..109]' DDr4+orig'[2..109]' # ---------------------------------------------------------------------- # register each of the 432 sub-bricks (time points) to the last one 3dvolreg -dfile DDrallvrout -base 431 -prefix DDrallvr DDrall+orig #====================================================================== # done pre-processing data, now analyze it (produce functional bricks) #====================================================================== # make a directory to store the various files mkdir $topdir/afni/regressors # ---------------------------------------------------------------------- # create stimulus files for each of the 4 stumulus types cd $topdir/stim_files foreach stim_type ( a t h l ) # For this stimulus type, concatenate the stimulus timings # from all 4 runs into a single file. cat scan1$stim_type.txt scan2$stim_type.txt \ scan3$stim_type.txt scan4$stim_type.txt \ > $topdir/afni/regressors/scan1to4$stim_type.1D # also, can do: # cat scan[1-4]$stim_type.txt \ # > $topdir/afni/regressors/scan1to4$stim_type.1D end # ---------------------------------------------------------------------- # create ideal reference function, one for each stimulus type cd $topdir/afni/regressors waver -dt 2.5 -GAM -input scan1to4a.1D > scan1to4a_hrf.1D waver -dt 2.5 -GAM -input scan1to4t.1D > scan1to4t_hrf.1D waver -dt 2.5 -GAM -input scan1to4h.1D > scan1to4h_hrf.1D waver -dt 2.5 -GAM -input scan1to4l.1D > scan1to4l_hrf.1D # ---------------------------------------------------------------------- # create a functional dataset, according to the stimulus timings # and the contrasts cd $topdir/afni 3dDeconvolve -xout -input DDrallvr+orig \ -num_stimts 4 \ -stim_file 1 regressors/scan1to4a_hrf.1D -stim_label 1 Actions \ -stim_file 2 regressors/scan1to4t_hrf.1D -stim_label 2 Tool \ -stim_file 3 regressors/scan1to4h_hrf.1D -stim_label 3 HCMS \ -stim_file 4 regressors/scan1to4l_hrf.1D -stim_label 4 LCMS \ -concat $topdir/contrasts/runs.1D \ -glt 1 $topdir/contrasts/DDcontrv1.txt -glt_label 1 AvsT \ -glt 1 $topdir/contrasts/DDcontrv2.txt -glt_label 2 HvsL \ -glt 1 $topdir/contrasts/DDcontrv3.txt -glt_label 3 ATvsHL \ -full_first -fout -tout \ -bucket DDrallvrMRv1