#!/bin/tcsh -xef echo "auto-generated by afni_proc.py, Wed Sep 27 11:58:40 2023" echo "(version 7.60, August 21, 2023)" echo "execution started: `date`" # to execute via tcsh: # tcsh -xef proc.FT.surf |& tee output.proc.FT.surf # to execute via bash: # tcsh -xef proc.FT.surf 2>&1 | tee output.proc.FT.surf # =========================== auto block: setup ============================ # script setup # take note of the AFNI version afni -ver # check that the current AFNI version is recent enough afni_history -check_date 14 Nov 2022 if ( $status ) then echo "** this script requires newer AFNI binaries (than 14 Nov 2022)" echo " (consider: @update.afni.binaries -defaults)" exit endif # the user may specify a single subject to run with if ( $#argv > 0 ) then set subj = $argv[1] else set subj = FT.surf endif # assign output directory name set output_dir = $subj.results # verify that the results directory does not yet exist if ( -d $output_dir ) then echo output dir "$subj.results" already exists exit endif # set list of runs set runs = (`count -digits 2 1 3`) # create results and stimuli directories mkdir -p $output_dir mkdir $output_dir/stimuli # copy stim files into stimulus directory cp FT/AV1_vis.txt FT/AV2_aud.txt $output_dir/stimuli # copy anatomy to results dir 3dcopy FT/FT_anat+orig $output_dir/FT_anat # ============================ auto block: tcat ============================ # apply 3dTcat to copy input dsets to results dir, # while removing the first 2 TRs 3dTcat -prefix $output_dir/pb00.$subj.r01.tcat FT/FT_epi_r1+orig'[2..$]' 3dTcat -prefix $output_dir/pb00.$subj.r02.tcat FT/FT_epi_r2+orig'[2..$]' 3dTcat -prefix $output_dir/pb00.$subj.r03.tcat FT/FT_epi_r3+orig'[2..$]' # and make note of repetitions (TRs) per run set tr_counts = ( 150 150 150 ) # ------------------------------------------------------- # enter the results directory (can begin processing data) cd $output_dir # --------------------------------------------------------- # QC: look for columns of high variance find_variance_lines.tcsh -polort 3 -nerode 2 \ -rdir vlines.pb00.tcat \ pb00.$subj.r*.tcat+orig.HEAD |& tee out.vlines.pb00.tcat.txt # ========================== auto block: outcount ========================== # QC: compute outlier fraction for each volume touch out.pre_ss_warn.txt foreach run ( $runs ) 3dToutcount -automask -fraction -polort 3 -legendre \ pb00.$subj.r$run.tcat+orig > outcount.r$run.1D # outliers at TR 0 might suggest pre-steady state TRs if ( `1deval -a outcount.r$run.1D"{0}" -expr "step(a-0.4)"` ) then echo "** TR #0 outliers: possible pre-steady state TRs in run $run" \ >> out.pre_ss_warn.txt endif end # catenate outlier counts into a single time series cat outcount.r*.1D > outcount_rall.1D # get run number and TR index for minimum outlier volume set minindex = `3dTstat -argmin -prefix - outcount_rall.1D\'` set ovals = ( `1d_tool.py -set_run_lengths $tr_counts \ -index_to_run_tr $minindex` ) # save run and TR indices for extraction of vr_base_min_outlier set minoutrun = $ovals[1] set minouttr = $ovals[2] echo "min outlier: run $minoutrun, TR $minouttr" | tee out.min_outlier.txt # ================================= tshift ================================= # time shift data so all slice timing is the same foreach run ( $runs ) 3dTshift -tzero 0 -quintic -prefix pb01.$subj.r$run.tshift \ pb00.$subj.r$run.tcat+orig end # -------------------------------- # extract volreg registration base 3dbucket -prefix vr_base_min_outlier \ pb01.$subj.r$minoutrun.tshift+orig"[$minouttr]" # ================================= align ================================== # for e2a: compute anat alignment transformation to EPI registration base # (new anat will be intermediate, stripped, FT_anat_ns+orig) align_epi_anat.py -anat2epi -anat FT_anat+orig \ -save_skullstrip -suffix _al_junk \ -epi vr_base_min_outlier+orig -epi_base 0 \ -epi_strip 3dAutomask \ -cost lpc+ZZ -giant_move -check_flip \ -volreg off -tshift off # ================================= volreg ================================= # align each dset to base volume, to anat # register and warp foreach run ( $runs ) # register each volume to the base image 3dvolreg -verbose -zpad 1 -base vr_base_min_outlier+orig \ -1Dfile dfile.r$run.1D -prefix rm.epi.volreg.r$run \ -cubic \ -1Dmatrix_save mat.r$run.vr.aff12.1D \ pb01.$subj.r$run.tshift+orig # create an all-1 dataset to mask the extents of the warp 3dcalc -overwrite -a pb01.$subj.r$run.tshift+orig -expr 1 \ -prefix rm.epi.all1 # catenate volreg/epi2anat xforms cat_matvec -ONELINE \ FT_anat_al_junk_mat.aff12.1D -I \ mat.r$run.vr.aff12.1D > mat.r$run.warp.aff12.1D # apply catenated xform: volreg/epi2anat 3dAllineate -base FT_anat_ns+orig \ -input pb01.$subj.r$run.tshift+orig \ -1Dmatrix_apply mat.r$run.warp.aff12.1D \ -mast_dxyz 2.5 \ -prefix rm.epi.nomask.r$run # warp the all-1 dataset for extents masking 3dAllineate -base FT_anat_ns+orig \ -input rm.epi.all1+orig \ -1Dmatrix_apply mat.r$run.warp.aff12.1D \ -mast_dxyz 2.5 -final NN -quiet \ -prefix rm.epi.1.r$run # make an extents intersection mask of this run 3dTstat -min -prefix rm.epi.min.r$run rm.epi.1.r$run+orig end # make a single file of registration params cat dfile.r*.1D > dfile_rall.1D # ---------------------------------------- # create the extents mask: mask_epi_extents+orig # (this is a mask of voxels that have valid data at every TR) 3dMean -datum short -prefix rm.epi.mean rm.epi.min.r*.HEAD 3dcalc -a rm.epi.mean+orig -expr 'step(a-0.999)' -prefix mask_epi_extents # and apply the extents mask to the EPI data # (delete any time series with missing data) foreach run ( $runs ) 3dcalc -a rm.epi.nomask.r$run+orig -b mask_epi_extents+orig \ -expr 'a*b' -prefix pb02.$subj.r$run.volreg end # warp the volreg base EPI dataset to make a final version cat_matvec -ONELINE FT_anat_al_junk_mat.aff12.1D -I > mat.basewarp.aff12.1D 3dAllineate -base FT_anat_ns+orig \ -input vr_base_min_outlier+orig \ -1Dmatrix_apply mat.basewarp.aff12.1D \ -mast_dxyz 2.5 \ -prefix final_epi_vr_base_min_outlier # create an anat_final dataset, aligned with stats 3dcopy FT_anat_ns+orig anat_final.$subj # record final registration costs 3dAllineate -base final_epi_vr_base_min_outlier+orig -allcostX \ -input anat_final.$subj+orig |& tee out.allcostX.txt # ----------------------------------------- # warp anat follower datasets (identity: resample) # ======================= surf (map data to surface) ======================= # map EPI data to the surface domain # set directory variables set surface_dir = /home/rickr/data/new/AFNI_data6/FT_analysis/FT/SUMA # align the surface anatomy with the current experiment anatomy @SUMA_AlignToExperiment -exp_anat anat_final.$subj+orig \ -surf_anat $surface_dir/FT_SurfVol.nii \ -wd -strip_skull surf_anat \ -atlas_followers -overwrite_resp S \ -prefix ${subj}_SurfVol_Alnd_Exp # map volume data to the surface of each hemisphere foreach hemi ( lh rh ) foreach run ( $runs ) 3dVol2Surf -spec $surface_dir/std.60.FT_${hemi}.spec \ -sv ${subj}_SurfVol_Alnd_Exp+orig \ -surf_A smoothwm \ -surf_B pial \ -f_index nodes \ -f_steps 10 \ -map_func ave \ -oob_value 0 \ -grid_parent pb02.$subj.r$run.volreg+orig \ -out_niml pb03.$subj.$hemi.r$run.surf.niml.dset end end # make local script for running suma, and make it executable echo suma -spec $surface_dir/std.60.FT_lh.spec \ -sv ${subj}_SurfVol_Alnd_Exp+orig > run_suma chmod 755 run_suma # =========================== blur (on surface) ============================ foreach hemi ( lh rh ) foreach run ( $runs ) # to save time, estimate blur parameters only once if ( ! -f surf.smooth.params.1D ) then SurfSmooth -spec $surface_dir/std.60.FT_${hemi}.spec \ -surf_A smoothwm \ -input pb03.$subj.$hemi.r$run.surf.niml.dset \ -met HEAT_07 \ -target_fwhm 6.0 \ -blurmaster pb03.$subj.$hemi.r$run.surf.niml.dset \ -detrend_master \ -output pb04.$subj.$hemi.r$run.blur.niml.dset \ | tee surf.smooth.params.1D set bfile = pb04.$subj.$hemi.r$run.blur.niml.dset.1D.smrec set params = ( `1dcat $bfile | tail -n 1` ) else # get blur params from smrec file SurfSmooth -spec $surface_dir/std.60.FT_${hemi}.spec \ -surf_A smoothwm \ -input pb03.$subj.$hemi.r$run.surf.niml.dset \ -met HEAT_07 \ -Niter $params[1] \ -sigma $params[3] \ -output pb04.$subj.$hemi.r$run.blur.niml.dset endif end end # ================================= scale ================================== # scale each voxel time series to have a mean of 100 # (be sure no negatives creep in) # (subject to a range of [0,200]) foreach hemi ( lh rh ) foreach run ( $runs ) 3dTstat -prefix rm.$hemi.mean_r$run.niml.dset \ pb04.$subj.$hemi.r$run.blur.niml.dset 3dcalc -a pb04.$subj.$hemi.r$run.blur.niml.dset \ -b rm.$hemi.mean_r$run.niml.dset \ -expr 'min(200, a/b*100)*step(a)*step(b)' \ -prefix pb05.$subj.$hemi.r$run.scale.niml.dset end end # ================================ regress ================================= # compute de-meaned motion parameters (for use in regression) 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \ -demean -write motion_demean.1D # compute motion parameter derivatives (just to have) 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \ -derivative -demean -write motion_deriv.1D # convert motion parameters for per-run regression 1d_tool.py -infile motion_demean.1D -set_nruns 3 \ -split_into_pad_runs mot_demean # create censor file motion_${subj}_censor.1D, for censoring motion 1d_tool.py -infile dfile_rall.1D -set_nruns 3 \ -show_censor_count -censor_prev_TR \ -censor_motion 0.3 motion_${subj} # note TRs that were not censored # (apply from a text file, in case of a lot of censoring) 1d_tool.py -infile motion_${subj}_censor.1D \ -show_trs_uncensored space \ > out.keep_trs_rall.txt set ktrs = "1dcat out.keep_trs_rall.txt" # ------------------------------ # run the regression analysis foreach hemi ( lh rh ) 3dDeconvolve -input pb05.$subj.$hemi.r*.scale.niml.dset \ -censor motion_${subj}_censor.1D \ -ortvec mot_demean.r01.1D mot_demean_r01 \ -ortvec mot_demean.r02.1D mot_demean_r02 \ -ortvec mot_demean.r03.1D mot_demean_r03 \ -polort 3 \ -num_stimts 2 \ -stim_times 1 stimuli/AV1_vis.txt 'BLOCK(20,1)' \ -stim_label 1 vis \ -stim_times 2 stimuli/AV2_aud.txt 'BLOCK(20,1)' \ -stim_label 2 aud \ -jobs 2 \ -gltsym 'SYM: vis -aud' \ -glt_label 1 V-A \ -fout -tout -x1D X.xmat.1D -xjpeg X.jpg \ -x1D_uncensored X.nocensor.xmat.1D \ -fitts fitts.$subj.$hemi.niml.dset \ -errts errts.${subj}.$hemi.niml.dset \ -bucket stats.$subj.$hemi.niml.dset end # display any large pairwise correlations from the X-matrix 1d_tool.py -show_cormat_warnings -infile X.xmat.1D |& tee out.cormat_warn.txt # display degrees of freedom info from X-matrix 1d_tool.py -show_df_info -infile X.xmat.1D |& tee out.df_info.txt # create an all_runs dataset to match the fitts, errts, etc. foreach hemi ( lh rh ) 3dTcat -prefix all_runs.$subj.$hemi.niml.dset \ pb05.$subj.$hemi.r*.scale.niml.dset end # -------------------------------------------------- # create a temporal signal to noise ratio dataset # signal: if 'scale' block, mean should be 100 # noise : compute standard deviation of errts foreach hemi ( lh rh ) 3dTstat -mean -prefix rm.signal.all.$hemi.niml.dset \ all_runs.$subj.$hemi.niml.dset"[$ktrs]" 3dTstat -stdev -prefix rm.noise.all.$hemi.niml.dset \ errts.${subj}.$hemi.niml.dset"[$ktrs]" 3dcalc -a rm.signal.all.$hemi.niml.dset \ -b rm.noise.all.$hemi.niml.dset \ -expr 'a/b' -prefix TSNR.$subj.$hemi.niml.dset end # create ideal files for fixed response stim types 1dcat X.nocensor.xmat.1D'[12]' > ideal_vis.1D 1dcat X.nocensor.xmat.1D'[13]' > ideal_aud.1D # -------------------------------------------------- # extract non-baseline regressors from the X-matrix, # then compute their sum 1d_tool.py -infile X.nocensor.xmat.1D -write_xstim X.stim.xmat.1D 3dTstat -sum -prefix sum_ideal.1D X.stim.xmat.1D # --------------------------------------------------------- # QC: compute correlations with spherical ~averages @radial_correlate -nfirst 0 -polort 3 -do_clean yes \ -rdir radcor.pb06.regress \ all_runs.$subj.$hemi.niml.dset+orig.HEAD \ errts.${subj}.$hemi.niml.dset+orig.HEAD # ========================= auto block: QC_review ========================== # generate quality control review scripts and HTML report # generate a review script for the unprocessed EPI data gen_epi_review.py -script @epi_review.$subj \ -dsets pb00.$subj.r*.tcat+orig.HEAD # ------------------------------------------------- # generate scripts to review single subject results # (try with defaults, but do not allow bad exit status) # write AP uvars into a simple txt file cat << EOF > out.ap_uvars.txt mot_limit : 0.3 copy_anat : FT_anat+orig.HEAD dir_suma_spec : /home/rickr/data/new/AFNI_data6/FT_analysis/FT/SUMA suma_specs : std.60.FT_lh.spec std.60.FT_rh.spec ss_review_dset : out.ss_review.$subj.txt vlines_tcat_dir : vlines.pb00.tcat EOF # and convert the txt format to JSON cat out.ap_uvars.txt | afni_python_wrapper.py -eval "data_file_to_json()" \ > out.ap_uvars.json # initialize gen_ss_review_scripts.py with out.ap_uvars.json gen_ss_review_scripts.py -exit0 \ -init_uvars_json out.ap_uvars.json \ -write_uvars_json out.ss_review_uvars.json # ========================== auto block: finalize ========================== # remove temporary files \rm -f rm.* # if the basic subject review script is here, run it # (want this to be the last text output) if ( -e @ss_review_basic ) then ./@ss_review_basic |& tee out.ss_review.$subj.txt # generate html ss review pages # (akin to static images from running @ss_review_driver) apqc_make_tcsh.py -review_style pythonic -subj_dir . \ -uvar_json out.ss_review_uvars.json apqc_make_html.py -qc_dir QC_$subj echo "\nconsider running: \n" echo " afni_open -b $subj.results/QC_$subj/index.html" echo "" endif # return to parent directory (just in case...) cd .. echo "execution finished: `date`" # ========================================================================== # script generated by the command: # # afni_proc.py -subj_id FT.surf -blocks tshift align volreg surf blur scale \ # regress -copy_anat FT/FT_anat+orig -dsets FT/FT_epi_r1+orig.HEAD \ # FT/FT_epi_r2+orig.HEAD FT/FT_epi_r3+orig.HEAD -surf_anat \ # FT/SUMA/FT_SurfVol.nii -surf_spec FT/SUMA/std.60.FT_lh.spec \ # FT/SUMA/std.60.FT_rh.spec -tcat_remove_first_trs 2 -align_opts_aea \ # -cost lpc+ZZ -giant_move -check_flip -volreg_align_to MIN_OUTLIER \ # -volreg_align_e2a -blur_size 6 -regress_stim_times FT/AV1_vis.txt \ # FT/AV2_aud.txt -regress_stim_labels vis aud -regress_basis \ # 'BLOCK(20,1)' -regress_motion_per_run -regress_censor_motion 0.3 \ # -regress_opts_3dD -jobs 2 -gltsym 'SYM: vis -aud' -glt_label 1 V-A