Skip to content

Commit

Permalink
Cleaning up
Browse files Browse the repository at this point in the history
  • Loading branch information
SNeuroble committed Feb 11, 2019
1 parent 41e68c8 commit f8ed8e0
Show file tree
Hide file tree
Showing 14 changed files with 199 additions and 14 deletions.
Empty file added hcpTask/Icon
Empty file.
8 changes: 4 additions & 4 deletions hcpTask/combine_results.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
#!/bin/bash
################################
# Combine results across multiple random group analysis jobs
# arg: config file (e.g., cfg.sh)
################################
###########################################################
# Combine results across multiple jobs
# Must provide config file (e.g., cfg.sh)
###########################################################

[[ ! -z $1 && -f $1 ]] && setupfile=$1 || { echo "Error: Config file needed." ; exit 1; }
source $setupfile
Expand Down
Empty file added hcpTask/config/Icon
Empty file.
105 changes: 105 additions & 0 deletions hcpTask/config/cfg.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# Paths and parameters for HCP Cluster Power Failure

### PARAMETERS (SPECIFY)

testing=false

# Number of tests
nPermutations=500

# Data
task="WM"
hcpReleaseNo="1200"
nSubs_subset=20 #20 for final sim

# Software + thresholds
Software="FSL" # TODO: right now FSL is the only choice
doRandomise=true
doTFCE=true
CDT="3.1" #z-val
CDTp="0.001" #p-val
FWEthreshold="0.95"
nPerms_forRandomise=1000 #5000 is recommended to resolve within p+/-0.01

# Parallelization parameters
njobs=8 # divy permutations across njobs
first_job_to_launch=1 # for doing a subset of jobs
last_job_to_launch=8 # for doing a subset of jobs

# Base directories
scriptsDir="/home/ec2-user/scripts/hcpTask"
dataDir="/home/ec2-user/data/hcpTask"


### DIRECTORIES AND OTHER SETUP

# Task/cope pairs: SOCIAL_cope6; WM_cope20; GAMBLING_cope6; RELATIONAL_cope4; EMOTION_cope3
case $task in
'SOCIAL')
copeNum="6" ;;
'WM')
copeNum="20" ;;
'GAMBLING')
copeNum="6" ;;
'RELATIONAL')
copeNum="4" ;;
'EMOTION')
copeNum="3" ;;
*)
echo "Error: must specify task."
exit
esac


# More setup
maskThresh=$CDT
one_minus_CDTp=$(echo "1 - $CDTp" | bc)
nperms_per_job=$(echo "$nPermutations / ($njobs-1)" | bc) # divy up perms for jobs; output floored
njobs_in_subset=$(( $last_job_to_launch - $first_job_to_launch + 1 ))

# Directories and key files
dataMasterDir="${dataDir}/${task}_cope${copeNum}"
subNamesWithInput="$dataMasterDir/hcp_file_names_S${hcpReleaseNo}_with_cope${copeNum}.txt"
nSubs_total=$(wc -l < $subNamesWithInput)

# Full dataset repository
dataDir_localRepository="$dataMasterDir/GroupSize$nSubs_total"
dataDir_localRepository_lowerLevel="$dataDir_localRepository/lower_level"

# Processing files, settings, &c
if [ "$doTFCE" = true ]; then
RandomiseOptions_WithThresholds="-T -1"
RandomiseOptions_NoThresholds="${RandomiseOptions_WithThresholds} -R"
UncorrectedTstat="tfce_tstat1"
ClusterTstat="tfce_corrp_tstat1"
else
RandomiseOptions_WithThresholds="-c ${maskThresh} -1"
RandomiseOptions_NoThresholds="${RandomiseOptions_WithThresholds} -x"
UncorrectedTstat="tstat1"
ClusterTstat="clustere_corrp_tstat1"
fi
processedSuffix="processed"
designTemplate="$scriptsDir/design_templates/design_template.fsf" #FLAME

# Ground truth data folders and mask
temp="-temp" # TODO: remove when stuff migrated back to orig bucket
cloudDataDir="s3://hcp-openaccess$temp/HCP_${hcpReleaseNo}"
cloudDataDir_contd="MNINonLinear/Results/tfMRI_$task/tfMRI_${task}_hp200_s4_level2vol.feat"
hcpConfigFile="$scriptsDir/config_files/hcp_access_S$hcpReleaseNo"
inputFileSuffix="cope${copeNum}.feat"
subNames="$scriptsDir/hcp_file_names_S${hcpReleaseNo}.txt"
groundTruthFolder="$dataDir_localRepository"
maskDir="${groundTruthFolder}/mask"
groundTruthTstat="${groundTruthFolder}/${processedSuffix}_tstat1.nii.gz"
groundTruthMask="${groundTruthFolder}/${processedSuffix}_clustere_corrp_tstat1.nii.gz" # TODO: come back
groundTruthDcoeff="${groundTruthFolder}/dcoeff.nii.gz"

# Output directories
outputDirSuffix=$( [ $doRandomise = "true" ] && echo "randomise" || echo "FLAME" )
outputDirSuffix=$( [ $doTFCE = "true" ] && echo "${outputDirSuffix}TFCE" || echo "$outputDirSuffix" )
outputDirSuffix=$( [ $testing = "true" ] && echo "${outputDirSuffix}TESTING" || echo "$outputDirSuffix" )
outputDir="$dataMasterDir/GroupSize${nSubs_subset}__${outputDirSuffix}"
subjectRandomizations="$outputDir/subIDs"
outputDirRecord="$outputDir/existing_dirs.txt"
resultImgSuffix=".gfeat/cope1.feat/cluster_mask_zstat1.nii.gz"
combinedSummaryDir="$outputDir/Summary"
82 changes: 82 additions & 0 deletions hcpTask/config/cfg_groundtruth.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# Paths and parameters for HCP cluster power failure

################### PARAMETERS (SPECIFY) ###################

# Number of tests
nPermutations=100 #100 #*****************
testing=false

# Data - task/cope pairs: SOCIAL_cope6; WM_cope20; GAMBLING_cope6; RELATIONAL_cope4; EMOTION_cope3
task="SOCIAL"
copeNum="6"
hcpReleaseNo="1200"
nSubs_subset=20 #20 for final sim

# Software + thresholds
Software="FSL" # TODO: right now FSL is the only choice
doRandomise=true
doTFCE=false
CDT="3.1" #z-val
CDTp="0.001" #p-val
FWEthreshold="0.95"
nPerms_forRandomise=5000 #5000 # *******************

# Parallelization parameters
njobs=8 # divy permutations across njobs
first_job_to_launch=1 # for doing a subset of jobs
last_job_to_launch=8 # for doing a subset of jobs

################# DIRECTORIES AND OTHER SETUP #################

# More setup
maskThresh=$CDT
one_minus_CDTp=$(echo "1 - $CDTp" | bc)
nperms_per_job=$(echo "$nPermutations / ($njobs-1)" | bc) # divy up perms for jobs; output floored
njobs_in_subset=$(( $last_job_to_launch - $first_job_to_launch + 1 ))

# Directories and key files
scriptsDir="/home/ec2-user/scripts/hcpTask"
dataMasterDir="/home/ec2-user/data/hcpTask/${task}_cope${copeNum}"
subNamesWithInput="$dataMasterDir/hcp_file_names_S${hcpReleaseNo}_with_cope${copeNum}.txt"
nSubs_total=$(wc -l < $subNamesWithInput)

# Full dataset repository
dataDir_localRepository="$dataMasterDir/GroupSize$nSubs_total"
dataDir_localRepository_lowerLevel="$dataDir_localRepository/lower_level"

# Processing files, settings, &c
if [ "$doTFCE" = true ]; then
RandomiseOptions_WithThresholds="-T -1"
RandomiseOptions_NoThresholds="${RandomiseOptions_WithThresholds} -R"
UncorrectedTstat="tfce_tstat1"
ClusterTstat="tfce_corrp_tstat1"
else
RandomiseOptions_WithThresholds="-c ${maskThresh} -1"
RandomiseOptions_NoThresholds="${RandomiseOptions_WithThresholds} -x"
UncorrectedTstat="tstat1"
ClusterTstat="clustere_corrp_tstat1"
fi
processedSuffix="processed"
designTemplate="$scriptsDir/design_templates/design_template.fsf" #FLAME

# Ground truth data folders and mask
temp="-temp" # TODO: remove when stuff migrated back to orig bucket
cloudDataDir="s3://hcp-openaccess$temp/HCP_${hcpReleaseNo}"
cloudDataDir_contd="MNINonLinear/Results/tfMRI_$task/tfMRI_${task}_hp200_s4_level2vol.feat"
hcpConfigFile="$scriptsDir/config_files/hcp_access_S$hcpReleaseNo"
inputFileSuffix="cope${copeNum}.feat"
subNames="$scriptsDir/hcp_file_names_S${hcpReleaseNo}.txt"
groundTruthFolder="$dataDir_localRepository"
maskDir="${groundTruthFolder}/mask"
groundTruthTstat="${groundTruthFolder}/${processedSuffix}_tstat1.nii.gz"
groundTruthMask="${groundTruthFolder}/${processedSuffix}_clustere_corrp_tstat1.nii.gz" # TODO: come back
groundTruthDcoeff="${groundTruthFolder}/dcoeff.nii.gz"

# Output directories
outputDirSuffix=$( [ $doRandomise = "true" ] && echo "randomise" || echo "FLAME" )
outputDirSuffix=$( [ $doTFCE = "true" ] && echo "${outputDirSuffix}TFCE" || echo "$outputDirSuffix" )
outputDirSuffix=$( [ $testing = "true" ] && echo "${outputDirSuffix}TESTING" || echo "$outputDirSuffix" )
outputDir="$dataMasterDir/GroupSize${nSubs_subset}__${outputDirSuffix}"
subjectRandomizations="$outputDir/subIDs"
outputDirRecord="$outputDir/existing_dirs.txt"
resultImgSuffix=".gfeat/cope1.feat/cluster_mask_zstat1.nii.gz"
2 changes: 1 addition & 1 deletion hcpTask/do_second_level__randomise.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#!/bin/bash
# check memory requirements for large datasets
# perform nonparametric correction

printf "\n++ Running second level (+/- contrast).\n"

Expand Down
3 changes: 1 addition & 2 deletions hcpTask/get_data_and_ground_truth.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
#!/bin/bash
# need to provide setpaths.sh
# make sure instance meets memory requirements
# must provide config file

### Setup

Expand Down
Empty file modified hcpTask/launch_parallel_processes.sh
100755 → 100644
Empty file.
1 change: 0 additions & 1 deletion hcpTask/make_TP_mask.sh
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#!/bin/bash
#set -x

mkdir -p $maskDir

Expand Down
Empty file added hcpTask/old/Icon
Empty file.
File renamed without changes.
File renamed without changes.
9 changes: 5 additions & 4 deletions hcpTask/run_hcp_cluster_failure.sh → hcpTask/run_hcp_cluster_power_failure.sh
100755 → 100644
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
#!/bin/bash
set +x
#########################################################################################
# Usage: ./scripts/hcpTask/run_hcp_cluster_failure.sh ~/scripts/hcpTask/cfg.sh <1> <2>
# After this, may want to mask results with ground truth
# Usage: ./hcpTask/run_hcp_cluster_power_failure.sh ~/hcpTask/cfg.sh <1> <2>
# Must provide config file (e.g., cfg.sh)
# Make sure instance meets memory requirements
# Run combine_results.sh when finished to combine across perms
#########################################################################################

# Setup
Expand All @@ -25,7 +26,7 @@ for perm in $(seq $first_perm $last_perm); do
. $scriptsDir/calc_true_positives.sh >> $logfile

# clean up
#[[ -d "$permOutputsDir" ]] && rm -r "$permOutputsDir/*"
[[ -d "$permOutputsDir" ]] && rm -r "$permOutputsDir/*"
done

printf "\n+++ Finished - $perm permutations complete.\n"
Expand Down
3 changes: 1 addition & 2 deletions hcpTask/setup.sh
Original file line number Diff line number Diff line change
@@ -1,8 +1,7 @@
#!/bin/bash
#set -x

### Define paths & params for this subset of perms
# TODO: rename these
# TODO: consider renaming
permDir="$outputDir/perms${first_perm}-${last_perm}"
permOutputsDir="${permDir}/subset_results"
outputDirSummary="${permDir}/Summary"
Expand Down

0 comments on commit f8ed8e0

Please sign in to comment.