From 13dd6c75f07ad68fd2b2fa0e00b2acbb264f9bfb Mon Sep 17 00:00:00 2001 From: "Andrew G. McArthur" Date: Tue, 6 Feb 2024 13:02:20 -0500 Subject: [PATCH] Digital Alliance --- README.rst | 117 +--------------------------------- docs/rgi_digital_alliance.rst | 116 +++++++++++++++++++++++++++++++++ 2 files changed, 117 insertions(+), 116 deletions(-) create mode 100644 docs/rgi_digital_alliance.rst diff --git a/README.rst b/README.rst index 8b61769..f353c44 100644 --- a/README.rst +++ b/README.rst @@ -38,6 +38,7 @@ Overview and Use of RGI * `Analyzing Genomes, Genome Assemblies, Metagenomic Contigs, or Proteomes `_ (a.k.a. RGI main) * `Analyzing Metagenomic Reads `_ (a.k.a. RGI bwt) * `K-mer Prediction of Pathogen-of-Origin for AMR Genes `_ (beta-testing) +* `Running RGI on Digital Alliance (formerly Compute Canada) Serial Farm `_ License -------- @@ -184,119 +185,3 @@ Running RGI Tests ------------------- -Running RGI on Compute Canada Serial Farm -========================================= - -**Order of operations** - -.. code-block:: sh - - ## Running jobs on computecanada using serial farm method - - - `rgi bwt` was used as example. - - ### step 1: - - - update make_table_dat.sh to construct arguments for commands - - ### step 2: - - - update eval command in job_script.sh to match your tool and also load appropriate modules - - ### step 3: - - - create table.dat using script make_table_dat.sh with inputs files in all_samples directory - ./make_table_dat.sh ./all_samples/ > table.dat - - ### step 4: - - - submit multiple jobs using for_loop.sh - - ### Resource: - - - https://docs.computecanada.ca/wiki/Running_jobs#Serial_job - - -**Update the make_table_dat.sh** - -.. code-block:: sh - - DIR=`find . -mindepth 1 -type d` - for D in $DIR; do - directory=$(basename $D); - for file in $directory/*; do - filename=$(basename $file); - if [[ $filename = *"_pass_1.fastq.gz"* ]]; then - read1=$(basename $filename); - base=(${read1//_pass_1.fastq.gz/ }); - #echo "--read_one $(pwd)/$directory/${base}_pass_1.fastq.gz --read_two $(pwd)/$directory/${base}_pass_2.fastq.gz -o $(pwd)/$directory/${base} -n 16 --aligner bowtie2 --debug" - echo "--read_one $(pwd)/$directory/${base}_pass_1.fastq.gz --read_two $(pwd)/$directory/${base}_pass_2.fastq.gz -o $(pwd)/$directory/${base}_wild -n 8 --aligner bowtie2 --debug --include_wildcard" - fi - done - done - -This block of code is used to generate the arguments for serial farming. In this example, rgi bwt is used, however depending on the job you are running you may update it according to your specifications. - -**Update the job_script.sh to match used tool** - -.. code-block:: sh - - #SBATCH --account=def-mcarthur - #SBATCH --time=120 - #SBATCH --job-name=rgi_bwt - #SBATCH --cpus-per-task=8 - #SBATCH --mem-per-cpu=2048M - #SBATCH --mail-user=raphenar@mcmaster.ca - #SBATCH --mail-type=ALL - - # Extracing the $I_FOR-th line from file $TABLE: - LINE=`sed -n ${I_FOR}p "$TABLE"` - - # Echoing the command (optional), with the case number prepended: - #echo "$I_FOR; $LINE" - - # load modules - module load nixpkgs/16.09 python/3.6.3 gcc/5.4.0 blast+/2.6.0 prodigal diamond/0.8.36 bowtie2 samtools bamtools bedtools bwa - - # execute command - #eval "$LINE" - #echo "rgi bwt $LINE" - eval "rgi bwt $LINE" - -Update this block of code according to which tool you want to use. In this example, rgi bwt is shown, however for your use-case, you may update it accordingly. - -**Creating the table.dat** - -To create the table.dat, use the script made before named make_table_dat.sh along with the path to the directory containing all your inputs as an argument. Output to table.dat. - -.. code-block:: sh - - ./make_table_dat.sh ./all_samples/ > table.dat - -**Submit multiple jobs using for_loop.sh** - -This script is used once all the previous steps are completed. This script allows you to submit multiple jobs into Compute Canada for rgi. - -.. code-block:: sh - - # Simplest case - using for loop to submit a serial farm - # The input file table.dat contains individual cases - one case per line - export TABLE=table.dat - - # Total number of cases (= number of jobs to submit): - N_cases=$(cat "$TABLE" | wc -l) - - # Submitting one job per case using the for loop: - for ((i=1; i<=$N_cases; i++)) - do - # Using environment variable I_FOR to communicate the case number to individual jobs: - export I_FOR=$i - sbatch job_script.sh - done - -**Resources** - -More information on serial farming on Compute Canada can be found here_. - -.. _here: https://docs.computecanada.ca/wiki/Running_jobs#Serial_job - diff --git a/docs/rgi_digital_alliance.rst b/docs/rgi_digital_alliance.rst new file mode 100644 index 0000000..f684c00 --- /dev/null +++ b/docs/rgi_digital_alliance.rst @@ -0,0 +1,116 @@ +Running RGI on Digital Alliance (formerly Compute Canada) Serial Farm +===================================================================== + +**Order of operations** + +.. code-block:: sh + + ## Running jobs on computecanada using serial farm method + + - `rgi bwt` was used as example. + + ### step 1: + + - update make_table_dat.sh to construct arguments for commands + + ### step 2: + + - update eval command in job_script.sh to match your tool and also load appropriate modules + + ### step 3: + + - create table.dat using script make_table_dat.sh with inputs files in all_samples directory + ./make_table_dat.sh ./all_samples/ > table.dat + + ### step 4: + + - submit multiple jobs using for_loop.sh + + ### Resource: + + - https://docs.computecanada.ca/wiki/Running_jobs#Serial_job + + +**Update the make_table_dat.sh** + +.. code-block:: sh + + DIR=`find . -mindepth 1 -type d` + for D in $DIR; do + directory=$(basename $D); + for file in $directory/*; do + filename=$(basename $file); + if [[ $filename = *"_pass_1.fastq.gz"* ]]; then + read1=$(basename $filename); + base=(${read1//_pass_1.fastq.gz/ }); + #echo "--read_one $(pwd)/$directory/${base}_pass_1.fastq.gz --read_two $(pwd)/$directory/${base}_pass_2.fastq.gz -o $(pwd)/$directory/${base} -n 16 --aligner bowtie2 --debug" + echo "--read_one $(pwd)/$directory/${base}_pass_1.fastq.gz --read_two $(pwd)/$directory/${base}_pass_2.fastq.gz -o $(pwd)/$directory/${base}_wild -n 8 --aligner bowtie2 --debug --include_wildcard" + fi + done + done + +This block of code is used to generate the arguments for serial farming. In this example, rgi bwt is used, however depending on the job you are running you may update it according to your specifications. + +**Update the job_script.sh to match used tool** + +.. code-block:: sh + + #SBATCH --account=def-mcarthur + #SBATCH --time=120 + #SBATCH --job-name=rgi_bwt + #SBATCH --cpus-per-task=8 + #SBATCH --mem-per-cpu=2048M + #SBATCH --mail-user=raphenar@mcmaster.ca + #SBATCH --mail-type=ALL + + # Extracing the $I_FOR-th line from file $TABLE: + LINE=`sed -n ${I_FOR}p "$TABLE"` + + # Echoing the command (optional), with the case number prepended: + #echo "$I_FOR; $LINE" + + # load modules + module load nixpkgs/16.09 python/3.6.3 gcc/5.4.0 blast+/2.6.0 prodigal diamond/0.8.36 bowtie2 samtools bamtools bedtools bwa + + # execute command + #eval "$LINE" + #echo "rgi bwt $LINE" + eval "rgi bwt $LINE" + +Update this block of code according to which tool you want to use. In this example, rgi bwt is shown, however for your use-case, you may update it accordingly. + +**Creating the table.dat** + +To create the table.dat, use the script made before named make_table_dat.sh along with the path to the directory containing all your inputs as an argument. Output to table.dat. + +.. code-block:: sh + + ./make_table_dat.sh ./all_samples/ > table.dat + +**Submit multiple jobs using for_loop.sh** + +This script is used once all the previous steps are completed. This script allows you to submit multiple jobs into Compute Canada for rgi. + +.. code-block:: sh + + # Simplest case - using for loop to submit a serial farm + # The input file table.dat contains individual cases - one case per line + export TABLE=table.dat + + # Total number of cases (= number of jobs to submit): + N_cases=$(cat "$TABLE" | wc -l) + + # Submitting one job per case using the for loop: + for ((i=1; i<=$N_cases; i++)) + do + # Using environment variable I_FOR to communicate the case number to individual jobs: + export I_FOR=$i + sbatch job_script.sh + done + +**Resources** + +More information on serial farming on Compute Canada can be found here_. + +.. _here: https://docs.computecanada.ca/wiki/Running_jobs#Serial_job +