-
Notifications
You must be signed in to change notification settings - Fork 0
/
nextflow.config
194 lines (164 loc) · 5.83 KB
/
nextflow.config
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
/**
* # Parameters for nf-maccoss-trex
*
* A NextFlow pipeline for analyzing data-ind
*/
params {
/** \group{Input/Output Parameters} */
/** \type{string} \required
* Path to the FASTA file containing the subset of proteins sequences to
* search.
*/
fasta = null
/** \type{string} \required
* The directory containing the mzML or raw files to search
*/
spectra_dir = null
/** \type{string} \required
* Location of the comet params file
*/
comet_params = 'comet.params'
result_dir = 'results/nf-teirex-dda' /** \type{str} Where results will be saved. */
report_dir = 'reports/nf-teirex-dda' /** \type{str} Where results will be saved. */
email = null /** \type{str} An email to alert on completion. */
// Limelight params
limelight_tags = null
limelight_upload = false
limelight_import_decoys = false
limelight_entrapment_prefix = null
}
plugins {
id 'nf-amazon'
}
docker {
enabled = true
}
/*
* Set up secrets in the environment.
* Need to do it this way because Nextflow doesn't allow the use of secrets when running on AWS Batch
*/
secret_value = env.LIMELIGHT_SUBMIT_UPLOAD_KEY = nextflow.secret.SecretsLoader.instance.load().getSecret("LIMELIGHT_SUBMIT_UPLOAD_KEY")
if(secret_value) {
env.LIMELIGHT_SUBMIT_UPLOAD_KEY = secret_value.value
}
secret_value = nextflow.secret.SecretsLoader.instance.load().getSecret("PANORAMA_API_KEY")
if(secret_value) {
env.PANORAMA_API_KEY = secret_value.value
}
aws {
batch {
// NOTE: this setting is only required if the AWS CLI tool is installed in a custom AMI
cliPath = '/usr/local/aws-cli/v2/current/bin/aws'
logsGroup = '/batch/tei-nextflow-batch'
maxConnections = 20
connectionTimeout = 10000
uploadStorageClass = 'INTELLIGENT_TIERING'
storageEncryption = 'AES256'
retryMode = 'standard'
}
region = 'us-west-2'
}
// Execution Profiles
profiles {
/*
* Params for running pipeline on the local computer (e.g.:
* your laptop). These can be overridden in the local config file.
*/
standard {
process.executor = 'local'
// limit nextflow to running 1 task at a time
executor.queueSize = 1
params.max_memory = '12.GB'
params.max_cpus = 8
params.max_time = '240.h'
// where to cache mzml files after running msconvert
params.mzml_cache_directory = '/data/mass_spec/nextflow/nf-teirex-dda/mzml_cache'
params.panorama_cache_directory = '/data/mass_spec/nextflow/panorama/raw_cache'
}
aws {
process.executor = 'awsbatch'
process.queue = 'nextflow_basic_ec2'
// params for running pipeline on aws batch
// These can be overridden in local config file
// max params allowed for your AWS Batch compute environment
params.max_memory = '124.GB'
params.max_cpus = 32
params.max_time = '240.h'
// where to cache mzml files after running msconvert
params.mzml_cache_directory = 's3://mc-tei-rex-nextflow-dda/mzml_cache'
params.panorama_cache_directory = 's3://mc-tei-rex-nextflow-dda/panorama_cache'
}
slurm {
process.executor = 'slurm'
params.max_memory = '12.GB'
params.max_cpus = 8
params.max_time = '240.h'
// where to cache mzml files after running msconvert
params.mzml_cache_directory = '/data/mass_spec/nextflow/nf-teirex-dda/mzml_cache'
params.panorama_cache_directory = '/data/mass_spec/nextflow/panorama/raw_cache'
}
}
// Manifest
manifest {
name = 'nf-teirex-dda'
author = 'Michael Riffle'
homePage = 'https://github.com/mriffle/nf-teirex-dda'
description = 'DDA workflows for TEI-REX project'
mainScript = 'main.nf'
nextflowVersion = '!>=21.10.3'
}
// Capture exit codes from upstream processes when piping
process.shell = ['/bin/bash', '-euo', 'pipefail']
def trace_timestamp = new java.util.Date().format( 'yyyy-MM-dd_HH-mm-ss')
timeline {
enabled = true
file = "${params.report_dir}/execution_timeline_${trace_timestamp}.html"
}
report {
enabled = true
file = "${params.report_dir}/execution_report_${trace_timestamp}.html"
}
trace {
enabled = true
file = "${params.report_dir}/execution_trace_${trace_timestamp}.txt"
}
dag {
enabled = false
file = "${params.report_dir}/pipeline_dag_${trace_timestamp}.html"
}
// Load base.config by default for all pipelines
includeConfig 'conf/base.config'
// Load the images to use for all processes
includeConfig 'container_images.config'
// Function to ensure that resource requirements don't go beyond
// a maximum limit. Copied from the nf-core template.
def check_max(obj, type) {
if (type == 'memory') {
try {
if (obj.compareTo(params.max_memory as nextflow.util.MemoryUnit) == 1)
return params.max_memory as nextflow.util.MemoryUnit
else
return obj
} catch (all) {
println " ### ERROR ### Max memory '${params.max_memory}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'time') {
try {
if (obj.compareTo(params.max_time as nextflow.util.Duration) == 1)
return params.max_time as nextflow.util.Duration
else
return obj
} catch (all) {
println " ### ERROR ### Max time '${params.max_time}' is not valid! Using default value: $obj"
return obj
}
} else if (type == 'cpus') {
try {
return Math.min( obj, params.max_cpus as int )
} catch (all) {
println " ### ERROR ### Max cpus '${params.max_cpus}' is not valid! Using default value: $obj"
return obj
}
}
}