-
Notifications
You must be signed in to change notification settings - Fork 23
/
nextflow.config
232 lines (212 loc) · 7.34 KB
/
nextflow.config
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
//
// Notes to End Users.
//
// The workflow should run without editing this configuration file,
// however there may be instances in which you wish to edit this
// file for compute performance or other reasons. Please see:
//
// https://nextflow.io/docs/latest/config.html#configuration
//
// for further help editing this file.
params {
help = false
version = false
fastq = null
bam = null
classifier = "kraken2"
sample = null
sample_sheet = null
exclude_host = null
// Advanced_options
max_len = null
min_len = 0
min_read_qual = null
threads = 4
server_threads = 2
kraken_clients = 2
// Databases
taxonomy = null
reference = null
ref2taxid = null
database = null
taxonomic_rank = 'S'
// Minimap
minimap2filter = null
minimap2exclude = false
keep_bam = false
minimap2_by_reference = false
min_percent_identity = 90
min_ref_coverage = 0
// Output
store_dir = "store_dir"
out_dir = "output"
include_read_assignments = false
// Extra features
igv = false
// Kraken
bracken_length = null
bracken_threshold = 10
port = 8080
host = "localhost"
external_kraken2 = false
kraken2_memory_mapping = false
kraken2_confidence = 0
// Real time
batch_size = 0
real_time = false
read_limit = null
// Databases
database_set = "Standard-8"
database_sets = [
'ncbi_16s_18s': [
'reference': 'https://ont-exd-int-s3-euwst1-epi2me-labs.s3.amazonaws.com/wf-metagenomics/ncbi_16s_18s/ncbi_targeted_loci_16s_18s.fna',
// database already includes kmer_dist_file
'database': 'https://ont-exd-int-s3-euwst1-epi2me-labs.s3.amazonaws.com/wf-metagenomics/ncbi_16s_18s/ncbi_targeted_loci_kraken2.tar.gz',
'ref2taxid': 'https://ont-exd-int-s3-euwst1-epi2me-labs.s3.amazonaws.com/wf-metagenomics/ncbi_16s_18s/ref2taxid.targloci.tsv',
'taxonomy': 'https://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxdump_archive/new_taxdump_2024-09-01.zip'
],
'ncbi_16s_18s_28s_ITS': [
'reference': 'https://ont-exd-int-s3-euwst1-epi2me-labs.s3.amazonaws.com/wf-metagenomics/ncbi_16s_18s_28s_ITS/ncbi_16s_18s_28s_ITS.fna',
'database': 'https://ont-exd-int-s3-euwst1-epi2me-labs.s3.amazonaws.com/wf-metagenomics/ncbi_16s_18s_28s_ITS/ncbi_16s_18s_28s_ITS_kraken2.tar.gz',
'ref2taxid': 'https://ont-exd-int-s3-euwst1-epi2me-labs.s3.amazonaws.com/wf-metagenomics/ncbi_16s_18s_28s_ITS/ref2taxid.ncbi_16s_18s_28s_ITS.tsv',
'taxonomy': 'https://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxdump_archive/new_taxdump_2024-09-01.zip'
],
'SILVA_138_1': [
// It uses the taxids from the Silva database, which doesn't match the taxids from NCBI
// Database create from scratch using kraken2-build command. It automatically downloads the files.
'database': null
],
'Standard-8': [
// database already includes kmer_dist_file
'database': 'https://genome-idx.s3.amazonaws.com/kraken/k2_standard_08gb_20240605.tar.gz', // 7.5GB hash.k2d
'taxonomy': 'https://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxdump_archive/new_taxdump_2024-09-01.zip'
],
'PlusPF-8': [
// database already includes kmer_dist_file
'database': 'https://genome-idx.s3.amazonaws.com/kraken/k2_pluspf_08gb_20240605.tar.gz',
'taxonomy': 'https://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxdump_archive/new_taxdump_2024-09-01.zip'
],
'PlusPFP-8': [
'database': 'https://genome-idx.s3.amazonaws.com/kraken/k2_pluspfp_08gb_20240605.tar.gz',
'taxonomy': 'https://ftp.ncbi.nlm.nih.gov/pub/taxonomy/taxdump_archive/new_taxdump_2024-09-01.zip'
]
]
// AMR options
amr = false
amr_db = "resfinder"
amr_minid = 80
amr_mincov = 80
// AWS
aws_image_prefix = null
aws_queue = null
// Other options
disable_ping = false
monochrome_logs = false
validate_params = true
show_hidden_params = false
analyse_unclassified = false
schema_ignore_params = 'show_hidden_params,validate_params,monochrome_logs,aws_queue,aws_image_prefix,database_sets,wf'
// Report options
abundance_threshold = 0
n_taxa_barplot = 9
// Workflows images
wf {
example_cmd = [
"--fastq 'wf-metagenomics-demo/test_data'"
]
agent = null
container_sha = "sha1adcb94088d2397b0d61c0393035f51b1013226a"
common_sha = "shaabceef445fb63214073cbf5836fdd33c04be4ac7"
container_sha_amr = "sha2ae47c3f988b71f7cf81c37c000f6cb046e18357"
}
}
manifest {
name = 'epi2me-labs/wf-metagenomics'
author = 'Oxford Nanopore Technologies'
homePage = 'https://github.com/epi2me-labs/wf-metagenomics'
description = 'Identification of the origin of single reads from both amplicon-targeted and shotgun metagenomics sequencing.'
mainScript = 'main.nf'
nextflowVersion = '>=23.04.2'
version = 'v2.12.1'
}
epi2melabs {
tags = "metagenomics,amr"
}
// used by default for "standard" (docker) and singularity profiles,
// other profiles may override.
process {
withLabel:wfmetagenomics {
container = "ontresearch/wf-metagenomics:${params.wf.container_sha}"
}
withLabel:wf_common {
container = "ontresearch/wf-common:${params.wf.common_sha}"
}
withLabel:amr{
container = "ontresearch/abricate:${params.wf.container_sha_amr}"
}
shell = ['/bin/bash', '-euo', 'pipefail']
}
profiles {
// the "standard" profile is used implicitely by nextflow
// if no other profile is given on the CLI
standard {
docker {
enabled = true
// this ensures container is run as host user and group, but
// also adds host user to the within-container group
runOptions = "--user \$(id -u):\$(id -g) --group-add 100"
}
}
// using singularity instead of docker
singularity {
singularity {
enabled = true
autoMounts = true
}
}
conda {
conda.enabled = true
}
// Using AWS batch.
// May need to set aws.region and aws.batch.cliPath
awsbatch {
process {
executor = 'awsbatch'
queue = "${params.aws_queue}"
memory = '32G'
withLabel:wfmetagenomics {
container = "${params.aws_image_prefix}-wf-metagenomics:${params.wf.container_sha}"
}
withLabel:wf_common {
container = "${params.aws_image_prefix}-wf-common:${params.wf.common_sha}"
}
withLabel:amr{
container = "${params.aws_image_prefix}-abricate:${params.wf.container_sha_amr}"
}
shell = ['/bin/bash', '-euo', 'pipefail']
}
}
// local profile for simplified development testing
local {
process.executor = 'local'
}
}
timeline {
enabled = true
overwrite = true
file = "${params.out_dir}/execution/timeline.html"
}
report {
enabled = true
overwrite = true
file = "${params.out_dir}/execution/report.html"
}
trace {
enabled = true
overwrite = true
file = "${params.out_dir}/execution/trace.txt"
}
env {
PYTHONNOUSERSITE = 1
JAVA_TOOL_OPTIONS = "-Xlog:disable -Xlog:all=warning:stderr"
}