diff --git a/bin/__pycache__/PatternHandler.cpython-39.pyc b/bin/__pycache__/PatternHandler.cpython-39.pyc index 7ad348b..4c7e048 100644 Binary files a/bin/__pycache__/PatternHandler.cpython-39.pyc and b/bin/__pycache__/PatternHandler.cpython-39.pyc differ diff --git a/bin/parseCsv.py b/bin/parseCsv.py index ecbeeec..3dc4d77 100755 --- a/bin/parseCsv.py +++ b/bin/parseCsv.py @@ -34,6 +34,7 @@ relpath = dictitem[namecol] filename = os.path.basename(relpath) pretext = os.path.dirname(relpath) + if pretext.startswith('/'): pretext = pretext[1:] newroot = os.path.join(rootpath, pretext) fnameok = True diff --git a/bin/run_conversion.py b/bin/run_conversion.py index a235073..a11368a 100644 --- a/bin/run_conversion.py +++ b/bin/run_conversion.py @@ -89,13 +89,24 @@ if "max_workers" in keys: cmd += ["--max_workers", '%s' % args.max_workers] if "no_nested" in keys: - cmd += ["--no-nested"] + if args.no_nested in (True, "True"): + cmd += ["--no-nested"] + elif args.no_nested in (False, "False", None, "None"): + pass + else: + raise ValueError(f"--no-nested cannot have the value {args.no_nested}") if "drop_series" in keys: - cmd += ["--scale-format-string", '%s' % "'%2$d'"] + if args.drop_series in (True, "True"): + val = '%2$d' + cmd += ["--scale-format-string", val] + elif args.drop_series in (False, "False", None, "None"): + pass + else: + raise ValueError(f"--drop_series cannot have the value {args.drop_series}") if "overwrite" in keys: cmd += ["--overwrite"] - cmd.append(f'{inps}') - cmd.append(f'{outs}') + cmd.append(f"{inps}") + cmd.append(f"{outs}") # cmdstr = ''.join(cmd) print(cmd) # sys.stdout.write(cmdstr) diff --git a/modules/functions.nf b/modules/functions.nf index c6cea8a..98d1407 100755 --- a/modules/functions.nf +++ b/modules/functions.nf @@ -40,7 +40,7 @@ def verify_filenames_fromPath(directory, selby, rejby) { // println((file.toString().contains(selby))) // println(!(file.toString().contains(rejby))) } - println(files) +// println(files) truth = true files.each { if (it.toString().contains(" ")) { diff --git a/modules/processes.nf b/modules/processes.nf index 41f12ef..63b15fb 100755 --- a/modules/processes.nf +++ b/modules/processes.nf @@ -8,6 +8,9 @@ include { verify_axes; verify_filenames_fromPath; verify_filenames_fromList; get // Conversion processes process Convert_EachFileFromRoot2SeparateOMETIFF { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 if ("${params.dest_type}"=="local") { publishDir( path: "${params.out_path}", @@ -35,6 +38,9 @@ process Convert_EachFileFromRoot2SeparateOMETIFF { } process Convert_EachFile2SeparateOMETIFF { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 if ("${params.dest_type}"=="local") { publishDir( path: "${params.out_path}", @@ -54,6 +60,9 @@ process Convert_EachFile2SeparateOMETIFF { } process Convert_Concatenate2SingleOMETIFF { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 if ("${params.dest_type}"=="local") { publishDir( path: "${params.out_path}", @@ -81,6 +90,9 @@ process Convert_Concatenate2SingleOMETIFF { } process Convert_EachFileFromRoot2SeparateOMEZARR { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 if ("${params.dest_type}"=="local") { publishDir( path: "${params.out_path}", @@ -107,6 +119,9 @@ process Convert_EachFileFromRoot2SeparateOMEZARR { } process Convert_EachFile2SeparateOMEZARR { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 if ("${params.dest_type}"=="local") { publishDir( path: "${params.out_path}", @@ -125,6 +140,9 @@ process Convert_EachFile2SeparateOMEZARR { } process Convert_Concatenate2SingleOMEZARR{ + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 // This process will be probably changed completely. Create hyperstack will probably be a different process if ("${params.dest_type}"=="local") { publishDir( @@ -155,6 +173,9 @@ process Convert_Concatenate2SingleOMEZARR{ // Processes for inspecting a remote location: process Inspect_S3Path { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: val source output: @@ -170,6 +191,9 @@ process Inspect_S3Path { // Transfer processes: process Transfer_Local2S3Storage { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path local output: @@ -190,6 +214,9 @@ process Transfer_Local2S3Storage { } process Mirror_S3Storage2Local { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: val source output: @@ -204,6 +231,9 @@ process Mirror_S3Storage2Local { process Transfer_S3Storage2Local { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: val s3path val s3name @@ -218,6 +248,9 @@ process Transfer_S3Storage2Local { } process Transfer_Local2PrivateBiostudies { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path local output: @@ -231,6 +264,9 @@ process Transfer_Local2PrivateBiostudies { } process Transfer_PrivateBiostudies2Local { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: val source output: @@ -243,6 +279,9 @@ process Transfer_PrivateBiostudies2Local { } process Transfer_PublicBiostudies2Local { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: val source output: @@ -255,6 +294,9 @@ process Transfer_PublicBiostudies2Local { } process CreatePatternFile1 { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path inpath output: @@ -274,6 +316,9 @@ process CreatePatternFile1 { } process CreatePatternFile2 { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path inpath output: @@ -293,6 +338,9 @@ process CreatePatternFile2 { } process CreatePatternFileFromCsv { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path inpath // input: @@ -316,6 +364,9 @@ process CreatePatternFileFromCsv { } process Csv2Symlink2 { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path csv_path input: @@ -333,6 +384,9 @@ process Csv2Symlink2 { } process Csv2Symlink1 { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path csv_path input: @@ -350,6 +404,9 @@ process Csv2Symlink1 { } process ParseCsv { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 input: path csv_path input: @@ -375,6 +432,9 @@ process ParseCsv { } process UpdateCsv { + sleep 1000 + errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' } + maxRetries 5 if ("${params.dest_type}"=="local") { publishDir( path: "${params.out_path}", @@ -389,6 +449,8 @@ process UpdateCsv { val input_column input: val conversion_type + input: + path proof_of_files output: path "FileList.csv" script: diff --git a/modules/subworkflows_ometiff.nf b/modules/subworkflows_ometiff.nf index 9c2e71f..05ca4c4 100755 --- a/modules/subworkflows_ometiff.nf +++ b/modules/subworkflows_ometiff.nf @@ -157,7 +157,7 @@ workflow Convert2OMETIFF_FromLocal_CSV { // s3 &! merged && CSV } else { def fpath = file(params.in_path) - parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL! + parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) ch0 = Csv2Symlink1( parsedCsv, "RootOriginal", "ImageNameOriginal", 'symlinks' ).flatten() ch1 = ch0.filter { it.toString().contains(params.pattern) } if ( params.reject_pattern.size() > 0 ) { @@ -167,8 +167,9 @@ workflow Convert2OMETIFF_FromLocal_CSV { // s3 &! merged && CSV ch = ch1 } output = Convert_EachFile2SeparateOMETIFF(ch) - UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff") - if (params.dest_type == "s3") { + mock = output.collect().flatten().first() + UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock) + if ( params.dest_type == "s3" ) { Transfer_Local2S3Storage(output) Transfer_CSV2S3Storage(UpdateCsv.out) } @@ -191,7 +192,7 @@ workflow Convert2OMETIFF_FromS3_CSV { // s3 &! merged && CSV } else { def fpath = file(params.in_path) - println(fpath) +// println(fpath) parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL! ch_ = Channel.fromPath(fpath.toString()). splitCsv(header:true) @@ -208,7 +209,8 @@ workflow Convert2OMETIFF_FromS3_CSV { // s3 &! merged && CSV ch1f = ch1.flatMap { file(it).Name } ch = Transfer_S3Storage2Local(ch1, ch1f) output = Convert_EachFile2SeparateOMETIFF(ch) - UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff") + mock = output.collect().flatten().first() + UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock) if (params.dest_type == "s3") { Transfer_Local2S3Storage(output) Transfer_CSV2S3Storage(UpdateCsv.out) diff --git a/modules/subworkflows_omezarr.nf b/modules/subworkflows_omezarr.nf index 53e78cb..395910c 100755 --- a/modules/subworkflows_omezarr.nf +++ b/modules/subworkflows_omezarr.nf @@ -157,7 +157,7 @@ workflow Convert2OMEZARR_FromLocal_CSV { // s3 &! merged && CSV } else { def fpath = file(params.in_path) - parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL! + parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) ch0 = Csv2Symlink1( parsedCsv, "RootOriginal", "ImageNameOriginal", 'symlinks' ).flatten() ch1 = ch0.filter { it.toString().contains(params.pattern) } if ( params.reject_pattern.size() > 0 ) { @@ -167,8 +167,9 @@ workflow Convert2OMEZARR_FromLocal_CSV { // s3 &! merged && CSV ch = ch1 } output = Convert_EachFile2SeparateOMEZARR(ch) - UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff") - if (params.dest_type == "s3") { + mock = output.collect().flatten().first() + UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock) + if ( params.dest_type == "s3" ) { Transfer_Local2S3Storage(output) Transfer_CSV2S3Storage(UpdateCsv.out) } @@ -191,7 +192,7 @@ workflow Convert2OMEZARR_FromS3_CSV { // s3 &! merged && CSV } else { def fpath = file(params.in_path) - println(fpath) +// println(fpath) parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL! ch_ = Channel.fromPath(fpath.toString()). splitCsv(header:true) @@ -208,7 +209,8 @@ workflow Convert2OMEZARR_FromS3_CSV { // s3 &! merged && CSV ch1f = ch1.flatMap { file(it).Name } ch = Transfer_S3Storage2Local(ch1, ch1f) output = Convert_EachFile2SeparateOMEZARR(ch) - UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff") + mock = output.collect().flatten().first() + UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock) if (params.dest_type == "s3") { Transfer_Local2S3Storage(output) Transfer_CSV2S3Storage(UpdateCsv.out)