Skip to content

Commit

Permalink
Merge pull request #29 from Euro-BioImaging/dev
Browse files Browse the repository at this point in the history
Dev
  • Loading branch information
bugraoezdemir authored Feb 19, 2024
2 parents 7a7aa67 + 3fcce94 commit 7a68f7a
Show file tree
Hide file tree
Showing 7 changed files with 93 additions and 15 deletions.
Binary file modified bin/__pycache__/PatternHandler.cpython-39.pyc
Binary file not shown.
1 change: 1 addition & 0 deletions bin/parseCsv.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
relpath = dictitem[namecol]
filename = os.path.basename(relpath)
pretext = os.path.dirname(relpath)
if pretext.startswith('/'): pretext = pretext[1:]
newroot = os.path.join(rootpath, pretext)

fnameok = True
Expand Down
19 changes: 15 additions & 4 deletions bin/run_conversion.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,13 +89,24 @@
if "max_workers" in keys:
cmd += ["--max_workers", '%s' % args.max_workers]
if "no_nested" in keys:
cmd += ["--no-nested"]
if args.no_nested in (True, "True"):
cmd += ["--no-nested"]
elif args.no_nested in (False, "False", None, "None"):
pass
else:
raise ValueError(f"--no-nested cannot have the value {args.no_nested}")
if "drop_series" in keys:
cmd += ["--scale-format-string", '%s' % "'%2$d'"]
if args.drop_series in (True, "True"):
val = '%2$d'
cmd += ["--scale-format-string", val]
elif args.drop_series in (False, "False", None, "None"):
pass
else:
raise ValueError(f"--drop_series cannot have the value {args.drop_series}")
if "overwrite" in keys:
cmd += ["--overwrite"]
cmd.append(f'{inps}')
cmd.append(f'{outs}')
cmd.append(f"{inps}")
cmd.append(f"{outs}")
# cmdstr = ''.join(cmd)
print(cmd)
# sys.stdout.write(cmdstr)
Expand Down
2 changes: 1 addition & 1 deletion modules/functions.nf
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def verify_filenames_fromPath(directory, selby, rejby) {
// println((file.toString().contains(selby)))
// println(!(file.toString().contains(rejby)))
}
println(files)
// println(files)
truth = true
files.each {
if (it.toString().contains(" ")) {
Expand Down
62 changes: 62 additions & 0 deletions modules/processes.nf
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,9 @@ include { verify_axes; verify_filenames_fromPath; verify_filenames_fromList; get
// Conversion processes

process Convert_EachFileFromRoot2SeparateOMETIFF {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
if ("${params.dest_type}"=="local") {
publishDir(
path: "${params.out_path}",
Expand Down Expand Up @@ -35,6 +38,9 @@ process Convert_EachFileFromRoot2SeparateOMETIFF {
}

process Convert_EachFile2SeparateOMETIFF {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
if ("${params.dest_type}"=="local") {
publishDir(
path: "${params.out_path}",
Expand All @@ -54,6 +60,9 @@ process Convert_EachFile2SeparateOMETIFF {
}

process Convert_Concatenate2SingleOMETIFF {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
if ("${params.dest_type}"=="local") {
publishDir(
path: "${params.out_path}",
Expand Down Expand Up @@ -81,6 +90,9 @@ process Convert_Concatenate2SingleOMETIFF {
}

process Convert_EachFileFromRoot2SeparateOMEZARR {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
if ("${params.dest_type}"=="local") {
publishDir(
path: "${params.out_path}",
Expand All @@ -107,6 +119,9 @@ process Convert_EachFileFromRoot2SeparateOMEZARR {
}

process Convert_EachFile2SeparateOMEZARR {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
if ("${params.dest_type}"=="local") {
publishDir(
path: "${params.out_path}",
Expand All @@ -125,6 +140,9 @@ process Convert_EachFile2SeparateOMEZARR {
}

process Convert_Concatenate2SingleOMEZARR{
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
// This process will be probably changed completely. Create hyperstack will probably be a different process
if ("${params.dest_type}"=="local") {
publishDir(
Expand Down Expand Up @@ -155,6 +173,9 @@ process Convert_Concatenate2SingleOMEZARR{
// Processes for inspecting a remote location:

process Inspect_S3Path {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
val source
output:
Expand All @@ -170,6 +191,9 @@ process Inspect_S3Path {
// Transfer processes:

process Transfer_Local2S3Storage {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path local
output:
Expand All @@ -190,6 +214,9 @@ process Transfer_Local2S3Storage {
}

process Mirror_S3Storage2Local {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
val source
output:
Expand All @@ -204,6 +231,9 @@ process Mirror_S3Storage2Local {


process Transfer_S3Storage2Local {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
val s3path
val s3name
Expand All @@ -218,6 +248,9 @@ process Transfer_S3Storage2Local {
}

process Transfer_Local2PrivateBiostudies {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path local
output:
Expand All @@ -231,6 +264,9 @@ process Transfer_Local2PrivateBiostudies {
}

process Transfer_PrivateBiostudies2Local {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
val source
output:
Expand All @@ -243,6 +279,9 @@ process Transfer_PrivateBiostudies2Local {
}

process Transfer_PublicBiostudies2Local {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
val source
output:
Expand All @@ -255,6 +294,9 @@ process Transfer_PublicBiostudies2Local {
}

process CreatePatternFile1 {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path inpath
output:
Expand All @@ -274,6 +316,9 @@ process CreatePatternFile1 {
}

process CreatePatternFile2 {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path inpath
output:
Expand All @@ -293,6 +338,9 @@ process CreatePatternFile2 {
}

process CreatePatternFileFromCsv {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path inpath //
input:
Expand All @@ -316,6 +364,9 @@ process CreatePatternFileFromCsv {
}

process Csv2Symlink2 {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path csv_path
input:
Expand All @@ -333,6 +384,9 @@ process Csv2Symlink2 {
}

process Csv2Symlink1 {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path csv_path
input:
Expand All @@ -350,6 +404,9 @@ process Csv2Symlink1 {
}

process ParseCsv {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
input:
path csv_path
input:
Expand All @@ -375,6 +432,9 @@ process ParseCsv {
}

process UpdateCsv {
sleep 1000
errorStrategy { sleep(Math.pow(2, task.attempt) * 200 as long); return 'retry' }
maxRetries 5
if ("${params.dest_type}"=="local") {
publishDir(
path: "${params.out_path}",
Expand All @@ -389,6 +449,8 @@ process UpdateCsv {
val input_column
input:
val conversion_type
input:
path proof_of_files
output:
path "FileList.csv"
script:
Expand Down
12 changes: 7 additions & 5 deletions modules/subworkflows_ometiff.nf
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ workflow Convert2OMETIFF_FromLocal_CSV { // s3 &! merged && CSV
}
else {
def fpath = file(params.in_path)
parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL!
parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' )
ch0 = Csv2Symlink1( parsedCsv, "RootOriginal", "ImageNameOriginal", 'symlinks' ).flatten()
ch1 = ch0.filter { it.toString().contains(params.pattern) }
if ( params.reject_pattern.size() > 0 ) {
Expand All @@ -167,8 +167,9 @@ workflow Convert2OMETIFF_FromLocal_CSV { // s3 &! merged && CSV
ch = ch1
}
output = Convert_EachFile2SeparateOMETIFF(ch)
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff")
if (params.dest_type == "s3") {
mock = output.collect().flatten().first()
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock)
if ( params.dest_type == "s3" ) {
Transfer_Local2S3Storage(output)
Transfer_CSV2S3Storage(UpdateCsv.out)
}
Expand All @@ -191,7 +192,7 @@ workflow Convert2OMETIFF_FromS3_CSV { // s3 &! merged && CSV
}
else {
def fpath = file(params.in_path)
println(fpath)
// println(fpath)
parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL!
ch_ = Channel.fromPath(fpath.toString()).
splitCsv(header:true)
Expand All @@ -208,7 +209,8 @@ workflow Convert2OMETIFF_FromS3_CSV { // s3 &! merged && CSV
ch1f = ch1.flatMap { file(it).Name }
ch = Transfer_S3Storage2Local(ch1, ch1f)
output = Convert_EachFile2SeparateOMETIFF(ch)
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff")
mock = output.collect().flatten().first()
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock)
if (params.dest_type == "s3") {
Transfer_Local2S3Storage(output)
Transfer_CSV2S3Storage(UpdateCsv.out)
Expand Down
12 changes: 7 additions & 5 deletions modules/subworkflows_omezarr.nf
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ workflow Convert2OMEZARR_FromLocal_CSV { // s3 &! merged && CSV
}
else {
def fpath = file(params.in_path)
parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL!
parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' )
ch0 = Csv2Symlink1( parsedCsv, "RootOriginal", "ImageNameOriginal", 'symlinks' ).flatten()
ch1 = ch0.filter { it.toString().contains(params.pattern) }
if ( params.reject_pattern.size() > 0 ) {
Expand All @@ -167,8 +167,9 @@ workflow Convert2OMEZARR_FromLocal_CSV { // s3 &! merged && CSV
ch = ch1
}
output = Convert_EachFile2SeparateOMEZARR(ch)
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff")
if (params.dest_type == "s3") {
mock = output.collect().flatten().first()
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock)
if ( params.dest_type == "s3" ) {
Transfer_Local2S3Storage(output)
Transfer_CSV2S3Storage(UpdateCsv.out)
}
Expand All @@ -191,7 +192,7 @@ workflow Convert2OMEZARR_FromS3_CSV { // s3 &! merged && CSV
}
else {
def fpath = file(params.in_path)
println(fpath)
// println(fpath)
parsedCsv = ParseCsv( fpath.toString(), params.root_column, params.input_column, 'parsed.txt' ) // CAREFUL!
ch_ = Channel.fromPath(fpath.toString()).
splitCsv(header:true)
Expand All @@ -208,7 +209,8 @@ workflow Convert2OMEZARR_FromS3_CSV { // s3 &! merged && CSV
ch1f = ch1.flatMap { file(it).Name }
ch = Transfer_S3Storage2Local(ch1, ch1f)
output = Convert_EachFile2SeparateOMEZARR(ch)
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff")
mock = output.collect().flatten().first()
UpdateCsv(parsedCsv, "RootOriginal", "ImageNameOriginal", "ometiff", mock)
if (params.dest_type == "s3") {
Transfer_Local2S3Storage(output)
Transfer_CSV2S3Storage(UpdateCsv.out)
Expand Down

0 comments on commit 7a68f7a

Please sign in to comment.