Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…into mn1/ersa
  • Loading branch information
Michael Nuhn committed Nov 23, 2015
2 parents 35eff95 + 432c056 commit 5fe1f8a
Show file tree
Hide file tree
Showing 2 changed files with 4 additions and 4 deletions.
2 changes: 1 addition & 1 deletion modules/Bio/EnsEMBL/Funcgen/Hive/Config/Collections.pm
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ sub pipeline_analyses {
-module => 'Bio::EnsEMBL::Funcgen::Hive::RunWiggleTools',
-parameters => {mode => 'RPKM'},
#-input_ids => [ dataflowed from PreprocessAlignments via branch 2 ]
-analysis_capacity => 1000,
-analysis_capacity => 100,
#Change this to hive_capacity as it may be competing with parallel peak jobs
-rc_name => 'normal_30GB_2cpu',
# This resource usage is a product of the read depth, so it could be detected in
Expand Down
6 changes: 3 additions & 3 deletions modules/Bio/EnsEMBL/Funcgen/Hive/Config/ReadAlignment.pm
Original file line number Diff line number Diff line change
Expand Up @@ -332,21 +332,21 @@ sub pipeline_analyses {
{-logic_name => 'Run_bwa_samse_control_chunk',
-module => 'Bio::EnsEMBL::Funcgen::Hive::RunAligner',
-batch_size => 1, #max parallelisation???
-analysis_capacity => 1000,
-analysis_capacity => 100,
-rc_name => 'normal_10gb'},

{-logic_name => 'Run_bwa_samse_merged_chunk',
-module => 'Bio::EnsEMBL::Funcgen::Hive::RunAligner',
-batch_size => 1, #max parallelisation???
-analysis_capacity => 1000,
-analysis_capacity => 100,
-rc_name => 'normal_10gb'},

{-logic_name => 'Run_bwa_samse_replicate_chunk',
-module => 'Bio::EnsEMBL::Funcgen::Hive::RunAligner',
# These jobs can be run in parallell... don't put too many since it may generate many jobs...jobs!
#-limit => 1,#what is this?
-batch_size => 1, #max parallelisation? Although probably want to up this so we don't hit pending time
-analysis_capacity => 1000,
-analysis_capacity => 100,
-rc_name => 'normal_10gb'},


Expand Down

0 comments on commit 5fe1f8a

Please sign in to comment.