Skip to content

Commit

Permalink
GR-933 open cache files only once
Browse files Browse the repository at this point in the history
  • Loading branch information
callunity committed Dec 6, 2019
1 parent 2f80f4d commit c62bc42
Show file tree
Hide file tree
Showing 4 changed files with 31 additions and 8 deletions.
26 changes: 26 additions & 0 deletions application/dash_application/utility/df_manipulation.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from pandas import DataFrame
from typing import List

from gsiqcetl import QCETLCache
import gsiqcetl.column
import pinery

Expand All @@ -22,6 +23,15 @@
rnaseqqc_ius_columns = [RNASEQQC_COL.Run, RNASEQQC_COL.Lane,
RNASEQQC_COL.Barcodes]

"""
Open a single instance of each cache, and use copies for the reports.
"""
cache = QCETLCache()
_rnaseqqc = cache.rnaseqqc.rnaseqqc
_bamqc = cache.bamqc.bamqc
_bamqc3 = cache.bamqc3.bamqc3
_ichorcna = cache.ichorcna.ichorcna

_pinery_client = pinery.PineryClient()
_provenance_client = pinery.PineryProvenanceClient(provider="pinery-miso-v5")
_pinery_samples = _provenance_client.get_all_samples()
Expand Down Expand Up @@ -61,6 +71,22 @@
)


def get_rnaseqqc():
return _rnaseqqc.copy(deep=True)


def get_bamqc():
return _bamqc.copy(deep=True)


def get_bamqc3():
return _bamqc3.copy(deep=True)


def get_ichorcna():
return _ichorcna.copy(deep=True)


def get_pinery_samples():
"""Get Pinery Sample Provenance DataFrame"""
return _pinery_samples.copy(deep=True)
Expand Down
3 changes: 1 addition & 2 deletions application/dash_application/views/pre_exome.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from ..table_builder import build_table
from ..utility import df_manipulation as util
from ..utility import slider_utils
from gsiqcetl import QCETLCache
from gsiqcetl.column import BamQcColumn
import pinery

Expand Down Expand Up @@ -54,7 +53,7 @@


def get_bamqc_data():
bamqc_df = QCETLCache().bamqc.bamqc
bamqc_df = util.get_bamqc()
bamqc_df = util.df_with_normalized_ius_columns(bamqc_df, BAMQC_COL.Run, BAMQC_COL.Lane, BAMQC_COL.Barcodes)

pinery_samples = util.get_pinery_samples_from_active_projects()
Expand Down
3 changes: 1 addition & 2 deletions application/dash_application/views/preqc_rna.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@
from ..utility import df_manipulation as util
from ..plot_builder import fill_in_colour_col, fill_in_shape_col, generate
from ..table_builder import build_table
from gsiqcetl import QCETLCache
from gsiqcetl.column import RnaSeqQcColumn as RnaColumn
import pinery

Expand Down Expand Up @@ -107,7 +106,7 @@ def get_rna_data():
# * QCETLCache().rnaseqqc.rnaseqqc: returns the DataFrame/cache named
# "rnaseqqc" within the rnaseqqc cache (as some caches like bcl2fastq
# contain multiple DataFrame/caches)
rna_df = QCETLCache().rnaseqqc.rnaseqqc
rna_df = util.get_rnaseqqc()
# Cast the primary key/join columns to explicit types
rna_df = util.df_with_normalized_ius_columns(rna_df, RNA_COL.Run,
RNA_COL.Lane, RNA_COL.Barcodes)
Expand Down
7 changes: 3 additions & 4 deletions application/dash_application/views/preqc_wgs.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@

import gsiqcetl.column
import pinery
from gsiqcetl import QCETLCache
from . import navbar
from ..dash_id import init_ids
from ..plot_builder import terminal_output, fill_in_shape_col, fill_in_colour_col, generate
Expand Down Expand Up @@ -104,14 +103,14 @@ def get_wgs_data():
* Runs (needed to join Pinery to Instruments)
"""
# Get the BamQC data
cache = QCETLCache()

ichorcna_df = cache.ichorcna.ichorcna[[ICHOR_COL.Run,
ichorcna_df = util.get_ichorcna()
ichorcna_df = ichorcna_df[[ICHOR_COL.Run,
ICHOR_COL.Lane,
ICHOR_COL.Barcodes,
ICHOR_COL.Ploidy,
ICHOR_COL.TumorFraction]]
bamqc_df = cache.bamqc3.bamqc3
bamqc_df = util.get_bamqc3()
wgs_df = bamqc_df.merge(
ichorcna_df, how="left", left_on=[
BAMQC_COL.Run, BAMQC_COL.Lane, BAMQC_COL.Barcodes], right_on=[
Expand Down

0 comments on commit c62bc42

Please sign in to comment.