Skip to content

Commit

Permalink
Reduce size of featureCache following some back of the envelope
Browse files Browse the repository at this point in the history
calculations
  • Loading branch information
cmdcolin committed Nov 14, 2024
1 parent 8efc12f commit 00a8c8f
Show file tree
Hide file tree
Showing 3 changed files with 301 additions and 291 deletions.
12 changes: 9 additions & 3 deletions plugins/alignments/src/BamAdapter/BamAdapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,13 @@ export default class BamAdapter extends BaseFeatureDataAdapter {

private setupP?: Promise<Header>

private featureCache = new QuickLRU<string, Feature>({ maxSize: 5000 })
// used for avoiding re-creation new BamSlightlyLazyFeatures, keeping
// mismatches in cache. at an average of 100kb-300kb, keeping even just 500
// of these in memory is memory intensive but can reduce recomputation on
// these objects
private ultraLongFeatureCache = new QuickLRU<string, Feature>({
maxSize: 500,
})

private configureP?: Promise<{
bam: BamFile
Expand Down Expand Up @@ -223,10 +229,10 @@ export default class BamAdapter extends BaseFeatureDataAdapter {
// retrieve a feature from our feature cache if it is available, the
// features in the cache have pre-computed mismatches objects that
// can be re-used across blocks
const ret = this.featureCache.get(`${record.id}`)
const ret = this.ultraLongFeatureCache.get(`${record.id}`)
if (!ret) {
const elt = new BamSlightlyLazyFeature(record, this, ref)
this.featureCache.set(`${record.id}`, elt)
this.ultraLongFeatureCache.set(`${record.id}`, elt)
observer.next(elt)
} else {
observer.next(ret)
Expand Down
16 changes: 10 additions & 6 deletions plugins/alignments/src/CramAdapter/CramAdapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,13 @@ export default class CramAdapter extends BaseFeatureDataAdapter {
sequenceAdapter: BaseSequenceAdapter
}>

private featureCache = new QuickLRU<string, Feature>({ maxSize: 5000 })
// used for avoiding re-creation new BamSlightlyLazyFeatures, keeping
// mismatches in cache. at an average of 100kb-300kb, keeping even just 500
// of these in memory is fairly intensive but can reduce recomputation on
// these objects
private ultraLongFeatureCache = new QuickLRU<string, Feature>({
maxSize: 500,
})

// maps a refname to an id
private seqIdToRefName: string[] | undefined
Expand Down Expand Up @@ -270,13 +276,11 @@ export default class CramAdapter extends BaseFeatureDataAdapter {
if (readName && record.readName !== readName) {
continue
}
// retrieve a feature from our feature cache if it is available, the
// features in the cache have pre-computed mismatches objects that
// can be re-used across blocks
const ret = this.featureCache.get(`${record.uniqueId}`)

const ret = this.ultraLongFeatureCache.get(`${record.uniqueId}`)
if (!ret) {
const elt = this.cramRecordToFeature(record)
this.featureCache.set(`${record.uniqueId}`, elt)
this.ultraLongFeatureCache.set(`${record.uniqueId}`, elt)
observer.next(elt)
} else {
observer.next(ret)
Expand Down
Loading

0 comments on commit 00a8c8f

Please sign in to comment.