From 6688ef373c2b6b0680445dc23dac18b8b0841b1e Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Tue, 18 Jul 2017 19:43:09 +0900 Subject: [PATCH 1/7] distributable BAM reader --- src/bam/bai.jl | 5 +++++ src/bam/overlap.jl | 1 + src/bam/reader.jl | 20 +++++++++++++++++++- 3 files changed, 25 insertions(+), 1 deletion(-) diff --git a/src/bam/bai.jl b/src/bam/bai.jl index a017ca92..6237979f 100644 --- a/src/bam/bai.jl +++ b/src/bam/bai.jl @@ -33,6 +33,11 @@ function BAI(input::IO) return read_bai(input) end +function findbai(filepath::AbstractString) + baipath = string(filepath, ".bai") + return isfile(baipath) ? Nullable(BAI(baipath)) : Nullable{BAI}() +end + # Read a BAI object from `input`. function read_bai(input::IO) # check magic bytes diff --git a/src/bam/overlap.jl b/src/bam/overlap.jl index 378fc14b..2ecc5dae 100644 --- a/src/bam/overlap.jl +++ b/src/bam/overlap.jl @@ -46,6 +46,7 @@ mutable struct OverlapIteratorState end function Base.start(iter::OverlapIterator) + iter.reader.stream = BGZFStreams.BGZFStream(get(iter.reader.filepath)) refindex = findfirst(iter.reader.refseqnames, iter.refname) if refindex == 0 throw(ArgumentError("sequence name $(iter.refname) is not found in the header")) diff --git a/src/bam/reader.jl b/src/bam/reader.jl index e7b08f47..119ec64e 100644 --- a/src/bam/reader.jl +++ b/src/bam/reader.jl @@ -17,6 +17,7 @@ mutable struct Reader{T} <: Bio.IO.AbstractReader refseqnames::Vector{String} refseqlens::Vector{Int} index::Nullable{BAI} + filepath::Nullable{String} end function Base.eltype{T}(::Type{Reader{T}}) @@ -40,6 +41,22 @@ function Reader(input::IO; index=nothing) return reader end +function Reader(filepath::AbstractString; index=:auto) + if isa(index, Symbol) + if index == :auto + index = findbai(filepath) + else + throw(ArgumentError("invalid index: ':$(index)'")) + end + elseif isa(index, AbstractString) + index = BAI(index) + end + reader = init_bam_reader(open(filepath)) + reader.index = index + reader.filepath = filepath + return reader +end + function Base.show(io::IO, reader::Reader) println(io, summary(reader), ":") print(io, " number of contigs: ", length(reader.refseqnames)) @@ -128,7 +145,8 @@ function init_bam_reader(input::BGZFStreams.BGZFStream) voffset, refseqnames, refseqlens, - Nullable{BAI}()) + Nullable{BAI}(), + Nullable{String}()) end function init_bam_reader(input::IO) From 163d74cd20d52f86a41f5a0ea30ba53d82a4d115 Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Sun, 23 Jul 2017 18:58:06 +0900 Subject: [PATCH 2/7] fixup! distributable BAM reader --- src/bam/overlap.jl | 33 ++++---- src/bam/reader.jl | 188 +++++++++++++++++++++------------------------ src/bam/record.jl | 12 ++- 3 files changed, 118 insertions(+), 115 deletions(-) diff --git a/src/bam/overlap.jl b/src/bam/overlap.jl index 2ecc5dae..54904e86 100644 --- a/src/bam/overlap.jl +++ b/src/bam/overlap.jl @@ -31,7 +31,10 @@ end # Iterator # -------- -mutable struct OverlapIteratorState +mutable struct OverlapIteratorState{S} + # reader's state + readerstate::ReaderState{S,Record} + # reference index refindex::Int @@ -40,31 +43,31 @@ mutable struct OverlapIteratorState # current chunk index chunkid::Int - - # pre-allocated record - record::Record end function Base.start(iter::OverlapIterator) - iter.reader.stream = BGZFStreams.BGZFStream(get(iter.reader.filepath)) - refindex = findfirst(iter.reader.refseqnames, iter.refname) + readerstate = ReaderState(iter.reader) + reader = readerstate.reader + refindex = findfirst(reader.refseqnames, iter.refname) if refindex == 0 throw(ArgumentError("sequence name $(iter.refname) is not found in the header")) end - @assert !isnull(iter.reader.index) - chunks = GenomicFeatures.Indexes.overlapchunks(get(iter.reader.index).index, refindex, iter.interval) + @assert !isnull(reader.index) + chunks = GenomicFeatures.Indexes.overlapchunks(get(reader.index).index, refindex, iter.interval) if !isempty(chunks) - seek(iter.reader, first(chunks).start) + seek(reader.input, first(chunks).start) end - return OverlapIteratorState(refindex, chunks, 1, Record()) + return OverlapIteratorState(readerstate, refindex, chunks, 1) end function Base.done(iter::OverlapIterator, state) + reader = state.readerstate.reader + record = state.readerstate.record while state.chunkid ≤ endof(state.chunks) chunk = state.chunks[state.chunkid] - while BGZFStreams.virtualoffset(iter.reader.stream) < chunk.stop - read!(iter.reader, state.record) - c = compare_intervals(state.record, (state.refindex, iter.interval)) + while BGZFStreams.virtualoffset(reader.input) < chunk.stop + read!(reader, record) + c = compare_intervals(record, (state.refindex, iter.interval)) if c == 0 # overlapping return false @@ -75,14 +78,14 @@ function Base.done(iter::OverlapIterator, state) end state.chunkid += 1 if state.chunkid ≤ endof(state.chunks) - seek(iter.reader, state.chunks[state.chunkid].start) + seek(reader.input, state.chunks[state.chunkid].start) end end return true end function Base.next(::OverlapIterator, state) - return copy(state.record), state + return copy(state.readerstate.record), state end function compare_intervals(record::Record, interval::Tuple{Int,UnitRange{Int}}) diff --git a/src/bam/reader.jl b/src/bam/reader.jl index 119ec64e..79abe729 100644 --- a/src/bam/reader.jl +++ b/src/bam/reader.jl @@ -2,64 +2,100 @@ # ========== """ - BAM.Reader(input::IO; index=nothing) + BAM.Reader(input; index=nothing) Create a data reader of the BAM file format. # Arguments -* `input`: data source -* `index=nothing`: filepath to a random access index (currently *bai* is Supported) +* `input`: data source (filepath or readable `IO` object) +* `index=nothing`: filepath to a random access index (currently *bai* is supported) """ -mutable struct Reader{T} <: Bio.IO.AbstractReader - stream::BGZFStreams.BGZFStream{T} +mutable struct Reader{T<:Union{String,BGZFStreams.BGZFStream}} <: Bio.IO.AbstractReader + # data source + input::T + + # BAM index + index::Nullable{BAI} + + # header data header::SAM.Header - start_offset::BGZFStreams.VirtualOffset refseqnames::Vector{String} refseqlens::Vector{Int} - index::Nullable{BAI} - filepath::Nullable{String} end -function Base.eltype{T}(::Type{Reader{T}}) - return Record -end +function Reader(input::BGZFStreams.BGZFStream; index=nothing) + if index == nothing + index = Nullable{BAI}() + elseif index isa BAI + index = Nullable(index) + elseif index isa AbstractString + index = Nullable(BAI(index)) + elseif index isa Nullable{BAI} + # ok + else + error("unrecognizable index argument: $(typeof(index))") + end -function Bio.IO.stream(reader::Reader) - return reader.stream + # magic bytes + B = read(input, UInt8) + A = read(input, UInt8) + M = read(input, UInt8) + x = read(input, UInt8) + if B != UInt8('B') || A != UInt8('A') || M != UInt8('M') || x != 0x01 + error("input was not a valid BAM file") + end + + # SAM header + textlen = read(input, Int32) + samreader = SAM.Reader(IOBuffer(read(input, UInt8, textlen))) + + # reference sequences + refseqnames = String[] + refseqlens = Int[] + n_refs = read(input, Int32) + for _ in 1:n_refs + namelen = read(input, Int32) + data = read(input, UInt8, namelen) + seqname = unsafe_string(pointer(data)) + seqlen = read(input, Int32) + push!(refseqnames, seqname) + push!(refseqlens, seqlen) + end + + return Reader(input, index, samreader.header, refseqnames, refseqlens) end function Reader(input::IO; index=nothing) - if isa(index, AbstractString) - index = BAI(index) - else - if index != nothing - error("unrecognizable index argument") - end - end - reader = init_bam_reader(input) - reader.index = index - return reader + return Reader(BGZFStreams.BGZFStream(input), index=index) end function Reader(filepath::AbstractString; index=:auto) - if isa(index, Symbol) + if index isa Symbol if index == :auto index = findbai(filepath) else throw(ArgumentError("invalid index: ':$(index)'")) end - elseif isa(index, AbstractString) + elseif index isa AbstractString index = BAI(index) end - reader = init_bam_reader(open(filepath)) - reader.index = index - reader.filepath = filepath - return reader + return Reader(filepath, index, SAM.Header(), String[], Int[]) +end + +function Base.open(reader::Reader{String}) + return Reader(open(reader.input), index=reader.index) +end + +function Base.eltype{T}(::Type{Reader{T}}) + return Record +end + +function Bio.IO.stream(reader::Reader) + return reader.stream end function Base.show(io::IO, reader::Reader) - println(io, summary(reader), ":") - print(io, " number of contigs: ", length(reader.refseqnames)) + print(io, summary(reader), "()") end """ @@ -87,82 +123,36 @@ function Bio.header(reader::Reader) return header(reader) end -function Base.seek(reader::Reader, voffset::BGZFStreams.VirtualOffset) - seek(reader.stream, voffset) -end +#function Base.seek(reader::Reader, voffset::BGZFStreams.VirtualOffset) +# seek(reader.stream, voffset) +#end +# +#function Base.seekstart(reader::Reader) +# seek(reader.stream, reader.start_offset) +#end -function Base.seekstart(reader::Reader) - seek(reader.stream, reader.start_offset) +struct ReaderState{S,T} + reader::S + record::T end -function Base.start(reader::Reader) - return Record() +function ReaderState(reader::Reader{<:BGZFStreams.BGZFStream}) + return ReaderState(reader, Record()) end -function Base.done(reader::Reader, rec) - return eof(reader) +function ReaderState(reader::Reader{String}) + return ReaderState(open(reader), Record()) end -function Base.next(reader::Reader, rec) - read!(reader, rec) - return copy(rec), rec +function Base.start(reader::Reader) + return ReaderState(reader) end -# Initialize a BAM reader by reading the header section. -function init_bam_reader(input::BGZFStreams.BGZFStream) - # magic bytes - B = read(input, UInt8) - A = read(input, UInt8) - M = read(input, UInt8) - x = read(input, UInt8) - if B != UInt8('B') || A != UInt8('A') || M != UInt8('M') || x != 0x01 - error("input was not a valid BAM file") - end - - # SAM header - textlen = read(input, Int32) - samreader = SAM.Reader(IOBuffer(read(input, UInt8, textlen))) - - # reference sequences - refseqnames = String[] - refseqlens = Int[] - n_refs = read(input, Int32) - for _ in 1:n_refs - namelen = read(input, Int32) - data = read(input, UInt8, namelen) - seqname = unsafe_string(pointer(data)) - seqlen = read(input, Int32) - push!(refseqnames, seqname) - push!(refseqlens, seqlen) - end +function Base.done(::Reader, state) + return eof(state.reader.input) +end - voffset = isa(input.io, Pipe) ? - BGZFStreams.VirtualOffset(0, 0) : - BGZFStreams.virtualoffset(input) - return Reader( - input, - samreader.header, - voffset, - refseqnames, - refseqlens, - Nullable{BAI}(), - Nullable{String}()) -end - -function init_bam_reader(input::IO) - return init_bam_reader(BGZFStreams.BGZFStream(input)) -end - -function _read!(reader::Reader, record) - unsafe_read( - reader.stream, - pointer_from_objref(record), - FIXED_FIELDS_BYTES) - dsize = data_size(record) - if length(record.data) < dsize - resize!(record.data, dsize) - end - unsafe_read(reader.stream, pointer(record.data), dsize) - record.reader = reader - return record +function Base.next(::Reader, state) + read!(state.reader, state.record) + return copy(state.record), state end diff --git a/src/bam/record.jl b/src/bam/record.jl index 965eeb28..69c23324 100644 --- a/src/bam/record.jl +++ b/src/bam/record.jl @@ -86,7 +86,17 @@ function Base.show(io::IO, record::Record) end function Base.read!(reader::Reader, record::Record) - return _read!(reader, record) + unsafe_read( + reader.input, + pointer_from_objref(record), + FIXED_FIELDS_BYTES) + dsize = data_size(record) + if length(record.data) < dsize + resize!(record.data, dsize) + end + unsafe_read(reader.input, pointer(record.data), dsize) + record.reader = reader + return record end From 27616a6440809d5f7ff58e9c69d17e8ba0d14015 Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Sun, 23 Jul 2017 19:11:33 +0900 Subject: [PATCH 3/7] add benchmark files --- loadfile.jl | 78 +++++++++++++++++++++++++++++++++++++++++++++++++++++ main.jl | 29 ++++++++++++++++++++ 2 files changed, 107 insertions(+) create mode 100644 loadfile.jl create mode 100644 main.jl diff --git a/loadfile.jl b/loadfile.jl new file mode 100644 index 00000000..351ec277 --- /dev/null +++ b/loadfile.jl @@ -0,0 +1,78 @@ +@everywhere begin + using BioAlignments + + function compute_cov(reader, interval) + range = interval.first:interval.last + cov = zeros(Int, length(range)) + for record in eachoverlap(reader, interval) + if !BAM.ismapped(record) || !BAM.isprimary(record) + continue + end + aln = BAM.alignment(record) + for i in 1:BAM.seqlength(record) + j, op = seq2ref(aln, i) + if ismatchop(op) && j in range + cov[j - first(range) + 1] += 1 + end + end + end + return cov + end + + function transcript_depth0(bamfile, intervals) + reader = BAM.Reader(bamfile) + ret = map(intervals) do interval + return compute_cov(reader, interval) + end + close(reader) + return ret + end + + function transcript_depth1(bamfile, intervals) + pmap(intervals) do interval + reader = BAM.Reader(bamfile) + cov = compute_cov(reader, interval) + close(reader) + return cov + end + end + + function transcript_depth2(bamfile, intervals, batchsize=10) + reader = BAM.Reader(bamfile) + ret = pmap(intervals, batch_size=batchsize) do interval + return compute_cov(reader, interval) + end + close(reader) + return ret + end +end + +#= +@everywhere function transcript_depth_dagger(bamfile, intervals) + function transcript_depth_chunk(bamfile, intervals) + open(BAM.Reader, bamfile, index=string(bamfile, ".bai")) do reader + map(intervals) do interval + range = interval.first:interval.last + cov = zeros(Int, length(range)) + for record in eachoverlap(reader, interval) + if !BAM.ismapped(record) || !BAM.isprimary(record) + continue + end + aln = BAM.alignment(record) + for i in 1:BAM.seqlength(record) + j, op = seq2ref(aln, i) + if ismatchop(op) && j in range + cov[j - first(range) + 1] += 1 + end + end + end + return cov + end + end + end + chunks = map(r->intervals[r], Dagger.split_range(1:endof(intervals), 4)) + vcat(pmap(chunks) do chunk + transcript_depth_chunk(bamfile, chunk) + end...) +end +=# diff --git a/main.jl b/main.jl new file mode 100644 index 00000000..b0f42abb --- /dev/null +++ b/main.jl @@ -0,0 +1,29 @@ +using GenomicFeatures + +include("loadfile.jl") + +bamfile = expanduser("./data/SRR1238088.sort.bam") +gff3file = expanduser("./data/TAIR10_GFF3_genes.gff") +chrom = "Chr1" +intervals = open(GFF3.Reader, gff3file) do reader + intervals = Interval{GFF3.Record}[] + for record in reader + if GFF3.seqid(record) == chrom && GFF3.featuretype(record) == "mRNA" + push!(intervals, Interval(record)) + end + end + return intervals +end +intervals = intervals[1:1000] +f = transcript_depth2 +println(sum(map(sum, f(bamfile, intervals)))) +out = STDOUT +println(out, "--- start benchmarking ---") +for batchsize in [1, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50] + @show batchsize + for i in 1:3 + gc() + println(out, @elapsed f(bamfile, intervals, batchsize)) + end +end +println(out, "--- finish benchmarking ---") From ab08ce2fbb6337caa23c6a45fbc92594d0faf5bb Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Sun, 23 Jul 2017 19:16:59 +0900 Subject: [PATCH 4/7] fixup! add benchmark files --- main.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/main.jl b/main.jl index b0f42abb..70077c12 100644 --- a/main.jl +++ b/main.jl @@ -15,6 +15,7 @@ intervals = open(GFF3.Reader, gff3file) do reader return intervals end intervals = intervals[1:1000] + f = transcript_depth2 println(sum(map(sum, f(bamfile, intervals)))) out = STDOUT From e8214da38755e90ae93c5550f3d959e0895a35ea Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Sun, 23 Jul 2017 19:21:24 +0900 Subject: [PATCH 5/7] fix close --- loadfile.jl | 3 --- src/bam/reader.jl | 2 +- 2 files changed, 1 insertion(+), 4 deletions(-) diff --git a/loadfile.jl b/loadfile.jl index 351ec277..70f0cafd 100644 --- a/loadfile.jl +++ b/loadfile.jl @@ -24,7 +24,6 @@ ret = map(intervals) do interval return compute_cov(reader, interval) end - close(reader) return ret end @@ -32,7 +31,6 @@ pmap(intervals) do interval reader = BAM.Reader(bamfile) cov = compute_cov(reader, interval) - close(reader) return cov end end @@ -42,7 +40,6 @@ ret = pmap(intervals, batch_size=batchsize) do interval return compute_cov(reader, interval) end - close(reader) return ret end end diff --git a/src/bam/reader.jl b/src/bam/reader.jl index 79abe729..bfc39127 100644 --- a/src/bam/reader.jl +++ b/src/bam/reader.jl @@ -91,7 +91,7 @@ function Base.eltype{T}(::Type{Reader{T}}) end function Bio.IO.stream(reader::Reader) - return reader.stream + return reader.input end function Base.show(io::IO, reader::Reader) From 6f136328da8b9c6f49bb05a9497f287b508e39a5 Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Tue, 25 Jul 2017 14:18:14 +0900 Subject: [PATCH 6/7] clean up main.jl --- loadfile.jl | 75 ----------------------------------------------- main.jl | 84 ++++++++++++++++++++++++++++++++++++++--------------- 2 files changed, 61 insertions(+), 98 deletions(-) delete mode 100644 loadfile.jl diff --git a/loadfile.jl b/loadfile.jl deleted file mode 100644 index 70f0cafd..00000000 --- a/loadfile.jl +++ /dev/null @@ -1,75 +0,0 @@ -@everywhere begin - using BioAlignments - - function compute_cov(reader, interval) - range = interval.first:interval.last - cov = zeros(Int, length(range)) - for record in eachoverlap(reader, interval) - if !BAM.ismapped(record) || !BAM.isprimary(record) - continue - end - aln = BAM.alignment(record) - for i in 1:BAM.seqlength(record) - j, op = seq2ref(aln, i) - if ismatchop(op) && j in range - cov[j - first(range) + 1] += 1 - end - end - end - return cov - end - - function transcript_depth0(bamfile, intervals) - reader = BAM.Reader(bamfile) - ret = map(intervals) do interval - return compute_cov(reader, interval) - end - return ret - end - - function transcript_depth1(bamfile, intervals) - pmap(intervals) do interval - reader = BAM.Reader(bamfile) - cov = compute_cov(reader, interval) - return cov - end - end - - function transcript_depth2(bamfile, intervals, batchsize=10) - reader = BAM.Reader(bamfile) - ret = pmap(intervals, batch_size=batchsize) do interval - return compute_cov(reader, interval) - end - return ret - end -end - -#= -@everywhere function transcript_depth_dagger(bamfile, intervals) - function transcript_depth_chunk(bamfile, intervals) - open(BAM.Reader, bamfile, index=string(bamfile, ".bai")) do reader - map(intervals) do interval - range = interval.first:interval.last - cov = zeros(Int, length(range)) - for record in eachoverlap(reader, interval) - if !BAM.ismapped(record) || !BAM.isprimary(record) - continue - end - aln = BAM.alignment(record) - for i in 1:BAM.seqlength(record) - j, op = seq2ref(aln, i) - if ismatchop(op) && j in range - cov[j - first(range) + 1] += 1 - end - end - end - return cov - end - end - end - chunks = map(r->intervals[r], Dagger.split_range(1:endof(intervals), 4)) - vcat(pmap(chunks) do chunk - transcript_depth_chunk(bamfile, chunk) - end...) -end -=# diff --git a/main.jl b/main.jl index 70077c12..fc2a55b6 100644 --- a/main.jl +++ b/main.jl @@ -1,30 +1,68 @@ -using GenomicFeatures +@everywhere begin + using BioAlignments + using GenomicFeatures -include("loadfile.jl") - -bamfile = expanduser("./data/SRR1238088.sort.bam") -gff3file = expanduser("./data/TAIR10_GFF3_genes.gff") -chrom = "Chr1" -intervals = open(GFF3.Reader, gff3file) do reader - intervals = Interval{GFF3.Record}[] - for record in reader - if GFF3.seqid(record) == chrom && GFF3.featuretype(record) == "mRNA" - push!(intervals, Interval(record)) + # The main algorithm. + function compute_depth(reader, interval) + range = interval.first:interval.last + depth = zeros(Int, length(range)) + for record in eachoverlap(reader, interval) + if !BAM.ismapped(record) || !BAM.isprimary(record) + continue + end + aln = BAM.alignment(record) + for i in 1:BAM.seqlength(record) + j, op = seq2ref(aln, i) + if ismatchop(op) && j in range + @inbounds depth[j - first(range) + 1] += 1 + end + end end + return depth + end +end + +# Sequential computation. +function transcript_depth0(bamfile, intervals) + reader = BAM.Reader(bamfile) + return map(intervals) do interval + return compute_depth(reader, interval) + end +end + +# Parallel computation using pmap (open BAM.Reader inside the closure). +function transcript_depth1(bamfile, intervals, batchsize) + pmap(intervals, batch_size=batchsize) do interval + reader = BAM.Reader(bamfile) + return compute_depth(reader, interval) end - return intervals end -intervals = intervals[1:1000] -f = transcript_depth2 -println(sum(map(sum, f(bamfile, intervals)))) -out = STDOUT -println(out, "--- start benchmarking ---") -for batchsize in [1, 5, 10, 15, 20, 25, 30, 35, 40, 45, 50] - @show batchsize - for i in 1:3 - gc() - println(out, @elapsed f(bamfile, intervals, batchsize)) +# Parallel computation using pmap (open BAM.Reader outside the closure). +function transcript_depth2(bamfile, intervals, batchsize) + reader = BAM.Reader(bamfile) + return pmap(intervals, batch_size=batchsize) do interval + return compute_depth(reader, interval) end end -println(out, "--- finish benchmarking ---") + +bamfile = expanduser("./data/SRR1238088.sort.bam") +gff3file = expanduser("./data/TAIR10_GFF3_genes.gff") +intervals = collect(Interval, Iterators.filter(r->GFF3.seqid(r)=="Chr1" && GFF3.featuretype(r)=="gene", GFF3.Reader(open(gff3file)))) + +using DocOpt +args = docopt("Usage: main.jl [--batch_size=] ") +batch_size = args["--batch_size"] +if batch_size == nothing + batch_size = 30 +else + batch_size = parse(Int, batch_size) +end +f = eval(parse(args[""])) +func = () -> f == transcript_depth0 ? f(bamfile, intervals) : f(bamfile, intervals, batch_size) + +println(STDERR, sum(map(sum, func()))) +for i in 1:3 + gc() + println(@elapsed func()) +end From 6d01563330018d31c2f9139d65b060436b50f8df Mon Sep 17 00:00:00 2001 From: Kenta Sato Date: Tue, 25 Jul 2017 14:22:17 +0900 Subject: [PATCH 7/7] rname --- main.jl => transcript_depth.jl | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename main.jl => transcript_depth.jl (100%) diff --git a/main.jl b/transcript_depth.jl similarity index 100% rename from main.jl rename to transcript_depth.jl