Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

TableReader and TableWriter take in a Path #5785

Merged
merged 5 commits into from
Mar 19, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@
import htsjdk.samtools.SAMTextHeaderCodec;
import htsjdk.samtools.util.BufferedLineReader;
import htsjdk.samtools.util.LineReader;
import java.nio.file.Files;
import java.nio.file.Path;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.tools.copynumber.formats.CopyNumberFormatsUtils;
import org.broadinstitute.hellbender.tools.copynumber.formats.metadata.Metadata;
Expand Down Expand Up @@ -76,7 +78,7 @@ public abstract class AbstractRecordCollection<METADATA extends Metadata, RECORD
this.recordToDataLineEncoder = Utils.nonNull(recordToDataLineEncoder);
Utils.nonEmpty(mandatoryColumns.names());

try (final RecordCollectionReader reader = new RecordCollectionReader(inputFile)) {
try (final RecordCollectionReader reader = new RecordCollectionReader(IOUtils.fileToPath(inputFile))) {
metadata = MetadataUtils.fromHeader(reader.getHeader(), getMetadataType());
TableUtils.checkMandatoryColumns(reader.columns(), mandatoryColumns, UserException.BadInput::new);
records = reader.stream().collect(Collectors.collectingAndThen(Collectors.toList(), ImmutableList::copyOf));
Expand Down Expand Up @@ -164,11 +166,11 @@ static String formatDouble(final double value) {

final class RecordCollectionReader extends TableReader<RECORD> {
private static final String COMMENT_PREFIX = CopyNumberFormatsUtils.COMMENT_PREFIX; //SAMTextHeaderCodec.HEADER_LINE_START; we need TableReader to treat SAM header as comment lines
private final File file;
private final Path path;

RecordCollectionReader(final File file) throws IOException {
super(file);
this.file = file;
RecordCollectionReader(final Path path) throws IOException {
super(path);
this.path = path;
}

@Override
Expand All @@ -177,8 +179,8 @@ protected RECORD createRecord(final DataLine dataLine) {
return recordFromDataLineDecoder.apply(dataLine);
}

private SAMFileHeader getHeader() throws FileNotFoundException {
final LineReader lineReader = new BufferedLineReader(new FileInputStream(file));
private SAMFileHeader getHeader() throws IOException {
final LineReader lineReader = new BufferedLineReader(Files.newInputStream(path));
return new SAMTextHeaderCodec().decode(lineReader, getSource());
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import htsjdk.variant.variantcontext.Genotype;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFConstants;
import java.nio.file.Path;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
Expand Down Expand Up @@ -142,7 +143,7 @@ public static void writeOrientationBiasSummaryTable(final List<Pair<String, Tran

// This code is inefficient, since it loops through all variant contexts for each sampleTransition pair.
try (final TableWriter<Pair<String, Transition> > writer =
TableUtils.writer(outFile, OrientationBiasFilterSummaryTableColumn.COLUMNS,
TableUtils.writer(IOUtils.fileToPath(outFile), OrientationBiasFilterSummaryTableColumn.COLUMNS,
//lambda for creating DataLine with sampleName and segment fields
(sampleTransitionPair, dataLine) -> {
// Create instance of a sample artifact mode and then write it.
Expand Down Expand Up @@ -186,16 +187,17 @@ private static<T extends OrientationSampleTransitionSummary> List<T> readOrienta
final Function<DataLine, T> dataLineToSummaryFunction) {
Utils.nonNull(inputFile);
IOUtils.canReadFile(inputFile);
final Path inputPath = IOUtils.fileToPath(inputFile);
final TableColumnCollection mandatoryColumns = OrientationBiasFilterSummaryTableColumn.COLUMNS;
try (final TableReader<T> reader = TableUtils.reader(inputFile,
try (final TableReader<T> reader = TableUtils.reader(inputPath,
(columns, formatExceptionFactory) -> {
TableUtils.checkMandatoryColumns(columns, mandatoryColumns, formatExceptionFactory);
//return the lambda to translate dataLines into called segments
return dataLineToSummaryFunction;
})) {
return reader.stream().collect(Collectors.toList());
} catch (final IOException | UncheckedIOException e) {
throw new UserException.CouldNotReadInputFile(inputFile, e);
throw new UserException.CouldNotReadInputFile(inputPath, e);
}
}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
package org.broadinstitute.hellbender.tools.walkers.contamination;

import java.nio.file.Path;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.tsv.DataLine;
import org.broadinstitute.hellbender.utils.tsv.TableColumnCollection;
import org.broadinstitute.hellbender.utils.tsv.TableReader;
Expand Down Expand Up @@ -37,15 +39,15 @@ public double getError() {

//----- The following two public static methods read and write contamination files
public static void writeToFile(final List<ContaminationRecord> records, final File outputTable) {
try ( ContaminationRecord.ContaminationTableWriter writer = new ContaminationRecord.ContaminationTableWriter(outputTable) ) {
try ( ContaminationRecord.ContaminationTableWriter writer = new ContaminationRecord.ContaminationTableWriter(IOUtils.fileToPath(outputTable)) ) {
writer.writeAllRecords(records);
} catch (IOException e){
throw new UserException(String.format("Encountered an IO exception while writing to %s.", outputTable));
}
}

public static List<ContaminationRecord> readFromFile(final File tableFile) {
try( ContaminationTableReader reader = new ContaminationTableReader(tableFile) ) {
try( ContaminationTableReader reader = new ContaminationTableReader(IOUtils.fileToPath(tableFile)) ) {
return reader.toList();
} catch (IOException e){
throw new UserException(String.format("Encountered an IO exception while reading from %s.", tableFile));
Expand All @@ -54,7 +56,7 @@ public static List<ContaminationRecord> readFromFile(final File tableFile) {

//-------- The following methods are boilerplate for reading and writing contamination tables
private static class ContaminationTableWriter extends TableWriter<ContaminationRecord> {
private ContaminationTableWriter(final File output) throws IOException {
private ContaminationTableWriter(final Path output) throws IOException {
super(output, ContaminationTableColumn.COLUMNS);
}

Expand All @@ -67,8 +69,8 @@ protected void composeLine(final ContaminationRecord record, final DataLine data
}

private static class ContaminationTableReader extends TableReader<ContaminationRecord> {
public ContaminationTableReader(final File file) throws IOException {
super(file);
public ContaminationTableReader(final Path path) throws IOException {
super(path);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
package org.broadinstitute.hellbender.tools.walkers.contamination;

import htsjdk.samtools.util.Locatable;
import java.nio.file.Path;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.tsv.*;

import java.io.File;
Expand Down Expand Up @@ -42,7 +44,7 @@ public double getMinorAlleleFraction() {

//----- The following two public static methods read and write contamination files
public static void writeToFile(final String sample, final List<MinorAlleleFractionRecord> records, final File outputTable) {
try ( MinorAlleleFractionTableWriter writer = new MinorAlleleFractionTableWriter(outputTable) ) {
try ( MinorAlleleFractionTableWriter writer = new MinorAlleleFractionTableWriter(IOUtils.fileToPath(outputTable)) ) {
writer.writeMetadata(TableUtils.SAMPLE_METADATA_TAG, sample);
writer.writeAllRecords(records);
} catch (IOException e){
Expand All @@ -51,17 +53,21 @@ public static void writeToFile(final String sample, final List<MinorAlleleFracti
}

public static ImmutablePair<String, List<MinorAlleleFractionRecord>> readFromFile(final File tableFile) {
try( MinorAlleleFractionTableReader reader = new MinorAlleleFractionTableReader(tableFile) ) {
return readFromPath(IOUtils.fileToPath(tableFile));
}

public static ImmutablePair<String, List<MinorAlleleFractionRecord>> readFromPath(final Path tablePath) {
try( MinorAlleleFractionTableReader reader = new MinorAlleleFractionTableReader(tablePath) ) {
final List<MinorAlleleFractionRecord> list = reader.toList();
return ImmutablePair.of(reader.getMetadata().get(TableUtils.SAMPLE_METADATA_TAG), list);
} catch (IOException e){
throw new UserException(String.format("Encountered an IO exception while reading from %s.", tableFile));
throw new UserException(String.format("Encountered an IO exception while reading from %s.", tablePath));
}
}

//-------- The following methods are boilerplate for reading and writing contamination tables
private static class MinorAlleleFractionTableWriter extends TableWriter<MinorAlleleFractionRecord> {
private MinorAlleleFractionTableWriter(final File output) throws IOException {
private MinorAlleleFractionTableWriter(final Path output) throws IOException {
super(output, MinorAlleleFractionTableColumn.COLUMNS);
}

Expand All @@ -76,8 +82,8 @@ protected void composeLine(final MinorAlleleFractionRecord record, final DataLin
}

public static class MinorAlleleFractionTableReader extends TableReader<MinorAlleleFractionRecord> {
public MinorAlleleFractionTableReader(final File file) throws IOException {
super(file);
public MinorAlleleFractionTableReader(final Path path) throws IOException {
super(path);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,14 @@
import htsjdk.samtools.util.Locatable;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.vcf.VCFConstants;
import java.nio.file.Path;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.math3.util.FastMath;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.BaseUtils;
import org.broadinstitute.hellbender.utils.MathUtils;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.pileup.ReadPileup;
import org.broadinstitute.hellbender.utils.tsv.*;

Expand Down Expand Up @@ -90,7 +92,7 @@ public double getMinorAlleleFraction() {

//----- The following two public static methods read and write pileup summary files
public static void writeToFile(final String sample, final List<PileupSummary> records, final File outputTable) {
try ( PileupSummaryTableWriter writer = new PileupSummaryTableWriter(outputTable) ) {
try ( PileupSummaryTableWriter writer = new PileupSummaryTableWriter(IOUtils.fileToPath(outputTable)) ) {
writer.writeMetadata(TableUtils.SAMPLE_METADATA_TAG, sample);
writer.writeAllRecords(records);
} catch (IOException e){
Expand All @@ -99,7 +101,7 @@ public static void writeToFile(final String sample, final List<PileupSummary> re
}

public static ImmutablePair<String, List<PileupSummary>> readFromFile(final File tableFile) {
try( PileupSummaryTableReader reader = new PileupSummaryTableReader(tableFile) ) {
try( PileupSummaryTableReader reader = new PileupSummaryTableReader(IOUtils.fileToPath(tableFile)) ) {
final List<PileupSummary> pileupSummaries = reader.toList();
return ImmutablePair.of(reader.getMetadata().get(TableUtils.SAMPLE_METADATA_TAG), pileupSummaries);
} catch (IOException e){
Expand All @@ -109,7 +111,7 @@ public static ImmutablePair<String, List<PileupSummary>> readFromFile(final File

//-------- The following methods are boilerplate for reading and writing pileup summary tables
private static class PileupSummaryTableWriter extends TableWriter<PileupSummary> {
private PileupSummaryTableWriter(final File output) throws IOException {
private PileupSummaryTableWriter(final Path output) throws IOException {
super(output, PileupSummaryTableColumn.COLUMNS);
}

Expand All @@ -125,7 +127,7 @@ protected void composeLine(final PileupSummary record, final DataLine dataLine)
}

private static class PileupSummaryTableReader extends TableReader<PileupSummary> {
public PileupSummaryTableReader(final File file) throws IOException { super(file); }
public PileupSummaryTableReader(final Path path) throws IOException { super(path); }

@Override
protected PileupSummary createRecord(final DataLine dataLine) {
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
package org.broadinstitute.hellbender.tools.walkers.mutect;

import java.nio.file.Path;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.tsv.DataLine;
import org.broadinstitute.hellbender.utils.tsv.TableColumnCollection;
import org.broadinstitute.hellbender.utils.tsv.TableReader;
Expand All @@ -27,15 +29,17 @@ public MutectStats(final String statistic, final double value) {

//----- The following two public static methods read and write contamination files
public static void writeToFile(final List<MutectStats> records, final File outputTable) {
try ( MutectStats.MutectStatsWriter writer = new MutectStats.MutectStatsWriter(outputTable) ) {
try ( MutectStats.MutectStatsWriter writer = new MutectStats.MutectStatsWriter(
IOUtils.fileToPath(outputTable)) ) {
writer.writeAllRecords(records);
} catch (IOException e){
throw new UserException(String.format("Encountered an IO exception while writing to %s.", outputTable));
}
}

public static List<MutectStats> readFromFile(final File tableFile) {
try( MutectStats.MutectStatsReader reader = new MutectStats.MutectStatsReader(tableFile) ) {
try( MutectStats.MutectStatsReader reader = new MutectStats.MutectStatsReader(
IOUtils.fileToPath(tableFile)) ) {
return reader.toList();
} catch (IOException e){
throw new UserException(String.format("Encountered an IO exception while reading from %s.", tableFile));
Expand All @@ -44,7 +48,7 @@ public static List<MutectStats> readFromFile(final File tableFile) {

//-------- The following methods are boilerplate for reading and writing contamination tables
private static class MutectStatsWriter extends TableWriter<MutectStats> {
private MutectStatsWriter(final File output) throws IOException {
private MutectStatsWriter(final Path output) throws IOException {
super(output, MutectStats.MutectStatsColumn.COLUMNS);
}

Expand All @@ -56,7 +60,7 @@ protected void composeLine(final MutectStats record, final DataLine dataLine) {
}

private static class MutectStatsReader extends TableReader<MutectStats> {
public MutectStatsReader(final File file) throws IOException {
public MutectStatsReader(final Path file) throws IOException {
super(file);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import htsjdk.variant.variantcontext.writer.VariantContextWriter;
import htsjdk.variant.vcf.VCFHeader;
import htsjdk.variant.vcf.VCFHeaderLine;
import java.nio.file.Path;
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.ArgumentCollection;
import org.broadinstitute.barclay.argparser.CommandLineProgramProperties;
Expand All @@ -12,6 +13,7 @@
import org.broadinstitute.hellbender.engine.*;
import org.broadinstitute.hellbender.exceptions.GATKException;
import org.broadinstitute.hellbender.tools.walkers.mutect.Mutect2;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.param.ParamUtils;
import picard.cmdline.programgroups.VariantFilteringProgramGroup;
import org.broadinstitute.hellbender.engine.FeatureContext;
Expand Down Expand Up @@ -144,8 +146,10 @@ protected void afterNthPass(final int n) {
if (n < NUMBER_OF_LEARNING_PASSES) {
filteringEngine.learnParameters();
} else if (n == NUMBER_OF_LEARNING_PASSES) {
final File filteringStatsFile = new File(filteringStatsOutput != null ? filteringStatsOutput : outputVcf + FILTERING_STATS_EXTENSION);
filteringEngine.writeFilteringStats(filteringStatsFile);
final Path filteringStats = IOUtils.getPath(
filteringStatsOutput != null ? filteringStatsOutput
: outputVcf + FILTERING_STATS_EXTENSION);
filteringEngine.writeFilteringStats(filteringStats);
} else {
throw new GATKException.ShouldNeverReachHereException("This walker should never reach (zero-indexed) pass " + n);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,10 @@
package org.broadinstitute.hellbender.tools.walkers.mutect.filtering;

import java.nio.file.Path;
import org.apache.commons.lang3.tuple.Pair;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.MathUtils;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.tsv.DataLine;
import org.broadinstitute.hellbender.utils.tsv.TableColumnCollection;
import org.broadinstitute.hellbender.utils.tsv.TableWriter;
Expand Down Expand Up @@ -62,7 +64,7 @@ private enum M2FilterStatsTableColumn {
}

private static class Mutect2FilterStatsWriter extends TableWriter<FilterStats> {
private Mutect2FilterStatsWriter(final File output) throws IOException {
private Mutect2FilterStatsWriter(final Path output) throws IOException {
super(output, M2FilterStatsTableColumn.COLUMNS);
}

Expand All @@ -76,7 +78,7 @@ protected void composeLine(final FilterStats stats, final DataLine dataLine) {
}
}

public static void writeM2FilterSummary(final Collection<FilterStats> filterStats, final File outputTable, List<Pair<String, String>> clusteringMetadata,
public static void writeM2FilterSummary(final Collection<FilterStats> filterStats, final Path outputTable, List<Pair<String, String>> clusteringMetadata,
final double threshold, final double totalCalls, final double expectedTruePositives,
final double expectedFalsePositives, final double expectedFalseNegatives) {
try (Mutect2FilterStatsWriter writer = new Mutect2FilterStatsWriter(outputTable)) {
Expand Down
Loading