Skip to content

Commit

Permalink
Updating htsjdk, disq, and Picard.
Browse files Browse the repository at this point in the history
* Updating htsjdk 2.18.2 -> 2.19.0
* Updating disq 0.2.0 -> 0.3.0

* Disabling test that was relying on broken behavior that was fixed in htsjdk
* Removing code that has migrated to htsjdk
  • Loading branch information
lbergelson authored and cmnbroad committed Mar 27, 2019
1 parent e4df3d3 commit 8140531
Show file tree
Hide file tree
Showing 13 changed files with 25 additions and 1,295 deletions.
6 changes: 3 additions & 3 deletions build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -58,12 +58,12 @@ repositories {
}

final requiredJavaVersion = "8"
final htsjdkVersion = System.getProperty('htsjdk.version','2.18.2')
final picardVersion = System.getProperty('picard.version','2.18.25')
final htsjdkVersion = System.getProperty('htsjdk.version','2.19.0')
final picardVersion = System.getProperty('picard.version','2.19.0')
final barclayVersion = System.getProperty('barclay.version','2.1.0')
final sparkVersion = System.getProperty('spark.version', '2.2.0')
final hadoopVersion = System.getProperty('hadoop.version', '2.8.2')
final disqVersion = System.getProperty('disq.version','0.2.0')
final disqVersion = System.getProperty('disq.version','0.3.0')
final genomicsdbVersion = System.getProperty('genomicsdb.version','1.0.0-rc2')
final testNGVersion = '6.11'
// Using the shaded version to avoid conflicts between its protobuf dependency
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ private static void createBaiAndSplittingIndex(final File inputBam, final File i
assertBamIsCoordinateSorted(header);
final SBIIndexWriter indexer = new SBIIndexWriter(out, granularity);

final BAMIndexer bamIndexer = new BAMIndexer(IOUtils.replaceExtension(index, BAMIndex.BAMIndexSuffix), header);
final BAMIndexer bamIndexer = new BAMIndexer(IOUtils.replaceExtension(index, BAMIndex.BAI_INDEX_SUFFIX), header);
BAMFileSpan lastFilePointer = null;
for(final SAMRecord read : reader){
BAMFileSpan filePointer = (BAMFileSpan) read.getFileSource().getFilePointer();
Expand All @@ -149,7 +149,7 @@ private static void createBaiAndSplittingIndex(final File inputBam, final File i

private static void assertBamIsCoordinateSorted(final SAMFileHeader header) {
if( header.getSortOrder() != SAMFileHeader.SortOrder.coordinate) {
throw new UserException.BadInput("Cannot create a " + BAMIndex.BAMIndexSuffix + " index for a file " +
throw new UserException.BadInput("Cannot create a " + BAMIndex.BAI_INDEX_SUFFIX + " index for a file " +
"that isn't coordinate sorted.");
}
}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,23 +1,18 @@
package org.broadinstitute.hellbender.tools.spark.sv.utils;

import htsjdk.samtools.SAMFlag;
import htsjdk.samtools.reference.FastaReferenceWriter;
import org.broadinstitute.hellbender.exceptions.GATKException;
import org.broadinstitute.hellbender.tools.spark.sv.discovery.alignment.AlignedContig;
import org.broadinstitute.hellbender.tools.spark.sv.discovery.alignment.AlignmentInterval;
import org.broadinstitute.hellbender.utils.Utils;
import org.broadinstitute.hellbender.utils.bwa.BwaMemAligner;
import org.broadinstitute.hellbender.utils.bwa.BwaMemAlignment;
import org.broadinstitute.hellbender.utils.bwa.BwaMemIndex;
import org.broadinstitute.hellbender.utils.reference.FastaReferenceWriter;

import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.*;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.function.Predicate;
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
package org.broadinstitute.hellbender.tools.walkers.fasta;

import com.google.common.primitives.Bytes;
import htsjdk.samtools.reference.FastaReferenceWriter;
import htsjdk.samtools.reference.FastaReferenceWriterBuilder;
import it.unimi.dsi.fastutil.bytes.ByteArrayList;
import org.broadinstitute.barclay.argparser.Argument;
import org.broadinstitute.barclay.argparser.CommandLineProgramProperties;
Expand All @@ -13,7 +15,6 @@
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.SimpleInterval;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.reference.FastaReferenceWriter;
import picard.cmdline.programgroups.ReferenceProgramGroup;

import java.io.IOException;
Expand Down Expand Up @@ -82,7 +83,10 @@ public class FastaReferenceMaker extends ReferenceWalker {
public void onTraversalStart() {
final Path path = IOUtils.getPath(output);
try {
writer = new FastaReferenceWriter(path, basesPerLine, true, true);
writer = new FastaReferenceWriterBuilder()
.setFastaFile(path)
.setBasesPerLine(basesPerLine)
.build();
} catch (IOException e) {
throw new UserException.CouldNotCreateOutputFile("Couldn't create " + output + ", encountered exception: " + e.getMessage(), e);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -384,8 +384,7 @@ private OutputAlleleSubset calculateOutputAlleleSubset(final AFCalculationResult
} else {
// we want to keep the NON_REF symbolic allele but only in the absence of a non-symbolic allele, e.g.
// if we combined a ref / NON_REF gVCF with a ref / alt gVCF
final boolean isNonRefWhichIsLoneAltAllele = alternativeAlleleCount == 1 && allele.equals(
Allele.NON_REF_ALLELE);
final boolean isNonRefWhichIsLoneAltAllele = alternativeAlleleCount == 1 && allele.equals(Allele.NON_REF_ALLELE);
final boolean isPlausible = afCalculationResult.isPolymorphicPhredScaledQual(allele, configuration.genotypeArgs.STANDARD_CONFIDENCE_FOR_CALLING);

siteIsMonomorphic &= !isPlausible;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ static void runHooks() {
for (Path path : toBeDeleted) {
try {
IOUtils.deleteRecursively(path);
} catch (IOException | SecurityException e) {
} catch (SecurityException e) {
// do nothing if cannot be deleted, because it is a shutdown hook
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
import htsjdk.samtools.BamFileIoUtils;
import htsjdk.samtools.cram.build.CramIO;
import htsjdk.samtools.util.BlockCompressedInputStream;
import htsjdk.samtools.util.IOUtil;
import htsjdk.tribble.Tribble;
import htsjdk.tribble.util.TabixUtils;
import org.apache.commons.compress.archivers.tar.TarArchiveEntry;
Expand Down Expand Up @@ -702,7 +703,7 @@ public static Path createTempPath(String name, String extension) {
final String filename = path.getFileName().toString();
IOUtils.deleteOnExit(path.resolveSibling(filename + Tribble.STANDARD_INDEX_EXTENSION));
IOUtils.deleteOnExit(path.resolveSibling(filename + TabixUtils.STANDARD_INDEX_EXTENSION));
IOUtils.deleteOnExit(path.resolveSibling(filename + BAMIndex.BAMIndexSuffix));
IOUtils.deleteOnExit(path.resolveSibling(filename + BAMIndex.BAI_INDEX_SUFFIX));
IOUtils.deleteOnExit(path.resolveSibling(filename.replaceAll(extension + "$", ".bai")));
IOUtils.deleteOnExit(path.resolveSibling(filename + ".md5"));

Expand Down Expand Up @@ -1021,14 +1022,8 @@ public static void deleteOnExit(final Path fileToDelete){
* Delete rootPath recursively
* @param rootPath is the file/directory to be deleted
*/
public static void deleteRecursively(final Path rootPath) throws IOException {
final List<Path> pathsToDelete = Files.walk(rootPath)
.sorted(Comparator.reverseOrder())
.collect(Collectors.toList());

for (Path path : pathsToDelete) {
Files.deleteIfExists(path);
}
public static void deleteRecursively(final Path rootPath) {
IOUtil.recursiveDelete(rootPath);
}

/**
Expand Down
Loading

0 comments on commit 8140531

Please sign in to comment.