Skip to content

Commit

Permalink
Make method parameters final where possible.
Browse files Browse the repository at this point in the history
  • Loading branch information
heuermh committed Sep 12, 2017
1 parent 9293243 commit 4cc7b01
Show file tree
Hide file tree
Showing 71 changed files with 513 additions and 420 deletions.
4 changes: 2 additions & 2 deletions src/main/java/htsjdk/samtools/LinearBAMIndex.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,11 @@
*/
public class LinearBAMIndex extends CachingBAMFileIndex {

public LinearBAMIndex(SeekableStream stream, SAMSequenceDictionary dict) {
public LinearBAMIndex(final SeekableStream stream, final SAMSequenceDictionary dict) {
super(stream, dict);
}

public LinearIndex getLinearIndex(int idx) {
public LinearIndex getLinearIndex(final int idx) {
return getQueryResults(idx).getLinearIndex();
}
}
14 changes: 7 additions & 7 deletions src/main/java/org/seqdoop/hadoop_bam/AnySAMInputFormat.java
Original file line number Diff line number Diff line change
Expand Up @@ -91,11 +91,11 @@ public AnySAMInputFormat() {
* Creates a new input format, reading {@link #TRUST_EXTS_PROPERTY} from
* the given <code>Configuration</code>.
*/
public AnySAMInputFormat(Configuration conf) {
public AnySAMInputFormat(final Configuration conf) {
this(conf, new HashMap<>(), false);
}

private static boolean trustExtensions(Configuration conf) {
private static boolean trustExtensions(final Configuration conf) {
return conf.getBoolean(TRUST_EXTS_PROPERTY, true);
}

Expand All @@ -107,11 +107,11 @@ private static boolean trustExtensions(Configuration conf) {
* <p>The <code>Map</code> is not copied, so it should not be modified while
* this input format is in use!</p>
*/
public AnySAMInputFormat(Map<Path, SAMFormat> formatMap) {
public AnySAMInputFormat(final Map<Path, SAMFormat> formatMap) {
this(null, formatMap, true);
}

private AnySAMInputFormat(Configuration conf, Map<Path, SAMFormat> formatMap, boolean givenMap) {
private AnySAMInputFormat(final Configuration conf, final Map<Path, SAMFormat> formatMap, final boolean givenMap) {
this.formatMap = formatMap;
this.givenMap = givenMap;
this.conf = conf;
Expand Down Expand Up @@ -176,7 +176,7 @@ public SAMFormat getFormat(final Path path) throws PathNotFoundException {
*/
@Override
public RecordReader<LongWritable, SAMRecordWritable>
createRecordReader(InputSplit split, TaskAttemptContext ctx)
createRecordReader(final InputSplit split, final TaskAttemptContext ctx)
throws InterruptedException, IOException {
final Path path;
if (split instanceof FileSplit) {
Expand Down Expand Up @@ -218,7 +218,7 @@ else if (split instanceof FileVirtualSplit) {
* {@link SAMInputFormat} as appropriate for the given path.
*/
@Override
public boolean isSplitable(JobContext job, Path path) {
public boolean isSplitable(final JobContext job, final Path path) {
if (this.conf == null) {
this.conf = job.getConfiguration();
}
Expand Down Expand Up @@ -252,7 +252,7 @@ public boolean isSplitable(JobContext job, Path path) {
* unchanged.
*/
@Override
public List<InputSplit> getSplits(JobContext job)
public List<InputSplit> getSplits(final JobContext job)
throws IOException {
if (this.conf == null) {
this.conf = job.getConfiguration();
Expand Down
4 changes: 2 additions & 2 deletions src/main/java/org/seqdoop/hadoop_bam/AnySAMOutputFormat.java
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public abstract class AnySAMOutputFormat<K>
* Creates a new output format, reading {@link #OUTPUT_SAM_FORMAT_PROPERTY}
* from the given <code>Configuration</code>.
*/
protected AnySAMOutputFormat(Configuration conf) {
protected AnySAMOutputFormat(final Configuration conf) {
final String fmtStr = conf.get(OUTPUT_SAM_FORMAT_PROPERTY);

format = fmtStr == null ? null : SAMFormat.valueOf(fmtStr);
Expand All @@ -54,7 +54,7 @@ protected AnySAMOutputFormat(Configuration conf) {
/**
* Creates a new output format for the given SAM format.
*/
protected AnySAMOutputFormat(SAMFormat fmt) {
protected AnySAMOutputFormat(final SAMFormat fmt) {
if (fmt == null) {
throw new IllegalArgumentException("null SAMFormat");
}
Expand Down
65 changes: 35 additions & 30 deletions src/main/java/org/seqdoop/hadoop_bam/BAMInputFormat.java
Original file line number Diff line number Diff line change
Expand Up @@ -115,16 +115,16 @@ public class BAMInputFormat
* @param intervals the intervals to filter by
* @param <T> the {@link Locatable} type
*/
public static <T extends Locatable> void setIntervals(Configuration conf,
List<T> intervals) {
public static <T extends Locatable> void setIntervals(final Configuration conf,
final List<T> intervals) {
setTraversalParameters(conf, intervals, false);
}

/**
* Enables or disables the split calculator that uses the BAM index to calculate splits.
*/
public static void setEnableBAISplitCalculator(Configuration conf,
boolean setEnabled) {
public static void setEnableBAISplitCalculator(final Configuration conf,
final boolean setEnabled) {
conf.setBoolean(ENABLE_BAI_SPLIT_CALCULATOR, setEnabled);
}

Expand All @@ -139,8 +139,9 @@ public static void setEnableBAISplitCalculator(Configuration conf,
* @param traverseUnplacedUnmapped whether to included unplaced unampped reads
* @param <T> the {@link Locatable} type
*/
public static <T extends Locatable> void setTraversalParameters(Configuration conf,
List<T> intervals, boolean traverseUnplacedUnmapped) {
public static <T extends Locatable> void setTraversalParameters(final Configuration conf,
final List<T> intervals,
final boolean traverseUnplacedUnmapped) {
if (intervals == null && !traverseUnplacedUnmapped) {
throw new IllegalArgumentException("Traversing mapped reads only is not supported.");
}
Expand All @@ -164,22 +165,22 @@ public static <T extends Locatable> void setTraversalParameters(Configuration co
*
* @param conf the Hadoop configuration to set properties on
*/
public static void unsetTraversalParameters(Configuration conf) {
public static void unsetTraversalParameters(final Configuration conf) {
conf.unset(BOUNDED_TRAVERSAL_PROPERTY);
conf.unset(INTERVALS_PROPERTY);
conf.unset(TRAVERSE_UNPLACED_UNMAPPED_PROPERTY);
}

static boolean isBoundedTraversal(Configuration conf) {
static boolean isBoundedTraversal(final Configuration conf) {
return conf.getBoolean(BOUNDED_TRAVERSAL_PROPERTY, false) ||
conf.get(INTERVALS_PROPERTY) != null; // backwards compatibility
}

static boolean traverseUnplacedUnmapped(Configuration conf) {
static boolean traverseUnplacedUnmapped(final Configuration conf) {
return conf.getBoolean(TRAVERSE_UNPLACED_UNMAPPED_PROPERTY, false);
}

static List<Interval> getIntervals(Configuration conf) {
static List<Interval> getIntervals(final Configuration conf) {
String intervalsProperty = conf.get(INTERVALS_PROPERTY);
if (intervalsProperty == null) {
return null;
Expand All @@ -197,11 +198,11 @@ static List<Interval> getIntervals(Configuration conf) {
return intervals;
}

static Path getIdxPath(Path path) {
static Path getIdxPath(final Path path) {
return path.suffix(SplittingBAMIndexer.OUTPUT_FILE_EXTENSION);
}

static List<InputSplit> removeIndexFiles(List<InputSplit> splits) {
static List<InputSplit> removeIndexFiles(final List<InputSplit> splits) {
// Remove any splitting bai files
return splits.stream()
.filter(split -> !((FileSplit) split).getPath().getName().endsWith(
Expand All @@ -211,7 +212,7 @@ static List<InputSplit> removeIndexFiles(List<InputSplit> splits) {
.collect(Collectors.toList());
}

static Path getBAIPath(Path path) {
static Path getBAIPath(final Path path) {
return path.suffix(BAMIndex.BAMIndexSuffix);
}

Expand All @@ -220,7 +221,7 @@ static Path getBAIPath(Path path) {
*/
@Override
public RecordReader<LongWritable, SAMRecordWritable>
createRecordReader(InputSplit split, TaskAttemptContext ctx)
createRecordReader(final InputSplit split, final TaskAttemptContext ctx)
throws InterruptedException, IOException {
final RecordReader<LongWritable, SAMRecordWritable> rr =
new BAMRecordReader();
Expand All @@ -232,7 +233,7 @@ static Path getBAIPath(Path path) {
* The splits returned are {@link FileVirtualSplit FileVirtualSplits}.
*/
@Override
public List<InputSplit> getSplits(JobContext job)
public List<InputSplit> getSplits(final JobContext job)
throws IOException {
return getSplits(super.getSplits(job), job.getConfiguration());
}
Expand Down Expand Up @@ -281,9 +282,10 @@ public int compare(InputSplit a, InputSplit b) {

// Handles all the splits that share the Path of the one at index i,
// returning the next index to be used.
private int addIndexedSplits(
List<InputSplit> splits, int i, List<InputSplit> newSplits,
Configuration cfg)
private int addIndexedSplits(final List<InputSplit> splits,
final int i,
final List<InputSplit> newSplits,
final Configuration cfg)
throws IOException {
final Path file = ((FileSplit) splits.get(i)).getPath();
List<InputSplit> potentialSplits = new ArrayList<InputSplit>();
Expand Down Expand Up @@ -341,10 +343,10 @@ private int addIndexedSplits(

// Handles all the splits that share the Path of the one at index i,
// returning the next index to be used.
private int addBAISplits(List<InputSplit> splits,
int i,
List<InputSplit> newSplits,
Configuration conf) throws IOException {
private int addBAISplits(final List<InputSplit> splits,
final int i,
final List<InputSplit> newSplits,
final Configuration conf) throws IOException {
final Path path = ((FileSplit) splits.get(i)).getPath();
FileSystem fs = path.getFileSystem(conf);
int splitsEnd = i;
Expand Down Expand Up @@ -493,9 +495,10 @@ private int addBAISplits(List<InputSplit> splits,

// Works the same way as addIndexedSplits, to avoid having to reopen the
// file repeatedly and checking addIndexedSplits for an index repeatedly.
private int addProbabilisticSplits(
List<InputSplit> splits, int i, List<InputSplit> newSplits,
Configuration cfg)
private int addProbabilisticSplits(final List<InputSplit> splits,
int i,
final List<InputSplit> newSplits,
final Configuration cfg)
throws IOException {
final Path path = ((FileSplit) splits.get(i)).getPath();
final SeekableStream sin =
Expand Down Expand Up @@ -558,7 +561,8 @@ private int addProbabilisticSplits(
return i;
}

private List<InputSplit> filterByInterval(List<InputSplit> splits, Configuration conf)
private List<InputSplit> filterByInterval(final List<InputSplit> splits,
final Configuration conf)
throws IOException {
if (!isBoundedTraversal(conf)) {
return splits;
Expand Down Expand Up @@ -669,8 +673,8 @@ else if (splitStart <= unmappedStart && unmappedStart <= splitEnd) {
* @param rawIntervals SimpleIntervals to be converted
* @return A sorted, merged list of QueryIntervals suitable for passing to the SamReader query API
*/
static QueryInterval[] prepareQueryIntervals(final List<Interval>
rawIntervals, final SAMSequenceDictionary sequenceDictionary) {
static QueryInterval[] prepareQueryIntervals(final List<Interval> rawIntervals,
final SAMSequenceDictionary sequenceDictionary) {
if (rawIntervals == null || rawIntervals.isEmpty()) {
return null;
}
Expand All @@ -694,7 +698,8 @@ static QueryInterval[] prepareQueryIntervals(final List<Interval>
* @param sequenceDictionary sequence dictionary used to perform the conversion
* @return an equivalent interval in QueryInterval format
*/
private static QueryInterval convertSimpleIntervalToQueryInterval(final Interval interval, final SAMSequenceDictionary sequenceDictionary) {
private static QueryInterval convertSimpleIntervalToQueryInterval(final Interval interval,
final SAMSequenceDictionary sequenceDictionary) {
if (interval == null) {
throw new IllegalArgumentException("interval may not be null");
}
Expand All @@ -712,7 +717,7 @@ private static QueryInterval convertSimpleIntervalToQueryInterval(final Interval
}

@Override
public boolean isSplitable(JobContext job, Path path) {
public boolean isSplitable(final JobContext job, final Path path) {
return true;
}
}
11 changes: 6 additions & 5 deletions src/main/java/org/seqdoop/hadoop_bam/BAMRecordReader.java
Original file line number Diff line number Diff line change
Expand Up @@ -111,19 +111,19 @@ public static long getKey(final SAMRecord rec) {
/**
* @param alignmentStart 1-based leftmost coordinate.
*/
public static long getKey(int refIdx, int alignmentStart) {
public static long getKey(final int refIdx, final int alignmentStart) {
return getKey0(refIdx, alignmentStart - 1);
}

/**
* @param alignmentStart0 0-based leftmost coordinate.
*/
public static long getKey0(int refIdx, int alignmentStart0) {
public static long getKey0(final int refIdx, final int alignmentStart0) {
return (long) refIdx << 32 | alignmentStart0;
}

@Override
public void initialize(InputSplit spl, TaskAttemptContext ctx)
public void initialize(final InputSplit spl, final TaskAttemptContext ctx)
throws IOException {
// This method should only be called once (see Hadoop API). However,
// there seems to be disagreement between implementations that call
Expand Down Expand Up @@ -188,8 +188,9 @@ else if (boundedTraversal && split.getIntervalFilePointers() == null) {
}
}

private SamReader createSamReader(SeekableStream in, SeekableStream inIndex,
ValidationStringency stringency) {
private SamReader createSamReader(final SeekableStream in,
final SeekableStream inIndex,
final ValidationStringency stringency) {
SamReaderFactory readerFactory = SamReaderFactory.makeDefault()
.setOption(SamReaderFactory.Option.CACHE_FILE_BASED_INDEXES, true)
.setOption(SamReaderFactory.Option.EAGERLY_DECODE, false)
Expand Down
32 changes: 19 additions & 13 deletions src/main/java/org/seqdoop/hadoop_bam/BAMRecordWriter.java
Original file line number Diff line number Diff line change
Expand Up @@ -57,8 +57,10 @@ public abstract class BAMRecordWriter<K>
/**
* A SAMFileHeader is read from the input Path.
*/
public BAMRecordWriter(
Path output, Path input, boolean writeHeader, TaskAttemptContext ctx)
public BAMRecordWriter(final Path output,
final Path input,
final boolean writeHeader,
final TaskAttemptContext ctx)
throws IOException {
init(
output,
Expand All @@ -72,9 +74,10 @@ public BAMRecordWriter(
}
}

public BAMRecordWriter(
Path output, SAMFileHeader header, boolean writeHeader,
TaskAttemptContext ctx)
public BAMRecordWriter(final Path output,
final SAMFileHeader header,
final boolean writeHeader,
final TaskAttemptContext ctx)
throws IOException {
init(
output.getFileSystem(ctx.getConfiguration()).create(output),
Expand All @@ -87,25 +90,28 @@ public BAMRecordWriter(
}
}

public BAMRecordWriter(
OutputStream output, SAMFileHeader header, boolean writeHeader)
public BAMRecordWriter(final OutputStream output,
final SAMFileHeader header,
final boolean writeHeader)
throws IOException {
init(output, header, writeHeader);
}

// Working around not being able to call a constructor other than as the
// first statement...
private void init(
Path output, SAMFileHeader header, boolean writeHeader,
TaskAttemptContext ctx)
private void init(final Path output,
final SAMFileHeader header,
final boolean writeHeader,
final TaskAttemptContext ctx)
throws IOException {
init(
output.getFileSystem(ctx.getConfiguration()).create(output),
header, writeHeader);
}

private void init(
OutputStream output, SAMFileHeader header, boolean writeHeader)
private void init(final OutputStream output,
final SAMFileHeader header,
final boolean writeHeader)
throws IOException {
origOutput = output;

Expand All @@ -121,7 +127,7 @@ private void init(
}

@Override
public void close(TaskAttemptContext ctx) throws IOException {
public void close(final TaskAttemptContext ctx) throws IOException {
// Don't close the codec, we don't want BlockCompressedOutputStream's
// file terminator to be output. But do flush the stream.
binaryCodec.getOutputStream().flush();
Expand Down
Loading

0 comments on commit 4cc7b01

Please sign in to comment.