Skip to content

Commit

Permalink
Support validation stringency in out formatters.
Browse files Browse the repository at this point in the history
  • Loading branch information
heuermh committed Apr 19, 2018
1 parent 2b4a3d2 commit 4ea7617
Show file tree
Hide file tree
Showing 6 changed files with 21 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -66,10 +66,12 @@ class BcftoolsNormFnArgs extends Args4jBase {
* for use in cannoli-shell or notebooks.
*
* @param args Bcftools norm function arguments.
* @param stringency Validation stringency. Defaults to ValidationStringency.LENIENT.
* @param sc Spark context.
*/
class BcftoolsNormFn(
val args: BcftoolsNormFnArgs,
val stringency: ValidationStringency = ValidationStringency.LENIENT,
sc: SparkContext) extends CannoliFn[VariantContextRDD, VariantContextRDD](sc) with Logging {

override def apply(variantContexts: VariantContextRDD): VariantContextRDD = {
Expand All @@ -96,7 +98,7 @@ class BcftoolsNormFn(
variantContexts, builder.build(), builder.getFiles())

implicit val tFormatter = VCFInFormatter
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration)
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration, stringency)

variantContexts.pipe[VariantContext, VariantContextProduct, VariantContextRDD, VCFInFormatter](
cmd = builder.build(),
Expand Down Expand Up @@ -149,7 +151,7 @@ class BcftoolsNorm(protected val args: BcftoolsNormArgs) extends BDGSparkCommand

def run(sc: SparkContext) {
val variantContexts = sc.loadVcf(args.inputPath, stringency = stringency)
val pipedVariantContexts = new BcftoolsNormFn(args, sc).apply(variantContexts)
val pipedVariantContexts = new BcftoolsNormFn(args, stringency, sc).apply(variantContexts)
pipedVariantContexts.saveAsVcf(args, stringency)
}
}
6 changes: 4 additions & 2 deletions cli/src/main/scala/org/bdgenomics/cannoli/cli/Freebayes.scala
Original file line number Diff line number Diff line change
Expand Up @@ -72,10 +72,12 @@ class FreebayesFnArgs extends Args4jBase {
* for use in cannoli-shell or notebooks.
*
* @param args Freebayes function arguments.
* @param stringency Validation stringency. Defaults to ValidationStringency.LENIENT.
* @param sc Spark context.
*/
class FreebayesFn(
val args: FreebayesFnArgs,
val stringency: ValidationStringency = ValidationStringency.LENIENT,
sc: SparkContext) extends CannoliFn[AlignmentRecordRDD, VariantContextRDD](sc) with Logging {

override def apply(alignments: AlignmentRecordRDD): VariantContextRDD = {
Expand Down Expand Up @@ -110,7 +112,7 @@ class FreebayesFn(
val accumulator: CollectionAccumulator[VCFHeaderLine] = sc.collectionAccumulator("headerLines")

implicit val tFormatter = BAMInFormatter
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration, Some(accumulator))
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration, stringency, Some(accumulator))

val variantContexts = alignments.pipe[VariantContext, VariantContextProduct, VariantContextRDD, BAMInFormatter](
cmd = builder.build(),
Expand Down Expand Up @@ -166,7 +168,7 @@ class Freebayes(protected val args: FreebayesArgs) extends BDGSparkCommand[Freeb

def run(sc: SparkContext) {
val alignments = sc.loadAlignments(args.inputPath, stringency = stringency)
val variantContexts = new FreebayesFn(args, sc).apply(alignments)
val variantContexts = new FreebayesFn(args, stringency, sc).apply(alignments)

if (isVcfExt(args.outputPath)) {
variantContexts.saveAsVcf(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,10 +64,12 @@ class SamtoolsMpileupFnArgs extends Args4jBase {
* for use in cannoli-shell or notebooks.
*
* @param args Samtools mpileup function arguments.
* @param stringency Validation stringency. Defaults to ValidationStringency.LENIENT.
* @param sc Spark context.
*/
class SamtoolsMpileupFn(
val args: SamtoolsMpileupFnArgs,
val stringency: ValidationStringency = ValidationStringency.LENIENT,
sc: SparkContext) extends CannoliFn[AlignmentRecordRDD, VariantContextRDD](sc) with Logging {

override def apply(alignments: AlignmentRecordRDD): VariantContextRDD = {
Expand Down Expand Up @@ -97,7 +99,7 @@ class SamtoolsMpileupFn(
alignments, builder.build(), builder.getFiles())

implicit val tFormatter = BAMInFormatter
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration)
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration, stringency)

alignments.pipe[VariantContext, VariantContextProduct, VariantContextRDD, BAMInFormatter](
cmd = builder.build(),
Expand Down Expand Up @@ -150,7 +152,7 @@ class SamtoolsMpileup(protected val args: SamtoolsMpileupArgs) extends BDGSparkC

def run(sc: SparkContext) {
val alignments = sc.loadAlignments(args.inputPath, stringency = stringency)
val variantContexts = new SamtoolsMpileupFn(args, sc).apply(alignments)
val variantContexts = new SamtoolsMpileupFn(args, stringency, sc).apply(alignments)

if (isVcfExt(args.outputPath)) {
variantContexts.saveAsVcf(
Expand Down
6 changes: 4 additions & 2 deletions cli/src/main/scala/org/bdgenomics/cannoli/cli/SnpEff.scala
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,12 @@ class SnpEffFnArgs extends Args4jBase {
* for use in cannoli-shell or notebooks.
*
* @param args SnpEff function arguments.
* @param stringency Validation stringency. Defaults to ValidationStringency.LENIENT.
* @param sc Spark context.
*/
class SnpEffFn(
val args: SnpEffFnArgs,
val stringency: ValidationStringency = ValidationStringency.LENIENT,
sc: SparkContext) extends CannoliFn[VariantContextRDD, VariantContextRDD](sc) with Logging {

override def apply(variantContexts: VariantContextRDD): VariantContextRDD = {
Expand All @@ -85,7 +87,7 @@ class SnpEffFn(
variantContexts, builder.build(), builder.getFiles())

implicit val tFormatter = VCFInFormatter
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration)
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration, stringency)

variantContexts.pipe[VariantContext, VariantContextProduct, VariantContextRDD, VCFInFormatter](
cmd = builder.build(),
Expand Down Expand Up @@ -138,7 +140,7 @@ class SnpEff(protected val args: SnpEffArgs) extends BDGSparkCommand[SnpEffArgs]

def run(sc: SparkContext) {
val variantContexts = sc.loadVcf(args.inputPath, stringency = stringency)
val pipedVariantContexts = new SnpEffFn(args, sc).apply(variantContexts)
val pipedVariantContexts = new SnpEffFn(args, stringency, sc).apply(variantContexts)
pipedVariantContexts.saveAsVcf(args, stringency)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,12 @@ class VtNormalizeFnArgs extends Args4jBase {
* for use in cannoli-shell or notebooks.
*
* @param args Vt normalize function arguments.
* @param stringency Validation stringency. Defaults to ValidationStringency.LENIENT.
* @param sc Spark context.
*/
class VtNormalizeFn(
val args: VtNormalizeFnArgs,
val stringency: ValidationStringency = ValidationStringency.LENIENT,
sc: SparkContext) extends CannoliFn[VariantContextRDD, VariantContextRDD](sc) with Logging {

override def apply(variantContexts: VariantContextRDD): VariantContextRDD = {
Expand All @@ -99,7 +101,7 @@ class VtNormalizeFn(
variantContexts, builder.build(), builder.getFiles())

implicit val tFormatter = VCFInFormatter
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration)
implicit val uFormatter = new VCFOutFormatter(sc.hadoopConfiguration, stringency)

variantContexts.pipe[VariantContext, VariantContextProduct, VariantContextRDD, VCFInFormatter](
cmd = builder.build(),
Expand Down Expand Up @@ -152,7 +154,7 @@ class VtNormalize(protected val args: VtNormalizeArgs) extends BDGSparkCommand[V

def run(sc: SparkContext) {
val variantContexts = sc.loadVcf(args.inputPath, stringency = stringency)
val pipedVariantContexts = new VtNormalizeFn(args, sc).apply(variantContexts)
val pipedVariantContexts = new VtNormalizeFn(args, stringency, sc).apply(variantContexts)
pipedVariantContexts.saveAsVcf(args, stringency)
}
}
2 changes: 1 addition & 1 deletion pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
<inceptionYear>2017</inceptionYear>

<properties>
<adam.version>0.24.0</adam.version>
<adam.version>0.25.0-SNAPSHOT</adam.version>
<scala.version>2.11.12</scala.version>
<scala.version.prefix>2.11</scala.version.prefix>
<spark.version>2.2.1</spark.version>
Expand Down

0 comments on commit 4ea7617

Please sign in to comment.