diff --git a/src/derive/command/junction_annotation.rs b/src/derive/command/junction_annotation.rs index 39809f8..811dd1a 100644 --- a/src/derive/command/junction_annotation.rs +++ b/src/derive/command/junction_annotation.rs @@ -5,6 +5,7 @@ use std::path::PathBuf; use anyhow::Context; use clap::Args; +use noodles::sam::record::MappingQuality; use num_format::Locale; use num_format::ToFormattedString; use tracing::debug; @@ -45,7 +46,7 @@ pub struct JunctionAnnotationArgs { /// Set to 0 to disable this filter and allow reads _without_ /// a mapping quality to be considered. #[arg(short, long, value_name = "U8", default_value = "30")] - min_mapq: u8, + min_mapq: Option, /// Do not count supplementary alignments. #[arg(long)] @@ -95,6 +96,7 @@ pub fn derive(args: JunctionAnnotationArgs) -> anyhow::Result<()> { debug!("Done reading GFF."); + // (1.5) Initialize variables (including opening the BAM). let mut counter = RecordCounter::new(); let mut results = JunctionAnnotationResults::default(); let params = compute::JunctionAnnotationParameters { diff --git a/src/derive/command/strandedness.rs b/src/derive/command/strandedness.rs index 89f7394..3fe1c6a 100644 --- a/src/derive/command/strandedness.rs +++ b/src/derive/command/strandedness.rs @@ -10,6 +10,7 @@ use anyhow::Context; use clap::Args; use noodles::bam; use noodles::gff; +use noodles::sam::record::MappingQuality; use rust_lapper::{Interval, Lapper}; use tracing::debug; use tracing::info; @@ -17,6 +18,7 @@ use tracing::info; use crate::derive::strandedness::compute; use crate::derive::strandedness::compute::ParsedBAMFile; use crate::derive::strandedness::results; +use crate::utils::args::parse_min_mapq; use crate::utils::formats; /// Clap arguments for the `ngs derive strandedness` subcommand. @@ -45,10 +47,10 @@ pub struct DeriveStrandednessArgs { num_genes: usize, /// Minimum mapping quality for a record to be considered. - /// Set to 0 to disable this filter and allow reads _without_ - /// a mapping quality to be considered. - #[arg(long, value_name = "U8", default_value = "30")] - min_mapq: u8, + /// Default is to ignore MAPQ values (allowing MAPQs to be considered). + /// Specify any u8 value to enable this filter. + #[arg(long, value_name = "U8", default_value = "30", value_parser = parse_min_mapq)] + min_mapq: Option, /// Consider all genes, not just protein coding genes. #[arg(long)] @@ -88,7 +90,7 @@ pub struct DeriveStrandednessArgs { pub fn derive(args: DeriveStrandednessArgs) -> anyhow::Result<()> { info!("Starting derive strandedness subcommand."); - // (1) Parse the GFF file and collect all gene features. + // (1) Parse the GFF file and collect all gene and exon features. debug!("Reading all records in GFF."); let mut gff = formats::gff::open(&args.features_gff) .with_context(|| format!("opening GFF file: {}", args.features_gff.display()))?; @@ -130,9 +132,6 @@ pub fn derive(args: DeriveStrandednessArgs) -> anyhow::Result<()> { exon_records.push(record); } } - - debug!("Tabulating GFF gene and exon features."); - if gene_records.is_empty() { bail!("No gene records matched criteria. Check your GFF file and `--gene-feature-name` and `--all-genes` options."); } @@ -145,6 +144,9 @@ pub fn derive(args: DeriveStrandednessArgs) -> anyhow::Result<()> { exon_records.len() ); + // (2) Parse exon features into proper data structure. + debug!("Tabulating GFF exon features."); + let mut exon_intervals: HashMap<&str, Vec>> = HashMap::new(); for record in &exon_records { @@ -180,6 +182,7 @@ pub fn derive(args: DeriveStrandednessArgs) -> anyhow::Result<()> { debug!("Done reading GFF."); + // (3) Initialize variables (including opening the BAM). let mut reader = File::open(&args.src) .map(bam::Reader::new) .with_context(|| format!("opening BAM file: {}", args.src.display()))?; @@ -219,8 +222,9 @@ pub fn derive(args: DeriveStrandednessArgs) -> anyhow::Result<()> { exons: exon_metrics, reads: results::ReadRecordMetrics::default(), }; - let mut result: Option = None; + + // (4) Run the strandedness test. for try_num in 1..=args.max_tries { info!("Starting try {} of {}", try_num, args.max_tries); @@ -247,7 +251,7 @@ pub fn derive(args: DeriveStrandednessArgs) -> anyhow::Result<()> { info!("Strandedness test failed after {} tries.", args.max_tries); } - // (4) Print the output to stdout as JSON (more support for different output + // (5) Print the output to stdout as JSON (more support for different output // types may be added in the future, but for now, only JSON). let output = serde_json::to_string_pretty(&result).unwrap(); print!("{}", output); diff --git a/src/derive/junction_annotation/compute.rs b/src/derive/junction_annotation/compute.rs index dbfed06..6d71acd 100644 --- a/src/derive/junction_annotation/compute.rs +++ b/src/derive/junction_annotation/compute.rs @@ -5,11 +5,13 @@ use anyhow::Ok; use noodles::core::Position; use noodles::sam::alignment::Record; use noodles::sam::record::cigar::op::Kind; +use noodles::sam::record::MappingQuality; use noodles::sam::Header; use std::collections::HashMap; use std::collections::HashSet; use crate::derive::junction_annotation::results; +use crate::utils::alignment::filter_by_mapq; /// Struct to hold starts and ends of exons. pub struct ExonSets<'a> { @@ -30,7 +32,7 @@ pub struct JunctionAnnotationParameters { /// Minumum mapping quality for a record to be considered. /// 0 if MAPQ shouldn't be considered. - pub min_mapq: u8, + pub min_mapq: Option, /// Do not count supplementary alignments. pub no_supplementary: bool, @@ -65,6 +67,27 @@ fn increment_junction_map( ); } +/// Function to filter out records based on their flags. +fn filter_by_flags(record: &Record, params: &JunctionAnnotationParameters) -> bool { + let flags = record.flags(); + if flags.is_unmapped() + || (params.no_supplementary && flags.is_supplementary()) + || (!params.count_secondary && flags.is_secondary()) + || (!params.count_duplicates && flags.is_duplicate()) + { + return true; + } + false +} + +/// Function to filter out records that don't have introns. +fn filter_by_cigar(record: &Record) -> bool { + !record + .cigar() + .iter() + .any(|op| matches!(op.kind(), Kind::Skip)) +} + /// Main function to annotate junctions one record at a time. pub fn process( record: &Record, @@ -79,42 +102,24 @@ pub fn process( _ => bail!("Could not parse read name"), }; - // (2) Parse the flags so we can see if the read should be ignored. - let flags = record.flags(); - - if flags.is_unmapped() - || (params.no_supplementary && flags.is_supplementary()) - || (!params.count_secondary && flags.is_secondary()) - || (!params.count_duplicates && flags.is_duplicate()) - { + // (2) Filter by record flags. + if filter_by_flags(record, params) { results.records.filtered_by_flags += 1; return Ok(()); } - // (3) Parse the CIGAR string from the record. + // (3) Filter by CIGAR. // We only care about reads with introns, so if there are no introns // we can skip this read. - let cigar = record.cigar(); - if !cigar.iter().any(|op| matches!(op.kind(), Kind::Skip)) { + if filter_by_cigar(record) { results.records.not_spliced += 1; return Ok(()); } - // (4) If the user is filtering by MAPQ, check if this read passes. - // Log if the read is filtered out for a too low MAPQ or a missing MAPQ. - if params.min_mapq > 0 { - match record.mapping_quality() { - Some(mapq) => { - if mapq.get() < params.min_mapq { - results.records.low_mapq += 1; - return Ok(()); - } - } - None => { - results.records.missing_mapq += 1; - return Ok(()); - } - } + // (4) Filter by MAPQ + if filter_by_mapq(record, params.min_mapq) { + results.records.bad_mapq += 1; + return Ok(()); } // (5) Parse the reference sequence from the record. @@ -144,7 +149,7 @@ pub fn process( // (8) Find introns let cur_pos = start; - for op in cigar.iter() { + for op in record.cigar().iter() { match op.kind() { // This is an intron. Kind::Skip => { @@ -355,6 +360,103 @@ mod tests { use noodles::sam::record::ReadName; use std::num::NonZeroUsize; + #[test] + fn test_filter_by_flags() { + // Setup + let mut record = Record::default(); + let params = JunctionAnnotationParameters { + min_intron_length: 10, + min_read_support: 2, + min_mapq: Some(MappingQuality::new(30).unwrap()), + no_supplementary: false, + count_secondary: false, + count_duplicates: false, + }; + + // Test that records are filtered out correctly + record.flags_mut().set(0x4.into(), true); + assert!(filter_by_flags(&record, ¶ms)); + record.flags_mut().set(0x4.into(), false); + record.flags_mut().set(0x800.into(), true); + assert!(!filter_by_flags(&record, ¶ms)); + record.flags_mut().set(0x800.into(), false); + record.flags_mut().set(0x100.into(), true); + assert!(filter_by_flags(&record, ¶ms)); + record.flags_mut().set(0x100.into(), false); + record.flags_mut().set(0x400.into(), true); + assert!(filter_by_flags(&record, ¶ms)); + record.flags_mut().set(0x400.into(), false); + assert!(!filter_by_flags(&record, ¶ms)); + } + + #[test] + fn test_filter_by_cigar() { + // Setup + let mut record = Record::default(); + + // Test that records are filtered out correctly + *record.cigar_mut() = "10M10N10M".parse().unwrap(); + assert!(!filter_by_cigar(&record)); + *record.cigar_mut() = "10M".parse().unwrap(); + assert!(filter_by_cigar(&record)); + } + + #[test] + fn test_filter_junction_map() { + // Setup + let mut junction_map = results::JunctionsMap::default(); + junction_map.insert( + "sq1".to_string(), + HashMap::from([ + ((Position::new(1).unwrap(), Position::new(11).unwrap()), 1), + ((Position::new(1).unwrap(), Position::new(5).unwrap()), 1), + ]), + ); + junction_map.insert( + "sq2".to_string(), + HashMap::from([((Position::new(1).unwrap(), Position::new(11).unwrap()), 2)]), + ); + let min_intron_length = 10; + let min_read_support = 2; + let mut metrics = results::SummaryResults::default(); + + // Test that junctions are filtered out correctly + filter_junction_map( + &mut junction_map, + min_intron_length, + min_read_support, + &mut metrics, + ); + assert_eq!(junction_map.len(), 1); + assert_eq!(junction_map.get("sq1"), None); + assert_eq!(junction_map.get("sq2").unwrap().len(), 1); + assert_eq!(metrics.intron_too_short, 1); + assert_eq!(metrics.junctions_with_not_enough_read_support, 2); + assert_eq!(metrics.total_rejected_junctions, 2); + } + + #[test] + fn test_tally_junctions_and_support() { + // Setup + let mut junction_map = results::JunctionsMap::default(); + junction_map.insert( + "sq1".to_string(), + HashMap::from([ + ((Position::new(1).unwrap(), Position::new(11).unwrap()), 1), + ((Position::new(1).unwrap(), Position::new(5).unwrap()), 1), + ]), + ); + junction_map.insert( + "sq2".to_string(), + HashMap::from([((Position::new(1).unwrap(), Position::new(11).unwrap()), 2)]), + ); + + // Test that junctions are tallied correctly + let (juncs, support) = tally_junctions_and_support(&junction_map); + assert_eq!(juncs, 3); + assert_eq!(support, 4); + } + #[test] fn test_process_and_summarize() { // Setup @@ -362,7 +464,7 @@ mod tests { let params = JunctionAnnotationParameters { min_intron_length: 10, min_read_support: 2, - min_mapq: 30, + min_mapq: Some(MappingQuality::new(30).unwrap()), no_supplementary: false, count_secondary: false, count_duplicates: false, @@ -413,8 +515,7 @@ mod tests { assert_eq!(results.records.processed, 1); assert_eq!(results.records.filtered_by_flags, 0); assert_eq!(results.records.not_spliced, 0); - assert_eq!(results.records.low_mapq, 0); - assert_eq!(results.records.missing_mapq, 0); + assert_eq!(results.records.bad_mapq, 0); // Test that unmapped gets ignored let mut record = Record::default(); @@ -429,8 +530,7 @@ mod tests { assert_eq!(results.records.processed, 1); assert_eq!(results.records.filtered_by_flags, 1); assert_eq!(results.records.not_spliced, 0); - assert_eq!(results.records.low_mapq, 0); - assert_eq!(results.records.missing_mapq, 0); + assert_eq!(results.records.bad_mapq, 0); // Test partial novel junction let mut record = Record::default(); @@ -445,8 +545,7 @@ mod tests { assert_eq!(results.records.processed, 2); assert_eq!(results.records.filtered_by_flags, 1); assert_eq!(results.records.not_spliced, 0); - assert_eq!(results.records.low_mapq, 0); - assert_eq!(results.records.missing_mapq, 0); + assert_eq!(results.records.bad_mapq, 0); // Test partial novel junction (again for more read support) let mut record = Record::default(); @@ -461,8 +560,7 @@ mod tests { assert_eq!(results.records.processed, 3); assert_eq!(results.records.filtered_by_flags, 1); assert_eq!(results.records.not_spliced, 0); - assert_eq!(results.records.low_mapq, 0); - assert_eq!(results.records.missing_mapq, 0); + assert_eq!(results.records.bad_mapq, 0); // Test that supplementary alignments get counted let mut record = Record::default(); @@ -478,8 +576,7 @@ mod tests { assert_eq!(results.records.processed, 4); assert_eq!(results.records.filtered_by_flags, 1); assert_eq!(results.records.not_spliced, 0); - assert_eq!(results.records.low_mapq, 0); - assert_eq!(results.records.missing_mapq, 0); + assert_eq!(results.records.bad_mapq, 0); // Test that secondary alignments don't get counted let mut record = Record::default(); @@ -495,8 +592,7 @@ mod tests { assert_eq!(results.records.processed, 4); assert_eq!(results.records.filtered_by_flags, 2); assert_eq!(results.records.not_spliced, 0); - assert_eq!(results.records.low_mapq, 0); - assert_eq!(results.records.missing_mapq, 0); + assert_eq!(results.records.bad_mapq, 0); // TODO: Below tests are not working as expected. Need to fix them. // Test complete novel junction diff --git a/src/derive/junction_annotation/results.rs b/src/derive/junction_annotation/results.rs index 912acc5..a3c945e 100644 --- a/src/derive/junction_annotation/results.rs +++ b/src/derive/junction_annotation/results.rs @@ -100,10 +100,8 @@ pub struct RecordMetrics { /// The number of records with junctions that have been ignored because /// they failed the MAPQ filter. - pub low_mapq: usize, - - /// The number of records whose MAPQ couldn't be parsed and were thus ignored. - pub missing_mapq: usize, + /// This could either mean the MAPQ was too low or it was missing. + pub bad_mapq: usize, } /// Summary statistics for the junction-annotation subcommand. diff --git a/src/derive/strandedness/compute.rs b/src/derive/strandedness/compute.rs index 53b471c..a939065 100644 --- a/src/derive/strandedness/compute.rs +++ b/src/derive/strandedness/compute.rs @@ -5,6 +5,7 @@ use noodles::core::Region; use noodles::gff; use noodles::sam; use noodles::sam::record::data::field::Tag; +use noodles::sam::record::MappingQuality; use rand::Rng; use rust_lapper::Lapper; use std::collections::HashMap; @@ -12,6 +13,7 @@ use std::collections::HashSet; use std::sync::Arc; use crate::derive::strandedness::results; +use crate::utils::alignment::filter_by_mapq; use crate::utils::read_groups::{validate_read_group_info, UNKNOWN_READ_GROUP}; const STRANDED_THRESHOLD: f64 = 80.0; @@ -27,6 +29,7 @@ pub struct Counts { reverse: usize, } +/// Struct for tracking possible strand orientations. #[derive(Clone, Copy, Debug)] enum Strand { Forward, @@ -55,6 +58,7 @@ impl TryFrom for Strand { } } +/// Struct for tracking the order of segments in a record. #[derive(Clone, Copy, Debug)] enum SegmentOrder { First, @@ -113,7 +117,7 @@ pub struct StrandednessParams { /// Minumum mapping quality for a record to be considered. /// 0 if MAPQ shouldn't be considered. - pub min_mapq: u8, + pub min_mapq: Option, /// Allow qc failed reads to be counted. pub count_qc_failed: bool, @@ -128,6 +132,7 @@ pub struct StrandednessParams { pub count_duplicates: bool, } +/// Function to disqualify a gene based on its strand and exons. fn disqualify_gene( gene: &gff::Record, exons: &HashMap<&str, Lapper>, @@ -155,6 +160,21 @@ fn disqualify_gene( true } +/// Function to filter out records based on their flags. +fn filter_by_flags(record: &sam::alignment::Record, params: &StrandednessParams) -> bool { + let flags = record.flags(); + if (!params.count_qc_failed && flags.is_qc_fail()) + || (params.no_supplementary && flags.is_supplementary()) + || (!params.count_secondary && flags.is_secondary()) + || (!params.count_duplicates && flags.is_duplicate()) + { + return true; + } + false +} + +/// Function to query the BAM file and filter the records based on the +/// parameters provided. fn query_and_filter( parsed_bam: &mut ParsedBAMFile, gene: &gff::Record, @@ -174,31 +194,16 @@ fn query_and_filter( for read in query { let read = read.unwrap(); - // (1) Parse the flags so we can see if the read should be discarded. - let flags = read.flags(); - if (!params.count_qc_failed && flags.is_qc_fail()) - || (params.no_supplementary && flags.is_supplementary()) - || (!params.count_secondary && flags.is_secondary()) - || (!params.count_duplicates && flags.is_duplicate()) - { + // (1) Filter by flags. + if filter_by_flags(&read, params) { read_metrics.filtered_by_flags += 1; continue; } - // (2) If the user is filtering by MAPQ, check if this read passes. - if params.min_mapq > 0 { - match read.mapping_quality() { - Some(mapq) => { - if mapq.get() < params.min_mapq { - read_metrics.low_mapq += 1; - continue; - } - } - None => { - read_metrics.missing_mapq += 1; - continue; - } - } + // (2) Filter by MAPQ. + if filter_by_mapq(&read, params.min_mapq) { + read_metrics.bad_mapq += 1; + continue; } filtered_reads.push(read); @@ -211,6 +216,7 @@ fn query_and_filter( filtered_reads } +/// Function to classify a read based on its strand and the strand of the gene. fn classify_read( read: &sam::alignment::Record, gene_strand: &Strand, @@ -475,8 +481,7 @@ mod tests { assert_eq!(read_metrics.paired_end_reads, 0); assert_eq!(read_metrics.single_end_reads, 1); assert_eq!(read_metrics.filtered_by_flags, 0); - assert_eq!(read_metrics.low_mapq, 0); - assert_eq!(read_metrics.missing_mapq, 0); + assert_eq!(read_metrics.bad_mapq, 0); let counts = all_counts.counts.get(&counts_key).unwrap(); assert_eq!(counts.forward, 1); assert_eq!(counts.reverse, 0); @@ -490,8 +495,7 @@ mod tests { assert_eq!(read_metrics.paired_end_reads, 1); assert_eq!(read_metrics.single_end_reads, 1); assert_eq!(read_metrics.filtered_by_flags, 0); - assert_eq!(read_metrics.low_mapq, 0); - assert_eq!(read_metrics.missing_mapq, 0); + assert_eq!(read_metrics.bad_mapq, 0); let counts = all_counts.counts.get(&counts_key).unwrap(); assert_eq!(counts.forward, 2); assert_eq!(counts.reverse, 0); @@ -505,8 +509,7 @@ mod tests { assert_eq!(read_metrics.paired_end_reads, 2); assert_eq!(read_metrics.single_end_reads, 1); assert_eq!(read_metrics.filtered_by_flags, 0); - assert_eq!(read_metrics.low_mapq, 0); - assert_eq!(read_metrics.missing_mapq, 0); + assert_eq!(read_metrics.bad_mapq, 0); let counts = all_counts.counts.get(&counts_key).unwrap(); assert_eq!(counts.forward, 3); assert_eq!(counts.reverse, 0); @@ -520,8 +523,7 @@ mod tests { assert_eq!(read_metrics.paired_end_reads, 3); assert_eq!(read_metrics.single_end_reads, 1); assert_eq!(read_metrics.filtered_by_flags, 0); - assert_eq!(read_metrics.low_mapq, 0); - assert_eq!(read_metrics.missing_mapq, 0); + assert_eq!(read_metrics.bad_mapq, 0); let counts = all_counts.counts.get(&counts_key).unwrap(); assert_eq!(counts.forward, 3); assert_eq!(counts.reverse, 1); diff --git a/src/derive/strandedness/results.rs b/src/derive/strandedness/results.rs index abe9d99..3b94963 100644 --- a/src/derive/strandedness/results.rs +++ b/src/derive/strandedness/results.rs @@ -11,12 +11,8 @@ pub struct ReadRecordMetrics { /// These conditions can be toggled on/off with CL flags pub filtered_by_flags: usize, - /// The number of records that have been filtered because - /// they failed the MAPQ filter. - pub low_mapq: usize, - - /// The number of records whose MAPQ couldn't be parsed and were thus discarded. - pub missing_mapq: usize, + /// The number of records that have been ignored because they failed the MAPQ filter. + pub bad_mapq: usize, /// The number of records determined to be Paired-End. pub paired_end_reads: usize, diff --git a/src/utils/alignment.rs b/src/utils/alignment.rs index 1a731f1..c3cd230 100644 --- a/src/utils/alignment.rs +++ b/src/utils/alignment.rs @@ -1,11 +1,25 @@ //! Utilities related to alignment of sequences. use anyhow::bail; -use noodles::sam::record::{cigar::op::Kind, sequence::Base, Cigar}; +use noodles::sam::record::{cigar::op::Kind, sequence::Base, Cigar, MappingQuality}; use super::cigar::consumes_reference; use super::cigar::consumes_sequence; +/// Filter an alignment record by its mapping quality. `true` means "filter the record" and `false` means "do not filter the record". +pub fn filter_by_mapq( + record: &noodles::sam::alignment::Record, + min_mapq: Option, +) -> bool { + match min_mapq { + Some(min_mapq) => match record.mapping_quality() { + Some(mapq) => mapq.get() < min_mapq.get(), + None => false, + }, + None => false, + } +} + /// Turns a condensed Cigar representation into a flattened representation. For /// example, 10M will become a Vec of length 10 comprised completely of /// Kind::MATCH. This utility is useful for generating a representation of a @@ -127,10 +141,37 @@ impl<'a> ReferenceRecordStepThrough<'a> { #[cfg(test)] mod tests { - use noodles::sam::record::{Cigar, Sequence}; + use noodles::sam::record::{Cigar, MappingQuality, Sequence}; use super::ReferenceRecordStepThrough; + #[test] + pub fn it_filters_by_mapq() -> anyhow::Result<()> { + let mut record = noodles::sam::alignment::Record::default(); + assert!(super::filter_by_mapq( + &record, + Some(MappingQuality::new(0).unwrap()) + )); // Get filtered because MAPQ is missing + assert!(!super::filter_by_mapq(&record, None)); // Do not get filtered because filter is disabled + + record + .mapping_quality_mut() + .replace(MappingQuality::new(10).unwrap()); + assert!(!super::filter_by_mapq( + &record, + Some(MappingQuality::new(0).unwrap()) + )); // Do not get filtered because MAPQ is present + assert!(!super::filter_by_mapq( + &record, + Some(MappingQuality::new(1).unwrap()) + )); // Do not get filtered because MAPQ is greater than 1 + assert!(super::filter_by_mapq( + &record, + Some(MappingQuality::new(11).unwrap()) + )); // Do get filtered because MAPQ is less than 11 + Ok(()) + } + #[test] pub fn it_correctly_returns_zero_edits_when_sequences_are_identical() -> anyhow::Result<()> { let reference = "ACTG".parse::()?; diff --git a/src/utils/args.rs b/src/utils/args.rs index 4151cb7..be482ff 100644 --- a/src/utils/args.rs +++ b/src/utils/args.rs @@ -2,7 +2,7 @@ use std::fmt::Display; -use noodles::bgzf::writer::CompressionLevel; +use noodles::{bgzf::writer::CompressionLevel, sam::record::MappingQuality}; use tracing::debug; //===================// @@ -74,9 +74,9 @@ impl From for CompressionLevel { } } -//==============// -// Float Parser // -//==============// +//=============// +// Arg Parsers // +//=============// /// Utility method to parse command line floats and ensure they are /// within the range [MIN, MAX]. @@ -90,3 +90,9 @@ pub fn arg_in_range(arg: f32, range: std::ops::RangeInclusive) -> anyhow::R ), } } + +/// Utility method to parse command line integers and ensure they are +/// within the range [0, 255) and return them as MappingQualities. +pub fn parse_min_mapq(s: &str) -> Result, std::num::ParseIntError> { + s.parse().map(MappingQuality::new) +}