Skip to content

Commit

Permalink
0.6.1 (#69)
Browse files Browse the repository at this point in the history
Fix: the cli version string.

---------

Co-authored-by: Mitchell R. Vollger <[email protected]>
  • Loading branch information
mrvollger and Mitchell R. Vollger authored Dec 11, 2024
1 parent ce5f66f commit c776bab
Show file tree
Hide file tree
Showing 8 changed files with 304 additions and 81 deletions.
291 changes: 216 additions & 75 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ license = "MIT"
name = "fibertools-rs"
readme = "README.md"
repository = "https://github.com/fiberseq/fibertools-rs"
version = "0.6.0"
version = "0.6.1"
# exclude py-ft and test data from cargo publish since they are too large
exclude = ["py-ft/", "tests/data/"]

Expand Down Expand Up @@ -57,7 +57,7 @@ rand = "0.8.5"

[build-dependencies]
burn-import = {version = "0.12"}
vergen-git2 = { version = "1.0.1", features = ["build"] }
vergen-git2 = { version = "1.0.2", features = ["build", "cargo", "rustc", "si"] }


[dev-dependencies]
Expand Down
15 changes: 13 additions & 2 deletions build.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,22 @@ fn vergen() -> Result<(), Box<dyn Error>> {
// NOTE: See the specific builder documentation for configuration options.
let build = BuildBuilder::all_build()?;
let git2 = Git2Builder::all_git()?;
let cargo = CargoBuilder::all_cargo()?;

Emitter::default()
let status = Emitter::default()
.add_instructions(&build)?
.add_instructions(&git2)?
.emit()?;
.add_instructions(&cargo)?
.fail_on_error()
.emit();

// set the env variable myself if the status failed
if status.is_err() {
eprintln!("Failed to get version information from git");
eprintln!("Likely building a published version from cargo or bioconda.");
std::env::set_var("VERGEN_GIT_DESCRIBE", "unknown");
}

Ok(())
}

Expand Down
4 changes: 3 additions & 1 deletion src/fiber.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,9 @@ impl FiberseqData {
};

// get fiberseq basemods
let base_mods = BaseMods::new(&record, filters.min_ml_score);
let mut base_mods = BaseMods::new(&record, filters.min_ml_score);
base_mods.filter_at_read_ends(filters.strip_starting_basemods);

//let (m6a, cpg) = FiberMods::new(&base_mods);
let m6a = base_mods.m6a();
let cpg = base_mods.cpg();
Expand Down
6 changes: 5 additions & 1 deletion src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,11 @@ use std::io::Write;

pub const VERSION: &str = env!("CARGO_PKG_VERSION");
lazy_static! {
pub static ref FULL_VERSION: String = format!("{}", env!("VERGEN_GIT_DESCRIBE"));
pub static ref FULL_VERSION: String = format!(
"v{}\tgit-details {}",
env!("CARGO_PKG_VERSION"),
env!("VERGEN_GIT_DESCRIBE")
);
}
// if this string (bar)gets too long it displays weird when writing to stdout
const PROGRESS_STYLE: &str =
Expand Down
38 changes: 38 additions & 0 deletions src/utils/bamranges.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,6 +162,44 @@ impl Ranges {
self.reference_lengths = to_keep.iter().map(|&i| self.reference_lengths[i]).collect();
}

/// filter out ranges that are within the first or last X bp of the read
pub fn filter_starts_at_read_ends(&mut self, strip: i64) {
if strip == 0 {
return;
}
let to_keep = self
.starts
.iter()
.enumerate()
.filter_map(|(i, &s)| {
if let Some(s) = s {
if s < strip || s > self.seq_len - strip {
None
} else {
Some(i)
}
} else {
None
}
})
.collect::<Vec<_>>();

if to_keep.len() != self.starts.len() {
log::trace!(
"basemods stripped, {} basemods removed",
self.starts.len() - to_keep.len()
);
}

self.starts = to_keep.iter().map(|&i| self.starts[i]).collect();
self.ends = to_keep.iter().map(|&i| self.ends[i]).collect();
self.lengths = to_keep.iter().map(|&i| self.lengths[i]).collect();
self.qual = to_keep.iter().map(|&i| self.qual[i]).collect();
self.reference_starts = to_keep.iter().map(|&i| self.reference_starts[i]).collect();
self.reference_ends = to_keep.iter().map(|&i| self.reference_ends[i]).collect();
self.reference_lengths = to_keep.iter().map(|&i| self.reference_lengths[i]).collect();
}

pub fn to_strings(&self, reference: bool, skip_none: bool) -> Vec<String> {
let (s, e, l, q) = if reference {
(
Expand Down
17 changes: 17 additions & 0 deletions src/utils/basemods.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,13 @@ impl BaseMod {
pub fn is_cpg(&self) -> bool {
self.modification_type == 'm'
}

pub fn filter_at_read_ends(&mut self, n_strip: i64) {
if n_strip <= 0 {
return;
}
self.ranges.filter_starts_at_read_ends(n_strip);
}
}

#[derive(Eq, PartialEq, Debug, Clone)]
Expand Down Expand Up @@ -257,6 +264,16 @@ impl BaseMods {
.for_each(|bm| bm.ranges.filter_by_qual(min_ml_score));
}

/// filter the basemods at the read ends
pub fn filter_at_read_ends(&mut self, n_strip: i64) {
if n_strip <= 0 {
return;
}
self.base_mods
.iter_mut()
.for_each(|bm| bm.filter_at_read_ends(n_strip));
}

/// combine the forward and reverse m6a data
pub fn m6a(&self) -> Ranges {
let ranges = self
Expand Down
10 changes: 10 additions & 0 deletions src/utils/input_bam.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,15 @@ pub struct FiberFilters {
/// Minium score in the ML tag to use or include in the output
#[clap(long="ml", alias="min-ml-score", default_value = MIN_ML_SCORE, help_heading = "BAM-Options", env="FT_MIN_ML_SCORE")]
pub min_ml_score: u8,
/// strip basemods in the first or last X bp of the read
#[clap(
global = true,
long,
default_value = "0",
help_heading = "BAM-Options",
hide = true
)]
pub strip_starting_basemods: i64,
}

impl std::default::Default for FiberFilters {
Expand All @@ -48,6 +57,7 @@ impl std::default::Default for FiberFilters {
bit_flag: 0,
min_ml_score: MIN_ML_SCORE.parse().unwrap(),
filter_expression: None,
strip_starting_basemods: 0,
}
}
}
Expand Down

0 comments on commit c776bab

Please sign in to comment.