Skip to content

Commit e88441b

Browse files
committed
REWRITE EVERYTHING!!!!!!
1 parent 98e73af commit e88441b

17 files changed

+451
-206
lines changed

src-tauri/Cargo.toml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@ tauri-build = { version = "1.5.0", features = [] }
1717
[dependencies]
1818
serde_json = "1.0"
1919
serde = { version = "1.0", features = ["derive"] }
20-
tauri = { version = "1.5.2", features = [ "app-all", "path-all", "shell-open", "dialog"] }
20+
tauri = { version = "1.5.2", features = [ "dialog-open", "app-all", "path-all", "shell-open", "dialog"] }
2121
tokio = { version = "1.32.0", features = ["full"] }
2222
reqwest = "0.11.20"
2323
zip-extract = "0.1.2"

src-tauri/src/aggregator.rs

+61
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
use std::io::Cursor;
2+
use std::path::PathBuf;
3+
use csv::ReaderBuilder;
4+
use tokio::fs;
5+
6+
pub struct Calculation {
7+
pub protein: String,
8+
pub peptide: String,
9+
pub neh: String,
10+
pub charge: String,
11+
pub mean: String,
12+
pub n_ret_1: String,
13+
pub mpe_0: String,
14+
pub mpe_1: String,
15+
pub two_sd_minus: String,
16+
pub n_ret_2: String,
17+
pub two_sd_plus: String,
18+
pub n_ret_3: String,
19+
pub samples_omitted: u64,
20+
}
21+
22+
pub async fn aggregate(spreadsheets: &Vec<(PathBuf, u64)>) -> Result<Vec<Calculation>, String> {
23+
let mut calculations = vec![];
24+
25+
for spreadsheet in spreadsheets {
26+
let mut spreadsheet_calculations = parse_calculations(spreadsheet).await?;
27+
calculations.append(&mut spreadsheet_calculations);
28+
}
29+
30+
Ok(calculations)
31+
}
32+
33+
async fn parse_calculations(spreadsheet: &(PathBuf, u64)) -> Result<Vec<Calculation>, String> {
34+
let contents = fs::read(&spreadsheet.0).await.map_err(|err| format!("Error reading calculations file: {}", err))?;
35+
let mut rdr = ReaderBuilder::new()
36+
.from_reader(Cursor::new(contents));
37+
38+
let mut calculations = vec![];
39+
40+
for result in rdr.records() {
41+
let record = result.map_err(|err| format!("Error reading record: {}", err))?;
42+
let calculation = Calculation {
43+
protein: record[0].to_string(),
44+
peptide: record[1].trim().to_string(),
45+
neh: record[2].to_string(),
46+
charge: record[3].to_string(),
47+
mean: record[4].to_string(),
48+
n_ret_1: record[5].to_string(),
49+
mpe_0: record[6].to_string(),
50+
mpe_1: record[7].to_string(),
51+
two_sd_minus: record[8].to_string(),
52+
n_ret_2: record[9].to_string(),
53+
two_sd_plus: record[10].to_string(),
54+
n_ret_3: record[11].to_string(),
55+
samples_omitted: spreadsheet.1,
56+
};
57+
calculations.push(calculation);
58+
}
59+
60+
Ok(calculations)
61+
}

src-tauri/src/analyzer.rs

+35
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
use std::path::{Path, PathBuf};
2+
use tokio::process::Command;
3+
use crate::serializer::Dataset;
4+
5+
6+
pub async fn analyze(deps_dir: &Path, data_dir: &Path, datasets: &Vec<Dataset>) -> Result<Vec<(PathBuf, u64)>, String> {
7+
let mut results = vec![];
8+
9+
for dataset in datasets {
10+
let result = analyze_single(deps_dir, data_dir, dataset).await?;
11+
results.push(result);
12+
}
13+
14+
Ok(results)
15+
}
16+
17+
async fn analyze_single(deps_dir: &Path, data_dir: &Path, dataset: &Dataset) -> Result<(PathBuf, u64), String> {
18+
let mut command = Command::new(deps_dir.join("SRM_Rate.exe")); // TODO: figure out lifetimes here
19+
20+
command.arg(dataset.heavy_water.to_str().unwrap())
21+
.arg(dataset.spreadsheet.to_str().unwrap());
22+
23+
let input_file_name = dataset.spreadsheet.file_stem().unwrap().to_str().unwrap();
24+
25+
let output = command
26+
.output()
27+
.await
28+
.map_err(|err| format!("Command couldn't run: {err}"))?;
29+
30+
if output.status.success() {
31+
Ok((data_dir.join(format!("{input_file_name}.RateConst.csv")), dataset.samples_removed))
32+
} else {
33+
Err(format!("The command didn't complete successfully: {}", String::from_utf8_lossy(&output.stderr)))
34+
}
35+
}

src-tauri/src/commands.rs

+40-95
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,15 @@
1-
use crate::processor;
2-
use reqwest::Client;
3-
use serde::{Deserialize, Serialize};
4-
use tokio::io::AsyncWriteExt;
51
use std::io::Cursor;
62
use std::path::Path;
3+
4+
use reqwest::Client;
75
use tauri::api::dialog::blocking::FileDialogBuilder;
86
use tokio::fs;
9-
use crate::processor::parser::parse;
7+
use crate::aggregator::aggregate;
8+
use crate::analyzer::analyze;
9+
10+
use crate::grouper::{group_by_na_columns, group_by_peptides};
11+
use crate::parser::parse;
12+
use crate::serializer::{serialize, serialize_calculations};
1013

1114
#[tauri::command]
1215
pub async fn install_dependencies(app_handle: tauri::AppHandle) -> Result<(), String> {
@@ -38,65 +41,11 @@ pub async fn install_dependencies(app_handle: tauri::AppHandle) -> Result<(), St
3841
Ok(())
3942
}
4043

41-
#[derive(Serialize, Deserialize, Debug)]
42-
pub struct File {
43-
name: String,
44-
path: String,
45-
}
46-
47-
#[tauri::command]
48-
pub async fn select_data(data_input_type: String) -> Result<Option<File>, String> {
49-
match data_input_type.as_str() {
50-
"inputData" => {
51-
let file_path = FileDialogBuilder::new()
52-
.add_filter("Input Data File", &vec!["csv"])
53-
.pick_file();
54-
55-
if let Some(file_path) = file_path {
56-
let file_name = file_path
57-
.file_name()
58-
.unwrap()
59-
.to_string_lossy()
60-
.into_owned();
61-
62-
return Ok(Some(File {
63-
name: file_name,
64-
path: file_path.to_string_lossy().into_owned(),
65-
}));
66-
}
67-
68-
Ok(None)
69-
}
70-
"heavyWaterInputData" => {
71-
let file_path = FileDialogBuilder::new()
72-
.add_filter("Heavy Water File", &vec!["txt"])
73-
.pick_file();
74-
75-
if let Some(file_path) = file_path {
76-
let file_name = file_path
77-
.file_name()
78-
.unwrap()
79-
.to_string_lossy()
80-
.into_owned();
81-
82-
return Ok(Some(File {
83-
name: file_name,
84-
path: file_path.to_string_lossy().into_owned(),
85-
}));
86-
}
87-
88-
Ok(None)
89-
}
90-
_ => Err("Invalid data input type".into()),
91-
}
92-
}
93-
9444
#[tauri::command]
9545
pub async fn process_data(
9646
app_handle: tauri::AppHandle,
9747
should_remove_na_calculations: bool,
9848
input_file_path: String,
99-
heavy_water_file_path: String,
10049
) -> Result<(), String> {
10150
let data_dir = app_handle
10251
.path_resolver()
@@ -109,42 +58,38 @@ pub async fn process_data(
10958
.unwrap()
11059
.join("dependencies");
11160
let input_file_path = Path::new(&input_file_path);
112-
let heavy_water_file_path = Path::new(&heavy_water_file_path);
113-
114-
dbg!(parse(input_file_path).await.unwrap());
115-
116-
// let output_contents = processor::handle(
117-
// should_remove_na_calculations,
118-
// &data_dir,
119-
// &dependencies_dir,
120-
// &input_file_path,
121-
// heavy_water_file_path,
122-
// )
123-
// .await?;
124-
//
125-
// let input_file_name = input_file_path
126-
// .file_stem()
127-
// .unwrap()
128-
// .to_string_lossy()
129-
// .into_owned();
130-
//
131-
// let file_path = FileDialogBuilder::new()
132-
// .set_file_name(&format!("{input_file_name}.RateConst.csv"))
133-
// .add_filter("Output CSV File", &vec!["csv"])
134-
// .save_file();
135-
//
136-
// if let Some(file_path) = file_path {
137-
// // overwrite file if it already exists
138-
// fs::OpenOptions::new()
139-
// .write(true)
140-
// .create(true)
141-
// .open(&file_path)
142-
// .await
143-
// .map_err(|err| format!("Failed to write output file: {err}"))?
144-
// .write(&output_contents)
145-
// .await
146-
// .map_err(|err| format!("Failed to write output file: {err}"))?;
147-
// }
61+
62+
let (days, mice, labels, peptides) = parse(input_file_path).await.unwrap();
63+
64+
let groups = group_by_peptides(peptides);
65+
let groups = group_by_na_columns(groups);
66+
67+
let datasets = serialize(
68+
&data_dir,
69+
days,
70+
mice,
71+
labels,
72+
groups,
73+
).await.unwrap();
74+
75+
let calculations = analyze(&dependencies_dir, &data_dir, &datasets).await.unwrap();
76+
let calculations = aggregate(&calculations).await.unwrap();
77+
78+
let input_file_name = input_file_path
79+
.file_stem()
80+
.unwrap()
81+
.to_string_lossy()
82+
.into_owned();
83+
84+
let file_path = FileDialogBuilder::new()
85+
.set_file_name(&format!("{input_file_name}.RateConst.csv"))
86+
.add_filter("Output CSV File", &vec!["csv"])
87+
.save_file();
88+
89+
if let Some(file_path) = file_path {
90+
serialize_calculations(&file_path, &calculations).unwrap();
91+
}
92+
14893

14994
Ok(())
15095
}

src-tauri/src/grouper.rs

+29-17
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
use std::collections::HashMap;
2+
23
use crate::parser::Peptide;
34

45
#[derive(Debug, Clone)]
@@ -7,7 +8,7 @@ pub struct PeptideGroup {
78
pub na_columns: Vec<bool>,
89
}
910

10-
pub fn group_by_peptides(peptides: Vec<Peptide>) -> (Vec<PeptideGroup>, Vec<NAGroup>) {
11+
pub fn group_by_peptides(peptides: Vec<Peptide>) -> Vec<PeptideGroup> {
1112
let mut sorted_peptides = peptides;
1213
sorted_peptides.sort_by(|a, b| {
1314
a.name.cmp(&b.name).then_with(|| {
@@ -16,7 +17,6 @@ pub fn group_by_peptides(peptides: Vec<Peptide>) -> (Vec<PeptideGroup>, Vec<NAGr
1617
});
1718

1819
let mut groups: Vec<PeptideGroup> = Vec::new();
19-
let mut separated_groups: Vec<NAGroup> = Vec::new();
2020
let mut current_group: Vec<Peptide> = Vec::new();
2121

2222
for peptide in sorted_peptides {
@@ -29,11 +29,7 @@ pub fn group_by_peptides(peptides: Vec<Peptide>) -> (Vec<PeptideGroup>, Vec<NAGr
2929
if (peptide.mass_charge_ratio - last_ratio).abs() < threshold {
3030
current_group.push(peptide);
3131
} else {
32-
let peptide_group = create_peptide_group(&current_group);
33-
separated_groups.push(NAGroup {
34-
groups: vec![peptide_group.clone()],
35-
na_columns: peptide_group.na_columns.clone()
36-
});
32+
groups.push(create_peptide_group(&current_group));
3733
current_group = vec![peptide];
3834
}
3935
} else {
@@ -47,7 +43,7 @@ pub fn group_by_peptides(peptides: Vec<Peptide>) -> (Vec<PeptideGroup>, Vec<NAGr
4743
groups.push(create_peptide_group(&current_group));
4844
}
4945

50-
(groups, separated_groups)
46+
groups
5147
}
5248

5349

@@ -62,7 +58,7 @@ fn dynamic_threshold(peptides: &[Peptide]) -> f64 {
6258
.sum::<f64>() / (peptides.len() - 1) as f64;
6359
let std_deviation = variance.sqrt();
6460

65-
std_deviation // You might want to scale this value based on your data.
61+
std_deviation * 2.0 // You might want to scale this value based on your data.
6662
}
6763

6864

@@ -85,22 +81,38 @@ fn create_peptide_group(peptides: &[Peptide]) -> PeptideGroup {
8581

8682
#[derive(Debug, Clone)]
8783
pub struct NAGroup {
88-
pub groups: Vec<PeptideGroup>,
84+
pub peptides: Vec<Peptide>,
8985
pub na_columns: Vec<bool>,
9086
}
9187

9288
pub fn group_by_na_columns(groups: Vec<PeptideGroup>) -> Vec<NAGroup> {
93-
let mut na_groups: HashMap<Vec<bool>, Vec<PeptideGroup>> = HashMap::new();
89+
let mut na_groups: HashMap<(Vec<bool>, u64), Vec<Peptide>> = HashMap::new();
9490

95-
// Group peptide groups by similar na_columns
9691
for group in groups {
97-
na_groups.entry(group.na_columns.clone())
98-
.or_insert_with(Vec::new)
99-
.push(group);
92+
let mut count = 1;
93+
loop {
94+
let key = (group.na_columns.clone(), count);
95+
if let Some(peptide_group) = na_groups.get(&key) {
96+
let name = group.peptides[0].name.clone();
97+
98+
if let Some(_) = peptide_group.iter().find(|&x| x.name == name) {
99+
count += 1;
100+
} else {
101+
na_groups.entry(key).or_insert_with(Vec::new).extend(group.peptides);
102+
break;
103+
}
104+
} else {
105+
na_groups.insert(key, group.peptides);
106+
break;
107+
}
108+
}
100109
}
101110

102111
// Convert each group into a NAGroup with na_columns computed
103-
na_groups.into_iter().map(|(na_columns, groups)| {
104-
NAGroup { groups, na_columns }
112+
na_groups.iter().map(|((na_cols, _), peptides)| {
113+
NAGroup {
114+
peptides: peptides.to_vec(),
115+
na_columns: na_cols.to_vec(),
116+
}
105117
}).collect()
106118
}

src-tauri/src/lib.rs

+6-1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,7 @@
11
pub mod commands;
2-
pub mod processor;
2+
pub mod processor;
3+
pub mod parser;
4+
pub mod grouper;
5+
pub mod serializer;
6+
pub mod analyzer;
7+
pub mod aggregator;

src-tauri/src/main.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ use app::commands;
66
#[tokio::main]
77
async fn main() {
88
tauri::Builder::default()
9-
.invoke_handler(tauri::generate_handler![commands::install_dependencies, commands::process_data, commands::select_data])
9+
.invoke_handler(tauri::generate_handler![commands::install_dependencies, commands::process_data])
1010
.run(tauri::generate_context!())
1111
.expect("error while running tauri application");
1212
}

0 commit comments

Comments
 (0)