Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

hotfix: handle unexpected panic situations when fetching feeds #15

Merged
merged 5 commits into from
Feb 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 6 additions & 0 deletions src-tauri/src/error.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,12 @@ pub enum Error {
#[error("failed to parse syndication feed")]
SyndicationParsingFailure,

#[error("failed to fetch feed: {0}")]
FetchFeedFailure(String),

#[error("failed to fetch feed items: {0}")]
FetchFeedItemsFailure(String),

#[error("empty string")]
EmptyString,

Expand Down
94 changes: 58 additions & 36 deletions src-tauri/src/producer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,30 +13,50 @@ use crate::{
syndication::fetch_feed_items,
};

pub fn create_new_items(db: &Connection, proxy: Option<&str>) -> Vec<ItemToCreate> {
use crate::error::Result;

pub fn create_new_items(db: &Connection, proxy: Option<&str>) -> Result<Vec<ItemToCreate>> {
let pairs = get_links_to_check(db);

let mut inserted = vec![];
for (feed, link, fetch_old_items) in pairs {
let mut items = fetch_feed_items(&link, proxy).unwrap();

if !fetch_old_items {
let most_recent_items = get_most_recent_items(db);
if let Some(most_recent) = most_recent_items.get(&feed) {
items.retain(|item| {
item.published_at
.map_or(false, |published_at| published_at > *most_recent)
});
} else {
items.truncate(1)
}
}

items.sort_by_key(|x| x.published_at);
inserted.extend(insert_new_items(db, feed, &items));
let feed_ids_to_check: Vec<i32> = pairs
.iter()
.filter_map(|(id, _, fetch_old_items)| if !fetch_old_items { Some(*id) } else { None })
.collect();

let most_recent_items = if !feed_ids_to_check.is_empty() {
get_most_recent_items(db, &feed_ids_to_check).unwrap_or_default()
} else {
HashMap::new()
};

for (feed, link, fetch_old_items) in pairs {
let items = fetch_feed_items(&link, proxy)?;

let mut filtered_items = if !fetch_old_items && most_recent_items.get(&feed).is_none() {
items
.into_iter()
.max_by_key(|x| x.published_at)
.into_iter()
.collect()
} else {
items
.into_iter()
.filter(|item| {
most_recent_items.get(&feed).map_or(true, |most_recent| {
item.published_at
.map_or(false, |published_at| published_at > *most_recent)
}) || fetch_old_items
})
.collect::<Vec<_>>()
};

filtered_items.sort_by_key(|x| x.published_at);
inserted.extend(insert_new_items(db, feed, &filtered_items));
}

inserted
Ok(inserted)
}

fn get_links_to_check(db: &Connection) -> Vec<(i32, String, bool)> {
Expand Down Expand Up @@ -93,25 +113,27 @@ fn insert_new_items(db: &Connection, feed: i32, items: &[RawItem]) -> Vec<ItemTo
inserted
}

fn get_most_recent_items(db: &Connection) -> HashMap<i32, DateTime<FixedOffset>> {
let opt = ItemReadOption {
ids: None,
feed: None,
status: None,
is_saved: None,
order_by: Some(ItemOrder::PublishedDateDesc),
limit: Some(1),
offset: None,
};

let rows = items::read_all(db, &opt).unwrap();

fn get_most_recent_items(
db: &Connection,
feed_ids: &[i32],
) -> Result<HashMap<i32, DateTime<FixedOffset>>> {
let mut most_recent_items = HashMap::new();
for row in rows {
let feed = row.feed.id;
let published_at = row.published_at;
most_recent_items.insert(feed, published_at);

for feed_id in feed_ids {
let opt = ItemReadOption {
ids: None,
feed: Some(*feed_id),
status: None,
is_saved: None,
order_by: Some(ItemOrder::PublishedDateDesc),
limit: Some(1),
offset: None,
};

if let Some(item) = items::read_all(db, &opt)?.first() {
most_recent_items.insert(item.feed.id, item.published_at);
}
}

most_recent_items
Ok(most_recent_items)
}
18 changes: 12 additions & 6 deletions src-tauri/src/worker.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,19 @@ pub fn start(app: &App, app_data_dir: &PathBuf) {
let db = open_connection(&app_data_dir).unwrap();

thread::spawn(move || loop {
let inserted = create_new_items(&db, proxy(&db).as_deref());
if !inserted.is_empty() {
if notification(&db) {
notify(&app_id, &inserted);
}
match create_new_items(&db, proxy(&db).as_deref()) {
Ok(inserted) => {
if !inserted.is_empty() {
if notification(&db) {
notify(&app_id, &inserted);
}

let _ = app_handle.emit_all("feed_updated", ());
let _ = app_handle.emit_all("feed_updated", ());
}
}
Err(err) => {
eprintln!("Error fetching new items: {}", err);
}
}

thread::sleep(time::Duration::from_secs(polling_frequency(&db)));
Expand Down
Loading