diff --git a/README.md b/README.md index 667492e..31b7f7e 100644 --- a/README.md +++ b/README.md @@ -13,7 +13,7 @@ Heavily WIP. Doesn't work yet at all, but does read the stream of posts as they - [x] Keep subscription state to not lose messages - [x] Serve the feed - [x] Handle deleting of posts -- [ ] Handle errors in the web service gracefully +- [x] Handle errors in the web service gracefully - [x] Handle missing profiles in the profile classifier - [ ] Add a way to mark a profile as being from a certain country manually - [ ] Handle reconnecting to websocket somehow diff --git a/src/processes/feed_server.rs b/src/processes/feed_server.rs index 86187ee..e55d94c 100644 --- a/src/processes/feed_server.rs +++ b/src/processes/feed_server.rs @@ -1,5 +1,6 @@ mod endpoints; mod server; mod state; +mod errors; pub use server::FeedServer; diff --git a/src/processes/feed_server/endpoints/get_feed_skeleton.rs b/src/processes/feed_server/endpoints/get_feed_skeleton.rs index bf6171c..833916a 100644 --- a/src/processes/feed_server/endpoints/get_feed_skeleton.rs +++ b/src/processes/feed_server/endpoints/get_feed_skeleton.rs @@ -1,4 +1,4 @@ -use anyhow::{anyhow, Result}; +use anyhow::anyhow; use atrium_api::app::bsky::feed::defs::SkeletonFeedPost; use atrium_api::app::bsky::feed::get_feed_skeleton::{ Output as FeedSkeleton, Parameters as FeedSkeletonQuery, @@ -8,29 +8,27 @@ use axum::Json; use chrono::{DateTime, TimeZone, Utc}; use crate::processes::feed_server::state::FeedServerState; +use crate::processes::feed_server::errors::AppError; pub async fn get_feed_skeleton( State(state): State, query: Query, -) -> Json { +) -> Result, AppError> { let algo = state .algos .get_by_name(&query.feed) - .ok_or_else(|| anyhow!("Feed {} not found", query.feed)) - .unwrap(); // TODO: handle error + .ok_or_else(|| AppError::FeedNotFound(query.feed.clone()))?; let limit = query.limit.unwrap_or(20); let earlier_than = query .cursor .as_deref() .map(parse_cursor) - .transpose() - .unwrap(); // TODO: handle error + .transpose()?; let posts = algo .fetch_posts(&state.database, limit, earlier_than) - .await - .unwrap(); // TODO: handle error + .await?; let feed = posts .iter() @@ -42,21 +40,21 @@ pub async fn get_feed_skeleton( let cursor = posts.last().map(|p| make_cursor(&p.indexed_at, &p.cid)); - Json(FeedSkeleton { cursor, feed }) + Ok(Json(FeedSkeleton { cursor, feed })) } fn make_cursor(date: &DateTime, cid: &str) -> String { format!("{}::{}", date.timestamp() * 1000, cid) } -fn parse_cursor(cursor: &str) -> Result<(DateTime, &str)> { +fn parse_cursor(cursor: &str) -> anyhow::Result<(DateTime, &str)> { let mut parts = cursor.split("::"); - let indexed_at = parts.next().ok_or_else(|| anyhow!("Malformed cursor"))?; - let cid = parts.next().ok_or_else(|| anyhow!("Malformed cursor"))?; + let indexed_at = parts.next().ok_or_else(|| anyhow!("Malformed cursor: {cursor}"))?; + let cid = parts.next().ok_or_else(|| anyhow!("Malformed cursor: {cursor}"))?; if parts.next().is_some() { - return Err(anyhow!("Malformed cursor")); + return Err(anyhow!("Malformed cursor: {cursor}")); } let indexed_at: i64 = indexed_at.parse()?; diff --git a/src/processes/feed_server/errors.rs b/src/processes/feed_server/errors.rs new file mode 100644 index 0000000..db6992c --- /dev/null +++ b/src/processes/feed_server/errors.rs @@ -0,0 +1,28 @@ +use axum::response::{Response, IntoResponse}; +use axum::http::StatusCode; + +pub enum AppError { + FeedNotFound(String), + Other(anyhow::Error), +} + +impl IntoResponse for AppError { + fn into_response(self) -> Response { + match self { + Self::FeedNotFound(name) => (StatusCode::NOT_FOUND, format!("Feed not found: {}", name)), + Self::Other(e) => ( + StatusCode::INTERNAL_SERVER_ERROR, + format!("Something went wrong: {}", e), + ) + }.into_response() + } +} + +impl From for AppError +where + E: Into, +{ + fn from(err: E) -> Self { + Self::Other(err.into()) + } +}