Upgrade everything to latest versions
This includes a bunch of small changes to adapt to how atrium-api has changed over time. They're not functional or interesting, just some type-level adjustments that are needed. Some more complicated logic was changed in how profile details are parsed, since atrium's way of doing things is weird and hard to understand so I just manually grab stuff from the object map instead of relying on atrium's types. This is similar to how CBOR parsing is done. Boring maintenance stuff.
This commit is contained in:
parent
149cd44227
commit
b8d1fd7695
File diff suppressed because it is too large
Load Diff
38
Cargo.toml
38
Cargo.toml
|
@ -7,28 +7,28 @@ default-run = "nederlandskie"
|
||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.79"
|
anyhow = "1.0.86"
|
||||||
async-trait = "0.1.77"
|
async-trait = "0.1.81"
|
||||||
atrium-api = "0.15.0"
|
atrium-api = "0.24.2"
|
||||||
atrium-xrpc = "0.8.0"
|
atrium-xrpc = "0.11.3"
|
||||||
atrium-xrpc-client = "0.2.0"
|
atrium-xrpc-client = "0.5.6"
|
||||||
axum = "0.7.4"
|
axum = "0.7.5"
|
||||||
chat-gpt-lib-rs = "0.3.2"
|
chat-gpt-lib-rs = "0.5.1"
|
||||||
chrono = "0.4.31"
|
chrono = "0.4.38"
|
||||||
clap = { version = "4.4.16", features = ["derive"] }
|
clap = { version = "4.5.16", features = ["derive"] }
|
||||||
dotenv = "0.15.0"
|
dotenv = "0.15.0"
|
||||||
env_logger = "0.10.1"
|
env_logger = "0.11.5"
|
||||||
http = "1.0.0"
|
http = "1.1.0"
|
||||||
libipld-core = { version = "0.16.0", features = ["serde-codec"] }
|
ipld-core = "0.4.1"
|
||||||
lingua = "1.6.2"
|
lingua = "1.6.2"
|
||||||
log = "0.4.20"
|
log = "0.4.22"
|
||||||
once_cell = "1.19.0"
|
once_cell = "1.19.0"
|
||||||
rs-car = "0.4.1"
|
rs-car = "0.4.1"
|
||||||
scooby = "0.5.0"
|
scooby = "0.5.0"
|
||||||
serde = "1.0.195"
|
serde = "1.0.208"
|
||||||
serde_ipld_dagcbor = "0.4.2"
|
serde_ipld_dagcbor = "0.6.1"
|
||||||
sk-cbor = "0.1.2"
|
sk-cbor = "0.1.2"
|
||||||
sqlx = { version = "0.7.3", default-features = false, features = ["postgres", "runtime-tokio-native-tls", "chrono"] }
|
sqlx = { version = "0.8.0", default-features = false, features = ["postgres", "runtime-tokio-native-tls", "chrono"] }
|
||||||
tokio = { version = "1.35.1", features = ["full"] }
|
tokio = { version = "1.39.2", features = ["full"] }
|
||||||
tokio-stream = "0.1.14"
|
tokio-stream = "0.1.15"
|
||||||
tokio-tungstenite = { version = "0.21.0", features = ["native-tls"] }
|
tokio-tungstenite = { version = "0.23.1", features = ["native-tls"] }
|
||||||
|
|
|
@ -19,7 +19,7 @@ pub trait Algo {
|
||||||
async fn fetch_posts(
|
async fn fetch_posts(
|
||||||
&self,
|
&self,
|
||||||
database: &Database,
|
database: &Database,
|
||||||
limit: i32,
|
limit: u8,
|
||||||
earlier_than: Option<(DateTime<Utc>, &str)>,
|
earlier_than: Option<(DateTime<Utc>, &str)>,
|
||||||
) -> Result<Vec<database::Post>>;
|
) -> Result<Vec<database::Post>>;
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,7 +50,7 @@ impl Algo for Nederlandskie {
|
||||||
async fn fetch_posts(
|
async fn fetch_posts(
|
||||||
&self,
|
&self,
|
||||||
database: &Database,
|
database: &Database,
|
||||||
limit: i32,
|
limit: u8,
|
||||||
earlier_than: Option<(DateTime<Utc>, &str)>,
|
earlier_than: Option<(DateTime<Utc>, &str)>,
|
||||||
) -> Result<Vec<database::Post>> {
|
) -> Result<Vec<database::Post>> {
|
||||||
Ok(database
|
Ok(database
|
||||||
|
|
|
@ -1,12 +1,13 @@
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
|
use atrium_api::types::string::Did;
|
||||||
use dotenv::dotenv;
|
use dotenv::dotenv;
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
pub struct Config {
|
pub struct Config {
|
||||||
pub chat_gpt_api_key: String,
|
pub chat_gpt_api_key: String,
|
||||||
pub database_url: String,
|
pub database_url: String,
|
||||||
pub feed_generator_did: String,
|
pub feed_generator_did: Did,
|
||||||
pub publisher_did: String,
|
pub publisher_did: Did,
|
||||||
pub feed_generator_hostname: String,
|
pub feed_generator_hostname: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -18,8 +19,12 @@ impl Config {
|
||||||
chat_gpt_api_key: env::var("CHAT_GPT_API_KEY")?,
|
chat_gpt_api_key: env::var("CHAT_GPT_API_KEY")?,
|
||||||
database_url: env::var("DATABASE_URL")?,
|
database_url: env::var("DATABASE_URL")?,
|
||||||
feed_generator_hostname: env::var("FEED_GENERATOR_HOSTNAME")?,
|
feed_generator_hostname: env::var("FEED_GENERATOR_HOSTNAME")?,
|
||||||
feed_generator_did: format!("did:web:{}", env::var("FEED_GENERATOR_HOSTNAME")?),
|
feed_generator_did: format!("did:web:{}", env::var("FEED_GENERATOR_HOSTNAME")?)
|
||||||
publisher_did: env::var("PUBLISHER_DID")?,
|
.parse()
|
||||||
|
.map_err(anyhow::Error::msg)?,
|
||||||
|
publisher_did: env::var("PUBLISHER_DID")?
|
||||||
|
.parse()
|
||||||
|
.map_err(anyhow::Error::msg)?,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,9 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use atrium_api::app::bsky::feed::describe_feed_generator::{
|
use atrium_api::app::bsky::feed::describe_feed_generator::{
|
||||||
Feed, Output as FeedGeneratorDescription,
|
FeedData, OutputData as FeedGeneratorDescription,
|
||||||
};
|
};
|
||||||
|
use atrium_api::types::Object;
|
||||||
use axum::{extract::State, Json};
|
use axum::{extract::State, Json};
|
||||||
|
|
||||||
use crate::{algos::Algos, config::Config};
|
use crate::{algos::Algos, config::Config};
|
||||||
|
@ -15,12 +16,14 @@ pub async fn describe_feed_generator(
|
||||||
did: config.feed_generator_did.clone(),
|
did: config.feed_generator_did.clone(),
|
||||||
feeds: algos
|
feeds: algos
|
||||||
.iter_names()
|
.iter_names()
|
||||||
.map(|name| Feed {
|
.map(|name| FeedData {
|
||||||
uri: format!(
|
uri: format!(
|
||||||
"at://{}/app.bsky.feed.generator/{}",
|
"at://{}/app.bsky.feed.generator/{}",
|
||||||
config.publisher_did, name
|
config.publisher_did.as_ref(),
|
||||||
|
name
|
||||||
),
|
),
|
||||||
})
|
})
|
||||||
|
.map(Object::from)
|
||||||
.collect(),
|
.collect(),
|
||||||
links: None,
|
links: None,
|
||||||
})
|
})
|
||||||
|
|
|
@ -25,7 +25,7 @@ pub struct Service {
|
||||||
pub async fn did_json(State(config): State<Arc<Config>>) -> Json<Did> {
|
pub async fn did_json(State(config): State<Arc<Config>>) -> Json<Did> {
|
||||||
Json(Did {
|
Json(Did {
|
||||||
context: vec!["https://www.w3.org/ns/did/v1".to_owned()],
|
context: vec!["https://www.w3.org/ns/did/v1".to_owned()],
|
||||||
id: config.feed_generator_did.clone(),
|
id: config.feed_generator_did.to_string(),
|
||||||
service: vec![Service {
|
service: vec![Service {
|
||||||
id: "#bsky_fg".to_owned(),
|
id: "#bsky_fg".to_owned(),
|
||||||
type_: "BskyFeedGenerator".to_owned(),
|
type_: "BskyFeedGenerator".to_owned(),
|
||||||
|
|
|
@ -1,10 +1,11 @@
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
|
|
||||||
use anyhow::anyhow;
|
use anyhow::anyhow;
|
||||||
use atrium_api::app::bsky::feed::defs::SkeletonFeedPost;
|
use atrium_api::app::bsky::feed::defs::SkeletonFeedPostData;
|
||||||
use atrium_api::app::bsky::feed::get_feed_skeleton::{
|
use atrium_api::app::bsky::feed::get_feed_skeleton::{
|
||||||
Output as FeedSkeleton, Parameters as FeedSkeletonQuery,
|
OutputData as FeedSkeleton, Parameters as FeedSkeletonQuery,
|
||||||
};
|
};
|
||||||
|
use atrium_api::types::{LimitedNonZeroU8, Object};
|
||||||
use axum::extract::{Query, State};
|
use axum::extract::{Query, State};
|
||||||
use axum::Json;
|
use axum::Json;
|
||||||
use chrono::{DateTime, TimeZone, Utc};
|
use chrono::{DateTime, TimeZone, Utc};
|
||||||
|
@ -28,17 +29,23 @@ pub async fn get_feed_skeleton(
|
||||||
.get_by_name(feed_name)
|
.get_by_name(feed_name)
|
||||||
.ok_or_else(|| AppError::FeedNotFound(feed_name.to_owned()))?;
|
.ok_or_else(|| AppError::FeedNotFound(feed_name.to_owned()))?;
|
||||||
|
|
||||||
let limit = query.limit.unwrap_or(20);
|
let limit = query
|
||||||
|
.limit
|
||||||
|
.unwrap_or(LimitedNonZeroU8::try_from(20).expect("this default limit should always work"));
|
||||||
let earlier_than = query.cursor.as_deref().map(parse_cursor).transpose()?;
|
let earlier_than = query.cursor.as_deref().map(parse_cursor).transpose()?;
|
||||||
|
|
||||||
let posts = algo.fetch_posts(&database, limit, earlier_than).await?;
|
let posts = algo
|
||||||
|
.fetch_posts(&database, limit.into(), earlier_than)
|
||||||
|
.await?;
|
||||||
|
|
||||||
let feed = posts
|
let feed = posts
|
||||||
.iter()
|
.iter()
|
||||||
.map(|p| SkeletonFeedPost {
|
.map(|p| SkeletonFeedPostData {
|
||||||
post: p.uri.clone(),
|
post: p.uri.clone(),
|
||||||
|
feed_context: None,
|
||||||
reason: None,
|
reason: None,
|
||||||
})
|
})
|
||||||
|
.map(Object::from)
|
||||||
.collect();
|
.collect();
|
||||||
|
|
||||||
let cursor = posts.last().map(|p| make_cursor(&p.indexed_at, &p.cid));
|
let cursor = posts.last().map(|p| make_cursor(&p.indexed_at, &p.cid));
|
||||||
|
|
|
@ -103,7 +103,9 @@ impl CommitProcessor for PostIndexer {
|
||||||
if commit.seq % 20 == 0 {
|
if commit.seq % 20 == 0 {
|
||||||
debug!(
|
debug!(
|
||||||
"Updating cursor for {} to {} ({})",
|
"Updating cursor for {} to {} ({})",
|
||||||
self.config.feed_generator_did, commit.seq, commit.time
|
self.config.feed_generator_did.as_str(),
|
||||||
|
commit.seq,
|
||||||
|
commit.time
|
||||||
);
|
);
|
||||||
self.database
|
self.database
|
||||||
.update_subscription_cursor(
|
.update_subscription_cursor(
|
||||||
|
|
|
@ -37,7 +37,7 @@ impl AI {
|
||||||
|
|
||||||
response
|
response
|
||||||
.choices
|
.choices
|
||||||
.get(0)
|
.first()
|
||||||
.map(|choice| choice.message.content.to_lowercase())
|
.map(|choice| choice.message.content.to_lowercase())
|
||||||
.ok_or_else(|| anyhow!("No choices received from ChatGPT, weird"))
|
.ok_or_else(|| anyhow!("No choices received from ChatGPT, weird"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,12 +1,12 @@
|
||||||
|
use std::fmt::Debug;
|
||||||
use std::matches;
|
use std::matches;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
|
|
||||||
use anyhow::{anyhow, Result};
|
use anyhow::Result;
|
||||||
use atrium_api::agent::{store::MemorySessionStore, AtpAgent};
|
use atrium_api::agent::{store::MemorySessionStore, AtpAgent};
|
||||||
use atrium_api::blob::BlobRef;
|
use atrium_api::types::string::Datetime;
|
||||||
use atrium_api::records::Record;
|
use atrium_api::types::{BlobRef, Collection, Object, TryIntoUnknown};
|
||||||
use atrium_xrpc_client::reqwest::ReqwestClient;
|
use atrium_xrpc_client::reqwest::ReqwestClient;
|
||||||
use chrono::Utc;
|
|
||||||
use http::StatusCode;
|
use http::StatusCode;
|
||||||
use log::error;
|
use log::error;
|
||||||
use tokio_stream::StreamExt;
|
use tokio_stream::StreamExt;
|
||||||
|
@ -46,7 +46,7 @@ impl Bluesky {
|
||||||
pub async fn upload_blob(&self, blob: Vec<u8>) -> Result<BlobRef> {
|
pub async fn upload_blob(&self, blob: Vec<u8>) -> Result<BlobRef> {
|
||||||
let result = self.agent.api.com.atproto.repo.upload_blob(blob).await?;
|
let result = self.agent.api.com.atproto.repo.upload_blob(blob).await?;
|
||||||
|
|
||||||
Ok(result.blob)
|
Ok(result.data.blob)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn publish_feed(
|
pub async fn publish_feed(
|
||||||
|
@ -58,66 +58,71 @@ impl Bluesky {
|
||||||
description: &str,
|
description: &str,
|
||||||
avatar: Option<BlobRef>,
|
avatar: Option<BlobRef>,
|
||||||
) -> Result<()> {
|
) -> Result<()> {
|
||||||
use atrium_api::com::atproto::repo::put_record::Input;
|
use atrium_api::com::atproto::repo::put_record::InputData;
|
||||||
|
|
||||||
self.agent
|
self.agent
|
||||||
.api
|
.api
|
||||||
.com
|
.com
|
||||||
.atproto
|
.atproto
|
||||||
.repo
|
.repo
|
||||||
.put_record(Input {
|
.put_record(
|
||||||
collection: "app.bsky.feed.generator".to_owned(),
|
InputData {
|
||||||
record: Record::AppBskyFeedGenerator(Box::new(
|
collection: atrium_api::app::bsky::feed::Generator::nsid(),
|
||||||
atrium_api::app::bsky::feed::generator::Record {
|
record: atrium_api::app::bsky::feed::generator::RecordData {
|
||||||
avatar,
|
avatar,
|
||||||
created_at: Utc::now().to_rfc3339(),
|
created_at: Datetime::now(),
|
||||||
description: Some(description.to_owned()),
|
description: Some(description.to_owned()),
|
||||||
description_facets: None,
|
description_facets: None,
|
||||||
did: feed_generator_did.to_owned(),
|
did: feed_generator_did.parse().map_err(anyhow::Error::msg)?,
|
||||||
display_name: display_name.to_owned(),
|
display_name: display_name.to_owned(),
|
||||||
labels: None,
|
labels: None,
|
||||||
},
|
accepts_interactions: None,
|
||||||
)),
|
}
|
||||||
repo: publisher_did.to_owned(),
|
.try_into_unknown()?,
|
||||||
rkey: name.to_owned(),
|
repo: publisher_did.parse().map_err(anyhow::Error::msg)?,
|
||||||
swap_commit: None,
|
rkey: name.to_owned(),
|
||||||
swap_record: None,
|
swap_commit: None,
|
||||||
validate: None,
|
swap_record: None,
|
||||||
})
|
validate: None,
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
.await?;
|
.await?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_profile_details(&self, did: &str) -> Result<Option<ProfileDetails>> {
|
pub async fn fetch_profile_details(&self, did: &str) -> Result<Option<ProfileDetails>> {
|
||||||
|
use atrium_api::com::atproto::repo::get_record::ParametersData;
|
||||||
|
|
||||||
let result = self
|
let result = self
|
||||||
.agent
|
.agent
|
||||||
.api
|
.api
|
||||||
.com
|
.com
|
||||||
.atproto
|
.atproto
|
||||||
.repo
|
.repo
|
||||||
.get_record(atrium_api::com::atproto::repo::get_record::Parameters {
|
.get_record(
|
||||||
collection: "app.bsky.actor.profile".to_owned(),
|
ParametersData {
|
||||||
cid: None,
|
collection: atrium_api::app::bsky::actor::Profile::nsid(),
|
||||||
repo: did.to_owned(),
|
cid: None,
|
||||||
rkey: "self".to_owned(),
|
repo: did.parse().map_err(anyhow::Error::msg)?,
|
||||||
})
|
rkey: "self".to_owned(),
|
||||||
|
}
|
||||||
|
.into(),
|
||||||
|
)
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
let profile_data = match result {
|
let profile_output = match result {
|
||||||
Ok(profile_data) => profile_data,
|
Ok(profile_output) => profile_output,
|
||||||
Err(e) if is_missing_record_error(&e) => return Ok(None),
|
Err(e) if is_missing_record_error(&e) => return Ok(None),
|
||||||
Err(e) => return Err(e.into()),
|
Err(e) => return Err(e.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
match profile_data.value {
|
Ok(Some(ProfileDetails::try_from(profile_output.data.value)?))
|
||||||
Record::AppBskyActorProfile(profile) => Ok(Some(ProfileDetails::from(*profile))),
|
|
||||||
_ => Err(anyhow!("Wrong type of record")),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn resolve_handle(&self, handle: &str) -> Result<Option<String>> {
|
pub async fn resolve_handle(&self, handle: &str) -> Result<Option<String>> {
|
||||||
use atrium_api::com::atproto::identity::resolve_handle::Parameters;
|
use atrium_api::com::atproto::identity::resolve_handle::ParametersData;
|
||||||
|
|
||||||
let result = self
|
let result = self
|
||||||
.agent
|
.agent
|
||||||
|
@ -125,13 +130,13 @@ impl Bluesky {
|
||||||
.com
|
.com
|
||||||
.atproto
|
.atproto
|
||||||
.identity
|
.identity
|
||||||
.resolve_handle(Parameters {
|
.resolve_handle(Object::from(ParametersData {
|
||||||
handle: handle.to_owned(),
|
handle: handle.parse().map_err(anyhow::Error::msg)?,
|
||||||
})
|
}))
|
||||||
.await;
|
.await;
|
||||||
|
|
||||||
match result {
|
match result {
|
||||||
Ok(result) => Ok(Some(result.did)),
|
Ok(result) => Ok(Some(result.did.to_string())),
|
||||||
Err(e) if is_unable_to_resolve_handle_error(&e) => Ok(None),
|
Err(e) if is_unable_to_resolve_handle_error(&e) => Ok(None),
|
||||||
Err(e) => Err(e.into()),
|
Err(e) => Err(e.into()),
|
||||||
}
|
}
|
||||||
|
@ -168,7 +173,10 @@ impl Bluesky {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_missing_record_error<T>(error: &atrium_xrpc::error::Error<T>) -> bool {
|
fn is_missing_record_error<T>(error: &atrium_xrpc::error::Error<T>) -> bool
|
||||||
|
where
|
||||||
|
T: Debug,
|
||||||
|
{
|
||||||
use atrium_xrpc::error::{Error, ErrorResponseBody, XrpcError, XrpcErrorKind};
|
use atrium_xrpc::error::{Error, ErrorResponseBody, XrpcError, XrpcErrorKind};
|
||||||
|
|
||||||
matches!(error,
|
matches!(error,
|
||||||
|
@ -189,7 +197,10 @@ fn is_missing_record_error<T>(error: &atrium_xrpc::error::Error<T>) -> bool {
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_unable_to_resolve_handle_error<T>(error: &atrium_xrpc::error::Error<T>) -> bool {
|
fn is_unable_to_resolve_handle_error<T>(error: &atrium_xrpc::error::Error<T>) -> bool
|
||||||
|
where
|
||||||
|
T: Debug,
|
||||||
|
{
|
||||||
use atrium_xrpc::error::{Error, ErrorResponseBody, XrpcError, XrpcErrorKind};
|
use atrium_xrpc::error::{Error, ErrorResponseBody, XrpcError, XrpcErrorKind};
|
||||||
|
|
||||||
matches!(error,
|
matches!(error,
|
||||||
|
|
|
@ -1,4 +1,10 @@
|
||||||
use atrium_api::app::bsky::actor::profile::Record as ProfileRecord;
|
use std::ops::Deref;
|
||||||
|
|
||||||
|
use atrium_api::{
|
||||||
|
app::bsky::actor::profile::RecordData as ProfileRecordData,
|
||||||
|
types::Unknown,
|
||||||
|
};
|
||||||
|
use ipld_core::ipld::Ipld;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ProfileDetails {
|
pub struct ProfileDetails {
|
||||||
|
@ -6,11 +12,33 @@ pub struct ProfileDetails {
|
||||||
pub description: String,
|
pub description: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<ProfileRecord> for ProfileDetails {
|
impl From<ProfileRecordData> for ProfileDetails {
|
||||||
fn from(value: ProfileRecord) -> Self {
|
fn from(value: ProfileRecordData) -> Self {
|
||||||
Self {
|
Self {
|
||||||
display_name: value.display_name.unwrap_or_default(),
|
display_name: value.display_name.unwrap_or_default(),
|
||||||
description: value.description.unwrap_or_default(),
|
description: value.description.unwrap_or_default(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl TryFrom<Unknown> for ProfileDetails {
|
||||||
|
type Error = anyhow::Error;
|
||||||
|
|
||||||
|
fn try_from(value: Unknown) -> Result<Self, Self::Error> {
|
||||||
|
let string_or_empty = |value: &Unknown, key: &str| match value {
|
||||||
|
Unknown::Object(map) => match map.get(key) {
|
||||||
|
Some(x) => match x.deref() {
|
||||||
|
Ipld::String(s) => s.clone(),
|
||||||
|
_ => "".to_owned(),
|
||||||
|
},
|
||||||
|
_ => "".to_owned(),
|
||||||
|
},
|
||||||
|
_ => "".to_owned(),
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(ProfileDetails {
|
||||||
|
display_name: string_or_empty(&value, "displayName"),
|
||||||
|
description: string_or_empty(&value, "description"),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -1,7 +1,4 @@
|
||||||
use atrium_api::com::atproto::sync::subscribe_repos::{
|
use ipld_core::ipld::Ipld;
|
||||||
Commit, Handle, Info, Message, Migrate, Tombstone,
|
|
||||||
};
|
|
||||||
use libipld_core::ipld::Ipld;
|
|
||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
|
|
||||||
// original definition:
|
// original definition:
|
||||||
|
@ -52,13 +49,13 @@ impl TryFrom<Ipld> for FrameHeader {
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Frame {
|
pub enum Frame {
|
||||||
Message(Box<MessageFrame>),
|
Message(Option<String>, MessageFrame),
|
||||||
Error(ErrorFrame),
|
Error(ErrorFrame),
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub struct MessageFrame {
|
pub struct MessageFrame {
|
||||||
pub body: Message,
|
pub body: Vec<u8>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
@ -81,39 +78,16 @@ impl TryFrom<&[u8]> for Frame {
|
||||||
return Err(anyhow::anyhow!("invalid frame type"));
|
return Err(anyhow::anyhow!("invalid frame type"));
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let ipld = serde_ipld_dagcbor::from_slice::<Ipld>(left)?;
|
let header = FrameHeader::try_from(serde_ipld_dagcbor::from_slice::<Ipld>(left)?)?;
|
||||||
let header = FrameHeader::try_from(ipld)?;
|
if let FrameHeader::Message(t) = &header {
|
||||||
match header {
|
Ok(Frame::Message(
|
||||||
FrameHeader::Message(t) => match t.as_deref() {
|
t.clone(),
|
||||||
Some("#commit") => Ok(Frame::Message(Box::new(MessageFrame {
|
MessageFrame {
|
||||||
body: Message::Commit(Box::new(serde_ipld_dagcbor::from_slice::<Commit>(
|
body: right.to_vec(),
|
||||||
right,
|
},
|
||||||
)?)),
|
))
|
||||||
}))),
|
} else {
|
||||||
Some("#handle") => Ok(Frame::Message(Box::new(MessageFrame {
|
Ok(Frame::Error(ErrorFrame {}))
|
||||||
body: Message::Handle(Box::new(serde_ipld_dagcbor::from_slice::<Handle>(
|
|
||||||
right,
|
|
||||||
)?)),
|
|
||||||
}))),
|
|
||||||
Some("#info") => Ok(Frame::Message(Box::new(MessageFrame {
|
|
||||||
body: Message::Info(Box::new(serde_ipld_dagcbor::from_slice::<Info>(right)?)),
|
|
||||||
}))),
|
|
||||||
Some("#migrate") => Ok(Frame::Message(Box::new(MessageFrame {
|
|
||||||
body: Message::Migrate(Box::new(serde_ipld_dagcbor::from_slice::<Migrate>(
|
|
||||||
right,
|
|
||||||
)?)),
|
|
||||||
}))),
|
|
||||||
Some("#tombstone") => Ok(Frame::Message(Box::new(MessageFrame {
|
|
||||||
body: Message::Tombstone(Box::new(
|
|
||||||
serde_ipld_dagcbor::from_slice::<Tombstone>(right)?,
|
|
||||||
)),
|
|
||||||
}))),
|
|
||||||
_ => {
|
|
||||||
let tag = t.as_deref();
|
|
||||||
Err(anyhow::anyhow!("frame not implemented: tag={tag:?}"))
|
|
||||||
}
|
|
||||||
},
|
|
||||||
FrameHeader::Error => Ok(Frame::Error(ErrorFrame {})),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,8 @@ use std::collections::HashMap;
|
||||||
|
|
||||||
use anyhow::Result;
|
use anyhow::Result;
|
||||||
use async_trait::async_trait;
|
use async_trait::async_trait;
|
||||||
use atrium_api::com::atproto::sync::subscribe_repos::{Commit, Message};
|
use atrium_api::com::atproto::sync::subscribe_repos::Commit;
|
||||||
|
use atrium_api::types::Collection;
|
||||||
use chrono::{DateTime, Utc};
|
use chrono::{DateTime, Utc};
|
||||||
|
|
||||||
use super::{
|
use super::{
|
||||||
|
@ -11,10 +12,6 @@ use super::{
|
||||||
internals::ipld::Frame,
|
internals::ipld::Frame,
|
||||||
};
|
};
|
||||||
|
|
||||||
const COLLECTION_POST: &str = "app.bsky.feed.post";
|
|
||||||
const COLLECTION_LIKE: &str = "app.bsky.feed.like";
|
|
||||||
const COLLECTION_FOLLOW: &str = "app.bsky.graph.follow";
|
|
||||||
|
|
||||||
const ACTION_CREATE: &str = "create";
|
const ACTION_CREATE: &str = "create";
|
||||||
const ACTION_DELETE: &str = "delete";
|
const ACTION_DELETE: &str = "delete";
|
||||||
|
|
||||||
|
@ -24,7 +21,7 @@ pub trait CommitProcessor {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub struct CommitDetails {
|
pub struct CommitDetails {
|
||||||
pub seq: i32,
|
pub seq: i64,
|
||||||
pub time: DateTime<Utc>,
|
pub time: DateTime<Utc>,
|
||||||
pub operations: Vec<Operation>,
|
pub operations: Vec<Operation>,
|
||||||
}
|
}
|
||||||
|
@ -71,7 +68,7 @@ pub async fn handle_message<P: CommitProcessor>(message: &[u8], processor: &P) -
|
||||||
processor
|
processor
|
||||||
.process_commit(&CommitDetails {
|
.process_commit(&CommitDetails {
|
||||||
seq: commit.seq,
|
seq: commit.seq,
|
||||||
time: commit.time.parse()?,
|
time: (*commit.time.as_ref()).into(),
|
||||||
operations,
|
operations,
|
||||||
})
|
})
|
||||||
.await?;
|
.await?;
|
||||||
|
@ -81,10 +78,14 @@ pub async fn handle_message<P: CommitProcessor>(message: &[u8], processor: &P) -
|
||||||
|
|
||||||
fn parse_commit_from_message(message: &[u8]) -> Result<Option<Commit>> {
|
fn parse_commit_from_message(message: &[u8]) -> Result<Option<Commit>> {
|
||||||
match Frame::try_from(message)? {
|
match Frame::try_from(message)? {
|
||||||
Frame::Message(message) => match message.body {
|
Frame::Message(Some(t), message) => {
|
||||||
Message::Commit(commit) => Ok(Some(*commit)),
|
if t == "#commit" {
|
||||||
_ => Ok(None),
|
Ok(serde_ipld_dagcbor::from_reader(message.body.as_slice())?)
|
||||||
},
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Frame::Message(None, _) => Ok(None),
|
||||||
Frame::Error(err) => panic!("Frame error: {err:?}"),
|
Frame::Error(err) => panic!("Frame error: {err:?}"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -93,17 +94,20 @@ async fn extract_operations(commit: &Commit) -> Result<Vec<Operation>> {
|
||||||
let mut operations = Vec::new();
|
let mut operations = Vec::new();
|
||||||
|
|
||||||
let (blocks, _header) = rs_car::car_read_all(&mut commit.blocks.as_slice(), true).await?;
|
let (blocks, _header) = rs_car::car_read_all(&mut commit.blocks.as_slice(), true).await?;
|
||||||
let blocks_by_cid: HashMap<_, _> = blocks.into_iter().collect();
|
let blocks_by_cid: HashMap<_, _> = blocks
|
||||||
|
.into_iter()
|
||||||
|
.map(|(cid, block)| (cid.to_string(), block))
|
||||||
|
.collect();
|
||||||
|
|
||||||
for op in &commit.ops {
|
for op in &commit.ops {
|
||||||
let collection = op.path.split('/').next().expect("op.path is empty");
|
let collection = op.path.split('/').next().expect("op.path is empty");
|
||||||
let action = op.action.as_str();
|
let action = op.action.as_str();
|
||||||
let uri = format!("at://{}/{}", commit.repo, op.path);
|
let uri = format!("at://{}/{}", commit.repo.as_str(), op.path);
|
||||||
|
|
||||||
let operation = match action {
|
let operation = match action {
|
||||||
ACTION_CREATE => {
|
ACTION_CREATE => {
|
||||||
let cid = match op.cid {
|
let cid = match &op.cid {
|
||||||
Some(cid) => cid,
|
Some(cid_link) => cid_link.0.to_string(),
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -113,31 +117,31 @@ async fn extract_operations(commit: &Commit) -> Result<Vec<Operation>> {
|
||||||
};
|
};
|
||||||
|
|
||||||
match collection {
|
match collection {
|
||||||
COLLECTION_POST => {
|
atrium_api::app::bsky::feed::Post::NSID => {
|
||||||
let post: PostRecord = read_record(block)?;
|
let post: PostRecord = read_record(block)?;
|
||||||
|
|
||||||
Operation::CreatePost {
|
Operation::CreatePost {
|
||||||
author_did: commit.repo.clone(),
|
author_did: commit.repo.to_string(),
|
||||||
cid: cid.to_string(),
|
cid: cid.to_string(),
|
||||||
uri,
|
uri,
|
||||||
post,
|
post,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
COLLECTION_LIKE => {
|
atrium_api::app::bsky::feed::Like::NSID => {
|
||||||
let like: LikeRecord = read_record(block)?;
|
let like: LikeRecord = read_record(block)?;
|
||||||
|
|
||||||
Operation::CreateLike {
|
Operation::CreateLike {
|
||||||
author_did: commit.repo.clone(),
|
author_did: commit.repo.to_string(),
|
||||||
cid: cid.to_string(),
|
cid: cid.to_string(),
|
||||||
uri,
|
uri,
|
||||||
like,
|
like,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
COLLECTION_FOLLOW => {
|
atrium_api::app::bsky::graph::Follow::NSID => {
|
||||||
let follow: FollowRecord = read_record(block)?;
|
let follow: FollowRecord = read_record(block)?;
|
||||||
|
|
||||||
Operation::CreateFollow {
|
Operation::CreateFollow {
|
||||||
author_did: commit.repo.clone(),
|
author_did: commit.repo.to_string(),
|
||||||
cid: cid.to_string(),
|
cid: cid.to_string(),
|
||||||
uri,
|
uri,
|
||||||
follow,
|
follow,
|
||||||
|
@ -147,9 +151,9 @@ async fn extract_operations(commit: &Commit) -> Result<Vec<Operation>> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ACTION_DELETE => match collection {
|
ACTION_DELETE => match collection {
|
||||||
COLLECTION_POST => Operation::DeletePost { uri },
|
atrium_api::app::bsky::feed::Post::NSID => Operation::DeletePost { uri },
|
||||||
COLLECTION_LIKE => Operation::DeleteLike { uri },
|
atrium_api::app::bsky::feed::Like::NSID => Operation::DeleteLike { uri },
|
||||||
COLLECTION_FOLLOW => Operation::DeleteFollow { uri },
|
atrium_api::app::bsky::graph::Follow::NSID => Operation::DeleteFollow { uri },
|
||||||
_ => continue,
|
_ => continue,
|
||||||
},
|
},
|
||||||
_ => continue,
|
_ => continue,
|
||||||
|
|
|
@ -251,7 +251,7 @@ impl Database {
|
||||||
&self,
|
&self,
|
||||||
host: &str,
|
host: &str,
|
||||||
did: &str,
|
did: &str,
|
||||||
cursor: i32,
|
cursor: i64,
|
||||||
) -> Result<bool> {
|
) -> Result<bool> {
|
||||||
let mut params = Parameters::new();
|
let mut params = Parameters::new();
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue