Compare commits

...

4 Commits

Author SHA1 Message Date
Lennart
a45e0b2efd carddav: Try out some tests with insta 2025-12-10 12:26:31 +01:00
Lennart
eecc03b7b7 caldav: add debug to principal resource 2025-12-10 12:25:59 +01:00
Lennart
e8303b9c82 main: slight refactoring 2025-12-10 12:25:13 +01:00
Lennart
a686286d06 sqlite_store: Refactor notification logic 2025-12-10 10:44:41 +01:00
19 changed files with 430 additions and 204 deletions

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "REPLACE INTO davpush_subscriptions (id, topic, expiration, push_resource, public_key, public_key_type, auth_secret) VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "583069cbeba5285c63c2b95e989669d3faed66a75fbfc7cd93e5f64b778f45ab"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT OR REPLACE INTO davpush_subscriptions (id, topic, expiration, push_resource, public_key, public_key_type, auth_secret) VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "6d08d3a014743da9b445ab012437ec11f81fd86d3b02fc1df07a036c6b47ace2"
}

12
Cargo.lock generated
View File

@@ -1789,6 +1789,17 @@ version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c8fae54786f62fb2918dcfae3d568594e50eb9b5c25bf04371af6fe7516452fb"
[[package]]
name = "insta"
version = "1.44.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b5c943d4415edd8153251b6f197de5eb1640e56d84e8d9159bea190421c73698"
dependencies = [
"console",
"once_cell",
"similar",
]
[[package]]
name = "ipnet"
version = "2.11.0"
@@ -3142,6 +3153,7 @@ dependencies = [
"futures-util",
"http",
"ical",
"insta",
"percent-encoding",
"quick-xml",
"rustical_dav",

View File

@@ -149,6 +149,7 @@ ece = { version = "2.3", default-features = false, features = [
openssl = { version = "0.10", features = ["vendored"] }
async-std = { version = "1.13", features = ["attributes"] }
similar-asserts = "1.7"
insta = "1.44"
[dependencies]
rustical_store.workspace = true

View File

@@ -6,7 +6,7 @@ use rustical_store::auth::PrincipalType;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName, Debug)]
#[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp {
// Scheduling Extensions to CalDAV (RFC 6638)
@@ -34,17 +34,17 @@ pub enum PrincipalProp {
CalendarHomeSet(CalendarHomeSet),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, Debug)]
pub struct CalendarHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName, Debug)]
#[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)]
pub enum PrincipalPropWrapper {
Principal(PrincipalProp),
Common(CommonPropertiesProp),
}
#[derive(XmlSerialize, PartialEq, Eq, Clone, VariantArray)]
#[derive(XmlSerialize, PartialEq, Eq, Debug, Clone, VariantArray)]
pub enum ReportMethod {
// We don't actually support principal-match
#[xml(ns = "rustical_dav::namespace::NS_DAV")]

View File

@@ -35,3 +35,6 @@ percent-encoding.workspace = true
ical.workspace = true
strum.workspace = true
strum_macros.workspace = true
[dev-dependencies]
insta.workspace = true

View File

@@ -11,11 +11,13 @@ mod service;
pub use service::*;
mod prop;
pub use prop::*;
#[cfg(test)]
pub mod tests;
#[derive(Debug, Clone)]
pub struct PrincipalResource {
principal: Principal,
members: Vec<String>,
pub principal: Principal,
pub members: Vec<String>,
}
impl ResourceName for PrincipalResource {

View File

@@ -4,7 +4,7 @@ use rustical_dav::{
};
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName, Debug)]
#[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp {
// WebDAV Access Control (RFC 3744)
@@ -27,10 +27,10 @@ pub enum PrincipalProp {
PrincipalAddress(Option<HrefElement>),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, Debug)]
pub struct AddressbookHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName, Debug)]
#[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)]
pub enum PrincipalPropWrapper {
Principal(PrincipalProp),

View File

@@ -0,0 +1,125 @@
---
source: crates/carddav/src/principal/tests.rs
expression: response
---
ResponseElement {
href: "/carddav/principal/user/",
status: None,
propstat: [
Normal(
PropstatElement {
prop: PropTagWrapper(
[
Principal(
PrincipalUrl(
HrefElement {
href: "/carddav/principal/user/",
},
),
),
Principal(
GroupMembership(
GroupMembership(
[
HrefElement {
href: "/carddav/principal/group/",
},
],
),
),
),
Principal(
GroupMemberSet(
GroupMemberSet(
[],
),
),
),
Principal(
AlternateUriSet,
),
Principal(
PrincipalCollectionSet(
HrefElement {
href: "/carddav/principal/",
},
),
),
Principal(
AddressbookHomeSet(
AddressbookHomeSet(
[
HrefElement {
href: "/carddav/principal/group/",
},
HrefElement {
href: "/carddav/principal/user/",
},
],
),
),
),
Principal(
PrincipalAddress(
None,
),
),
Common(
Resourcetype(
Resourcetype(
[
ResourcetypeInner(
Some(
Namespace("DAV:"),
),
"collection",
),
ResourcetypeInner(
Some(
Namespace("DAV:"),
),
"principal",
),
],
),
),
),
Common(
Displayname(
Some(
"user",
),
),
),
Common(
CurrentUserPrincipal(
HrefElement {
href: "/carddav/principal/user/",
},
),
),
Common(
CurrentUserPrivilegeSet(
UserPrivilegeSet {
privileges: {
All,
},
},
),
),
Common(
Owner(
Some(
HrefElement {
href: "/carddav/principal/user/",
},
),
),
),
],
),
status: 200,
},
),
],
}

View File

@@ -0,0 +1,45 @@
---
source: crates/carddav/src/principal/tests.rs
expression: response.serialize_to_string().unwrap()
---
<?xml version="1.0" encoding="utf-8"?>
<response xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<href>/carddav/principal/user/</href>
<propstat>
<prop>
<principal-URL>
<href>/carddav/principal/user/</href>
</principal-URL>
<group-membership>
<href>/carddav/principal/group/</href>
</group-membership>
<group-member-set>
</group-member-set>
<alternate-URI-set/>
<principal-collection-set>
<href>/carddav/principal/</href>
</principal-collection-set>
<CARD:addressbook-home-set>
<href>/carddav/principal/group/</href>
<href>/carddav/principal/user/</href>
</CARD:addressbook-home-set>
<resourcetype>
<collection/>
<principal/>
</resourcetype>
<displayname>user</displayname>
<current-user-principal>
<href>/carddav/principal/user/</href>
</current-user-principal>
<current-user-privilege-set>
<privilege>
<all/>
</privilege>
</current-user-privilege-set>
<owner>
<href>/carddav/principal/user/</href>
</owner>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>

View File

@@ -0,0 +1,8 @@
---
source: crates/carddav/src/principal/tests.rs
expression: propfind
---
PropfindElement {
prop: Allprop,
include: None,
}

View File

@@ -0,0 +1,41 @@
use rustical_dav::resource::Resource;
use rustical_store::auth::Principal;
use rustical_xml::XmlSerializeRoot;
use crate::{CardDavPrincipalUri, principal::PrincipalResource};
#[test]
fn test_propfind() {
let propfind = PrincipalResource::parse_propfind(
r#"<?xml version="1.0" encoding="UTF-8"?><propfind xmlns="DAV:"><allprop/></propfind>"#,
)
.unwrap();
insta::assert_debug_snapshot!(propfind);
let principal = Principal {
id: "user".to_string(),
displayname: None,
principal_type: rustical_store::auth::PrincipalType::Individual,
password: None,
memberships: vec!["group".to_string()],
};
let resource = PrincipalResource {
principal: principal.clone(),
members: vec![],
};
let response = resource
.propfind(
&format!("/carddav/principal/{}", principal.id),
&propfind.prop,
propfind.include.as_ref(),
&CardDavPrincipalUri("/carddav"),
&principal,
)
.unwrap();
insta::assert_debug_snapshot!(response);
insta::assert_snapshot!(response.serialize_to_string().unwrap());
}

View File

@@ -6,7 +6,7 @@ use crate::{
};
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Debug, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "CommonPropertiesPropName")]
pub enum CommonPropertiesProp {
// WebDAV (RFC 2518)

View File

@@ -1,8 +1,8 @@
use crate::xml::HrefElement;
use rustical_xml::{XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, Debug)]
pub struct GroupMembership(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, Debug)]
pub struct GroupMemberSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);

View File

@@ -3,9 +3,9 @@ use headers::{CacheControl, ContentType, HeaderMapExt};
use http::StatusCode;
use quick_xml::name::Namespace;
use rustical_xml::{XmlRootTag, XmlSerialize, XmlSerializeRoot};
use std::collections::HashMap;
use std::{collections::HashMap, fmt::Debug};
#[derive(XmlSerialize)]
#[derive(XmlSerialize, Debug)]
pub struct PropTagWrapper<T: XmlSerialize>(#[xml(flatten, ty = "untagged")] pub Vec<T>);
// RFC 2518
@@ -30,7 +30,7 @@ fn xml_serialize_status(
XmlSerialize::serialize(&format!("HTTP/1.1 {status}"), ns, tag, namespaces, writer)
}
#[derive(XmlSerialize)]
#[derive(XmlSerialize, Debug)]
#[xml(untagged)]
pub enum PropstatWrapper<T: XmlSerialize> {
Normal(PropstatElement<PropTagWrapper<T>>),
@@ -40,7 +40,7 @@ pub enum PropstatWrapper<T: XmlSerialize> {
// RFC 2518
// <!ELEMENT response (href, ((href*, status)|(propstat+)),
// responsedescription?) >
#[derive(XmlSerialize, XmlRootTag)]
#[derive(XmlSerialize, XmlRootTag, Debug)]
#[xml(ns = "crate::namespace::NS_DAV", root = "response")]
#[xml(ns_prefix(
crate::namespace::NS_DAV = "",

View File

@@ -9,7 +9,7 @@ use rustical_store::{
};
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
use tokio::sync::mpsc::Sender;
use tracing::{error, instrument, warn};
use tracing::{error_span, instrument, warn};
pub mod birthday_calendar;
@@ -74,7 +74,7 @@ impl SqliteAddressbookStore {
"Commiting orphaned addressbook object ({},{},{}), deleted={}",
&row.principal, &row.addressbook_id, &row.id, &row.deleted
);
log_object_operation(
Self::log_object_operation(
&mut tx,
&row.principal,
&row.addressbook_id,
@@ -88,6 +88,57 @@ impl SqliteAddressbookStore {
Ok(())
}
// Logs an operation to an address object
async fn log_object_operation(
tx: &mut Transaction<'_, Sqlite>,
principal: &str,
addressbook_id: &str,
object_id: &str,
operation: ChangeOperation,
) -> Result<String, Error> {
struct Synctoken {
synctoken: i64,
}
let Synctoken { synctoken } = sqlx::query_as!(
Synctoken,
r#"
UPDATE addressbooks
SET synctoken = synctoken + 1
WHERE (principal, id) = (?1, ?2)
RETURNING synctoken"#,
principal,
addressbook_id
)
.fetch_one(&mut **tx)
.await
.map_err(crate::Error::from)?;
sqlx::query!(
r#"
INSERT INTO addressobjectchangelog (principal, addressbook_id, object_id, "operation", synctoken)
VALUES (?1, ?2, ?3, ?4, (
SELECT synctoken FROM addressbooks WHERE (principal, id) = (?1, ?2)
))"#,
principal,
addressbook_id,
object_id,
operation
)
.execute(&mut **tx)
.await
.map_err(crate::Error::from)?;
Ok(format_synctoken(synctoken))
}
fn send_push_notification(&self, data: CollectionOperationInfo, topic: String) {
if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) {
error_span!(
"Error trying to send addressbook update notification:",
err = format!("{err:?}"),
);
}
}
async fn _get_addressbook<'e, E: Executor<'e, Database = Sqlite>>(
executor: E,
principal: &str,
@@ -496,13 +547,8 @@ impl AddressbookStore for SqliteAddressbookStore {
Self::_delete_addressbook(&mut *tx, principal, addressbook_id, use_trashbin).await?;
tx.commit().await.map_err(crate::Error::from)?;
if let Some(addressbook) = addressbook
&& let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Delete,
topic: addressbook.push_topic,
})
{
error!("Push notification about deleted addressbook failed: {err}");
if let Some(addressbook) = addressbook {
self.send_push_notification(CollectionOperationInfo::Delete, addressbook.push_topic);
}
Ok(())
@@ -588,7 +634,7 @@ impl AddressbookStore for SqliteAddressbookStore {
Self::_put_object(&mut *tx, &principal, &addressbook_id, &object, overwrite).await?;
let sync_token = log_object_operation(
let sync_token = Self::log_object_operation(
&mut tx,
&principal,
&addressbook_id,
@@ -600,15 +646,12 @@ impl AddressbookStore for SqliteAddressbookStore {
tx.commit().await.map_err(crate::Error::from)?;
if let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self
.get_addressbook(&principal, &addressbook_id, false)
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_addressbook(&principal, &addressbook_id, false)
.await?
.push_topic,
}) {
error!("Push notification about deleted addressbook failed: {err}");
}
);
Ok(())
}
@@ -629,7 +672,7 @@ impl AddressbookStore for SqliteAddressbookStore {
Self::_delete_object(&mut *tx, principal, addressbook_id, object_id, use_trashbin).await?;
let sync_token = log_object_operation(
let sync_token = Self::log_object_operation(
&mut tx,
principal,
addressbook_id,
@@ -641,15 +684,12 @@ impl AddressbookStore for SqliteAddressbookStore {
tx.commit().await.map_err(crate::Error::from)?;
if let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self
.get_addressbook(principal, addressbook_id, false)
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_addressbook(principal, addressbook_id, false)
.await?
.push_topic,
}) {
error!("Push notification about deleted addressbook failed: {err}");
}
);
Ok(())
}
@@ -668,7 +708,7 @@ impl AddressbookStore for SqliteAddressbookStore {
Self::_restore_object(&mut *tx, principal, addressbook_id, object_id).await?;
let sync_token = log_object_operation(
let sync_token = Self::log_object_operation(
&mut tx,
principal,
addressbook_id,
@@ -679,15 +719,12 @@ impl AddressbookStore for SqliteAddressbookStore {
.map_err(crate::Error::from)?;
tx.commit().await.map_err(crate::Error::from)?;
if let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self
.get_addressbook(principal, addressbook_id, false)
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_addressbook(principal, addressbook_id, false)
.await?
.push_topic,
}) {
error!("Push notification about restored addressbook object failed: {err}");
}
);
Ok(())
}
@@ -732,7 +769,7 @@ impl AddressbookStore for SqliteAddressbookStore {
.await?;
sync_token = Some(
log_object_operation(
Self::log_object_operation(
&mut tx,
&addressbook.principal,
&addressbook.id,
@@ -744,59 +781,14 @@ impl AddressbookStore for SqliteAddressbookStore {
}
tx.commit().await.map_err(crate::Error::from)?;
if let Some(sync_token) = sync_token
&& let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self
.get_addressbook(&addressbook.principal, &addressbook.id, true)
if let Some(sync_token) = sync_token {
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_addressbook(&addressbook.principal, &addressbook.id, true)
.await?
.push_topic,
})
{
error!("Push notification about imported addressbook failed: {err}");
);
}
Ok(())
}
}
// Logs an operation to an address object
async fn log_object_operation(
tx: &mut Transaction<'_, Sqlite>,
principal: &str,
addressbook_id: &str,
object_id: &str,
operation: ChangeOperation,
) -> Result<String, Error> {
struct Synctoken {
synctoken: i64,
}
let Synctoken { synctoken } = sqlx::query_as!(
Synctoken,
r#"
UPDATE addressbooks
SET synctoken = synctoken + 1
WHERE (principal, id) = (?1, ?2)
RETURNING synctoken"#,
principal,
addressbook_id
)
.fetch_one(&mut **tx)
.await
.map_err(crate::Error::from)?;
sqlx::query!(
r#"
INSERT INTO addressobjectchangelog (principal, addressbook_id, object_id, "operation", synctoken)
VALUES (?1, ?2, ?3, ?4, (
SELECT synctoken FROM addressbooks WHERE (principal, id) = (?1, ?2)
))"#,
principal,
addressbook_id,
object_id,
operation
)
.execute(&mut **tx)
.await
.map_err(crate::Error::from)?;
Ok(format_synctoken(synctoken))
}

View File

@@ -11,7 +11,7 @@ use rustical_store::{CollectionOperation, CollectionOperationInfo};
use sqlx::types::chrono::NaiveDateTime;
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
use tokio::sync::mpsc::Sender;
use tracing::{error, instrument, warn};
use tracing::{error_span, instrument, warn};
#[derive(Debug, Clone)]
struct CalendarObjectRow {
@@ -94,6 +94,57 @@ pub struct SqliteCalendarStore {
}
impl SqliteCalendarStore {
// Logs an operation to the events
async fn log_object_operation(
tx: &mut Transaction<'_, Sqlite>,
principal: &str,
cal_id: &str,
object_id: &str,
operation: ChangeOperation,
) -> Result<String, Error> {
struct Synctoken {
synctoken: i64,
}
let Synctoken { synctoken } = sqlx::query_as!(
Synctoken,
r#"
UPDATE calendars
SET synctoken = synctoken + 1
WHERE (principal, id) = (?1, ?2)
RETURNING synctoken"#,
principal,
cal_id
)
.fetch_one(&mut **tx)
.await
.map_err(crate::Error::from)?;
sqlx::query!(
r#"
INSERT INTO calendarobjectchangelog (principal, cal_id, object_id, "operation", synctoken)
VALUES (?1, ?2, ?3, ?4, (
SELECT synctoken FROM calendars WHERE (principal, id) = (?1, ?2)
))"#,
principal,
cal_id,
object_id,
operation
)
.execute(&mut **tx)
.await
.map_err(crate::Error::from)?;
Ok(format_synctoken(synctoken))
}
fn send_push_notification(&self, data: CollectionOperationInfo, topic: String) {
if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) {
error_span!(
"Error trying to send calendar update notification:",
err = format!("{err:?}"),
);
}
}
// Commit "orphaned" objects to the changelog table
pub async fn repair_orphans(&self) -> Result<(), Error> {
struct Row {
@@ -134,7 +185,8 @@ impl SqliteCalendarStore {
"Commiting orphaned calendar object ({},{},{}), deleted={}",
&row.principal, &row.cal_id, &row.id, &row.deleted
);
log_object_operation(&mut tx, &row.principal, &row.cal_id, &row.id, operation).await?;
Self::log_object_operation(&mut tx, &row.principal, &row.cal_id, &row.id, operation)
.await?;
}
tx.commit().await.map_err(crate::Error::from)?;
@@ -605,13 +657,8 @@ impl CalendarStore for SqliteCalendarStore {
Self::_delete_calendar(&mut *tx, principal, id, use_trashbin).await?;
tx.commit().await.map_err(crate::Error::from)?;
if let Some(cal) = cal
&& let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Delete,
topic: cal.push_topic,
})
{
error!("Push notification about deleted calendar failed: {err}");
if let Some(cal) = cal {
self.send_push_notification(CollectionOperationInfo::Delete, cal.push_topic);
}
Ok(())
}
@@ -652,7 +699,7 @@ impl CalendarStore for SqliteCalendarStore {
Self::_put_object(&mut *tx, &calendar.principal, &calendar.id, &object, false).await?;
sync_token = Some(
log_object_operation(
Self::log_object_operation(
&mut tx,
&calendar.principal,
&calendar.id,
@@ -665,16 +712,13 @@ impl CalendarStore for SqliteCalendarStore {
tx.commit().await.map_err(crate::Error::from)?;
if let Some(sync_token) = sync_token
&& let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self
.get_calendar(&calendar.principal, &calendar.id, true)
if let Some(sync_token) = sync_token {
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_calendar(&calendar.principal, &calendar.id, true)
.await?
.push_topic,
})
{
error!("Push notification about imported calendar failed: {err}");
);
}
Ok(())
}
@@ -755,7 +799,7 @@ impl CalendarStore for SqliteCalendarStore {
Self::_put_object(&mut *tx, &principal, &cal_id, &object, overwrite).await?;
let sync_token = log_object_operation(
let sync_token = Self::log_object_operation(
&mut tx,
&principal,
&cal_id,
@@ -766,15 +810,12 @@ impl CalendarStore for SqliteCalendarStore {
tx.commit().await.map_err(crate::Error::from)?;
if let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self
.get_calendar(&principal, &cal_id, true)
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_calendar(&principal, &cal_id, true)
.await?
.push_topic,
}) {
error!("Push notification about deleted calendar failed: {err}");
}
);
Ok(())
}
@@ -795,15 +836,15 @@ impl CalendarStore for SqliteCalendarStore {
Self::_delete_object(&mut *tx, principal, cal_id, id, use_trashbin).await?;
let sync_token =
log_object_operation(&mut tx, principal, cal_id, id, ChangeOperation::Delete).await?;
Self::log_object_operation(&mut tx, principal, cal_id, id, ChangeOperation::Delete)
.await?;
tx.commit().await.map_err(crate::Error::from)?;
if let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self.get_calendar(principal, cal_id, true).await?.push_topic,
}) {
error!("Push notification about deleted calendar failed: {err}");
}
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_calendar(principal, cal_id, true).await?.push_topic,
);
Ok(())
}
@@ -823,16 +864,14 @@ impl CalendarStore for SqliteCalendarStore {
Self::_restore_object(&mut *tx, principal, cal_id, object_id).await?;
let sync_token =
log_object_operation(&mut tx, principal, cal_id, object_id, ChangeOperation::Add)
Self::log_object_operation(&mut tx, principal, cal_id, object_id, ChangeOperation::Add)
.await?;
tx.commit().await.map_err(crate::Error::from)?;
if let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Content { sync_token },
topic: self.get_calendar(principal, cal_id, true).await?.push_topic,
}) {
error!("Push notification about restored calendar object failed: {err}");
}
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_calendar(principal, cal_id, true).await?.push_topic,
);
Ok(())
}
@@ -850,46 +889,3 @@ impl CalendarStore for SqliteCalendarStore {
false
}
}
// Logs an operation to the events
// TODO: Log multiple updates
async fn log_object_operation(
tx: &mut Transaction<'_, Sqlite>,
principal: &str,
cal_id: &str,
object_id: &str,
operation: ChangeOperation,
) -> Result<String, Error> {
struct Synctoken {
synctoken: i64,
}
let Synctoken { synctoken } = sqlx::query_as!(
Synctoken,
r#"
UPDATE calendars
SET synctoken = synctoken + 1
WHERE (principal, id) = (?1, ?2)
RETURNING synctoken"#,
principal,
cal_id
)
.fetch_one(&mut **tx)
.await
.map_err(crate::Error::from)?;
sqlx::query!(
r#"
INSERT INTO calendarobjectchangelog (principal, cal_id, object_id, "operation", synctoken)
VALUES (?1, ?2, ?3, ?4, (
SELECT synctoken FROM calendars WHERE (principal, id) = (?1, ?2)
))"#,
principal,
cal_id,
object_id,
operation
)
.execute(&mut **tx)
.await
.map_err(crate::Error::from)?;
Ok(format_synctoken(synctoken))
}

View File

@@ -37,7 +37,7 @@ impl SubscriptionStore for SqliteStore {
Err(err) => return Err(err),
};
sqlx::query!(
r#"INSERT OR REPLACE INTO davpush_subscriptions (id, topic, expiration, push_resource, public_key, public_key_type, auth_secret) VALUES (?, ?, ?, ?, ?, ?, ?)"#,
r#"REPLACE INTO davpush_subscriptions (id, topic, expiration, push_resource, public_key, public_key_type, auth_secret) VALUES (?, ?, ?, ?, ?, ?, ?)"#,
sub.id,
sub.topic,
sub.expiration,

View File

@@ -91,21 +91,22 @@ async fn get_data_stores(
async fn main() -> Result<()> {
let args = Args::parse();
let parse_config = || {
Figment::new()
.merge(Toml::file(&args.config_file))
.merge(Env::prefixed("RUSTICAL_").split("__"))
.extract()
};
match args.command {
Some(Command::GenConfig(gen_config_args)) => cmd_gen_config(gen_config_args)?,
Some(Command::Principals(principals_args)) => cmd_principals(principals_args).await?,
Some(Command::Health(health_args)) => {
let config: Config = Figment::new()
.merge(Toml::file(&args.config_file))
.merge(Env::prefixed("RUSTICAL_").split("__"))
.extract()?;
let config: Config = parse_config()?;
cmd_health(config.http, health_args).await?;
}
None => {
let config: Config = Figment::new()
.merge(Toml::file(&args.config_file))
.merge(Env::prefixed("RUSTICAL_").split("__"))
.extract()?;
let config: Config = parse_config()?;
setup_tracing(&config.tracing);