run clippy fix

This commit is contained in:
Lennart K
2025-10-27 19:01:04 +01:00
parent 08041c60be
commit 0d071d3b92
94 changed files with 455 additions and 484 deletions

View File

@@ -45,13 +45,13 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
// Extract calendar metadata // Extract calendar metadata
let displayname = cal let displayname = cal
.get_property("X-WR-CALNAME") .get_property("X-WR-CALNAME")
.and_then(|prop| prop.value.to_owned()); .and_then(|prop| prop.value.clone());
let description = cal let description = cal
.get_property("X-WR-CALDESC") .get_property("X-WR-CALDESC")
.and_then(|prop| prop.value.to_owned()); .and_then(|prop| prop.value.clone());
let timezone_id = cal let timezone_id = cal
.get_property("X-WR-TIMEZONE") .get_property("X-WR-TIMEZONE")
.and_then(|prop| prop.value.to_owned()); .and_then(|prop| prop.value.clone());
// These properties should not appear in the expanded calendar objects // These properties should not appear in the expanded calendar objects
cal.remove_property("X-WR-CALNAME"); cal.remove_property("X-WR-CALNAME");
cal.remove_property("X-WR-CALDESC"); cal.remove_property("X-WR-CALDESC");

View File

@@ -79,8 +79,8 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
_ => unreachable!("We never call with another method"), _ => unreachable!("We never call with another method"),
}; };
if let Some("") = request.displayname.as_deref() { if request.displayname.as_deref() == Some("") {
request.displayname = None request.displayname = None;
} }
let timezone_id = if let Some(tzid) = request.calendar_timezone_id { let timezone_id = if let Some(tzid) = request.calendar_timezone_id {
@@ -110,8 +110,8 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
}; };
let calendar = Calendar { let calendar = Calendar {
id: cal_id.to_owned(), id: cal_id.clone(),
principal: principal.to_owned(), principal: principal.clone(),
meta: CalendarMetadata { meta: CalendarMetadata {
order: request.calendar_order.unwrap_or(0), order: request.calendar_order.unwrap_or(0),
displayname: request.displayname, displayname: request.displayname,

View File

@@ -49,12 +49,11 @@ pub async fn route_post<C: CalendarStore, S: SubscriptionStore>(
}; };
let subscription = Subscription { let subscription = Subscription {
id: sub_id.to_owned(), id: sub_id.clone(),
push_resource: request push_resource: request
.subscription .subscription
.web_push_subscription .web_push_subscription
.push_resource .push_resource.clone(),
.to_owned(),
topic: calendar_resource.cal.push_topic, topic: calendar_resource.cal.push_topic,
expiration: expires.naive_local(), expiration: expires.naive_local(),
public_key: request public_key: request

View File

@@ -4,10 +4,10 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore; use rustical_store::CalendarStore;
use rustical_xml::XmlDeserialize; use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, href+)> // <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, href+)>
pub(crate) struct CalendarMultigetRequest { pub struct CalendarMultigetRequest {
#[xml(ty = "untagged")] #[xml(ty = "untagged")]
pub(crate) prop: PropfindType<CalendarObjectPropWrapperName>, pub(crate) prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(flatten)] #[xml(flatten)]
@@ -27,13 +27,13 @@ pub async fn get_objects_calendar_multiget<C: CalendarStore>(
for href in &cal_query.href { for href in &cal_query.href {
if let Some(filename) = href.strip_prefix(path) { if let Some(filename) = href.strip_prefix(path) {
let filename = filename.trim_start_matches("/"); let filename = filename.trim_start_matches('/');
if let Some(object_id) = filename.strip_suffix(".ics") { if let Some(object_id) = filename.strip_suffix(".ics") {
match store.get_object(principal, cal_id, object_id, false).await { match store.get_object(principal, cal_id, object_id, false).await {
Ok(object) => result.push(object), Ok(object) => result.push(object),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()), Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
}; }
} else { } else {
not_found.push(href.to_owned()); not_found.push(href.to_owned());
continue; continue;

View File

@@ -3,18 +3,17 @@ use rustical_dav::xml::PropfindType;
use rustical_ical::{CalendarObject, UtcDateTime}; use rustical_ical::{CalendarObject, UtcDateTime};
use rustical_store::calendar_store::CalendarQuery; use rustical_store::calendar_store::CalendarQuery;
use rustical_xml::XmlDeserialize; use rustical_xml::XmlDeserialize;
use std::ops::Deref;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
pub(crate) struct TimeRangeElement { pub struct TimeRangeElement {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub(crate) start: Option<UtcDateTime>, pub(crate) start: Option<UtcDateTime>,
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub(crate) end: Option<UtcDateTime>, pub(crate) end: Option<UtcDateTime>,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.3 // https://www.rfc-editor.org/rfc/rfc4791#section-9.7.3
pub struct ParamFilterElement { pub struct ParamFilterElement {
@@ -27,7 +26,7 @@ pub struct ParamFilterElement {
pub(crate) name: String, pub(crate) name: String,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
pub struct TextMatchElement { pub struct TextMatchElement {
#[xml(ty = "attr")] #[xml(ty = "attr")]
@@ -40,7 +39,7 @@ pub struct TextMatchElement {
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)] #[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.2 // https://www.rfc-editor.org/rfc/rfc4791#section-9.7.2
pub(crate) struct PropFilterElement { pub struct PropFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>, pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
@@ -57,7 +56,7 @@ pub(crate) struct PropFilterElement {
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)] #[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1 // https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
pub(crate) struct CompFilterElement { pub struct CompFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>, pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
@@ -81,7 +80,7 @@ impl CompFilterElement {
// Client is asking for something different than a vcalendar // Client is asking for something different than a vcalendar
(None, false) => return false, (None, false) => return false,
_ => {} _ => {}
}; }
if self.time_range.is_some() { if self.time_range.is_some() {
// <time-range> should be applied on VEVENT/VTODO but not on VCALENDAR // <time-range> should be applied on VEVENT/VTODO but not on VCALENDAR
@@ -111,20 +110,20 @@ impl CompFilterElement {
// Client is asking for something different than a vcalendar // Client is asking for something different than a vcalendar
(None, false) => return false, (None, false) => return false,
_ => {} _ => {}
}; }
// TODO: Implement prop-filter (and comp-filter?) at some point // TODO: Implement prop-filter (and comp-filter?) at some point
if let Some(time_range) = &self.time_range { if let Some(time_range) = &self.time_range {
if let Some(start) = &time_range.start if let Some(start) = &time_range.start
&& let Some(last_occurence) = cal_object.get_last_occurence().unwrap_or(None) && let Some(last_occurence) = cal_object.get_last_occurence().unwrap_or(None)
&& start.deref() > &last_occurence.utc() && **start > last_occurence.utc()
{ {
return false; return false;
} }
if let Some(end) = &time_range.end if let Some(end) = &time_range.end
&& let Some(first_occurence) = cal_object.get_first_occurence().unwrap_or(None) && let Some(first_occurence) = cal_object.get_first_occurence().unwrap_or(None)
&& end.deref() < &first_occurence.utc() && **end < first_occurence.utc()
{ {
return false; return false;
} }
@@ -136,7 +135,7 @@ impl CompFilterElement {
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)] #[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7 // https://datatracker.ietf.org/doc/html/rfc4791#section-9.7
pub(crate) struct FilterElement { pub struct FilterElement {
// This comp-filter matches on VCALENDAR // This comp-filter matches on VCALENDAR
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) comp_filter: CompFilterElement, pub(crate) comp_filter: CompFilterElement,
@@ -151,7 +150,7 @@ impl FilterElement {
impl From<&FilterElement> for CalendarQuery { impl From<&FilterElement> for CalendarQuery {
fn from(value: &FilterElement) -> Self { fn from(value: &FilterElement) -> Self {
let comp_filter_vcalendar = &value.comp_filter; let comp_filter_vcalendar = &value.comp_filter;
for comp_filter in comp_filter_vcalendar.comp_filter.iter() { for comp_filter in &comp_filter_vcalendar.comp_filter {
// A calendar object cannot contain both VEVENT and VTODO, so we only have to handle // A calendar object cannot contain both VEVENT and VTODO, so we only have to handle
// whatever we get first // whatever we get first
if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO") if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO")
@@ -159,7 +158,7 @@ impl From<&FilterElement> for CalendarQuery {
{ {
let start = time_range.start.as_ref().map(|start| start.date_naive()); let start = time_range.start.as_ref().map(|start| start.date_naive());
let end = time_range.end.as_ref().map(|end| end.date_naive()); let end = time_range.end.as_ref().map(|end| end.date_naive());
return CalendarQuery { return Self {
time_start: start, time_start: start,
time_end: end, time_end: end,
}; };
@@ -188,7 +187,7 @@ impl From<&CalendarQueryRequest> for CalendarQuery {
value value
.filter .filter
.as_ref() .as_ref()
.map(CalendarQuery::from) .map(Self::from)
.unwrap_or_default() .unwrap_or_default()
} }
} }

View File

@@ -3,7 +3,7 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore; use rustical_store::CalendarStore;
mod elements; mod elements;
pub(crate) use elements::*; pub use elements::*;
pub async fn get_objects_calendar_query<C: CalendarStore>( pub async fn get_objects_calendar_query<C: CalendarStore>(
cal_query: &CalendarQueryRequest, cal_query: &CalendarQueryRequest,

View File

@@ -41,11 +41,11 @@ pub(crate) enum ReportRequest {
} }
impl ReportRequest { impl ReportRequest {
fn props(&self) -> &PropfindType<CalendarObjectPropWrapperName> { const fn props(&self) -> &PropfindType<CalendarObjectPropWrapperName> {
match &self { match &self {
ReportRequest::CalendarMultiget(CalendarMultigetRequest { prop, .. }) => prop, Self::CalendarMultiget(CalendarMultigetRequest { prop, .. }) => prop,
ReportRequest::CalendarQuery(CalendarQueryRequest { prop, .. }) => prop, Self::CalendarQuery(CalendarQueryRequest { prop, .. }) => prop,
ReportRequest::SyncCollection(SyncCollectionRequest { prop, .. }) => prop, Self::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
} }
} }
} }

View File

@@ -3,13 +3,13 @@ use rustical_ical::CalendarObjectType;
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray; use strum_macros::VariantArray;
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, From, Into)] #[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq, From, Into)]
pub struct SupportedCalendarComponent { pub struct SupportedCalendarComponent {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub name: CalendarObjectType, pub name: CalendarObjectType,
} }
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq)]
pub struct SupportedCalendarComponentSet { pub struct SupportedCalendarComponentSet {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)] #[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub comp: Vec<SupportedCalendarComponent>, pub comp: Vec<SupportedCalendarComponent>,
@@ -36,7 +36,7 @@ impl From<SupportedCalendarComponentSet> for Vec<CalendarObjectType> {
} }
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct CalendarData { pub struct CalendarData {
#[xml(ty = "attr")] #[xml(ty = "attr")]
content_type: String, content_type: String,
@@ -53,13 +53,13 @@ impl Default for CalendarData {
} }
} }
#[derive(Debug, Clone, XmlSerialize, Default, PartialEq)] #[derive(Debug, Clone, XmlSerialize, Default, PartialEq, Eq)]
pub struct SupportedCalendarData { pub struct SupportedCalendarData {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
calendar_data: CalendarData, calendar_data: CalendarData,
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq, VariantArray)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq, VariantArray)]
pub enum ReportMethod { pub enum ReportMethod {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
CalendarQuery, CalendarQuery,

View File

@@ -71,7 +71,7 @@ pub struct CalendarResource {
impl ResourceName for CalendarResource { impl ResourceName for CalendarResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.cal.id.to_owned() self.cal.id.clone()
} }
} }
@@ -89,7 +89,7 @@ impl SyncTokenExtension for CalendarResource {
impl DavPushExtension for CalendarResource { impl DavPushExtension for CalendarResource {
fn get_topic(&self) -> String { fn get_topic(&self) -> String {
self.cal.push_topic.to_owned() self.cal.push_topic.clone()
} }
} }
@@ -135,7 +135,7 @@ impl Resource for CalendarResource {
} }
CalendarPropName::CalendarTimezone => { CalendarPropName::CalendarTimezone => {
CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| { CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| {
vtimezones_rs::VTIMEZONES.get(tzid).map(|tz| tz.to_string()) vtimezones_rs::VTIMEZONES.get(tzid).map(|tz| (*tz).to_string())
})) }))
} }
// chrono_tz uses the IANA database // chrono_tz uses the IANA database
@@ -159,7 +159,7 @@ impl Resource for CalendarResource {
CalendarProp::SupportedReportSet(SupportedReportSet::all()) CalendarProp::SupportedReportSet(SupportedReportSet::all())
} }
CalendarPropName::Source => CalendarProp::Source( CalendarPropName::Source => CalendarProp::Source(
self.cal.subscription_url.to_owned().map(HrefElement::from), self.cal.subscription_url.clone().map(HrefElement::from),
), ),
CalendarPropName::MinDateTime => { CalendarPropName::MinDateTime => {
CalendarProp::MinDateTime(CalDateTime::from(DateTime::<Utc>::MIN_UTC).format()) CalendarProp::MinDateTime(CalDateTime::from(DateTime::<Utc>::MIN_UTC).format())

View File

@@ -35,7 +35,7 @@ impl<C: CalendarStore, S: SubscriptionStore> Clone for CalendarResourceService<C
} }
impl<C: CalendarStore, S: SubscriptionStore> CalendarResourceService<C, S> { impl<C: CalendarStore, S: SubscriptionStore> CalendarResourceService<C, S> {
pub fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self { pub const fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self {
Self { Self {
cal_store, cal_store,
sub_store, sub_store,

View File

@@ -78,12 +78,9 @@ pub async fn put_event<C: CalendarStore>(
true true
}; };
let object = match CalendarObject::from_ics(body.clone()) { let object = if let Ok(obj) = CalendarObject::from_ics(body.clone()) { obj } else {
Ok(obj) => obj, debug!("invalid calendar data:\n{body}");
Err(_) => { return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
debug!("invalid calendar data:\n{body}");
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
}
}; };
if object.get_id() != object_id { if object.get_id() != object_id {
error!( error!(

View File

@@ -2,7 +2,7 @@ use rustical_dav::extensions::CommonPropertiesProp;
use rustical_ical::UtcDateTime; use rustical_ical::UtcDateTime;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarObjectPropName")] #[xml(unit_variants_ident = "CalendarObjectPropName")]
pub enum CalendarObjectProp { pub enum CalendarObjectProp {
// WebDAV (RFC 2518) // WebDAV (RFC 2518)
@@ -25,7 +25,7 @@ pub enum CalendarObjectPropWrapper {
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq, Hash)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct ExpandElement { pub struct ExpandElement {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub(crate) start: UtcDateTime, pub(crate) start: UtcDateTime,
#[xml(ty = "attr")] #[xml(ty = "attr")]

View File

@@ -1,4 +1,4 @@
use super::prop::*; use super::prop::{CalendarObjectPropWrapper, CalendarObjectPropWrapperName, CalendarObjectPropName, CalendarObjectProp, CalendarData};
use crate::Error; use crate::Error;
use derive_more::derive::{From, Into}; use derive_more::derive::{From, Into};
use rustical_dav::{ use rustical_dav::{

View File

@@ -35,7 +35,7 @@ impl<C: CalendarStore> Clone for CalendarObjectResourceService<C> {
} }
impl<C: CalendarStore> CalendarObjectResourceService<C> { impl<C: CalendarStore> CalendarObjectResourceService<C> {
pub fn new(cal_store: Arc<C>) -> Self { pub const fn new(cal_store: Arc<C>) -> Self {
Self { cal_store } Self { cal_store }
} }
} }

View File

@@ -62,23 +62,23 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use] pub fn status_code(&self) -> StatusCode {
match self { match self {
Error::StoreError(err) => match err { Self::StoreError(err) => match err {
rustical_store::Error::NotFound => StatusCode::NOT_FOUND, rustical_store::Error::NotFound => StatusCode::NOT_FOUND,
rustical_store::Error::AlreadyExists => StatusCode::CONFLICT, rustical_store::Error::AlreadyExists => StatusCode::CONFLICT,
rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN, rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
}, },
Error::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::DavError(err) => StatusCode::try_from(err.status_code().as_u16()) Self::DavError(err) => StatusCode::try_from(err.status_code().as_u16())
.expect("Just converting between versions"), .expect("Just converting between versions"),
Error::Unauthorized => StatusCode::UNAUTHORIZED, Self::Unauthorized => StatusCode::UNAUTHORIZED,
Error::XmlDecodeError(_) => StatusCode::BAD_REQUEST, Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Error::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR, Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
Error::IcalError(err) => err.status_code(), Self::IcalError(err) => err.status_code(),
Error::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED, Self::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED,
} }
} }
} }

View File

@@ -38,8 +38,8 @@ pub fn caldav_router<AP: AuthenticationProvider, C: CalendarStore, S: Subscripti
prefix, prefix,
RootResourceService::<_, Principal, CalDavPrincipalUri>::new(PrincipalResourceService { RootResourceService::<_, Principal, CalDavPrincipalUri>::new(PrincipalResourceService {
auth_provider: auth_provider.clone(), auth_provider: auth_provider.clone(),
sub_store: subscription_store.clone(), sub_store: subscription_store,
cal_store: store.clone(), cal_store: store,
simplified_home_set, simplified_home_set,
}) })
.axum_router() .axum_router()

View File

@@ -24,7 +24,7 @@ pub struct PrincipalResource {
impl ResourceName for PrincipalResource { impl ResourceName for PrincipalResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.principal.id.to_owned() self.principal.id.clone()
} }
} }
@@ -56,7 +56,7 @@ impl Resource for PrincipalResource {
PrincipalPropWrapperName::Principal(prop) => { PrincipalPropWrapperName::Principal(prop) => {
PrincipalPropWrapper::Principal(match prop { PrincipalPropWrapper::Principal(match prop {
PrincipalPropName::CalendarUserType => { PrincipalPropName::CalendarUserType => {
PrincipalProp::CalendarUserType(self.principal.principal_type.to_owned()) PrincipalProp::CalendarUserType(self.principal.principal_type.clone())
} }
PrincipalPropName::PrincipalUrl => { PrincipalPropName::PrincipalUrl => {
PrincipalProp::PrincipalUrl(principal_url.into()) PrincipalProp::PrincipalUrl(principal_url.into())

View File

@@ -6,7 +6,7 @@ use rustical_store::auth::PrincipalType;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray; use strum_macros::VariantArray;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropName")] #[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp { pub enum PrincipalProp {
// Scheduling Extensions to CalDAV (RFC 6638) // Scheduling Extensions to CalDAV (RFC 6638)
@@ -34,7 +34,7 @@ pub enum PrincipalProp {
CalendarHomeSet(CalendarHomeSet), CalendarHomeSet(CalendarHomeSet),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct CalendarHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct CalendarHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
@@ -44,7 +44,7 @@ pub enum PrincipalPropWrapper {
Common(CommonPropertiesProp), Common(CommonPropertiesProp),
} }
#[derive(XmlSerialize, PartialEq, Clone, VariantArray)] #[derive(XmlSerialize, PartialEq, Eq, Clone, VariantArray)]
pub enum ReportMethod { pub enum ReportMethod {
// We don't actually support principal-match // We don't actually support principal-match
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]

View File

@@ -1,7 +1,7 @@
use rustical_dav::extensions::CommonPropertiesProp; use rustical_dav::extensions::CommonPropertiesProp;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressObjectPropName")] #[xml(unit_variants_ident = "AddressObjectPropName")]
pub enum AddressObjectProp { pub enum AddressObjectProp {
// WebDAV (RFC 2518) // WebDAV (RFC 2518)

View File

@@ -8,7 +8,7 @@ use rustical_store::{Addressbook, AddressbookStore, SubscriptionStore, auth::Pri
use rustical_xml::{XmlDeserialize, XmlDocument, XmlRootTag}; use rustical_xml::{XmlDeserialize, XmlDocument, XmlRootTag};
use tracing::instrument; use tracing::instrument;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct Resourcetype { pub struct Resourcetype {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
addressbook: Option<()>, addressbook: Option<()>,
@@ -16,7 +16,7 @@ pub struct Resourcetype {
collection: Option<()>, collection: Option<()>,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct MkcolAddressbookProp { pub struct MkcolAddressbookProp {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
resourcetype: Option<Resourcetype>, resourcetype: Option<Resourcetype>,
@@ -27,7 +27,7 @@ pub struct MkcolAddressbookProp {
description: Option<String>, description: Option<String>,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct PropElement<T: XmlDeserialize> { pub struct PropElement<T: XmlDeserialize> {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
prop: T, prop: T,
@@ -53,13 +53,13 @@ pub async fn route_mkcol<AS: AddressbookStore, S: SubscriptionStore>(
} }
let mut request = MkcolRequest::parse_str(&body)?.set.prop; let mut request = MkcolRequest::parse_str(&body)?.set.prop;
if let Some("") = request.displayname.as_deref() { if request.displayname.as_deref() == Some("") {
request.displayname = None request.displayname = None;
} }
let addressbook = Addressbook { let addressbook = Addressbook {
id: addressbook_id.to_owned(), id: addressbook_id.clone(),
principal: principal.to_owned(), principal: principal.clone(),
displayname: request.displayname, displayname: request.displayname,
description: request.description, description: request.description,
deleted_at: None, deleted_at: None,

View File

@@ -45,12 +45,11 @@ pub async fn route_post<AS: AddressbookStore, S: SubscriptionStore>(
}; };
let subscription = Subscription { let subscription = Subscription {
id: sub_id.to_owned(), id: sub_id.clone(),
push_resource: request push_resource: request
.subscription .subscription
.web_push_subscription .web_push_subscription
.push_resource .push_resource.clone(),
.to_owned(),
topic: addressbook_resource.0.push_topic, topic: addressbook_resource.0.push_topic,
expiration: expires.naive_local(), expiration: expires.naive_local(),
public_key: request public_key: request

View File

@@ -13,7 +13,7 @@ use rustical_ical::AddressObject;
use rustical_store::{AddressbookStore, auth::Principal}; use rustical_store::{AddressbookStore, auth::Principal};
use rustical_xml::XmlDeserialize; use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
pub struct AddressbookMultigetRequest { pub struct AddressbookMultigetRequest {
@@ -35,7 +35,7 @@ pub async fn get_objects_addressbook_multiget<AS: AddressbookStore>(
for href in &addressbook_multiget.href { for href in &addressbook_multiget.href {
if let Some(filename) = href.strip_prefix(path) { if let Some(filename) = href.strip_prefix(path) {
let filename = filename.trim_start_matches("/"); let filename = filename.trim_start_matches('/');
if let Some(object_id) = filename.strip_suffix(".vcf") { if let Some(object_id) = filename.strip_suffix(".vcf") {
match store match store
.get_object(principal, addressbook_id, object_id, false) .get_object(principal, addressbook_id, object_id, false)
@@ -44,7 +44,7 @@ pub async fn get_objects_addressbook_multiget<AS: AddressbookStore>(
Ok(object) => result.push(object), Ok(object) => result.push(object),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()), Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
}; }
} else { } else {
not_found.push(href.to_owned()); not_found.push(href.to_owned());
continue; continue;

View File

@@ -26,10 +26,10 @@ pub(crate) enum ReportRequest {
} }
impl ReportRequest { impl ReportRequest {
fn props(&self) -> &PropfindType<AddressObjectPropWrapperName> { const fn props(&self) -> &PropfindType<AddressObjectPropWrapperName> {
match self { match self {
ReportRequest::AddressbookMultiget(AddressbookMultigetRequest { prop, .. }) => prop, Self::AddressbookMultiget(AddressbookMultigetRequest { prop, .. }) => prop,
ReportRequest::SyncCollection(SyncCollectionRequest { prop, .. }) => prop, Self::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
} }
} }
} }

View File

@@ -29,7 +29,7 @@ pub enum AddressbookPropWrapper {
Common(CommonPropertiesProp), Common(CommonPropertiesProp),
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct AddressDataType { pub struct AddressDataType {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub content_type: &'static str, pub content_type: &'static str,
@@ -37,7 +37,7 @@ pub struct AddressDataType {
pub version: &'static str, pub version: &'static str,
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct SupportedAddressData { pub struct SupportedAddressData {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV", flatten)] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV", flatten)]
address_data_type: &'static [AddressDataType], address_data_type: &'static [AddressDataType],
@@ -60,7 +60,7 @@ impl Default for SupportedAddressData {
} }
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq, VariantArray)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq, VariantArray)]
pub enum ReportMethod { pub enum ReportMethod {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
AddressbookMultiget, AddressbookMultiget,

View File

@@ -17,7 +17,7 @@ pub struct AddressbookResource(pub(crate) Addressbook);
impl ResourceName for AddressbookResource { impl ResourceName for AddressbookResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.0.id.to_owned() self.0.id.clone()
} }
} }
@@ -29,7 +29,7 @@ impl SyncTokenExtension for AddressbookResource {
impl DavPushExtension for AddressbookResource { impl DavPushExtension for AddressbookResource {
fn get_topic(&self) -> String { fn get_topic(&self) -> String {
self.0.push_topic.to_owned() self.0.push_topic.clone()
} }
} }
@@ -65,7 +65,7 @@ impl Resource for AddressbookResource {
AddressbookProp::SupportedReportSet(SupportedReportSet::all()) AddressbookProp::SupportedReportSet(SupportedReportSet::all())
} }
AddressbookPropName::AddressbookDescription => { AddressbookPropName::AddressbookDescription => {
AddressbookProp::AddressbookDescription(self.0.description.to_owned()) AddressbookProp::AddressbookDescription(self.0.description.clone())
} }
AddressbookPropName::SupportedAddressData => { AddressbookPropName::SupportedAddressData => {
AddressbookProp::SupportedAddressData(SupportedAddressData::default()) AddressbookProp::SupportedAddressData(SupportedAddressData::default())

View File

@@ -26,7 +26,7 @@ pub struct AddressbookResourceService<AS: AddressbookStore, S: SubscriptionStore
} }
impl<A: AddressbookStore, S: SubscriptionStore> AddressbookResourceService<A, S> { impl<A: AddressbookStore, S: SubscriptionStore> AddressbookResourceService<A, S> {
pub fn new(addr_store: Arc<A>, sub_store: Arc<S>) -> Self { pub const fn new(addr_store: Arc<A>, sub_store: Arc<S>) -> Self {
Self { Self {
addr_store, addr_store,
sub_store, sub_store,

View File

@@ -30,20 +30,20 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use] pub const fn status_code(&self) -> StatusCode {
match self { match self {
Error::StoreError(err) => match err { Self::StoreError(err) => match err {
rustical_store::Error::NotFound => StatusCode::NOT_FOUND, rustical_store::Error::NotFound => StatusCode::NOT_FOUND,
rustical_store::Error::AlreadyExists => StatusCode::CONFLICT, rustical_store::Error::AlreadyExists => StatusCode::CONFLICT,
rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN, rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
}, },
Error::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::DavError(err) => err.status_code(), Self::DavError(err) => err.status_code(),
Error::Unauthorized => StatusCode::UNAUTHORIZED, Self::Unauthorized => StatusCode::UNAUTHORIZED,
Error::XmlDecodeError(_) => StatusCode::BAD_REQUEST, Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Error::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR, Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
Self::IcalError(err) => err.status_code(), Self::IcalError(err) => err.status_code(),
} }
} }

View File

@@ -38,15 +38,15 @@ pub fn carddav_router<AP: AuthenticationProvider, A: AddressbookStore, S: Subscr
subscription_store: Arc<S>, subscription_store: Arc<S>,
) -> Router { ) -> Router {
let principal_service = PrincipalResourceService::new( let principal_service = PrincipalResourceService::new(
store.clone(), store,
auth_provider.clone(), auth_provider.clone(),
subscription_store.clone(), subscription_store,
); );
Router::new() Router::new()
.nest( .nest(
prefix, prefix,
RootResourceService::<_, Principal, CardDavPrincipalUri>::new( RootResourceService::<_, Principal, CardDavPrincipalUri>::new(
principal_service.clone(), principal_service,
) )
.axum_router() .axum_router()
.layer(AuthenticationLayer::new(auth_provider)) .layer(AuthenticationLayer::new(auth_provider))

View File

@@ -20,7 +20,7 @@ pub struct PrincipalResource {
impl ResourceName for PrincipalResource { impl ResourceName for PrincipalResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.principal.id.to_owned() self.principal.id.clone()
} }
} }

View File

@@ -4,7 +4,7 @@ use rustical_dav::{
}; };
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropName")] #[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp { pub enum PrincipalProp {
// WebDAV Access Control (RFC 3744) // WebDAV Access Control (RFC 3744)
@@ -27,7 +27,7 @@ pub enum PrincipalProp {
PrincipalAddress(Option<HrefElement>), PrincipalAddress(Option<HrefElement>),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct AddressbookHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct AddressbookHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]

View File

@@ -34,7 +34,7 @@ impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore> Clon
impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore> impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore>
PrincipalResourceService<A, AP, S> PrincipalResourceService<A, AP, S>
{ {
pub fn new(addr_store: Arc<A>, auth_provider: Arc<AP>, sub_store: Arc<S>) -> Self { pub const fn new(addr_store: Arc<A>, auth_provider: Arc<AP>, sub_store: Arc<S>) -> Self {
Self { Self {
addr_store, addr_store,
auth_provider, auth_provider,

View File

@@ -35,7 +35,7 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use] pub const fn status_code(&self) -> StatusCode {
match self { match self {
Self::InternalError => StatusCode::INTERNAL_SERVER_ERROR, Self::InternalError => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
@@ -50,8 +50,8 @@ impl Error {
| XmlError::InvalidValue(_) => StatusCode::UNPROCESSABLE_ENTITY, | XmlError::InvalidValue(_) => StatusCode::UNPROCESSABLE_ENTITY,
_ => StatusCode::BAD_REQUEST, _ => StatusCode::BAD_REQUEST,
}, },
Error::PropReadOnly => StatusCode::CONFLICT, Self::PropReadOnly => StatusCode::CONFLICT,
Error::PreconditionFailed => StatusCode::PRECONDITION_FAILED, Self::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Forbidden => StatusCode::FORBIDDEN, Self::Forbidden => StatusCode::FORBIDDEN,
} }
@@ -68,7 +68,7 @@ impl axum::response::IntoResponse for Error {
} }
let mut resp = axum::response::Response::builder().status(self.status_code()); let mut resp = axum::response::Response::builder().status(self.status_code());
if matches!(&self, &Error::Unauthorized) { if matches!(&self, &Self::Unauthorized) {
resp.headers_mut() resp.headers_mut()
.expect("This must always work") .expect("This must always work")
.insert("WWW-Authenticate", "Basic".parse().unwrap()); .insert("WWW-Authenticate", "Basic".parse().unwrap());

View File

@@ -40,7 +40,7 @@ pub trait CommonPropertiesExtension: Resource {
CommonPropertiesProp::Resourcetype(self.get_resourcetype()) CommonPropertiesProp::Resourcetype(self.get_resourcetype())
} }
CommonPropertiesPropName::Displayname => { CommonPropertiesPropName::Displayname => {
CommonPropertiesProp::Displayname(self.get_displayname().map(|s| s.to_string())) CommonPropertiesProp::Displayname(self.get_displayname().map(std::string::ToString::to_string))
} }
CommonPropertiesPropName::CurrentUserPrincipal => { CommonPropertiesPropName::CurrentUserPrincipal => {
CommonPropertiesProp::CurrentUserPrincipal( CommonPropertiesProp::CurrentUserPrincipal(

View File

@@ -1,6 +1,6 @@
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, PropName, EnumVariants)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "SyncTokenExtensionPropName")] #[xml(unit_variants_ident = "SyncTokenExtensionPropName")]
pub enum SyncTokenExtensionProp { pub enum SyncTokenExtensionProp {
// Collection Synchronization (RFC 6578) // Collection Synchronization (RFC 6578)

View File

@@ -19,7 +19,7 @@ impl IntoResponse for InvalidDepthHeader {
} }
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum Depth { pub enum Depth {
Zero, Zero,
One, One,
@@ -29,9 +29,9 @@ pub enum Depth {
impl ValueSerialize for Depth { impl ValueSerialize for Depth {
fn serialize(&self) -> String { fn serialize(&self) -> String {
match self { match self {
Depth::Zero => "0", Self::Zero => "0",
Depth::One => "1", Self::One => "1",
Depth::Infinity => "infinity", Self::Infinity => "infinity",
} }
.to_owned() .to_owned()
} }
@@ -55,9 +55,9 @@ impl TryFrom<&[u8]> for Depth {
fn try_from(value: &[u8]) -> Result<Self, Self::Error> { fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match value { match value {
b"0" => Ok(Depth::Zero), b"0" => Ok(Self::Zero),
b"1" => Ok(Depth::One), b"1" => Ok(Self::One),
b"Infinity" | b"infinity" => Ok(Depth::Infinity), b"Infinity" | b"infinity" => Ok(Self::Infinity),
_ => Err(InvalidDepthHeader), _ => Err(InvalidDepthHeader),
} }
} }

View File

@@ -14,7 +14,7 @@ impl IntoResponse for InvalidOverwriteHeader {
} }
} }
#[derive(Debug, PartialEq)] #[derive(Debug, PartialEq, Eq)]
pub struct Overwrite(pub bool); pub struct Overwrite(pub bool);
impl Default for Overwrite { impl Default for Overwrite {

View File

@@ -41,13 +41,13 @@ impl XmlSerialize for UserPrivilegeSet {
} }
} }
#[derive(Debug, Clone, Default, PartialEq)] #[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct UserPrivilegeSet { pub struct UserPrivilegeSet {
privileges: HashSet<UserPrivilege>, privileges: HashSet<UserPrivilege>,
} }
impl UserPrivilegeSet { impl UserPrivilegeSet {
pub fn has(&self, privilege: &UserPrivilege) -> bool { #[must_use] pub fn has(&self, privilege: &UserPrivilege) -> bool {
if (privilege == &UserPrivilege::WriteProperties if (privilege == &UserPrivilege::WriteProperties
|| privilege == &UserPrivilege::WriteContent) || privilege == &UserPrivilege::WriteContent)
&& self.privileges.contains(&UserPrivilege::Write) && self.privileges.contains(&UserPrivilege::Write)
@@ -57,13 +57,13 @@ impl UserPrivilegeSet {
self.privileges.contains(privilege) || self.privileges.contains(&UserPrivilege::All) self.privileges.contains(privilege) || self.privileges.contains(&UserPrivilege::All)
} }
pub fn all() -> Self { #[must_use] pub fn all() -> Self {
Self { Self {
privileges: HashSet::from([UserPrivilege::All]), privileges: HashSet::from([UserPrivilege::All]),
} }
} }
pub fn owner_only(is_owner: bool) -> Self { #[must_use] pub fn owner_only(is_owner: bool) -> Self {
if is_owner { if is_owner {
Self::all() Self::all()
} else { } else {
@@ -71,7 +71,7 @@ impl UserPrivilegeSet {
} }
} }
pub fn owner_read(is_owner: bool) -> Self { #[must_use] pub fn owner_read(is_owner: bool) -> Self {
if is_owner { if is_owner {
Self::read_only() Self::read_only()
} else { } else {
@@ -79,7 +79,7 @@ impl UserPrivilegeSet {
} }
} }
pub fn owner_write_properties(is_owner: bool) -> Self { #[must_use] pub fn owner_write_properties(is_owner: bool) -> Self {
// Content is read-only but we can write properties // Content is read-only but we can write properties
if is_owner { if is_owner {
Self::write_properties() Self::write_properties()
@@ -88,7 +88,7 @@ impl UserPrivilegeSet {
} }
} }
pub fn read_only() -> Self { #[must_use] pub fn read_only() -> Self {
Self { Self {
privileges: HashSet::from([ privileges: HashSet::from([
UserPrivilege::Read, UserPrivilege::Read,
@@ -98,7 +98,7 @@ impl UserPrivilegeSet {
} }
} }
pub fn write_properties() -> Self { #[must_use] pub fn write_properties() -> Self {
Self { Self {
privileges: HashSet::from([ privileges: HashSet::from([
UserPrivilege::Read, UserPrivilege::Read,

View File

@@ -9,42 +9,42 @@ pub type MethodFunction<State> =
pub trait AxumMethods: Sized + Send + Sync + 'static { pub trait AxumMethods: Sized + Send + Sync + 'static {
#[inline] #[inline]
fn report() -> Option<MethodFunction<Self>> { #[must_use] fn report() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn get() -> Option<MethodFunction<Self>> { #[must_use] fn get() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn post() -> Option<MethodFunction<Self>> { #[must_use] fn post() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn mkcol() -> Option<MethodFunction<Self>> { #[must_use] fn mkcol() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn mkcalendar() -> Option<MethodFunction<Self>> { #[must_use] fn mkcalendar() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn put() -> Option<MethodFunction<Self>> { #[must_use] fn put() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn import() -> Option<MethodFunction<Self>> { #[must_use] fn import() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
fn allow_header() -> Allow { #[must_use] fn allow_header() -> Allow {
let mut allow = vec![ let mut allow = vec![
Method::from_str("PROPFIND").unwrap(), Method::from_str("PROPFIND").unwrap(),
Method::from_str("PROPPATCH").unwrap(), Method::from_str("PROPPATCH").unwrap(),

View File

@@ -23,7 +23,7 @@ pub struct AxumService<RS: ResourceService + AxumMethods> {
} }
impl<RS: ResourceService + AxumMethods> AxumService<RS> { impl<RS: ResourceService + AxumMethods> AxumService<RS> {
pub fn new(resource_service: RS) -> Self { pub const fn new(resource_service: RS) -> Self {
Self { resource_service } Self { resource_service }
} }
} }
@@ -103,7 +103,7 @@ where
} }
} }
_ => {} _ => {}
}; }
Box::pin(async move { Box::pin(async move {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::METHOD_NOT_ALLOWED) .status(StatusCode::METHOD_NOT_ALLOWED)

View File

@@ -12,7 +12,7 @@ use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
#[instrument(skip(path, resource_service,))] #[instrument(skip(path, resource_service,))]
pub(crate) async fn axum_route_copy<R: ResourceService>( pub async fn axum_route_copy<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
depth: Option<Depth>, depth: Option<Depth>,

View File

@@ -7,7 +7,7 @@ use axum_extra::TypedHeader;
use headers::{IfMatch, IfNoneMatch}; use headers::{IfMatch, IfNoneMatch};
use http::HeaderMap; use http::HeaderMap;
pub(crate) async fn axum_route_delete<R: ResourceService>( pub async fn axum_route_delete<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
principal: R::Principal, principal: R::Principal,
@@ -24,8 +24,7 @@ pub(crate) async fn axum_route_delete<R: ResourceService>(
} }
let no_trash = header_map let no_trash = header_map
.get("X-No-Trashbin") .get("X-No-Trashbin")
.map(|val| matches!(val.to_str(), Ok("1"))) .is_some_and(|val| matches!(val.to_str(), Ok("1")));
.unwrap_or(false);
route_delete( route_delete(
&path, &path,
&principal, &principal,

View File

@@ -4,8 +4,8 @@ mod mv;
mod propfind; mod propfind;
mod proppatch; mod proppatch;
pub(crate) use copy::axum_route_copy; pub use copy::axum_route_copy;
pub(crate) use delete::axum_route_delete; pub use delete::axum_route_delete;
pub(crate) use mv::axum_route_move; pub use mv::axum_route_move;
pub(crate) use propfind::axum_route_propfind; pub use propfind::axum_route_propfind;
pub(crate) use proppatch::axum_route_proppatch; pub use proppatch::axum_route_proppatch;

View File

@@ -12,7 +12,7 @@ use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
#[instrument(skip(path, resource_service,))] #[instrument(skip(path, resource_service,))]
pub(crate) async fn axum_route_move<R: ResourceService>( pub async fn axum_route_move<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
depth: Option<Depth>, depth: Option<Depth>,

View File

@@ -15,7 +15,7 @@ type RSMultistatus<R> = MultistatusElement<
>; >;
#[instrument(skip(path, resource_service, puri))] #[instrument(skip(path, resource_service, puri))]
pub(crate) async fn axum_route_propfind<R: ResourceService>( pub async fn axum_route_propfind<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
depth: Depth, depth: Depth,
@@ -36,7 +36,7 @@ pub(crate) async fn axum_route_propfind<R: ResourceService>(
.await .await
} }
pub(crate) async fn route_propfind<R: ResourceService>( pub async fn route_propfind<R: ResourceService>(
path_components: &R::PathComponents, path_components: &R::PathComponents,
path: &str, path: &str,
body: &str, body: &str,

View File

@@ -61,7 +61,7 @@ enum Operation<T: XmlDeserialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
struct PropertyupdateElement<T: XmlDeserialize>(#[xml(ty = "untagged", flatten)] Vec<Operation<T>>); struct PropertyupdateElement<T: XmlDeserialize>(#[xml(ty = "untagged", flatten)] Vec<Operation<T>>);
pub(crate) async fn axum_route_proppatch<R: ResourceService>( pub async fn axum_route_proppatch<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
principal: R::Principal, principal: R::Principal,
@@ -71,7 +71,7 @@ pub(crate) async fn axum_route_proppatch<R: ResourceService>(
route_proppatch(&path, uri.path(), &body, &principal, &resource_service).await route_proppatch(&path, uri.path(), &body, &principal, &resource_service).await
} }
pub(crate) async fn route_proppatch<R: ResourceService>( pub async fn route_proppatch<R: ResourceService>(
path_components: &R::PathComponents, path_components: &R::PathComponents,
path: &str, path: &str,
body: &str, body: &str,
@@ -96,7 +96,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
let mut props_conflict = Vec::new(); let mut props_conflict = Vec::new();
let mut props_not_found = Vec::new(); let mut props_not_found = Vec::new();
for operation in operations.into_iter() { for operation in operations {
match operation { match operation {
Operation::Set(SetPropertyElement { Operation::Set(SetPropertyElement {
prop: SetPropertyPropWrapperWrapper(properties), prop: SetPropertyPropWrapperWrapper(properties),
@@ -113,7 +113,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
Err(Error::PropReadOnly) => props_conflict Err(Error::PropReadOnly) => props_conflict
.push((ns.map(NamespaceOwned::from), propname.to_owned())), .push((ns.map(NamespaceOwned::from), propname.to_owned())),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
}; }
} }
SetPropertyPropWrapper::Invalid(invalid) => { SetPropertyPropWrapper::Invalid(invalid) => {
let propname = invalid.tag_name(); let propname = invalid.tag_name();
@@ -131,7 +131,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
// This happens in following cases: // This happens in following cases:
// - read-only properties with #[serde(skip_deserializing)] // - read-only properties with #[serde(skip_deserializing)]
// - internal properties // - internal properties
props_conflict.push(full_propname) props_conflict.push(full_propname);
} else { } else {
props_not_found.push((None, propname)); props_not_found.push((None, propname));
} }
@@ -154,7 +154,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
}, },
// I guess removing a nonexisting property should be successful :) // I guess removing a nonexisting property should be successful :)
Err(_) => props_ok.push((None, propname)), Err(_) => props_ok.push((None, propname)),
}; }
} }
} }
} }

View File

@@ -42,7 +42,7 @@ pub trait Resource: Clone + Send + 'static {
fn get_resourcetype(&self) -> Resourcetype; fn get_resourcetype(&self) -> Resourcetype;
fn list_props() -> Vec<(Option<Namespace<'static>>, &'static str)> { #[must_use] fn list_props() -> Vec<(Option<Namespace<'static>>, &'static str)> {
Self::Prop::variant_names() Self::Prop::variant_names()
} }
@@ -106,13 +106,13 @@ pub trait Resource: Clone + Send + 'static {
fn parse_propfind( fn parse_propfind(
body: &str, body: &str,
) -> Result<PropfindElement<<Self::Prop as PropName>::Names>, rustical_xml::XmlError> { ) -> Result<PropfindElement<<Self::Prop as PropName>::Names>, rustical_xml::XmlError> {
if !body.is_empty() { if body.is_empty() {
PropfindElement::parse_str(body)
} else {
Ok(PropfindElement { Ok(PropfindElement {
prop: PropfindType::Allprop, prop: PropfindType::Allprop,
include: None, include: None,
}) })
} else {
PropfindElement::parse_str(body)
} }
} }
@@ -139,7 +139,7 @@ pub trait Resource: Clone + Send + 'static {
.collect_vec(); .collect_vec();
return Ok(ResponseElement { return Ok(ResponseElement {
href: path.to_owned(), href: path.clone(),
propstat: vec![PropstatWrapper::TagList(PropstatElement { propstat: vec![PropstatWrapper::TagList(PropstatElement {
prop: TagList::from(props), prop: TagList::from(props),
status: StatusCode::OK, status: StatusCode::OK,
@@ -181,7 +181,7 @@ pub trait Resource: Clone + Send + 'static {
})); }));
} }
Ok(ResponseElement { Ok(ResponseElement {
href: path.to_owned(), href: path.clone(),
propstat: propstats, propstat: propstats,
..Default::default() ..Default::default()
}) })

View File

@@ -63,7 +63,7 @@ pub struct RootResourceService<PRS: ResourceService + Clone, P: Principal, PURI:
impl<PRS: ResourceService + Clone, P: Principal, PURI: PrincipalUri> impl<PRS: ResourceService + Clone, P: Principal, PURI: PrincipalUri>
RootResourceService<PRS, P, PURI> RootResourceService<PRS, P, PURI>
{ {
pub fn new(principal_resource_service: PRS) -> Self { pub const fn new(principal_resource_service: PRS) -> Self {
Self(principal_resource_service, PhantomData, PhantomData) Self(principal_resource_service, PhantomData, PhantomData)
} }
} }
@@ -88,7 +88,7 @@ where
async fn get_resource( async fn get_resource(
&self, &self,
_: &(), (): &(),
_show_deleted: bool, _show_deleted: bool,
) -> Result<Self::Resource, Self::Error> { ) -> Result<Self::Resource, Self::Error> {
Ok(RootResource::<PRS::Resource, P>::default()) Ok(RootResource::<PRS::Resource, P>::default())

View File

@@ -1,8 +1,8 @@
use crate::xml::HrefElement; use crate::xml::HrefElement;
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct GroupMembership(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct GroupMembership(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct GroupMemberSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct GroupMemberSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);

View File

@@ -1,14 +1,14 @@
use derive_more::From; use derive_more::From;
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, Debug, Clone, From, PartialEq)] #[derive(XmlDeserialize, XmlSerialize, Debug, Clone, From, PartialEq, Eq)]
pub struct HrefElement { pub struct HrefElement {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
pub href: String, pub href: String,
} }
impl HrefElement { impl HrefElement {
pub fn new(href: String) -> Self { #[must_use] pub const fn new(href: String) -> Self {
Self { href } Self { href }
} }
} }

View File

@@ -26,7 +26,7 @@ fn xml_serialize_status(
namespaces: &HashMap<Namespace, &str>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
XmlSerialize::serialize(&format!("HTTP/1.1 {}", status), ns, tag, namespaces, writer) XmlSerialize::serialize(&format!("HTTP/1.1 {status}"), ns, tag, namespaces, writer)
} }
#[derive(XmlSerialize)] #[derive(XmlSerialize)]
@@ -64,7 +64,7 @@ fn xml_serialize_optional_status(
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
XmlSerialize::serialize( XmlSerialize::serialize(
&val.map(|status| format!("HTTP/1.1 {}", status)), &val.map(|status| format!("HTTP/1.1 {status}")),
ns, ns,
tag, tag,
namespaces, namespaces,

View File

@@ -15,7 +15,7 @@ pub struct PropfindElement<PN: XmlDeserialize> {
pub include: Option<PropElement<PN>>, pub include: Option<PropElement<PN>>,
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct PropElement<PN: XmlDeserialize>( pub struct PropElement<PN: XmlDeserialize>(
// valid // valid
pub Vec<PN>, pub Vec<PN>,
@@ -82,7 +82,7 @@ impl<PN: XmlDeserialize> XmlDeserialize for PropElement<PN> {
} }
} }
#[derive(Debug, Clone, XmlDeserialize, PartialEq)] #[derive(Debug, Clone, XmlDeserialize, PartialEq, Eq)]
pub enum PropfindType<PN: XmlDeserialize> { pub enum PropfindType<PN: XmlDeserialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
Propname, Propname,

View File

@@ -2,7 +2,7 @@ use rustical_xml::XmlSerialize;
use strum::VariantArray; use strum::VariantArray;
// RFC 3253 section-3.1.5 // RFC 3253 section-3.1.5
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct SupportedReportSet<T: XmlSerialize + 'static> { pub struct SupportedReportSet<T: XmlSerialize + 'static> {
#[xml(flatten)] #[xml(flatten)]
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
@@ -10,7 +10,7 @@ pub struct SupportedReportSet<T: XmlSerialize + 'static> {
} }
impl<T: XmlSerialize + Clone + 'static> SupportedReportSet<T> { impl<T: XmlSerialize + Clone + 'static> SupportedReportSet<T> {
pub fn new(methods: Vec<T>) -> Self { #[must_use] pub fn new(methods: Vec<T>) -> Self {
Self { Self {
supported_report: methods supported_report: methods
.into_iter() .into_iter()
@@ -27,7 +27,7 @@ impl<T: XmlSerialize + Clone + 'static> SupportedReportSet<T> {
} }
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct ReportWrapper<T: XmlSerialize> { pub struct ReportWrapper<T: XmlSerialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
report: T, report: T,

View File

@@ -1,9 +1,9 @@
use rustical_xml::XmlSerialize; use rustical_xml::XmlSerialize;
#[derive(Debug, Clone, PartialEq, XmlSerialize)] #[derive(Debug, Clone, PartialEq, Eq, XmlSerialize)]
pub struct Resourcetype(#[xml(flatten, ty = "untagged")] pub &'static [ResourcetypeInner]); pub struct Resourcetype(#[xml(flatten, ty = "untagged")] pub &'static [ResourcetypeInner]);
#[derive(Debug, Clone, PartialEq, XmlSerialize)] #[derive(Debug, Clone, PartialEq, Eq, XmlSerialize)]
pub struct ResourcetypeInner( pub struct ResourcetypeInner(
#[xml(ty = "namespace")] pub Option<quick_xml::name::Namespace<'static>>, #[xml(ty = "namespace")] pub Option<quick_xml::name::Namespace<'static>>,
#[xml(ty = "tag_name")] pub &'static str, #[xml(ty = "tag_name")] pub &'static str,

View File

@@ -2,7 +2,7 @@ use rustical_xml::{ValueDeserialize, ValueSerialize, XmlDeserialize, XmlRootTag}
use super::PropfindType; use super::PropfindType;
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum SyncLevel { pub enum SyncLevel {
One, One,
Infinity, Infinity,
@@ -25,15 +25,15 @@ impl ValueDeserialize for SyncLevel {
impl ValueSerialize for SyncLevel { impl ValueSerialize for SyncLevel {
fn serialize(&self) -> String { fn serialize(&self) -> String {
match self { match self {
SyncLevel::One => "1", Self::One => "1",
SyncLevel::Infinity => "Infinity", Self::Infinity => "Infinity",
} }
.to_owned() .to_owned()
} }
} }
// https://datatracker.ietf.org/doc/html/rfc5323#section-5.17 // https://datatracker.ietf.org/doc/html/rfc5323#section-5.17
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct LimitElement { pub struct LimitElement {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
pub nresults: NresultsElement, pub nresults: NresultsElement,
@@ -53,7 +53,7 @@ impl From<LimitElement> for u64 {
} }
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct NresultsElement(#[xml(ty = "text")] u64); pub struct NresultsElement(#[xml(ty = "text")] u64);
#[derive(XmlDeserialize, Clone, Debug, PartialEq, XmlRootTag)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, XmlRootTag)]

View File

@@ -6,7 +6,7 @@ use quick_xml::{
use rustical_xml::{NamespaceOwned, XmlSerialize}; use rustical_xml::{NamespaceOwned, XmlSerialize};
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Clone, Debug, PartialEq, From)] #[derive(Clone, Debug, PartialEq, Eq, From)]
pub struct TagList(Vec<(Option<NamespaceOwned>, String)>); pub struct TagList(Vec<(Option<NamespaceOwned>, String)>);
impl XmlSerialize for TagList { impl XmlSerialize for TagList {
@@ -18,13 +18,12 @@ impl XmlSerialize for TagList {
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns
.map(|ns| namespaces.get(&ns)) .and_then(|ns| namespaces.get(&ns))
.unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if prefix.is_empty() {
format!("{prefix}:")
} else {
String::new() String::new()
} else {
format!("{prefix}:")
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();

View File

@@ -125,7 +125,7 @@ impl<S: SubscriptionStore> DavPushController<S> {
subsciption.id, subsciption.topic subsciption.id, subsciption.topic
); );
self.try_delete_subscription(&subsciption.id).await; self.try_delete_subscription(&subsciption.id).await;
}; }
} }
if let Err(err) = self.send_payload(&payload, &subsciption).await { if let Err(err) = self.send_payload(&payload, &subsciption).await {
@@ -207,7 +207,7 @@ enum NotifierError {
impl NotifierError { impl NotifierError {
// Decide whether the error should cause the subscription to be removed // Decide whether the error should cause the subscription to be removed
pub fn is_permament_error(&self) -> bool { pub const fn is_permament_error(&self) -> bool {
match self { match self {
Self::InvalidPublicKeyType(_) Self::InvalidPublicKeyType(_)
| Self::InvalidEndpointUrl(_) | Self::InvalidEndpointUrl(_)

View File

@@ -1,7 +1,7 @@
use rustical_dav::header::Depth; use rustical_dav::header::Depth;
use rustical_xml::{Unparsed, XmlDeserialize, XmlSerialize}; use rustical_xml::{Unparsed, XmlDeserialize, XmlSerialize};
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub enum Transport { pub enum Transport {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
WebPush, WebPush,
@@ -33,12 +33,12 @@ pub enum Trigger {
PropertyUpdate(PropertyUpdate), PropertyUpdate(PropertyUpdate),
} }
#[derive(XmlSerialize, XmlDeserialize, PartialEq, Clone, Debug)] #[derive(XmlSerialize, XmlDeserialize, PartialEq, Eq, Clone, Debug)]
pub struct ContentUpdate( pub struct ContentUpdate(
#[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
); );
#[derive(XmlSerialize, PartialEq, Clone, Debug)] #[derive(XmlSerialize, PartialEq, Eq, Clone, Debug)]
pub struct PropertyUpdate( pub struct PropertyUpdate(
#[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
); );

View File

@@ -15,7 +15,7 @@ pub struct WebPushSubscription {
pub auth_secret: String, pub auth_secret: String,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct SubscriptionPublicKey { pub struct SubscriptionPublicKey {
#[xml(ty = "attr", rename = "type")] #[xml(ty = "attr", rename = "type")]
pub ty: String, pub ty: String,

View File

@@ -1,6 +1,6 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
fn default_true() -> bool { const fn default_true() -> bool {
true true
} }

View File

@@ -106,11 +106,11 @@ pub fn frontend_router<AP: AuthenticationProvider, CS: CalendarStore, AS: Addres
router = router router = router
.layer(AuthenticationLayer::new(auth_provider.clone())) .layer(AuthenticationLayer::new(auth_provider.clone()))
.layer(Extension(auth_provider.clone())) .layer(Extension(auth_provider))
.layer(Extension(cal_store.clone())) .layer(Extension(cal_store))
.layer(Extension(addr_store.clone())) .layer(Extension(addr_store))
.layer(Extension(frontend_config.clone())) .layer(Extension(frontend_config))
.layer(Extension(oidc_config.clone())); .layer(Extension(oidc_config));
Router::new() Router::new()
.nest(prefix, router) .nest(prefix, router)

View File

@@ -58,6 +58,6 @@ pub fn nextcloud_login_router<AP: AuthenticationProvider>(auth_provider: Arc<AP>
.route("/", post(post_nextcloud_login)) .route("/", post(post_nextcloud_login))
.layer(Extension(nextcloud_flows)) .layer(Extension(nextcloud_flows))
.layer(Extension(auth_provider.clone())) .layer(Extension(auth_provider.clone()))
.layer(AuthenticationLayer::new(auth_provider.clone())) .layer(AuthenticationLayer::new(auth_provider))
.layer(middleware::from_fn(unauthorized_handler)) .layer(middleware::from_fn(unauthorized_handler))
} }

View File

@@ -18,7 +18,7 @@ use serde::{Deserialize, Serialize};
use std::sync::Arc; use std::sync::Arc;
use tracing::instrument; use tracing::instrument;
pub(crate) async fn post_nextcloud_login( pub async fn post_nextcloud_login(
Extension(state): Extension<Arc<NextcloudFlows>>, Extension(state): Extension<Arc<NextcloudFlows>>,
TypedHeader(user_agent): TypedHeader<UserAgent>, TypedHeader(user_agent): TypedHeader<UserAgent>,
Host(host): Host, Host(host): Host,
@@ -35,9 +35,9 @@ pub(crate) async fn post_nextcloud_login(
flows.insert( flows.insert(
flow_id.clone(), flow_id.clone(),
NextcloudFlow { NextcloudFlow {
app_name: app_name.to_owned(), app_name: app_name.clone(),
created_at: Utc::now(), created_at: Utc::now(),
token: token.to_owned(), token: token.clone(),
response: None, response: None,
}, },
); );
@@ -52,11 +52,11 @@ pub(crate) async fn post_nextcloud_login(
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub(crate) struct NextcloudPollForm { pub struct NextcloudPollForm {
token: String, token: String,
} }
pub(crate) async fn post_nextcloud_poll<AP: AuthenticationProvider>( pub async fn post_nextcloud_poll<AP: AuthenticationProvider>(
Extension(state): Extension<Arc<NextcloudFlows>>, Extension(state): Extension<Arc<NextcloudFlows>>,
Path(flow_id): Path<String>, Path(flow_id): Path<String>,
Extension(auth_provider): Extension<Arc<AP>>, Extension(auth_provider): Extension<Arc<AP>>,
@@ -75,8 +75,8 @@ pub(crate) async fn post_nextcloud_poll<AP: AuthenticationProvider>(
auth_provider auth_provider
.add_app_token( .add_app_token(
&response.login_name, &response.login_name,
flow.app_name.to_owned(), flow.app_name.clone(),
response.app_password.to_owned(), response.app_password.clone(),
) )
.await?; .await?;
flows.remove(&flow_id); flows.remove(&flow_id);
@@ -98,7 +98,7 @@ struct NextcloudLoginPage {
} }
#[instrument(skip(state))] #[instrument(skip(state))]
pub(crate) async fn get_nextcloud_flow( pub async fn get_nextcloud_flow(
Extension(state): Extension<Arc<NextcloudFlows>>, Extension(state): Extension<Arc<NextcloudFlows>>,
Path(flow_id): Path<String>, Path(flow_id): Path<String>,
user: Principal, user: Principal,
@@ -107,7 +107,7 @@ pub(crate) async fn get_nextcloud_flow(
Ok(Html( Ok(Html(
NextcloudLoginPage { NextcloudLoginPage {
username: user.displayname.unwrap_or(user.id), username: user.displayname.unwrap_or(user.id),
app_name: flow.app_name.to_owned(), app_name: flow.app_name.clone(),
} }
.render() .render()
.unwrap(), .unwrap(),
@@ -119,7 +119,7 @@ pub(crate) async fn get_nextcloud_flow(
} }
#[derive(Debug, Clone, Deserialize, Serialize)] #[derive(Debug, Clone, Deserialize, Serialize)]
pub(crate) struct NextcloudAuthorizeForm { pub struct NextcloudAuthorizeForm {
app_name: String, app_name: String,
} }
@@ -130,7 +130,7 @@ struct NextcloudLoginSuccessPage {
} }
#[instrument(skip(state))] #[instrument(skip(state))]
pub(crate) async fn post_nextcloud_flow( pub async fn post_nextcloud_flow(
user: Principal, user: Principal,
Extension(state): Extension<Arc<NextcloudFlows>>, Extension(state): Extension<Arc<NextcloudFlows>>,
Path(flow_id): Path<String>, Path(flow_id): Path<String>,
@@ -141,12 +141,12 @@ pub(crate) async fn post_nextcloud_flow(
flow.app_name = form.app_name; flow.app_name = form.app_name;
flow.response = Some(NextcloudSuccessResponse { flow.response = Some(NextcloudSuccessResponse {
server: format!("https://{host}"), server: format!("https://{host}"),
login_name: user.id.to_owned(), login_name: user.id.clone(),
app_password: generate_app_token(), app_password: generate_app_token(),
}); });
Ok(Html( Ok(Html(
NextcloudLoginSuccessPage { NextcloudLoginSuccessPage {
app_name: flow.app_name.to_owned(), app_name: flow.app_name.clone(),
} }
.render() .render()
.unwrap(), .unwrap(),

View File

@@ -40,7 +40,7 @@ pub struct AppleConfig {
} }
#[derive(Debug, Clone, Deserialize)] #[derive(Debug, Clone, Deserialize)]
pub(crate) struct PostAppTokenForm { pub struct PostAppTokenForm {
name: String, name: String,
#[serde(default)] #[serde(default)]
apple: bool, apple: bool,
@@ -57,7 +57,7 @@ pub async fn route_post_app_token<AP: AuthenticationProvider>(
assert_eq!(user_id, user.id); assert_eq!(user_id, user.id);
let token = generate_app_token(); let token = generate_app_token();
let mut token_id = auth_provider let mut token_id = auth_provider
.add_app_token(&user.id, name.to_owned(), token.clone()) .add_app_token(&user.id, name.clone(), token.clone())
.await?; .await?;
// Get first 4 characters of token identifier // Get first 4 characters of token identifier
token_id.truncate(4); token_id.truncate(4);
@@ -70,7 +70,7 @@ pub async fn route_post_app_token<AP: AuthenticationProvider>(
hostname: hostname.clone(), hostname: hostname.clone(),
caldav_principal_url: format!("https://{hostname}/caldav-compat/principal/{user_id}"), caldav_principal_url: format!("https://{hostname}/caldav-compat/principal/{user_id}"),
carddav_principal_url: format!("https://{hostname}/carddav/principal/{user_id}"), carddav_principal_url: format!("https://{hostname}/carddav/principal/{user_id}"),
user: user.id.to_owned(), user: user.id.clone(),
token, token,
caldav_profile_uuid: Uuid::new_v4(), caldav_profile_uuid: Uuid::new_v4(),
carddav_profile_uuid: Uuid::new_v4(), carddav_profile_uuid: Uuid::new_v4(),

View File

@@ -45,32 +45,32 @@ impl AddressObject {
Ok(Self { id, vcf, vcard }) Ok(Self { id, vcf, vcard })
} }
pub fn get_id(&self) -> &str { #[must_use] pub fn get_id(&self) -> &str {
&self.id &self.id
} }
pub fn get_etag(&self) -> String { #[must_use] pub fn get_etag(&self) -> String {
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(self.get_id()); hasher.update(self.get_id());
hasher.update(self.get_vcf()); hasher.update(self.get_vcf());
format!("\"{:x}\"", hasher.finalize()) format!("\"{:x}\"", hasher.finalize())
} }
pub fn get_vcf(&self) -> &str { #[must_use] pub fn get_vcf(&self) -> &str {
&self.vcf &self.vcf
} }
pub fn get_anniversary(&self) -> Option<(CalDateTime, bool)> { #[must_use] pub fn get_anniversary(&self) -> Option<(CalDateTime, bool)> {
let prop = self.vcard.get_property("ANNIVERSARY")?.value.as_deref()?; let prop = self.vcard.get_property("ANNIVERSARY")?.value.as_deref()?;
CalDateTime::parse_vcard(prop).ok() CalDateTime::parse_vcard(prop).ok()
} }
pub fn get_birthday(&self) -> Option<(CalDateTime, bool)> { #[must_use] pub fn get_birthday(&self) -> Option<(CalDateTime, bool)> {
let prop = self.vcard.get_property("BDAY")?.value.as_deref()?; let prop = self.vcard.get_property("BDAY")?.value.as_deref()?;
CalDateTime::parse_vcard(prop).ok() CalDateTime::parse_vcard(prop).ok()
} }
pub fn get_full_name(&self) -> Option<&str> { #[must_use] pub fn get_full_name(&self) -> Option<&str> {
let prop = self.vcard.get_property("FN")?; let prop = self.vcard.get_property("FN")?;
prop.value.as_deref() prop.value.as_deref()
} }
@@ -94,7 +94,7 @@ impl AddressObject {
let year_suffix = year.map(|year| format!(" ({year})")).unwrap_or_default(); let year_suffix = year.map(|year| format!(" ({year})")).unwrap_or_default();
Some(CalendarObject::from_ics(format!( Some(CalendarObject::from_ics(format!(
r#"BEGIN:VCALENDAR r"BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
CALSCALE:GREGORIAN CALSCALE:GREGORIAN
PRODID:-//github.com/lennart-k/rustical birthday calendar//EN PRODID:-//github.com/lennart-k/rustical birthday calendar//EN
@@ -111,7 +111,7 @@ ACTION:DISPLAY
DESCRIPTION:💍 {fullname}{year_suffix} DESCRIPTION:💍 {fullname}{year_suffix}
END:VALARM END:VALARM
END:VEVENT END:VEVENT
END:VCALENDAR"#, END:VCALENDAR",
))?) ))?)
} else { } else {
None None
@@ -135,7 +135,7 @@ END:VCALENDAR"#,
let year_suffix = year.map(|year| format!(" ({year})")).unwrap_or_default(); let year_suffix = year.map(|year| format!(" ({year})")).unwrap_or_default();
Some(CalendarObject::from_ics(format!( Some(CalendarObject::from_ics(format!(
r#"BEGIN:VCALENDAR r"BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
CALSCALE:GREGORIAN CALSCALE:GREGORIAN
PRODID:-//github.com/lennart-k/rustical birthday calendar//EN PRODID:-//github.com/lennart-k/rustical birthday calendar//EN
@@ -152,7 +152,7 @@ ACTION:DISPLAY
DESCRIPTION:🎂 {fullname}{year_suffix} DESCRIPTION:🎂 {fullname}{year_suffix}
END:VALARM END:VALARM
END:VEVENT END:VEVENT
END:VCALENDAR"#, END:VCALENDAR",
))?) ))?)
} else { } else {
None None

View File

@@ -24,7 +24,7 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use] pub const fn status_code(&self) -> StatusCode {
match self { match self {
Self::InvalidData(_) => StatusCode::BAD_REQUEST, Self::InvalidData(_) => StatusCode::BAD_REQUEST,
Self::MissingCalendar | Self::MissingContact => StatusCode::BAD_REQUEST, Self::MissingCalendar | Self::MissingContact => StatusCode::BAD_REQUEST,

View File

@@ -15,7 +15,7 @@ pub struct EventObject {
} }
impl EventObject { impl EventObject {
pub fn get_uid(&self) -> &str { #[must_use] pub fn get_uid(&self) -> &str {
self.event.get_uid() self.event.get_uid()
} }
@@ -43,7 +43,7 @@ impl EventObject {
if let Some(dtend) = self.get_dtend()? { if let Some(dtend) = self.get_dtend()? {
return Ok(Some(dtend)); return Ok(Some(dtend));
}; }
let duration = self.event.get_duration().unwrap_or(Duration::days(1)); let duration = self.event.get_duration().unwrap_or(Duration::days(1));
@@ -96,7 +96,7 @@ impl EventObject {
&self, &self,
start: Option<DateTime<Utc>>, start: Option<DateTime<Utc>>,
end: Option<DateTime<Utc>>, end: Option<DateTime<Utc>>,
overrides: &[EventObject], overrides: &[Self],
) -> Result<Vec<IcalEvent>, Error> { ) -> Result<Vec<IcalEvent>, Error> {
if let Some(mut rrule_set) = self.recurrence_ruleset()? { if let Some(mut rrule_set) = self.recurrence_ruleset()? {
if let Some(start) = start { if let Some(start) = start {
@@ -150,7 +150,7 @@ impl EventObject {
ev.set_property(Property { ev.set_property(Property {
name: "RECURRENCE-ID".to_string(), name: "RECURRENCE-ID".to_string(),
value: Some(dateformat.to_owned()), value: Some(dateformat.clone()),
params: None, params: None,
}); });
ev.set_property(Property { ev.set_property(Property {

View File

@@ -26,11 +26,11 @@ pub enum CalendarObjectType {
} }
impl CalendarObjectType { impl CalendarObjectType {
pub fn as_str(&self) -> &'static str { #[must_use] pub const fn as_str(&self) -> &'static str {
match self { match self {
CalendarObjectType::Event => "VEVENT", Self::Event => "VEVENT",
CalendarObjectType::Todo => "VTODO", Self::Todo => "VTODO",
CalendarObjectType::Journal => "VJOURNAL", Self::Journal => "VJOURNAL",
} }
} }
} }
@@ -66,9 +66,9 @@ pub enum CalendarObjectComponent {
impl From<&CalendarObjectComponent> for CalendarObjectType { impl From<&CalendarObjectComponent> for CalendarObjectType {
fn from(value: &CalendarObjectComponent) -> Self { fn from(value: &CalendarObjectComponent) -> Self {
match value { match value {
CalendarObjectComponent::Event(..) => CalendarObjectType::Event, CalendarObjectComponent::Event(..) => Self::Event,
CalendarObjectComponent::Todo(..) => CalendarObjectType::Todo, CalendarObjectComponent::Todo(..) => Self::Todo,
CalendarObjectComponent::Journal(..) => CalendarObjectType::Journal, CalendarObjectComponent::Journal(..) => Self::Journal,
} }
} }
} }
@@ -154,10 +154,10 @@ impl CalendarObject {
)); ));
} }
if !cal.events.is_empty() as u8 if u8::from(!cal.events.is_empty())
+ !cal.todos.is_empty() as u8 + u8::from(!cal.todos.is_empty())
+ !cal.journals.is_empty() as u8 + u8::from(!cal.journals.is_empty())
+ !cal.free_busys.is_empty() as u8 + u8::from(!cal.free_busys.is_empty())
!= 1 != 1
{ {
// https://datatracker.ietf.org/doc/html/rfc4791#section-4.1 // https://datatracker.ietf.org/doc/html/rfc4791#section-4.1
@@ -208,15 +208,15 @@ impl CalendarObject {
}) })
} }
pub fn get_vtimezones(&self) -> &HashMap<String, IcalTimeZone> { #[must_use] pub const fn get_vtimezones(&self) -> &HashMap<String, IcalTimeZone> {
&self.vtimezones &self.vtimezones
} }
pub fn get_data(&self) -> &CalendarObjectComponent { #[must_use] pub const fn get_data(&self) -> &CalendarObjectComponent {
&self.data &self.data
} }
pub fn get_id(&self) -> &str { #[must_use] pub fn get_id(&self) -> &str {
match &self.data { match &self.data {
// We've made sure before that the first component exists and all components share the // We've made sure before that the first component exists and all components share the
// same UID // same UID
@@ -226,22 +226,22 @@ impl CalendarObject {
} }
} }
pub fn get_etag(&self) -> String { #[must_use] pub fn get_etag(&self) -> String {
let mut hasher = Sha256::new(); let mut hasher = Sha256::new();
hasher.update(self.get_id()); hasher.update(self.get_id());
hasher.update(self.get_ics()); hasher.update(self.get_ics());
format!("\"{:x}\"", hasher.finalize()) format!("\"{:x}\"", hasher.finalize())
} }
pub fn get_ics(&self) -> &str { #[must_use] pub fn get_ics(&self) -> &str {
&self.ics &self.ics
} }
pub fn get_component_name(&self) -> &str { #[must_use] pub fn get_component_name(&self) -> &str {
self.get_object_type().as_str() self.get_object_type().as_str()
} }
pub fn get_object_type(&self) -> CalendarObjectType { #[must_use] pub fn get_object_type(&self) -> CalendarObjectType {
(&self.data).into() (&self.data).into()
} }
@@ -250,7 +250,7 @@ impl CalendarObject {
CalendarObjectComponent::Event(main_event, overrides) => Ok(overrides CalendarObjectComponent::Event(main_event, overrides) => Ok(overrides
.iter() .iter()
.chain([main_event].into_iter()) .chain([main_event].into_iter())
.map(|event| event.get_dtstart()) .map(super::event::EventObject::get_dtstart)
.collect::<Result<Vec<_>, _>>()? .collect::<Result<Vec<_>, _>>()?
.into_iter() .into_iter()
.flatten() .flatten()
@@ -264,7 +264,7 @@ impl CalendarObject {
CalendarObjectComponent::Event(main_event, overrides) => Ok(overrides CalendarObjectComponent::Event(main_event, overrides) => Ok(overrides
.iter() .iter()
.chain([main_event].into_iter()) .chain([main_event].into_iter())
.map(|event| event.get_last_occurence()) .map(super::event::EventObject::get_last_occurence)
.collect::<Result<Vec<_>, _>>()? .collect::<Result<Vec<_>, _>>()?
.into_iter() .into_iter()
.flatten() .flatten()

View File

@@ -73,7 +73,7 @@ impl From<CalDateTime> for DateTime<rrule::Tz> {
value value
.as_datetime() .as_datetime()
.into_owned() .into_owned()
.with_timezone(&value.timezone().to_owned().into()) .with_timezone(&value.timezone().into())
} }
} }
@@ -102,13 +102,13 @@ impl Ord for CalDateTime {
impl From<DateTime<Local>> for CalDateTime { impl From<DateTime<Local>> for CalDateTime {
fn from(value: DateTime<Local>) -> Self { fn from(value: DateTime<Local>) -> Self {
CalDateTime::DateTime(value.with_timezone(&ICalTimezone::Local)) Self::DateTime(value.with_timezone(&ICalTimezone::Local))
} }
} }
impl From<DateTime<Utc>> for CalDateTime { impl From<DateTime<Utc>> for CalDateTime {
fn from(value: DateTime<Utc>) -> Self { fn from(value: DateTime<Utc>) -> Self {
CalDateTime::DateTime(value.with_timezone(&ICalTimezone::Olson(chrono_tz::UTC))) Self::DateTime(value.with_timezone(&ICalTimezone::Olson(chrono_tz::UTC)))
} }
} }
@@ -158,7 +158,7 @@ impl CalDateTime {
Self::parse(prop_value, timezone) Self::parse(prop_value, timezone)
} }
pub fn format(&self) -> String { #[must_use] pub fn format(&self) -> String {
match self { match self {
Self::DateTime(datetime) => match datetime.timezone() { Self::DateTime(datetime) => match datetime.timezone() {
ICalTimezone::Olson(chrono_tz::UTC) => datetime.format(UTC_DATE_TIME).to_string(), ICalTimezone::Olson(chrono_tz::UTC) => datetime.format(UTC_DATE_TIME).to_string(),
@@ -168,25 +168,25 @@ impl CalDateTime {
} }
} }
pub fn format_date(&self) -> String { #[must_use] pub fn format_date(&self) -> String {
match self { match self {
Self::DateTime(datetime) => datetime.format(LOCAL_DATE).to_string(), Self::DateTime(datetime) => datetime.format(LOCAL_DATE).to_string(),
Self::Date(date, _) => date.format(LOCAL_DATE).to_string(), Self::Date(date, _) => date.format(LOCAL_DATE).to_string(),
} }
} }
pub fn date(&self) -> NaiveDate { #[must_use] pub fn date(&self) -> NaiveDate {
match self { match self {
Self::DateTime(datetime) => datetime.date_naive(), Self::DateTime(datetime) => datetime.date_naive(),
Self::Date(date, _) => date.to_owned(), Self::Date(date, _) => date.to_owned(),
} }
} }
pub fn is_date(&self) -> bool { #[must_use] pub const fn is_date(&self) -> bool {
matches!(&self, Self::Date(_, _)) matches!(&self, Self::Date(_, _))
} }
pub fn as_datetime(&self) -> Cow<'_, DateTime<ICalTimezone>> { #[must_use] pub fn as_datetime(&self) -> Cow<'_, DateTime<ICalTimezone>> {
match self { match self {
Self::DateTime(datetime) => Cow::Borrowed(datetime), Self::DateTime(datetime) => Cow::Borrowed(datetime),
Self::Date(date, tz) => Cow::Owned( Self::Date(date, tz) => Cow::Owned(
@@ -201,14 +201,14 @@ impl CalDateTime {
pub fn parse(value: &str, timezone: Option<Tz>) -> Result<Self, CalDateTimeError> { pub fn parse(value: &str, timezone: Option<Tz>) -> Result<Self, CalDateTimeError> {
if let Ok(datetime) = NaiveDateTime::parse_from_str(value, LOCAL_DATE_TIME) { if let Ok(datetime) = NaiveDateTime::parse_from_str(value, LOCAL_DATE_TIME) {
if let Some(timezone) = timezone { if let Some(timezone) = timezone {
return Ok(CalDateTime::DateTime( return Ok(Self::DateTime(
datetime datetime
.and_local_timezone(timezone.into()) .and_local_timezone(timezone.into())
.earliest() .earliest()
.ok_or(CalDateTimeError::LocalTimeGap)?, .ok_or(CalDateTimeError::LocalTimeGap)?,
)); ));
} }
return Ok(CalDateTime::DateTime( return Ok(Self::DateTime(
datetime datetime
.and_local_timezone(ICalTimezone::Local) .and_local_timezone(ICalTimezone::Local)
.earliest() .earliest()
@@ -220,17 +220,16 @@ impl CalDateTime {
return Ok(datetime.and_utc().into()); return Ok(datetime.and_utc().into());
} }
let timezone = timezone let timezone = timezone
.map(ICalTimezone::Olson) .map_or(ICalTimezone::Local, ICalTimezone::Olson);
.unwrap_or(ICalTimezone::Local);
if let Ok(date) = NaiveDate::parse_from_str(value, LOCAL_DATE) { if let Ok(date) = NaiveDate::parse_from_str(value, LOCAL_DATE) {
return Ok(CalDateTime::Date(date, timezone)); return Ok(Self::Date(date, timezone));
} }
if let Ok(date) = NaiveDate::parse_from_str(value, "%Y-%m-%d") { if let Ok(date) = NaiveDate::parse_from_str(value, "%Y-%m-%d") {
return Ok(CalDateTime::Date(date, timezone)); return Ok(Self::Date(date, timezone));
} }
if let Ok(date) = NaiveDate::parse_from_str(value, "%Y%m%d") { if let Ok(date) = NaiveDate::parse_from_str(value, "%Y%m%d") {
return Ok(CalDateTime::Date(date, timezone)); return Ok(Self::Date(date, timezone));
} }
Err(CalDateTimeError::InvalidDatetimeFormat(value.to_string())) Err(CalDateTimeError::InvalidDatetimeFormat(value.to_string()))
@@ -250,7 +249,7 @@ impl CalDateTime {
let day = captures.name("d").unwrap().as_str().parse().ok().unwrap(); let day = captures.name("d").unwrap().as_str().parse().ok().unwrap();
return Ok(( return Ok((
CalDateTime::Date( Self::Date(
NaiveDate::from_ymd_opt(year, month, day) NaiveDate::from_ymd_opt(year, month, day)
.ok_or(CalDateTimeError::ParseError(value.to_string()))?, .ok_or(CalDateTimeError::ParseError(value.to_string()))?,
ICalTimezone::Local, ICalTimezone::Local,
@@ -261,14 +260,14 @@ impl CalDateTime {
Err(CalDateTimeError::InvalidDatetimeFormat(value.to_string())) Err(CalDateTimeError::InvalidDatetimeFormat(value.to_string()))
} }
pub fn utc(&self) -> DateTime<Utc> { #[must_use] pub fn utc(&self) -> DateTime<Utc> {
self.as_datetime().to_utc() self.as_datetime().to_utc()
} }
pub fn timezone(&self) -> ICalTimezone { #[must_use] pub fn timezone(&self) -> ICalTimezone {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.timezone(), Self::DateTime(datetime) => datetime.timezone(),
CalDateTime::Date(_, tz) => tz.to_owned(), Self::Date(_, tz) => tz.to_owned(),
} }
} }
} }
@@ -282,107 +281,107 @@ impl From<CalDateTime> for DateTime<Utc> {
impl Datelike for CalDateTime { impl Datelike for CalDateTime {
fn year(&self) -> i32 { fn year(&self) -> i32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.year(), Self::DateTime(datetime) => datetime.year(),
CalDateTime::Date(date, _) => date.year(), Self::Date(date, _) => date.year(),
} }
} }
fn month(&self) -> u32 { fn month(&self) -> u32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.month(), Self::DateTime(datetime) => datetime.month(),
CalDateTime::Date(date, _) => date.month(), Self::Date(date, _) => date.month(),
} }
} }
fn month0(&self) -> u32 { fn month0(&self) -> u32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.month0(), Self::DateTime(datetime) => datetime.month0(),
CalDateTime::Date(date, _) => date.month0(), Self::Date(date, _) => date.month0(),
} }
} }
fn day(&self) -> u32 { fn day(&self) -> u32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.day(), Self::DateTime(datetime) => datetime.day(),
CalDateTime::Date(date, _) => date.day(), Self::Date(date, _) => date.day(),
} }
} }
fn day0(&self) -> u32 { fn day0(&self) -> u32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.day0(), Self::DateTime(datetime) => datetime.day0(),
CalDateTime::Date(date, _) => date.day0(), Self::Date(date, _) => date.day0(),
} }
} }
fn ordinal(&self) -> u32 { fn ordinal(&self) -> u32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.ordinal(), Self::DateTime(datetime) => datetime.ordinal(),
CalDateTime::Date(date, _) => date.ordinal(), Self::Date(date, _) => date.ordinal(),
} }
} }
fn ordinal0(&self) -> u32 { fn ordinal0(&self) -> u32 {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.ordinal0(), Self::DateTime(datetime) => datetime.ordinal0(),
CalDateTime::Date(date, _) => date.ordinal0(), Self::Date(date, _) => date.ordinal0(),
} }
} }
fn weekday(&self) -> chrono::Weekday { fn weekday(&self) -> chrono::Weekday {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.weekday(), Self::DateTime(datetime) => datetime.weekday(),
CalDateTime::Date(date, _) => date.weekday(), Self::Date(date, _) => date.weekday(),
} }
} }
fn iso_week(&self) -> chrono::IsoWeek { fn iso_week(&self) -> chrono::IsoWeek {
match &self { match &self {
CalDateTime::DateTime(datetime) => datetime.iso_week(), Self::DateTime(datetime) => datetime.iso_week(),
CalDateTime::Date(date, _) => date.iso_week(), Self::Date(date, _) => date.iso_week(),
} }
} }
fn with_year(&self, year: i32) -> Option<Self> { fn with_year(&self, year: i32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => Some(Self::DateTime(datetime.with_year(year)?)), Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_year(year)?)),
CalDateTime::Date(date, tz) => Some(Self::Date(date.with_year(year)?, tz.to_owned())), Self::Date(date, tz) => Some(Self::Date(date.with_year(year)?, tz.to_owned())),
} }
} }
fn with_month(&self, month: u32) -> Option<Self> { fn with_month(&self, month: u32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => Some(Self::DateTime(datetime.with_month(month)?)), Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_month(month)?)),
CalDateTime::Date(date, tz) => Some(Self::Date(date.with_month(month)?, tz.to_owned())), Self::Date(date, tz) => Some(Self::Date(date.with_month(month)?, tz.to_owned())),
} }
} }
fn with_month0(&self, month0: u32) -> Option<Self> { fn with_month0(&self, month0: u32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => Some(Self::DateTime(datetime.with_month0(month0)?)), Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_month0(month0)?)),
CalDateTime::Date(date, tz) => { Self::Date(date, tz) => {
Some(Self::Date(date.with_month0(month0)?, tz.to_owned())) Some(Self::Date(date.with_month0(month0)?, tz.to_owned()))
} }
} }
} }
fn with_day(&self, day: u32) -> Option<Self> { fn with_day(&self, day: u32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => Some(Self::DateTime(datetime.with_day(day)?)), Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_day(day)?)),
CalDateTime::Date(date, tz) => Some(Self::Date(date.with_day(day)?, tz.to_owned())), Self::Date(date, tz) => Some(Self::Date(date.with_day(day)?, tz.to_owned())),
} }
} }
fn with_day0(&self, day0: u32) -> Option<Self> { fn with_day0(&self, day0: u32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => Some(Self::DateTime(datetime.with_day0(day0)?)), Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_day0(day0)?)),
CalDateTime::Date(date, tz) => Some(Self::Date(date.with_day0(day0)?, tz.to_owned())), Self::Date(date, tz) => Some(Self::Date(date.with_day0(day0)?, tz.to_owned())),
} }
} }
fn with_ordinal(&self, ordinal: u32) -> Option<Self> { fn with_ordinal(&self, ordinal: u32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => { Self::DateTime(datetime) => {
Some(Self::DateTime(datetime.with_ordinal(ordinal)?)) Some(Self::DateTime(datetime.with_ordinal(ordinal)?))
} }
CalDateTime::Date(date, tz) => { Self::Date(date, tz) => {
Some(Self::Date(date.with_ordinal(ordinal)?, tz.to_owned())) Some(Self::Date(date.with_ordinal(ordinal)?, tz.to_owned()))
} }
} }
} }
fn with_ordinal0(&self, ordinal0: u32) -> Option<Self> { fn with_ordinal0(&self, ordinal0: u32) -> Option<Self> {
match &self { match &self {
CalDateTime::DateTime(datetime) => { Self::DateTime(datetime) => {
Some(Self::DateTime(datetime.with_ordinal0(ordinal0)?)) Some(Self::DateTime(datetime.with_ordinal0(ordinal0)?))
} }
CalDateTime::Date(date, tz) => { Self::Date(date, tz) => {
Some(Self::Date(date.with_ordinal0(ordinal0)?, tz.to_owned())) Some(Self::Date(date.with_ordinal0(ordinal0)?, tz.to_owned()))
} }
} }

View File

@@ -26,7 +26,7 @@ impl From<rrule::Tz> for ICalTimezone {
} }
} }
#[derive(Debug, Clone, PartialEq, Display)] #[derive(Debug, Clone, PartialEq, Eq, Display)]
pub enum CalTimezoneOffset { pub enum CalTimezoneOffset {
Local(chrono::FixedOffset), Local(chrono::FixedOffset),
Olson(chrono_tz::TzOffset), Olson(chrono_tz::TzOffset),

View File

@@ -83,7 +83,7 @@ async fn get_oidc_client(
})?; })?;
Ok(CoreClient::from_provider_metadata( Ok(CoreClient::from_provider_metadata(
provider_metadata.clone(), provider_metadata,
client_id.clone(), client_id.clone(),
client_secret.clone(), client_secret.clone(),
) )

View File

@@ -14,7 +14,7 @@ pub struct Addressbook {
} }
impl Addressbook { impl Addressbook {
pub fn format_synctoken(&self) -> String { #[must_use] pub fn format_synctoken(&self) -> String {
format_synctoken(self.synctoken) format_synctoken(self.synctoken)
} }
} }

View File

@@ -23,7 +23,7 @@ impl<AP: AuthenticationProvider> Clone for AuthenticationLayer<AP> {
} }
impl<AP: AuthenticationProvider> AuthenticationLayer<AP> { impl<AP: AuthenticationProvider> AuthenticationLayer<AP> {
pub fn new(auth_provider: Arc<AP>) -> Self { pub const fn new(auth_provider: Arc<AP>) -> Self {
Self { auth_provider } Self { auth_provider }
} }
} }

View File

@@ -35,7 +35,7 @@ impl Principal {
/// Returns true if the user is either /// Returns true if the user is either
/// - the principal itself /// - the principal itself
/// - has full access to the prinicpal (is member) /// - has full access to the prinicpal (is member)
pub fn is_principal(&self, principal: &str) -> bool { #[must_use] pub fn is_principal(&self, principal: &str) -> bool {
if self.id == principal { if self.id == principal {
return true; return true;
} }

View File

@@ -3,8 +3,8 @@ use std::fmt::Display;
use rustical_xml::ValueSerialize; use rustical_xml::ValueSerialize;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
/// https://datatracker.ietf.org/doc/html/rfc5545#section-3.2.3 /// <https://datatracker.ietf.org/doc/html/rfc5545#section-3.2.3>
#[derive(Debug, Clone, Deserialize, Serialize, Default, PartialEq, clap::ValueEnum)] #[derive(Debug, Clone, Deserialize, Serialize, Default, PartialEq, Eq, clap::ValueEnum)]
#[serde(rename_all = "lowercase")] #[serde(rename_all = "lowercase")]
pub enum PrincipalType { pub enum PrincipalType {
#[default] #[default]
@@ -36,13 +36,13 @@ impl TryFrom<&str> for PrincipalType {
} }
impl PrincipalType { impl PrincipalType {
pub fn as_str(&self) -> &'static str { #[must_use] pub const fn as_str(&self) -> &'static str {
match self { match self {
PrincipalType::Individual => "INDIVIDUAL", Self::Individual => "INDIVIDUAL",
PrincipalType::Group => "GROUP", Self::Group => "GROUP",
PrincipalType::Resource => "RESOURCE", Self::Resource => "RESOURCE",
PrincipalType::Room => "ROOM", Self::Room => "ROOM",
PrincipalType::Unknown => "UNKNOWN", Self::Unknown => "UNKNOWN",
} }
} }
} }

View File

@@ -32,19 +32,19 @@ pub struct Calendar {
} }
impl Calendar { impl Calendar {
pub fn format_synctoken(&self) -> String { #[must_use] pub fn format_synctoken(&self) -> String {
format_synctoken(self.synctoken) format_synctoken(self.synctoken)
} }
pub fn get_timezone(&self) -> Option<chrono_tz::Tz> { #[must_use] pub fn get_timezone(&self) -> Option<chrono_tz::Tz> {
self.timezone_id self.timezone_id
.as_ref() .as_ref()
.and_then(|tzid| chrono_tz::Tz::from_str(tzid).ok()) .and_then(|tzid| chrono_tz::Tz::from_str(tzid).ok())
} }
pub fn get_vtimezone(&self) -> Option<&'static str> { #[must_use] pub fn get_vtimezone(&self) -> Option<&'static str> {
self.timezone_id self.timezone_id
.as_ref() .as_ref()
.and_then(|tzid| vtimezones_rs::VTIMEZONES.get(tzid).cloned()) .and_then(|tzid| vtimezones_rs::VTIMEZONES.get(tzid).copied())
} }
} }

View File

@@ -8,7 +8,7 @@ use rustical_ical::{AddressObject, CalendarObject, CalendarObjectType};
use sha2::{Digest, Sha256}; use sha2::{Digest, Sha256};
use std::{collections::HashMap, sync::Arc}; use std::{collections::HashMap, sync::Arc};
pub(crate) const BIRTHDAYS_PREFIX: &str = "_birthdays_"; pub const BIRTHDAYS_PREFIX: &str = "_birthdays_";
#[derive(Constructor, Clone)] #[derive(Constructor, Clone)]
pub struct ContactBirthdayStore<AS: AddressbookStore>(Arc<AS>); pub struct ContactBirthdayStore<AS: AddressbookStore>(Arc<AS>);
@@ -43,7 +43,7 @@ fn birthday_calendar(addressbook: Addressbook) -> Calendar {
} }
} }
/// Objects are all prefixed with BIRTHDAYS_PREFIX /// Objects are all prefixed with `BIRTHDAYS_PREFIX`
#[async_trait] #[async_trait]
impl<AS: AddressbookStore> CalendarStore for ContactBirthdayStore<AS> { impl<AS: AddressbookStore> CalendarStore for ContactBirthdayStore<AS> {
async fn get_calendar( async fn get_calendar(
@@ -165,7 +165,7 @@ impl<AS: AddressbookStore> CalendarStore for ContactBirthdayStore<AS> {
let cal_id = cal_id let cal_id = cal_id
.strip_prefix(BIRTHDAYS_PREFIX) .strip_prefix(BIRTHDAYS_PREFIX)
.ok_or(Error::NotFound)?; .ok_or(Error::NotFound)?;
let (addressobject_id, date_type) = object_id.rsplit_once("-").ok_or(Error::NotFound)?; let (addressobject_id, date_type) = object_id.rsplit_once('-').ok_or(Error::NotFound)?;
self.0 self.0
.get_object(principal, cal_id, addressobject_id, show_deleted) .get_object(principal, cal_id, addressobject_id, show_deleted)
.await? .await?

View File

@@ -30,7 +30,7 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use] pub const fn status_code(&self) -> StatusCode {
match self { match self {
Self::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
Self::AlreadyExists => StatusCode::CONFLICT, Self::AlreadyExists => StatusCode::CONFLICT,

View File

@@ -1,10 +1,10 @@
const SYNC_NAMESPACE: &str = "github.com/lennart-k/rustical/ns/"; const SYNC_NAMESPACE: &str = "github.com/lennart-k/rustical/ns/";
pub fn format_synctoken(synctoken: i64) -> String { #[must_use] pub fn format_synctoken(synctoken: i64) -> String {
format!("{SYNC_NAMESPACE}{synctoken}") format!("{SYNC_NAMESPACE}{synctoken}")
} }
pub fn parse_synctoken(synctoken: &str) -> Option<i64> { #[must_use] pub fn parse_synctoken(synctoken: &str) -> Option<i64> {
if !synctoken.starts_with(SYNC_NAMESPACE) { if !synctoken.starts_with(SYNC_NAMESPACE) {
return None; return None;
} }

View File

@@ -138,26 +138,23 @@ impl SqliteAddressbookStore {
addressbook_id: &str, addressbook_id: &str,
use_trashbin: bool, use_trashbin: bool,
) -> Result<(), rustical_store::Error> { ) -> Result<(), rustical_store::Error> {
match use_trashbin { if use_trashbin {
true => { sqlx::query!(
sqlx::query!( r#"UPDATE addressbooks SET deleted_at = datetime() WHERE (principal, id) = (?, ?)"#,
r#"UPDATE addressbooks SET deleted_at = datetime() WHERE (principal, id) = (?, ?)"#, principal, addressbook_id
principal, addressbook_id )
) .execute(executor)
.execute(executor) .await.map_err(crate::Error::from)?;
.await.map_err(crate::Error::from)?; } else {
} sqlx::query!(
false => { r#"DELETE FROM addressbooks WHERE (principal, id) = (?, ?)"#,
sqlx::query!( principal,
r#"DELETE FROM addressbooks WHERE (principal, id) = (?, ?)"#, addressbook_id
principal, )
addressbook_id .execute(executor)
) .await
.execute(executor) .map_err(crate::Error::from)?;
.await }
.map_err(crate::Error::from)?;
}
};
Ok(()) Ok(())
} }
@@ -208,8 +205,7 @@ impl SqliteAddressbookStore {
let new_synctoken = changes let new_synctoken = changes
.last() .last()
.map(|&Row { synctoken, .. }| synctoken) .map_or(0, |&Row { synctoken, .. }| synctoken);
.unwrap_or(0);
for Row { object_id, .. } in changes { for Row { object_id, .. } in changes {
match Self::_get_object(&mut *conn, principal, addressbook_id, &object_id, false).await match Self::_get_object(&mut *conn, principal, addressbook_id, &object_id, false).await
@@ -259,7 +255,7 @@ impl SqliteAddressbookStore {
.fetch_all(executor) .fetch_all(executor)
.await.map_err(crate::Error::from)? .await.map_err(crate::Error::from)?
.into_iter() .into_iter()
.map(|row| row.try_into()) .map(std::convert::TryInto::try_into)
.collect() .collect()
} }
@@ -325,28 +321,25 @@ impl SqliteAddressbookStore {
object_id: &str, object_id: &str,
use_trashbin: bool, use_trashbin: bool,
) -> Result<(), rustical_store::Error> { ) -> Result<(), rustical_store::Error> {
match use_trashbin { if use_trashbin {
true => { sqlx::query!(
sqlx::query!( "UPDATE addressobjects SET deleted_at = datetime(), updated_at = datetime() WHERE (principal, addressbook_id, id) = (?, ?, ?)",
"UPDATE addressobjects SET deleted_at = datetime(), updated_at = datetime() WHERE (principal, addressbook_id, id) = (?, ?, ?)", principal,
principal, addressbook_id,
addressbook_id, object_id
object_id )
) .execute(executor)
.execute(executor) .await.map_err(crate::Error::from)?;
.await.map_err(crate::Error::from)?; } else {
} sqlx::query!(
false => { "DELETE FROM addressobjects WHERE addressbook_id = ? AND id = ?",
sqlx::query!( addressbook_id,
"DELETE FROM addressobjects WHERE addressbook_id = ? AND id = ?", object_id
addressbook_id, )
object_id .execute(executor)
) .await
.execute(executor) .map_err(crate::Error::from)?;
.await }
.map_err(crate::Error::from)?;
}
};
Ok(()) Ok(())
} }
@@ -440,7 +433,7 @@ impl AddressbookStore for SqliteAddressbookStore {
}) })
{ {
error!("Push notification about deleted addressbook failed: {err}"); error!("Push notification about deleted addressbook failed: {err}");
}; }
Ok(()) Ok(())
} }
@@ -474,9 +467,9 @@ impl AddressbookStore for SqliteAddressbookStore {
let mut deleted_sizes = vec![]; let mut deleted_sizes = vec![];
for (size, deleted) in Self::_list_objects(&self.db, principal, addressbook_id).await? { for (size, deleted) in Self::_list_objects(&self.db, principal, addressbook_id).await? {
if deleted { if deleted {
deleted_sizes.push(size) deleted_sizes.push(size);
} else { } else {
sizes.push(size) sizes.push(size);
} }
} }
Ok(CollectionMetadata { Ok(CollectionMetadata {
@@ -521,8 +514,8 @@ impl AddressbookStore for SqliteAddressbookStore {
Self::_put_object( Self::_put_object(
&mut *tx, &mut *tx,
principal.to_owned(), principal.clone(),
addressbook_id.to_owned(), addressbook_id.clone(),
object, object,
overwrite, overwrite,
) )
@@ -548,7 +541,7 @@ impl AddressbookStore for SqliteAddressbookStore {
.push_topic, .push_topic,
}) { }) {
error!("Push notification about deleted addressbook failed: {err}"); error!("Push notification about deleted addressbook failed: {err}");
}; }
Ok(()) Ok(())
} }
@@ -585,7 +578,7 @@ impl AddressbookStore for SqliteAddressbookStore {
.push_topic, .push_topic,
}) { }) {
error!("Push notification about deleted addressbook failed: {err}"); error!("Push notification about deleted addressbook failed: {err}");
}; }
Ok(()) Ok(())
} }
@@ -619,7 +612,7 @@ impl AddressbookStore for SqliteAddressbookStore {
.push_topic, .push_topic,
}) { }) {
error!("Push notification about deleted addressbook failed: {err}"); error!("Push notification about deleted addressbook failed: {err}");
}; }
Ok(()) Ok(())
} }

View File

@@ -22,7 +22,7 @@ impl TryFrom<CalendarObjectRow> for CalendarObject {
type Error = rustical_store::Error; type Error = rustical_store::Error;
fn try_from(value: CalendarObjectRow) -> Result<Self, Self::Error> { fn try_from(value: CalendarObjectRow) -> Result<Self, Self::Error> {
let object = CalendarObject::from_ics(value.ics)?; let object = Self::from_ics(value.ics)?;
if object.get_id() != value.id { if object.get_id() != value.id {
return Err(rustical_store::Error::IcalError( return Err(rustical_store::Error::IcalError(
rustical_ical::Error::InvalidData(format!( rustical_ical::Error::InvalidData(format!(
@@ -213,24 +213,21 @@ impl SqliteCalendarStore {
id: &str, id: &str,
use_trashbin: bool, use_trashbin: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
match use_trashbin { if use_trashbin { sqlx::query!(
true => sqlx::query!( r#"UPDATE calendars SET deleted_at = datetime() WHERE (principal, id) = (?, ?)"#,
r#"UPDATE calendars SET deleted_at = datetime() WHERE (principal, id) = (?, ?)"#, principal,
principal, id
id )
) .execute(executor)
.execute(executor) .await
.await .map_err(crate::Error::from)? } else { sqlx::query!(
.map_err(crate::Error::from)?, r#"DELETE FROM calendars WHERE (principal, id) = (?, ?)"#,
false => sqlx::query!( principal,
r#"DELETE FROM calendars WHERE (principal, id) = (?, ?)"#, id
principal, )
id .execute(executor)
) .await
.execute(executor) .map_err(crate::Error::from)? };
.await
.map_err(crate::Error::from)?,
};
Ok(()) Ok(())
} }
@@ -286,7 +283,7 @@ impl SqliteCalendarStore {
.fetch_all(executor) .fetch_all(executor)
.await.map_err(crate::Error::from)? .await.map_err(crate::Error::from)?
.into_iter() .into_iter()
.map(|row| row.try_into()) .map(std::convert::TryInto::try_into)
.collect() .collect()
} }
@@ -320,7 +317,7 @@ impl SqliteCalendarStore {
.await .await
.map_err(crate::Error::from)? .map_err(crate::Error::from)?
.into_iter() .into_iter()
.map(|row| row.try_into()) .map(std::convert::TryInto::try_into)
.collect() .collect()
} }
@@ -411,28 +408,25 @@ impl SqliteCalendarStore {
id: &str, id: &str,
use_trashbin: bool, use_trashbin: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
match use_trashbin { if use_trashbin {
true => { sqlx::query!(
sqlx::query!( "UPDATE calendarobjects SET deleted_at = datetime(), updated_at = datetime() WHERE (principal, cal_id, id) = (?, ?, ?)",
"UPDATE calendarobjects SET deleted_at = datetime(), updated_at = datetime() WHERE (principal, cal_id, id) = (?, ?, ?)", principal,
principal, cal_id,
cal_id, id
id )
) .execute(executor)
.execute(executor) .await.map_err(crate::Error::from)?;
.await.map_err(crate::Error::from)?; } else {
} sqlx::query!(
false => { "DELETE FROM calendarobjects WHERE cal_id = ? AND id = ?",
sqlx::query!( cal_id,
"DELETE FROM calendarobjects WHERE cal_id = ? AND id = ?", id
cal_id, )
id .execute(executor)
) .await
.execute(executor) .map_err(crate::Error::from)?;
.await }
.map_err(crate::Error::from)?;
}
};
Ok(()) Ok(())
} }
@@ -484,8 +478,7 @@ impl SqliteCalendarStore {
let new_synctoken = changes let new_synctoken = changes
.last() .last()
.map(|&Row { synctoken, .. }| synctoken) .map_or(0, |&Row { synctoken, .. }| synctoken);
.unwrap_or(0);
for Row { object_id, .. } in changes { for Row { object_id, .. } in changes {
match Self::_get_object(&mut *conn, principal, cal_id, &object_id, false).await { match Self::_get_object(&mut *conn, principal, cal_id, &object_id, false).await {
@@ -562,7 +555,7 @@ impl CalendarStore for SqliteCalendarStore {
}) })
{ {
error!("Push notification about deleted calendar failed: {err}"); error!("Push notification about deleted calendar failed: {err}");
}; }
Ok(()) Ok(())
} }
@@ -627,9 +620,9 @@ impl CalendarStore for SqliteCalendarStore {
let mut deleted_sizes = vec![]; let mut deleted_sizes = vec![];
for (size, deleted) in Self::_list_objects(&self.db, principal, cal_id).await? { for (size, deleted) in Self::_list_objects(&self.db, principal, cal_id).await? {
if deleted { if deleted {
deleted_sizes.push(size) deleted_sizes.push(size);
} else { } else {
sizes.push(size) sizes.push(size);
} }
} }
Ok(CollectionMetadata { Ok(CollectionMetadata {
@@ -680,8 +673,8 @@ impl CalendarStore for SqliteCalendarStore {
Self::_put_object( Self::_put_object(
&mut *tx, &mut *tx,
principal.to_owned(), principal.clone(),
cal_id.to_owned(), cal_id.clone(),
object, object,
overwrite, overwrite,
) )
@@ -706,7 +699,7 @@ impl CalendarStore for SqliteCalendarStore {
.push_topic, .push_topic,
}) { }) {
error!("Push notification about deleted calendar failed: {err}"); error!("Push notification about deleted calendar failed: {err}");
}; }
Ok(()) Ok(())
} }
@@ -731,7 +724,7 @@ impl CalendarStore for SqliteCalendarStore {
topic: self.get_calendar(principal, cal_id, true).await?.push_topic, topic: self.get_calendar(principal, cal_id, true).await?.push_topic,
}) { }) {
error!("Push notification about deleted calendar failed: {err}"); error!("Push notification about deleted calendar failed: {err}");
}; }
Ok(()) Ok(())
} }
@@ -756,7 +749,7 @@ impl CalendarStore for SqliteCalendarStore {
topic: self.get_calendar(principal, cal_id, true).await?.push_topic, topic: self.get_calendar(principal, cal_id, true).await?.push_topic,
}) { }) {
error!("Push notification about deleted calendar failed: {err}"); error!("Push notification about deleted calendar failed: {err}");
}; }
Ok(()) Ok(())
} }

View File

@@ -15,16 +15,16 @@ pub enum Error {
impl From<sqlx::Error> for Error { impl From<sqlx::Error> for Error {
fn from(value: sqlx::Error) -> Self { fn from(value: sqlx::Error) -> Self {
match value { match value {
sqlx::Error::RowNotFound => Error::StoreError(rustical_store::Error::NotFound), sqlx::Error::RowNotFound => Self::StoreError(rustical_store::Error::NotFound),
sqlx::Error::Database(err) => { sqlx::Error::Database(err) => {
if err.is_unique_violation() { if err.is_unique_violation() {
warn!("{err:?}"); warn!("{err:?}");
Error::StoreError(rustical_store::Error::AlreadyExists) Self::StoreError(rustical_store::Error::AlreadyExists)
} else { } else {
Error::SqlxError(sqlx::Error::Database(err)) Self::SqlxError(sqlx::Error::Database(err))
} }
} }
err => Error::SqlxError(err), err => Self::SqlxError(err),
} }
} }
} }

View File

@@ -26,7 +26,7 @@ pub struct SqliteStore {
} }
impl SqliteStore { impl SqliteStore {
pub fn new(db: SqlitePool) -> Self { #[must_use] pub const fn new(db: SqlitePool) -> Self {
Self { db } Self { db }
} }
} }

View File

@@ -25,7 +25,7 @@ impl TryFrom<PrincipalRow> for Principal {
type Error = Error; type Error = Error;
fn try_from(value: PrincipalRow) -> Result<Self, Self::Error> { fn try_from(value: PrincipalRow) -> Result<Self, Self::Error> {
Ok(Principal { Ok(Self {
id: value.id, id: value.id,
displayname: value.displayname, displayname: value.displayname,
password: value.password_hash.map(Secret::from), password: value.password_hash.map(Secret::from),

View File

@@ -62,13 +62,13 @@ impl<T: XmlRootTag + XmlDeserialize> XmlDocument for T {
format!("{root_ns:?}"), format!("{root_ns:?}"),
Self::root_tag().to_owned(), Self::root_tag().to_owned(),
)); ));
}; }
return Self::deserialize(&mut reader, &start, empty); return Self::deserialize(&mut reader, &start, empty);
} }
Event::Eof => return Err(XmlError::Eof), Event::Eof => return Err(XmlError::Eof),
_ => return Err(XmlError::UnsupportedEvent("unknown, todo")), _ => return Err(XmlError::UnsupportedEvent("unknown, todo")),
}; }
} }
} }
} }
@@ -88,7 +88,7 @@ impl XmlDeserialize for () {
Event::End(e) if e.name() == start.name() => return Ok(()), Event::End(e) if e.name() == start.name() => return Ok(()),
Event::Eof => return Err(XmlError::Eof), Event::Eof => return Err(XmlError::Eof),
_ => {} _ => {}
}; }
} }
} }
} }

View File

@@ -1,6 +1,6 @@
use quick_xml::name::Namespace; use quick_xml::name::Namespace;
#[derive(Debug, Clone, Default, PartialEq)] #[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct NamespaceOwned(pub Vec<u8>); pub struct NamespaceOwned(pub Vec<u8>);
impl<'a> From<Namespace<'a>> for NamespaceOwned { impl<'a> From<Namespace<'a>> for NamespaceOwned {
@@ -28,7 +28,7 @@ impl<'a> From<&'a Namespace<'a>> for NamespaceOwned {
} }
impl NamespaceOwned { impl NamespaceOwned {
pub fn as_ref(&self) -> Namespace<'_> { #[must_use] pub fn as_ref(&self) -> Namespace<'_> {
Namespace(&self.0) Namespace(&self.0)
} }
} }

View File

@@ -65,13 +65,12 @@ impl XmlSerialize for () {
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns
.map(|ns| namespaces.get(&ns)) .and_then(|ns| namespaces.get(&ns))
.unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if prefix.is_empty() {
[*prefix, ":"].concat()
} else {
String::new() String::new()
} else {
[*prefix, ":"].concat()
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();

View File

@@ -5,11 +5,11 @@ use quick_xml::events::BytesStart;
use crate::{XmlDeserialize, XmlError}; use crate::{XmlDeserialize, XmlError};
// TODO: actually implement // TODO: actually implement
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct Unparsed(BytesStart<'static>); pub struct Unparsed(BytesStart<'static>);
impl Unparsed { impl Unparsed {
pub fn tag_name(&self) -> String { #[must_use] pub fn tag_name(&self) -> String {
// TODO: respect namespace? // TODO: respect namespace?
String::from_utf8_lossy(self.0.local_name().as_ref()).to_string() String::from_utf8_lossy(self.0.local_name().as_ref()).to_string()
} }

View File

@@ -60,7 +60,7 @@ impl_value_parse!(usize);
impl ValueSerialize for &str { impl ValueSerialize for &str {
fn serialize(&self) -> String { fn serialize(&self) -> String {
self.to_string() (*self).to_string()
} }
} }
@@ -98,7 +98,7 @@ impl<T: ValueDeserialize> XmlDeserialize for T {
Event::End(_) => break, Event::End(_) => break,
Event::Eof => return Err(XmlError::Eof), Event::Eof => return Err(XmlError::Eof),
_ => return Err(XmlError::UnsupportedEvent("todo")), _ => return Err(XmlError::UnsupportedEvent("todo")),
}; }
} }
} }
@@ -115,13 +115,12 @@ impl<T: ValueSerialize> XmlSerialize for T {
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns
.map(|ns| namespaces.get(&ns)) .and_then(|ns| namespaces.get(&ns))
.unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if prefix.is_empty() {
[*prefix, ":"].concat()
} else {
String::new() String::new()
} else {
[*prefix, ":"].concat()
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();

View File

@@ -43,7 +43,7 @@ pub fn make_app<AS: AddressbookStore, CS: CalendarStore, S: SubscriptionStore>(
) -> Router<()> { ) -> Router<()> {
let birthday_store = Arc::new(ContactBirthdayStore::new(addr_store.clone())); let birthday_store = Arc::new(ContactBirthdayStore::new(addr_store.clone()));
let combined_cal_store = let combined_cal_store =
Arc::new(CombinedCalendarStore::new(cal_store.clone()).with_store(birthday_store)); Arc::new(CombinedCalendarStore::new(cal_store).with_store(birthday_store));
let mut router = Router::new() let mut router = Router::new()
.merge(caldav_router( .merge(caldav_router(
@@ -104,24 +104,19 @@ pub fn make_app<AS: AddressbookStore, CS: CalendarStore, S: SubscriptionStore>(
router = router.merge(frontend_router( router = router.merge(frontend_router(
"/frontend", "/frontend",
auth_provider.clone(), auth_provider.clone(),
combined_cal_store.clone(), combined_cal_store,
addr_store.clone(), addr_store,
frontend_config, frontend_config,
oidc_config, oidc_config,
)); ));
} }
if nextcloud_login_config.enabled { if nextcloud_login_config.enabled {
router = router.nest( router = router.nest("/index.php/login/v2", nextcloud_login_router(auth_provider));
"/index.php/login/v2",
nextcloud_login_router(auth_provider.clone()),
);
} }
if dav_push_enabled { if dav_push_enabled {
router = router.merge(rustical_dav_push::subscription_service( router = router.merge(rustical_dav_push::subscription_service(subscription_store));
subscription_store.clone(),
));
} }
router router
@@ -178,11 +173,11 @@ pub fn make_app<AS: AddressbookStore, CS: CalendarStore, S: SubscriptionStore>(
tracing::error!("client error"); tracing::error!("client error");
} }
} }
}; }
}) })
.on_failure( .on_failure(
|_error: ServerErrorsFailureClass, _latency: Duration, _span: &Span| { |_error: ServerErrorsFailureClass, _latency: Duration, _span: &Span| {
tracing::error!("something went wrong") tracing::error!("something went wrong");
}, },
), ),
) )

View File

@@ -38,9 +38,9 @@ pub async fn handle_membership_command(
MembershipArgs { command }: MembershipArgs, MembershipArgs { command }: MembershipArgs,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
let id = match &command { let id = match &command {
MembershipCommand::Assign(AssignArgs { id, .. }) => id, MembershipCommand::Assign(AssignArgs { id, .. })
MembershipCommand::Remove(RemoveArgs { id, .. }) => id, | MembershipCommand::Remove(RemoveArgs { id, .. })
MembershipCommand::List(ListArgs { id }) => id, | MembershipCommand::List(ListArgs { id }) => id,
}; };
match &command { match &command {

View File

@@ -1,5 +1,6 @@
use crate::config::{ use crate::config::{
Config, DataStoreConfig, DavPushConfig, HttpConfig, SqliteDataStoreConfig, TracingConfig, Config, DataStoreConfig, DavPushConfig, HttpConfig, NextcloudLoginConfig,
SqliteDataStoreConfig, TracingConfig,
}; };
use clap::Parser; use clap::Parser;
use rustical_frontend::FrontendConfig; use rustical_frontend::FrontendConfig;
@@ -23,7 +24,7 @@ pub fn cmd_gen_config(_args: GenConfigArgs) -> anyhow::Result<()> {
}, },
oidc: None, oidc: None,
dav_push: DavPushConfig::default(), dav_push: DavPushConfig::default(),
nextcloud_login: Default::default(), nextcloud_login: NextcloudLoginConfig::default(),
}; };
let generated_config = toml::to_string(&config)?; let generated_config = toml::to_string(&config)?;
println!("{generated_config}"); println!("{generated_config}");

View File

@@ -140,7 +140,7 @@ pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
.unwrap() .unwrap()
.to_string() .to_string()
.into(), .into(),
) );
} }
if name.is_some() { if name.is_some() {
principal.displayname = name; principal.displayname = name;
@@ -152,7 +152,7 @@ pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
println!("Principal {id} updated"); println!("Principal {id} updated");
} }
Command::Membership(args) => { Command::Membership(args) => {
handle_membership_command(principal_store.as_ref(), args).await? handle_membership_command(principal_store.as_ref(), args).await?;
} }
} }
Ok(()) Ok(())

View File

@@ -41,7 +41,7 @@ pub struct TracingConfig {
pub opentelemetry: bool, pub opentelemetry: bool,
} }
fn default_true() -> bool { const fn default_true() -> bool {
true true
} }

View File

@@ -66,7 +66,7 @@ async fn get_data_stores(
let addressbook_store = Arc::new(SqliteAddressbookStore::new(db.clone(), send.clone())); let addressbook_store = Arc::new(SqliteAddressbookStore::new(db.clone(), send.clone()));
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send)); let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send));
let subscription_store = Arc::new(SqliteStore::new(db.clone())); let subscription_store = Arc::new(SqliteStore::new(db.clone()));
let principal_store = Arc::new(SqlitePrincipalStore::new(db.clone())); let principal_store = Arc::new(SqlitePrincipalStore::new(db));
( (
addressbook_store, addressbook_store,
cal_store, cal_store,
@@ -128,7 +128,7 @@ async fn main() -> Result<()> {
let listener = tokio::net::TcpListener::bind(&address).await?; let listener = tokio::net::TcpListener::bind(&address).await?;
tasks.push(tokio::spawn(async move { tasks.push(tokio::spawn(async move {
info!("RustiCal serving on http://{address}"); info!("RustiCal serving on http://{address}");
axum::serve(listener, app).await.unwrap() axum::serve(listener, app).await.unwrap();
})); }));
for task in tasks { for task in tasks {