Compare commits

...

33 Commits

Author SHA1 Message Date
Lennart
4592afac10 version 0.9.7 2025-09-18 21:11:44 +02:00
Lennart
e7ab7c2987 ical: Fix import UID grouping 2025-09-18 21:08:00 +02:00
Lennart
242f7b9076 calendar export: Fix overrides 2025-09-18 20:38:54 +02:00
Lennart
cb1356acad ical: Fix data model to allow calendar objects with overrides
#125
2025-09-18 20:38:37 +02:00
Lennart
55dadbb06b update Rust to 1.90 2025-09-18 16:45:48 +02:00
Lennart
4dd12bfe52 version 0.9.6 2025-09-17 11:35:20 +02:00
Lennart
5e004a6edc calendar import: Enable import to existing calendars (if no objects are overwritten) 2025-09-17 11:33:49 +02:00
Lennart
03e550c2f8 add some debug logging for invalid data in put_event
#125
2025-09-17 10:18:46 +02:00
Lennart
b2f5d5486c version 0.9.5 2025-09-17 10:06:07 +02:00
Lennart
db674d5895 Allow setting HTTP payload limit and set default to 4MB
#124
2025-09-17 10:06:07 +02:00
Lennart K
bc98d1be42 document thing to watch out for with Kubernetes #122 2025-09-16 15:34:31 +02:00
Lennart
4bb8cae9ea docs: Fix typo for env var configuration 2025-09-14 18:55:33 +02:00
Lennart
3774b358a5 version 0.9.4 2025-09-10 23:23:12 +02:00
Lennart
c6b612e5a0 Update dependencies 2025-09-10 23:20:40 +02:00
Lennart
91586ee797 migrate quick-xml to 0.38
fixes #120
2025-09-05 15:24:34 +02:00
Lennart K
87adf94947 Update Cargo.toml and Dockerfile 2025-09-04 13:05:14 +02:00
Lennart
f850f9b3a3 version 0.9.3 2025-09-02 23:38:41 +02:00
Lennart
0eb8359e26 rewrite combined calendar store in preparation for sharing 2025-09-02 23:30:16 +02:00
Lennart
7d961ea93b frontend: make button descriptions shorter to fit mobile screen 2025-09-02 23:19:15 +02:00
Lennart
375caedec6 update docs 2025-09-02 09:32:28 +02:00
Lennart
2d8d2eb194 Update README.md 2025-09-01 00:29:55 +02:00
Lennart
69e788b363 store: prevent objects from being commited to subscription calendar 2025-08-31 12:40:20 +02:00
Lennart
8ea5321503 Merge branch 'main' into sharing 2025-08-30 13:58:50 +02:00
Lennart
76c03fa4d4 clippy appeasement 2025-08-30 11:56:58 +02:00
Lennart
96b63848f0 version 0.9.2 2025-08-30 00:41:50 +02:00
Lennart
16e5cacefe Docker: Target Rust 1.89
fixes #116
2025-08-30 00:21:41 +02:00
Lennart
3819f623a6 update dependencies 2025-08-30 00:20:51 +02:00
Lennart
c4604d4376 xml: Comprehensive refactoring from byte strings to strings 2025-08-28 18:01:41 +02:00
Lennart K
85787e69bc xml: tiny refactoring 2025-08-28 15:24:19 +02:00
Lennart K
43b4150e28 xml: Change ns_prefix from LitByteStr to LitStr 2025-08-28 15:19:27 +02:00
Lennart K
c38fbe004f clippy appeasement 2025-08-28 15:09:01 +02:00
Lennart
bf5d874481 frontend tweaks 2025-08-28 14:53:17 +02:00
Lennart
a4285fb2ac Outsource some Calendar info to CalendarMetadata struct 2025-08-24 12:52:28 +02:00
77 changed files with 1080 additions and 902 deletions

499
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,9 +2,10 @@
members = ["crates/*"] members = ["crates/*"]
[workspace.package] [workspace.package]
version = "0.9.1" version = "0.9.7"
edition = "2024" edition = "2024"
description = "A CalDAV server" description = "A CalDAV server"
documentation = "https://lennart-k.github.io/rustical/"
repository = "https://github.com/lennart-k/rustical" repository = "https://github.com/lennart-k/rustical"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
@@ -16,7 +17,7 @@ description.workspace = true
repository.workspace = true repository.workspace = true
license.workspace = true license.workspace = true
resolver = "2" resolver = "2"
publish = false publish = true
[features] [features]
debug = ["opentelemetry"] debug = ["opentelemetry"]
@@ -48,7 +49,7 @@ rand_core = { version = "0.9", features = ["std"] }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
regex = "1.10" regex = "1.10"
lazy_static = "1.5" lazy_static = "1.5"
rstest = "0.25" rstest = "0.26"
rstest_reuse = "0.7" rstest_reuse = "0.7"
sha2 = "0.10" sha2 = "0.10"
tokio = { version = "1", features = [ tokio = { version = "1", features = [
@@ -61,7 +62,7 @@ tokio = { version = "1", features = [
url = "2.5" url = "2.5"
base64 = "0.22" base64 = "0.22"
thiserror = "2.0" thiserror = "2.0"
quick-xml = { version = "0.37" } quick-xml = { version = "0.38" }
rust-embed = "8.5" rust-embed = "8.5"
tower-sessions = "0.14" tower-sessions = "0.14"
futures-core = "0.3.31" futures-core = "0.3.31"

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM rust:1.88-alpine AS chef FROM --platform=$BUILDPLATFORM rust:1.90-alpine AS chef
ARG TARGETPLATFORM ARG TARGETPLATFORM
ARG BUILDPLATFORM ARG BUILDPLATFORM
@@ -45,4 +45,5 @@ CMD ["/usr/local/bin/rustical"]
ENV RUSTICAL_DATA_STORE__SQLITE__DB_URL=/var/lib/rustical/db.sqlite3 ENV RUSTICAL_DATA_STORE__SQLITE__DB_URL=/var/lib/rustical/db.sqlite3
LABEL org.opencontainers.image.authors="Lennart K github.com/lennart-k" LABEL org.opencontainers.image.authors="Lennart K github.com/lennart-k"
LABEL org.opencontainers.image.licenses="AGPL-3.0-or-later"
EXPOSE 4000 EXPOSE 4000

View File

@@ -4,14 +4,15 @@ a CalDAV/CardDAV server
> [!WARNING] > [!WARNING]
RustiCal is under **active development**! RustiCal is under **active development**!
While I've been successfully using RustiCal productively for a few weeks now, While I've been successfully using RustiCal productively for some months now and there seems to be a growing user base,
you'd still be one of the first testers so expect bugs and rough edges. you'd still be one of the first testers so expect bugs and rough edges.
If you still want to play around with it in its current state, absolutely feel free to do so and to open up an issue if something is not working. :) If you still want to use it in its current state, absolutely feel free to do so and to open up an issue if something is not working. :)
## Features ## Features
- easy to backup, everything saved in one SQLite database - easy to backup, everything saved in one SQLite database
- also export feature in the frontend - also export feature in the frontend
- Import your existing calendars in the frontend
- **[WebDAV Push](https://github.com/bitfireAT/webdav-push/)** support, so near-instant synchronisation to DAVx5 - **[WebDAV Push](https://github.com/bitfireAT/webdav-push/)** support, so near-instant synchronisation to DAVx5
- lightweight (the container image contains only one binary) - lightweight (the container image contains only one binary)
- adequately fast (I'd love to say blazingly fast™ :fire: but I don't have any benchmarks) - adequately fast (I'd love to say blazingly fast™ :fire: but I don't have any benchmarks)

View File

@@ -43,24 +43,24 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
let mut ical_calendar_builder = IcalCalendarBuilder::version("4.0") let mut ical_calendar_builder = IcalCalendarBuilder::version("4.0")
.gregorian() .gregorian()
.prodid("RustiCal"); .prodid("RustiCal");
if calendar.displayname.is_some() { if let Some(displayname) = calendar.meta.displayname {
ical_calendar_builder = ical_calendar_builder.set(Property { ical_calendar_builder = ical_calendar_builder.set(Property {
name: "X-WR-CALNAME".to_owned(), name: "X-WR-CALNAME".to_owned(),
value: calendar.displayname, value: Some(displayname),
params: None, params: None,
}); });
} }
if calendar.description.is_some() { if let Some(description) = calendar.meta.description {
ical_calendar_builder = ical_calendar_builder.set(Property { ical_calendar_builder = ical_calendar_builder.set(Property {
name: "X-WR-CALDESC".to_owned(), name: "X-WR-CALDESC".to_owned(),
value: calendar.description, value: Some(description),
params: None, params: None,
}); });
} }
if calendar.timezone_id.is_some() { if let Some(timezone_id) = calendar.timezone_id {
ical_calendar_builder = ical_calendar_builder.set(Property { ical_calendar_builder = ical_calendar_builder.set(Property {
name: "X-WR-TIMEZONE".to_owned(), name: "X-WR-TIMEZONE".to_owned(),
value: calendar.timezone_id, value: Some(timezone_id),
params: None, params: None,
}); });
} }
@@ -68,19 +68,32 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
for object in &objects { for object in &objects {
vtimezones.extend(object.get_vtimezones()); vtimezones.extend(object.get_vtimezones());
match object.get_data() { match object.get_data() {
CalendarObjectComponent::Event(EventObject { CalendarObjectComponent::Event(
event, EventObject {
timezones: object_timezones, event,
.. timezones: object_timezones,
}) => { ..
},
overrides,
) => {
timezones.extend(object_timezones); timezones.extend(object_timezones);
ical_calendar_builder = ical_calendar_builder.add_event(event.clone()); ical_calendar_builder = ical_calendar_builder.add_event(event.clone());
for _override in overrides {
ical_calendar_builder =
ical_calendar_builder.add_event(_override.event.clone());
}
} }
CalendarObjectComponent::Todo(TodoObject(todo)) => { CalendarObjectComponent::Todo(TodoObject(todo), overrides) => {
ical_calendar_builder = ical_calendar_builder.add_todo(todo.clone()); ical_calendar_builder = ical_calendar_builder.add_todo(todo.clone());
for _override in overrides {
ical_calendar_builder = ical_calendar_builder.add_todo(_override.0.clone());
}
} }
CalendarObjectComponent::Journal(JournalObject(journal)) => { CalendarObjectComponent::Journal(JournalObject(journal), overrides) => {
ical_calendar_builder = ical_calendar_builder.add_journal(journal.clone()); ical_calendar_builder = ical_calendar_builder.add_journal(journal.clone());
for _override in overrides {
ical_calendar_builder = ical_calendar_builder.add_journal(_override.0.clone());
}
} }
} }
} }

View File

@@ -9,8 +9,11 @@ use ical::{
generator::Emitter, generator::Emitter,
parser::{Component, ComponentMut}, parser::{Component, ComponentMut},
}; };
use rustical_dav::header::Overwrite;
use rustical_ical::{CalendarObject, CalendarObjectType}; use rustical_ical::{CalendarObject, CalendarObjectType};
use rustical_store::{Calendar, CalendarStore, SubscriptionStore, auth::Principal}; use rustical_store::{
Calendar, CalendarMetadata, CalendarStore, SubscriptionStore, auth::Principal,
};
use std::io::BufReader; use std::io::BufReader;
use tracing::instrument; use tracing::instrument;
@@ -19,6 +22,7 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
Path((principal, cal_id)): Path<(String, String)>, Path((principal, cal_id)): Path<(String, String)>,
user: Principal, user: Principal,
State(resource_service): State<CalendarResourceService<C, S>>, State(resource_service): State<CalendarResourceService<C, S>>,
overwrite: Overwrite,
body: String, body: String,
) -> Result<Response, Error> { ) -> Result<Response, Error> {
if !user.is_principal(&principal) { if !user.is_principal(&principal) {
@@ -83,10 +87,12 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
let new_cal = Calendar { let new_cal = Calendar {
principal, principal,
id: cal_id, id: cal_id,
displayname, meta: CalendarMetadata {
order: 0, displayname,
description, order: 0,
color: None, description,
color: None,
},
timezone_id, timezone_id,
deleted_at: None, deleted_at: None,
synctoken: 0, synctoken: 0,
@@ -96,7 +102,9 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
}; };
let cal_store = resource_service.cal_store; let cal_store = resource_service.cal_store;
cal_store.import_calendar(new_cal, objects, false).await?; cal_store
.import_calendar(new_cal, objects, overwrite.is_true())
.await?;
Ok(StatusCode::OK.into_response()) Ok(StatusCode::OK.into_response())
} }

View File

@@ -8,7 +8,7 @@ use ical::IcalParser;
use rustical_dav::xml::HrefElement; use rustical_dav::xml::HrefElement;
use rustical_ical::CalendarObjectType; use rustical_ical::CalendarObjectType;
use rustical_store::auth::Principal; use rustical_store::auth::Principal;
use rustical_store::{Calendar, CalendarStore, SubscriptionStore}; use rustical_store::{Calendar, CalendarMetadata, CalendarStore, SubscriptionStore};
use rustical_xml::{Unparsed, XmlDeserialize, XmlDocument, XmlRootTag}; use rustical_xml::{Unparsed, XmlDeserialize, XmlDocument, XmlRootTag};
use tracing::instrument; use tracing::instrument;
@@ -46,7 +46,7 @@ pub struct PropElement {
} }
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug)] #[derive(XmlDeserialize, XmlRootTag, Clone, Debug)]
#[xml(root = b"mkcalendar")] #[xml(root = "mkcalendar")]
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
struct MkcalendarRequest { struct MkcalendarRequest {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
@@ -54,7 +54,7 @@ struct MkcalendarRequest {
} }
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug)] #[derive(XmlDeserialize, XmlRootTag, Clone, Debug)]
#[xml(root = b"mkcol")] #[xml(root = "mkcol")]
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
struct MkcolRequest { struct MkcolRequest {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
@@ -112,11 +112,13 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
let calendar = Calendar { let calendar = Calendar {
id: cal_id.to_owned(), id: cal_id.to_owned(),
principal: principal.to_owned(), principal: principal.to_owned(),
order: request.calendar_order.unwrap_or(0), meta: CalendarMetadata {
displayname: request.displayname, order: request.calendar_order.unwrap_or(0),
displayname: request.displayname,
color: request.calendar_color,
description: request.calendar_description,
},
timezone_id, timezone_id,
color: request.calendar_color,
description: request.calendar_description,
deleted_at: None, deleted_at: None,
synctoken: 0, synctoken: 0,
subscription_url: request.source.map(|href| href.href), subscription_url: request.source.map(|href| href.href),

View File

@@ -116,19 +116,17 @@ impl CompFilterElement {
// TODO: Implement prop-filter (and comp-filter?) at some point // TODO: Implement prop-filter (and comp-filter?) at some point
if let Some(time_range) = &self.time_range { if let Some(time_range) = &self.time_range {
if let Some(start) = &time_range.start { if let Some(start) = &time_range.start
if let Some(last_occurence) = cal_object.get_last_occurence().unwrap_or(None) { && let Some(last_occurence) = cal_object.get_last_occurence().unwrap_or(None)
if start.deref() > &last_occurence.utc() { && start.deref() > &last_occurence.utc()
return false; {
} return false;
};
} }
if let Some(end) = &time_range.end { if let Some(end) = &time_range.end
if let Some(first_occurence) = cal_object.get_first_occurence().unwrap_or(None) { && let Some(first_occurence) = cal_object.get_first_occurence().unwrap_or(None)
if end.deref() < &first_occurence.utc() { && end.deref() < &first_occurence.utc()
return false; {
} return false;
};
} }
} }
true true
@@ -156,15 +154,15 @@ impl From<&FilterElement> for CalendarQuery {
for comp_filter in comp_filter_vcalendar.comp_filter.iter() { for comp_filter in comp_filter_vcalendar.comp_filter.iter() {
// A calendar object cannot contain both VEVENT and VTODO, so we only have to handle // A calendar object cannot contain both VEVENT and VTODO, so we only have to handle
// whatever we get first // whatever we get first
if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO") { if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO")
if let Some(time_range) = &comp_filter.time_range { && let Some(time_range) = &comp_filter.time_range
let start = time_range.start.as_ref().map(|start| start.date_naive()); {
let end = time_range.end.as_ref().map(|end| end.date_naive()); let start = time_range.start.as_ref().map(|start| start.date_naive());
return CalendarQuery { let end = time_range.end.as_ref().map(|end| end.date_naive());
time_start: start, return CalendarQuery {
time_end: end, time_start: start,
}; time_end: end,
} };
} }
} }
Default::default() Default::default()

View File

@@ -128,10 +128,10 @@ impl Resource for CalendarResource {
Ok(match prop { Ok(match prop {
CalendarPropWrapperName::Calendar(prop) => CalendarPropWrapper::Calendar(match prop { CalendarPropWrapperName::Calendar(prop) => CalendarPropWrapper::Calendar(match prop {
CalendarPropName::CalendarColor => { CalendarPropName::CalendarColor => {
CalendarProp::CalendarColor(self.cal.color.clone()) CalendarProp::CalendarColor(self.cal.meta.color.clone())
} }
CalendarPropName::CalendarDescription => { CalendarPropName::CalendarDescription => {
CalendarProp::CalendarDescription(self.cal.description.clone()) CalendarProp::CalendarDescription(self.cal.meta.description.clone())
} }
CalendarPropName::CalendarTimezone => { CalendarPropName::CalendarTimezone => {
CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| { CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| {
@@ -146,7 +146,7 @@ impl Resource for CalendarResource {
CalendarProp::CalendarTimezoneId(self.cal.timezone_id.clone()) CalendarProp::CalendarTimezoneId(self.cal.timezone_id.clone())
} }
CalendarPropName::CalendarOrder => { CalendarPropName::CalendarOrder => {
CalendarProp::CalendarOrder(Some(self.cal.order)) CalendarProp::CalendarOrder(Some(self.cal.meta.order))
} }
CalendarPropName::SupportedCalendarComponentSet => { CalendarPropName::SupportedCalendarComponentSet => {
CalendarProp::SupportedCalendarComponentSet(self.cal.components.clone().into()) CalendarProp::SupportedCalendarComponentSet(self.cal.components.clone().into())
@@ -187,11 +187,11 @@ impl Resource for CalendarResource {
match prop { match prop {
CalendarPropWrapper::Calendar(prop) => match prop { CalendarPropWrapper::Calendar(prop) => match prop {
CalendarProp::CalendarColor(color) => { CalendarProp::CalendarColor(color) => {
self.cal.color = color; self.cal.meta.color = color;
Ok(()) Ok(())
} }
CalendarProp::CalendarDescription(description) => { CalendarProp::CalendarDescription(description) => {
self.cal.description = description; self.cal.meta.description = description;
Ok(()) Ok(())
} }
CalendarProp::CalendarTimezone(timezone) => { CalendarProp::CalendarTimezone(timezone) => {
@@ -225,18 +225,18 @@ impl Resource for CalendarResource {
} }
CalendarProp::TimezoneServiceSet(_) => Err(rustical_dav::Error::PropReadOnly), CalendarProp::TimezoneServiceSet(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::CalendarTimezoneId(timezone_id) => { CalendarProp::CalendarTimezoneId(timezone_id) => {
if let Some(tzid) = &timezone_id { if let Some(tzid) = &timezone_id
if !vtimezones_rs::VTIMEZONES.contains_key(tzid) { && !vtimezones_rs::VTIMEZONES.contains_key(tzid)
return Err(rustical_dav::Error::BadRequest(format!( {
"Invalid timezone-id: {tzid}" return Err(rustical_dav::Error::BadRequest(format!(
))); "Invalid timezone-id: {tzid}"
} )));
} }
self.cal.timezone_id = timezone_id; self.cal.timezone_id = timezone_id;
Ok(()) Ok(())
} }
CalendarProp::CalendarOrder(order) => { CalendarProp::CalendarOrder(order) => {
self.cal.order = order.unwrap_or_default(); self.cal.meta.order = order.unwrap_or_default();
Ok(()) Ok(())
} }
CalendarProp::SupportedCalendarComponentSet(comp_set) => { CalendarProp::SupportedCalendarComponentSet(comp_set) => {
@@ -264,11 +264,11 @@ impl Resource for CalendarResource {
match prop { match prop {
CalendarPropWrapperName::Calendar(prop) => match prop { CalendarPropWrapperName::Calendar(prop) => match prop {
CalendarPropName::CalendarColor => { CalendarPropName::CalendarColor => {
self.cal.color = None; self.cal.meta.color = None;
Ok(()) Ok(())
} }
CalendarPropName::CalendarDescription => { CalendarPropName::CalendarDescription => {
self.cal.description = None; self.cal.meta.description = None;
Ok(()) Ok(())
} }
CalendarPropName::CalendarTimezone | CalendarPropName::CalendarTimezoneId => { CalendarPropName::CalendarTimezone | CalendarPropName::CalendarTimezoneId => {
@@ -277,7 +277,7 @@ impl Resource for CalendarResource {
} }
CalendarPropName::TimezoneServiceSet => Err(rustical_dav::Error::PropReadOnly), CalendarPropName::TimezoneServiceSet => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::CalendarOrder => { CalendarPropName::CalendarOrder => {
self.cal.order = 0; self.cal.meta.order = 0;
Ok(()) Ok(())
} }
CalendarPropName::SupportedCalendarComponentSet => { CalendarPropName::SupportedCalendarComponentSet => {
@@ -300,10 +300,10 @@ impl Resource for CalendarResource {
} }
fn get_displayname(&self) -> Option<&str> { fn get_displayname(&self) -> Option<&str> {
self.cal.displayname.as_deref() self.cal.meta.displayname.as_deref()
} }
fn set_displayname(&mut self, name: Option<String>) -> Result<(), rustical_dav::Error> { fn set_displayname(&mut self, name: Option<String>) -> Result<(), rustical_dav::Error> {
self.cal.displayname = name; self.cal.meta.displayname = name;
Ok(()) Ok(())
} }

View File

@@ -11,7 +11,7 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore; use rustical_store::CalendarStore;
use rustical_store::auth::Principal; use rustical_store::auth::Principal;
use std::str::FromStr; use std::str::FromStr;
use tracing::instrument; use tracing::{debug, instrument};
#[instrument(skip(cal_store))] #[instrument(skip(cal_store))]
pub async fn get_event<C: CalendarStore>( pub async fn get_event<C: CalendarStore>(
@@ -78,9 +78,10 @@ pub async fn put_event<C: CalendarStore>(
true true
}; };
let object = match CalendarObject::from_ics(body) { let object = match CalendarObject::from_ics(body.clone()) {
Ok(obj) => obj, Ok(obj) => obj,
Err(_) => { Err(_) => {
debug!("invalid calendar data:\n{body}");
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData)); return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
} }
}; };

View File

@@ -16,13 +16,13 @@ pub enum PrincipalProp {
CalendarUserAddressSet(HrefElement), CalendarUserAddressSet(HrefElement),
// WebDAV Access Control (RFC 3744) // WebDAV Access Control (RFC 3744)
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = b"principal-URL")] #[xml(ns = "rustical_dav::namespace::NS_DAV", rename = "principal-URL")]
PrincipalUrl(HrefElement), PrincipalUrl(HrefElement),
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMembership(GroupMembership), GroupMembership(GroupMembership),
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMemberSet(GroupMemberSet), GroupMemberSet(GroupMemberSet),
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = b"alternate-URI-set")] #[xml(ns = "rustical_dav::namespace::NS_DAV", rename = "alternate-URI-set")]
AlternateUriSet, AlternateUriSet,
// #[xml(ns = "rustical_dav::namespace::NS_DAV")] // #[xml(ns = "rustical_dav::namespace::NS_DAV")]
// PrincipalCollectionSet(HrefElement), // PrincipalCollectionSet(HrefElement),

View File

@@ -79,5 +79,5 @@ async fn test_propfind() {
) )
.unwrap(); .unwrap();
let output = response.serialize_to_string().unwrap(); let _output = response.serialize_to_string().unwrap();
} }

View File

@@ -22,7 +22,7 @@ pub struct MkcolAddressbookProp {
resourcetype: Option<Resourcetype>, resourcetype: Option<Resourcetype>,
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
displayname: Option<String>, displayname: Option<String>,
#[xml(rename = b"addressbook-description")] #[xml(rename = "addressbook-description")]
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
description: Option<String>, description: Option<String>,
} }
@@ -34,7 +34,7 @@ pub struct PropElement<T: XmlDeserialize> {
} }
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq)]
#[xml(root = b"mkcol")] #[xml(root = "mkcol")]
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
struct MkcolRequest { struct MkcolRequest {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]

View File

@@ -8,14 +8,14 @@ use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[xml(unit_variants_ident = "PrincipalPropName")] #[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp { pub enum PrincipalProp {
// WebDAV Access Control (RFC 3744) // WebDAV Access Control (RFC 3744)
#[xml(rename = b"principal-URL")] #[xml(rename = "principal-URL")]
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
PrincipalUrl(HrefElement), PrincipalUrl(HrefElement),
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMembership(GroupMembership), GroupMembership(GroupMembership),
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMemberSet(GroupMemberSet), GroupMemberSet(GroupMemberSet),
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = b"alternate-URI-set")] #[xml(ns = "rustical_dav::namespace::NS_DAV", rename = "alternate-URI-set")]
AlternateUriSet, AlternateUriSet,
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
PrincipalCollectionSet(HrefElement), PrincipalCollectionSet(HrefElement),

View File

@@ -20,13 +20,13 @@ impl XmlSerialize for UserPrivilegeSet {
fn serialize( fn serialize(
&self, &self,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
#[derive(XmlSerialize)] #[derive(XmlSerialize)]
pub struct FakeUserPrivilegeSet { pub struct FakeUserPrivilegeSet {
#[xml(rename = b"privilege", flatten)] #[xml(rename = "privilege", flatten)]
privileges: Vec<UserPrivilege>, privileges: Vec<UserPrivilege>,
} }

View File

@@ -60,11 +60,11 @@ pub async fn route_delete<R: ResourceService>(
return Err(crate::Error::PreconditionFailed.into()); return Err(crate::Error::PreconditionFailed.into());
} }
} }
if let Some(if_none_match) = if_none_match { if let Some(if_none_match) = if_none_match
if resource.satisfies_if_none_match(&if_none_match) { && resource.satisfies_if_none_match(&if_none_match)
// Precondition failed {
return Err(crate::Error::PreconditionFailed.into()); // Precondition failed
} return Err(crate::Error::PreconditionFailed.into());
} }
resource_service resource_service
.delete_resource(path_components, !no_trash) .delete_resource(path_components, !no_trash)

View File

@@ -57,7 +57,7 @@ enum Operation<T: XmlDeserialize> {
} }
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug)] #[derive(XmlDeserialize, XmlRootTag, Clone, Debug)]
#[xml(root = b"propertyupdate")] #[xml(root = "propertyupdate")]
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
struct PropertyupdateElement<T: XmlDeserialize>(#[xml(ty = "untagged", flatten)] Vec<Operation<T>>); struct PropertyupdateElement<T: XmlDeserialize>(#[xml(ty = "untagged", flatten)] Vec<Operation<T>>);

View File

@@ -1,12 +1,12 @@
use rustical_xml::{XmlRootTag, XmlSerialize}; use rustical_xml::{XmlRootTag, XmlSerialize};
#[derive(XmlSerialize, XmlRootTag)] #[derive(XmlSerialize, XmlRootTag)]
#[xml(ns = "crate::namespace::NS_DAV", root = b"error")] #[xml(ns = "crate::namespace::NS_DAV", root = "error")]
#[xml(ns_prefix( #[xml(ns_prefix(
crate::namespace::NS_DAV = b"", crate::namespace::NS_DAV = "",
crate::namespace::NS_CARDDAV = b"CARD", crate::namespace::NS_CARDDAV = "CARD",
crate::namespace::NS_CALDAV = b"CAL", crate::namespace::NS_CALDAV = "CAL",
crate::namespace::NS_CALENDARSERVER = b"CS", crate::namespace::NS_CALENDARSERVER = "CS",
crate::namespace::NS_DAVPUSH = b"PUSH" crate::namespace::NS_DAVPUSH = "PUSH"
))] ))]
pub struct ErrorElement<'t, T: XmlSerialize>(#[xml(ty = "untagged")] pub &'t T); pub struct ErrorElement<'t, T: XmlSerialize>(#[xml(ty = "untagged")] pub &'t T);

View File

@@ -22,8 +22,8 @@ pub struct PropstatElement<PropType: XmlSerialize> {
fn xml_serialize_status( fn xml_serialize_status(
status: &StatusCode, status: &StatusCode,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
XmlSerialize::serialize(&format!("HTTP/1.1 {}", status), ns, tag, namespaces, writer) XmlSerialize::serialize(&format!("HTTP/1.1 {}", status), ns, tag, namespaces, writer)
@@ -40,13 +40,13 @@ pub enum PropstatWrapper<T: XmlSerialize> {
// <!ELEMENT response (href, ((href*, status)|(propstat+)), // <!ELEMENT response (href, ((href*, status)|(propstat+)),
// responsedescription?) > // responsedescription?) >
#[derive(XmlSerialize, XmlRootTag)] #[derive(XmlSerialize, XmlRootTag)]
#[xml(ns = "crate::namespace::NS_DAV", root = b"response")] #[xml(ns = "crate::namespace::NS_DAV", root = "response")]
#[xml(ns_prefix( #[xml(ns_prefix(
crate::namespace::NS_DAV = b"", crate::namespace::NS_DAV = "",
crate::namespace::NS_CARDDAV = b"CARD", crate::namespace::NS_CARDDAV = "CARD",
crate::namespace::NS_CALDAV = b"CAL", crate::namespace::NS_CALDAV = "CAL",
crate::namespace::NS_CALENDARSERVER = b"CS", crate::namespace::NS_CALENDARSERVER = "CS",
crate::namespace::NS_DAVPUSH = b"PUSH" crate::namespace::NS_DAVPUSH = "PUSH"
))] ))]
pub struct ResponseElement<PropstatType: XmlSerialize> { pub struct ResponseElement<PropstatType: XmlSerialize> {
pub href: String, pub href: String,
@@ -59,8 +59,8 @@ pub struct ResponseElement<PropstatType: XmlSerialize> {
fn xml_serialize_optional_status( fn xml_serialize_optional_status(
val: &Option<StatusCode>, val: &Option<StatusCode>,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
XmlSerialize::serialize( XmlSerialize::serialize(
@@ -86,18 +86,18 @@ impl<PT: XmlSerialize> Default for ResponseElement<PT> {
// <!ELEMENT multistatus (response+, responsedescription?) > // <!ELEMENT multistatus (response+, responsedescription?) >
// Extended by sync-token as specified in RFC 6578 // Extended by sync-token as specified in RFC 6578
#[derive(XmlSerialize, XmlRootTag)] #[derive(XmlSerialize, XmlRootTag)]
#[xml(root = b"multistatus", ns = "crate::namespace::NS_DAV")] #[xml(root = "multistatus", ns = "crate::namespace::NS_DAV")]
#[xml(ns_prefix( #[xml(ns_prefix(
crate::namespace::NS_DAV = b"", crate::namespace::NS_DAV = "",
crate::namespace::NS_CARDDAV = b"CARD", crate::namespace::NS_CARDDAV = "CARD",
crate::namespace::NS_CALDAV = b"CAL", crate::namespace::NS_CALDAV = "CAL",
crate::namespace::NS_CALENDARSERVER = b"CS", crate::namespace::NS_CALENDARSERVER = "CS",
crate::namespace::NS_DAVPUSH = b"PUSH" crate::namespace::NS_DAVPUSH = "PUSH"
))] ))]
pub struct MultistatusElement<PropType: XmlSerialize, MemberPropType: XmlSerialize> { pub struct MultistatusElement<PropType: XmlSerialize, MemberPropType: XmlSerialize> {
#[xml(rename = b"response", flatten)] #[xml(rename = "response", flatten)]
pub responses: Vec<ResponseElement<PropType>>, pub responses: Vec<ResponseElement<PropType>>,
#[xml(rename = b"response", flatten)] #[xml(rename = "response", flatten)]
pub member_responses: Vec<ResponseElement<MemberPropType>>, pub member_responses: Vec<ResponseElement<MemberPropType>>,
pub sync_token: Option<String>, pub sync_token: Option<String>,
} }

View File

@@ -7,7 +7,7 @@ use rustical_xml::XmlError;
use rustical_xml::XmlRootTag; use rustical_xml::XmlRootTag;
#[derive(Debug, Clone, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, Clone, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"propfind", ns = "crate::namespace::NS_DAV")] #[xml(root = "propfind", ns = "crate::namespace::NS_DAV")]
pub struct PropfindElement<PN: XmlDeserialize> { pub struct PropfindElement<PN: XmlDeserialize> {
#[xml(ty = "untagged")] #[xml(ty = "untagged")]
pub prop: PropfindType<PN>, pub prop: PropfindType<PN>,
@@ -66,6 +66,9 @@ impl<PN: XmlDeserialize> XmlDeserialize for PropElement<PN> {
Event::Text(_) | Event::CData(_) => { Event::Text(_) | Event::CData(_) => {
return Err(XmlError::UnsupportedEvent("Not expecting text here")); return Err(XmlError::UnsupportedEvent("Not expecting text here"));
} }
Event::GeneralRef(_) => {
return Err(::rustical_xml::XmlError::UnsupportedEvent("GeneralRef"));
}
Event::Decl(_) | Event::Comment(_) | Event::DocType(_) | Event::PI(_) => { /* ignore */ Event::Decl(_) | Event::Comment(_) | Event::DocType(_) | Event::PI(_) => { /* ignore */
} }
Event::End(_end) => { Event::End(_end) => {

View File

@@ -16,7 +16,7 @@ mod tests {
use super::{Resourcetype, ResourcetypeInner}; use super::{Resourcetype, ResourcetypeInner};
#[derive(XmlSerialize, XmlRootTag)] #[derive(XmlSerialize, XmlRootTag)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
resourcetype: Resourcetype, resourcetype: Resourcetype,
} }

View File

@@ -60,7 +60,7 @@ pub struct NresultsElement(#[xml(ty = "text")] u64);
// <!ELEMENT sync-collection (sync-token, sync-level, limit?, prop)> // <!ELEMENT sync-collection (sync-token, sync-level, limit?, prop)>
// <!-- DAV:limit defined in RFC 5323, Section 5.17 --> // <!-- DAV:limit defined in RFC 5323, Section 5.17 -->
// <!-- DAV:prop defined in RFC 4918, Section 14.18 --> // <!-- DAV:prop defined in RFC 4918, Section 14.18 -->
#[xml(ns = "crate::namespace::NS_DAV", root = b"sync-collection")] #[xml(ns = "crate::namespace::NS_DAV", root = "sync-collection")]
pub struct SyncCollectionRequest<PN: XmlDeserialize> { pub struct SyncCollectionRequest<PN: XmlDeserialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
pub sync_token: String, pub sync_token: String,

View File

@@ -13,8 +13,8 @@ impl XmlSerialize for TagList {
fn serialize( fn serialize(
&self, &self,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns
@@ -22,23 +22,18 @@ impl XmlSerialize for TagList {
.unwrap_or(None) .unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if !prefix.is_empty() {
[*prefix, b":"].concat() format!("{prefix}:")
} else { } else {
Vec::new() String::new()
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();
let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat()); let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat());
let qname = tagname
.as_ref()
.map(|tagname| ::quick_xml::name::QName(tagname));
if let Some(qname) = &qname { if let Some(tagname) = tagname.as_ref() {
let mut bytes_start = BytesStart::from(qname.to_owned()); let mut bytes_start = BytesStart::new(tagname);
if !has_prefix { if !has_prefix && let Some(ns) = &ns {
if let Some(ns) = &ns { bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
}
} }
writer.write_event(Event::Start(bytes_start))?; writer.write_event(Event::Start(bytes_start))?;
} }
@@ -51,8 +46,8 @@ impl XmlSerialize for TagList {
el.write_empty()?; el.write_empty()?;
} }
if let Some(qname) = &qname { if let Some(tagname) = tagname.as_ref() {
writer.write_event(Event::End(BytesEnd::from(qname.to_owned())))?; writer.write_event(Event::End(BytesEnd::new(tagname)))?;
} }
Ok(()) Ok(())
} }

View File

@@ -25,10 +25,10 @@ pub struct ContentUpdate {
} }
#[derive(XmlSerialize, XmlRootTag, Debug)] #[derive(XmlSerialize, XmlRootTag, Debug)]
#[xml(root = b"push-message", ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(root = "push-message", ns = "rustical_dav::namespace::NS_DAVPUSH")]
#[xml(ns_prefix( #[xml(ns_prefix(
rustical_dav::namespace::NS_DAVPUSH = b"", rustical_dav::namespace::NS_DAVPUSH = "",
rustical_dav::namespace::NS_DAV = b"D", rustical_dav::namespace::NS_DAV = "D",
))] ))]
struct PushMessage { struct PushMessage {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]

View File

@@ -35,12 +35,12 @@ pub enum Trigger {
#[derive(XmlSerialize, XmlDeserialize, PartialEq, Clone, Debug)] #[derive(XmlSerialize, XmlDeserialize, PartialEq, Clone, Debug)]
pub struct ContentUpdate( pub struct ContentUpdate(
#[xml(rename = b"depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
); );
#[derive(XmlSerialize, PartialEq, Clone, Debug)] #[derive(XmlSerialize, PartialEq, Clone, Debug)]
pub struct PropertyUpdate( pub struct PropertyUpdate(
#[xml(rename = b"depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
); );
impl XmlDeserialize for PropertyUpdate { impl XmlDeserialize for PropertyUpdate {
@@ -51,8 +51,8 @@ impl XmlDeserialize for PropertyUpdate {
) -> Result<Self, rustical_xml::XmlError> { ) -> Result<Self, rustical_xml::XmlError> {
#[derive(XmlDeserialize, PartialEq, Clone, Debug)] #[derive(XmlDeserialize, PartialEq, Clone, Debug)]
struct FakePropertyUpdate( struct FakePropertyUpdate(
#[xml(rename = b"depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
#[xml(rename = b"prop", ns = "rustical_dav::namespace::NS_DAV")] pub Unparsed, #[xml(rename = "prop", ns = "rustical_dav::namespace::NS_DAV")] pub Unparsed,
); );
let FakePropertyUpdate(depth, _) = FakePropertyUpdate::deserialize(reader, start, empty)?; let FakePropertyUpdate(depth, _) = FakePropertyUpdate::deserialize(reader, start, empty)?;
Ok(Self(depth)) Ok(Self(depth))

View File

@@ -17,7 +17,7 @@ pub struct WebPushSubscription {
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq)]
pub struct SubscriptionPublicKey { pub struct SubscriptionPublicKey {
#[xml(ty = "attr", rename = b"type")] #[xml(ty = "attr", rename = "type")]
pub ty: String, pub ty: String,
#[xml(ty = "text")] #[xml(ty = "text")]
pub key: String, pub key: String,
@@ -33,7 +33,7 @@ pub struct SubscriptionElement {
pub struct TriggerElement(#[xml(ty = "untagged", flatten)] Vec<Trigger>); pub struct TriggerElement(#[xml(ty = "untagged", flatten)] Vec<Trigger>);
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq)]
#[xml(root = b"push-register")] #[xml(root = "push-register")]
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
pub struct PushRegister { pub struct PushRegister {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]

View File

@@ -17,7 +17,7 @@ export class DeleteButton extends LitElement {
} }
protected render() { protected render() {
let text = this.trash ? 'Move to trash' : 'Delete' let text = this.trash ? 'Trash' : 'Delete'
return html`<button class="delete" @click=${e => this._onClick(e)}>${text}</button>` return html`<button class="delete" @click=${e => this._onClick(e)}>${text}</button>`
} }

View File

@@ -28,9 +28,9 @@ export class EditAddressbookForm extends LitElement {
override render() { override render() {
return html` return html`
<button @click=${() => this.dialog.value.showModal()}>Edit addressbook</button> <button @click=${() => this.dialog.value.showModal()}>Edit</button>
<dialog ${ref(this.dialog)}> <dialog ${ref(this.dialog)}>
<h3>Create addressbook</h3> <h3>Edit addressbook</h3>
<form @submit=${this.submit} ${ref(this.form)}> <form @submit=${this.submit} ${ref(this.form)}>
<label> <label>
Displayname Displayname

View File

@@ -40,9 +40,9 @@ export class EditCalendarForm extends LitElement {
override render() { override render() {
return html` return html`
<button @click=${() => this.dialog.value.showModal()}>Edit calendar</button> <button @click=${() => this.dialog.value.showModal()}>Edit</button>
<dialog ${ref(this.dialog)}> <dialog ${ref(this.dialog)}>
<h3>Create calendar</h3> <h3>Edit calendar</h3>
<form @submit=${this.submit} ${ref(this.form)}> <form @submit=${this.submit} ${ref(this.form)}>
<label> <label>
Displayname Displayname

View File

@@ -19,7 +19,7 @@ let DeleteButton = class extends i {
return this; return this;
} }
render() { render() {
let text = this.trash ? "Move to trash" : "Delete"; let text = this.trash ? "Trash" : "Delete";
return x`<button class="delete" @click=${(e) => this._onClick(e)}>${text}</button>`; return x`<button class="delete" @click=${(e) => this._onClick(e)}>${text}</button>`;
} }
async _onClick(event) { async _onClick(event) {

View File

@@ -27,9 +27,9 @@ let EditAddressbookForm = class extends i {
} }
render() { render() {
return x` return x`
<button @click=${() => this.dialog.value.showModal()}>Edit addressbook</button> <button @click=${() => this.dialog.value.showModal()}>Edit</button>
<dialog ${n(this.dialog)}> <dialog ${n(this.dialog)}>
<h3>Create addressbook</h3> <h3>Edit addressbook</h3>
<form @submit=${this.submit} ${n(this.form)}> <form @submit=${this.submit} ${n(this.form)}>
<label> <label>
Displayname Displayname

View File

@@ -28,9 +28,9 @@ let EditCalendarForm = class extends i {
} }
render() { render() {
return x` return x`
<button @click=${() => this.dialog.value.showModal()}>Edit calendar</button> <button @click=${() => this.dialog.value.showModal()}>Edit</button>
<dialog ${n(this.dialog)}> <dialog ${n(this.dialog)}>
<h3>Create calendar</h3> <h3>Edit calendar</h3>
<form @submit=${this.submit} ${n(this.form)}> <form @submit=${this.submit} ${n(this.form)}>
<label> <label>
Displayname Displayname

View File

@@ -290,6 +290,7 @@ ul.collection-list {
.color-chip { .color-chip {
background: var(--color); background: var(--color);
grid-area: color-chip; grid-area: color-chip;
margin-left: 8px;
} }
.actions { .actions {
@@ -346,6 +347,17 @@ select {
} }
} }
form {
input[type="text"],
input[type="password"],
input[type="color"],
textarea,
select {
width: 100%;
}
}
svg.icon { svg.icon {
stroke-width: 2px; stroke-width: 2px;
color: var(--text-on-background-color); color: var(--text-on-background-color);

View File

@@ -1,13 +1,13 @@
<h2>{{ user.id }}'s Calendars</h2> <h2>{{ user.id }}'s Calendars</h2>
<ul class="collection-list"> <ul class="collection-list">
{% for (meta, calendar) in calendars %} {% for (meta, calendar) in calendars %}
{% let color = calendar.color.to_owned().unwrap_or("transparent".to_owned()) %} {% let color = calendar.meta.color.to_owned().unwrap_or("transparent".to_owned()) %}
<li class="collection-list-item" style="--color: {{ color }}"> <li class="collection-list-item" style="--color: {{ color }}">
<a href="/frontend/user/{{ calendar.principal }}/calendar/{{ calendar.id }}"></a> <a href="/frontend/user/{{ calendar.principal }}/calendar/{{ calendar.id }}"></a>
<div class="inner"> <div class="inner">
<span class="title"> <span class="title">
{%- if calendar.principal != user.id -%}{{ calendar.principal }}/{%- endif -%} {%- if calendar.principal != user.id -%}{{ calendar.principal }}/{%- endif -%}
{{ calendar.displayname.to_owned().unwrap_or(calendar.id.to_owned()) }} {{ calendar.meta.displayname.to_owned().unwrap_or(calendar.id.to_owned()) }}
<div class="comps"> <div class="comps">
{% for comp in calendar.components %} {% for comp in calendar.components %}
<span>{{ comp }}</span> <span>{{ comp }}</span>
@@ -15,7 +15,7 @@
</div> </div>
</span> </span>
<span class="description"> <span class="description">
{% if let Some(description) = calendar.description %}{{ description }}{% endif %} {% if let Some(description) = calendar.meta.description %}{{ description }}{% endif %}
</span> </span>
{% if let Some(subscription_url) = calendar.subscription_url %} {% if let Some(subscription_url) = calendar.subscription_url %}
<span class="subscription-url">{{ subscription_url }}</span> <span class="subscription-url">{{ subscription_url }}</span>
@@ -29,9 +29,9 @@
principal="{{ calendar.principal }}" principal="{{ calendar.principal }}"
cal_id="{{ calendar.id }}" cal_id="{{ calendar.id }}"
timezone_id="{{ calendar.timezone_id.as_deref().unwrap_or_default() }}" timezone_id="{{ calendar.timezone_id.as_deref().unwrap_or_default() }}"
displayname="{{ calendar.displayname.as_deref().unwrap_or_default() }}" displayname="{{ calendar.meta.displayname.as_deref().unwrap_or_default() }}"
description="{{ calendar.description.as_deref().unwrap_or_default() }}" description="{{ calendar.meta.description.as_deref().unwrap_or_default() }}"
color="{{ calendar.color.as_deref().unwrap_or_default() }}" color="{{ calendar.meta.color.as_deref().unwrap_or_default() }}"
components="{{ calendar.components | json }}" components="{{ calendar.components | json }}"
></edit-calendar-form> ></edit-calendar-form>
<delete-button trash href="/caldav/principal/{{ calendar.principal }}/{{ calendar.id }}"></delete-button> <delete-button trash href="/caldav/principal/{{ calendar.principal }}/{{ calendar.id }}"></delete-button>
@@ -51,13 +51,13 @@
<h3>Deleted Calendars</h3> <h3>Deleted Calendars</h3>
<ul class="collection-list"> <ul class="collection-list">
{% for (meta, calendar) in deleted_calendars %} {% for (meta, calendar) in deleted_calendars %}
{% let color = calendar.color.to_owned().unwrap_or("transparent".to_owned()) %} {% let color = calendar.meta.color.to_owned().unwrap_or("transparent".to_owned()) %}
<li class="collection-list-item" style="--color: {{ color }}"> <li class="collection-list-item" style="--color: {{ color }}">
<a href="/frontend/user/{{ calendar.principal }}/calendar/{{ calendar.id}}"></a> <a href="/frontend/user/{{ calendar.principal }}/calendar/{{ calendar.id}}"></a>
<div class="inner"> <div class="inner">
<span class="title"> <span class="title">
{%- if calendar.principal != user.id -%}{{ calendar.principal }}/{%- endif -%} {%- if calendar.principal != user.id -%}{{ calendar.principal }}/{%- endif -%}
{{ calendar.displayname.to_owned().unwrap_or(calendar.id.to_owned()) }} {{ calendar.meta.displayname.to_owned().unwrap_or(calendar.id.to_owned()) }}
<div class="comps"> <div class="comps">
{% for comp in calendar.components %} {% for comp in calendar.components %}
<span>{{ comp }}</span> <span>{{ comp }}</span>
@@ -65,7 +65,7 @@
</div> </div>
</span> </span>
<span class="description"> <span class="description">
{% if let Some(description) = calendar.description %}{{ description }}{% endif %} {% if let Some(description) = calendar.meta.description %}{{ description }}{% endif %}
</span> </span>
<div class="actions"> <div class="actions">
<form action="/frontend/user/{{ calendar.principal }}/calendar/{{ calendar.id}}/restore" method="POST" <form action="/frontend/user/{{ calendar.principal }}/calendar/{{ calendar.id}}/restore" method="POST"

View File

@@ -4,9 +4,9 @@
{% endblock %} {% endblock %}
{% block content %} {% block content %}
{% let name = calendar.displayname.to_owned().unwrap_or(calendar.id.to_owned()) %} {% let name = calendar.meta.displayname.to_owned().unwrap_or(calendar.id.to_owned()) %}
<h1>{{ calendar.principal }}/{{ name }}</h1> <h1>{{ calendar.principal }}/{{ name }}</h1>
{% if let Some(description) = calendar.description %}<p>{{ description }}</p>{% endif%} {% if let Some(description) = calendar.meta.description %}<p>{{ description }}</p>{% endif%}
{% if let Some(subscription_url) = calendar.subscription_url %} {% if let Some(subscription_url) = calendar.subscription_url %}
<h2>Subscription URL</h2> <h2>Subscription URL</h2>
@@ -25,9 +25,6 @@
{% if let Some(timezone_id) = calendar.timezone_id %} {% if let Some(timezone_id) = calendar.timezone_id %}
<p>{{ timezone_id }}</p> <p>{{ timezone_id }}</p>
{% endif %} {% endif %}
{% if let Some(timezone) = calendar.get_vtimezone() %}
<textarea rows="16" readonly>{{ timezone }}</textarea>
{% endif %}
<pre>{{ calendar|json }}</pre> <pre>{{ calendar|json }}</pre>

View File

@@ -13,6 +13,7 @@ use tower::Service;
#[derive(Clone, RustEmbed, Default)] #[derive(Clone, RustEmbed, Default)]
#[folder = "public/assets"] #[folder = "public/assets"]
#[allow(dead_code)] // Since this is not used with the frontend-dev feature
pub struct Assets; pub struct Assets;
#[derive(Clone, Default)] #[derive(Clone, Default)]

View File

@@ -15,6 +15,10 @@ pub struct EventObject {
} }
impl EventObject { impl EventObject {
pub fn get_uid(&self) -> &str {
self.event.get_uid()
}
pub fn get_dtstart(&self) -> Result<Option<CalDateTime>, Error> { pub fn get_dtstart(&self) -> Result<Option<CalDateTime>, Error> {
if let Some(dtstart) = self.event.get_dtstart() { if let Some(dtstart) = self.event.get_dtstart() {
Ok(Some(CalDateTime::parse_prop(dtstart, &self.timezones)?)) Ok(Some(CalDateTime::parse_prop(dtstart, &self.timezones)?))
@@ -92,6 +96,7 @@ impl EventObject {
&self, &self,
start: Option<DateTime<Utc>>, start: Option<DateTime<Utc>>,
end: Option<DateTime<Utc>>, end: Option<DateTime<Utc>>,
overrides: &[EventObject],
) -> Result<Vec<IcalEvent>, Error> { ) -> Result<Vec<IcalEvent>, Error> {
if let Some(mut rrule_set) = self.recurrence_ruleset()? { if let Some(mut rrule_set) = self.recurrence_ruleset()? {
if let Some(start) = start { if let Some(start) = start {
@@ -107,13 +112,30 @@ impl EventObject {
.get_dtend()? .get_dtend()?
.map(|dtend| dtend.as_datetime().into_owned() - dtstart.as_datetime().into_owned()); .map(|dtend| dtend.as_datetime().into_owned() - dtstart.as_datetime().into_owned());
for date in dates { 'recurrence: for date in dates {
let date = CalDateTime::from(date); let date = CalDateTime::from(date);
let dateformat = if dtstart.is_date() { let dateformat = if dtstart.is_date() {
date.format_date() date.format_date()
} else { } else {
date.format() date.format()
}; };
for _override in overrides {
if let Some(override_id) = &_override
.event
.get_recurrence_id()
.as_ref()
.expect("overrides have a recurrence id")
.value
&& override_id == &dateformat
{
// We have an override for this occurence
//
events.push(_override.event.clone());
continue 'recurrence;
}
}
let mut ev = self.event.clone().mutable(); let mut ev = self.event.clone().mutable();
ev.remove_property("RRULE"); ev.remove_property("RRULE");
ev.remove_property("RDATE"); ev.remove_property("RDATE");
@@ -229,10 +251,18 @@ END:VEVENT\r\n",
#[test] #[test]
fn test_expand_recurrence() { fn test_expand_recurrence() {
let event = CalendarObject::from_ics(ICS.to_string()).unwrap(); let event = CalendarObject::from_ics(ICS.to_string()).unwrap();
let event = event.event().unwrap(); let (event, overrides) = if let crate::CalendarObjectComponent::Event(
main_event,
overrides,
) = event.get_data()
{
(main_event, overrides)
} else {
panic!()
};
let events: Vec<String> = event let events: Vec<String> = event
.expand_recurrence(None, None) .expand_recurrence(None, None, overrides)
.unwrap() .unwrap()
.into_iter() .into_iter()
.map(|event| Emitter::generate(&event)) .map(|event| Emitter::generate(&event))

View File

@@ -3,3 +3,9 @@ use ical::parser::ical::component::IcalJournal;
#[derive(Debug, Clone, From)] #[derive(Debug, Clone, From)]
pub struct JournalObject(pub IcalJournal); pub struct JournalObject(pub IcalJournal);
impl JournalObject {
pub fn get_uid(&self) -> &str {
self.0.get_uid()
}
}

View File

@@ -56,18 +56,75 @@ impl rustical_xml::ValueDeserialize for CalendarObjectType {
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum CalendarObjectComponent { pub enum CalendarObjectComponent {
Event(EventObject), Event(EventObject, Vec<EventObject>),
Todo(TodoObject), Todo(TodoObject, Vec<TodoObject>),
Journal(JournalObject), Journal(JournalObject, Vec<JournalObject>),
} }
impl Default for CalendarObjectComponent { impl CalendarObjectComponent {
fn default() -> Self { fn from_events(mut events: Vec<EventObject>) -> Result<Self, Error> {
Self::Event(EventObject::default()) let main_event = events
.extract_if(.., |event| event.event.get_recurrence_id().is_none())
.next()
.expect("there must be one main event");
let overrides = events;
for event in &overrides {
if event.get_uid() != main_event.get_uid() {
return Err(Error::InvalidData(
"Calendar object contains multiple UIDs".to_owned(),
));
}
if event.event.get_recurrence_id().is_none() {
return Err(Error::InvalidData(
"Calendar object can only contain one main component".to_owned(),
));
}
}
Ok(Self::Event(main_event, overrides))
}
fn from_todos(mut todos: Vec<TodoObject>) -> Result<Self, Error> {
let main_todo = todos
.extract_if(.., |todo| todo.0.get_recurrence_id().is_none())
.next()
.expect("there must be one main event");
let overrides = todos;
for todo in &overrides {
if todo.get_uid() != main_todo.get_uid() {
return Err(Error::InvalidData(
"Calendar object contains multiple UIDs".to_owned(),
));
}
if todo.0.get_recurrence_id().is_none() {
return Err(Error::InvalidData(
"Calendar object can only contain one main component".to_owned(),
));
}
}
Ok(Self::Todo(main_todo, overrides))
}
fn from_journals(mut journals: Vec<JournalObject>) -> Result<Self, Error> {
let main_journal = journals
.extract_if(.., |journal| journal.0.get_recurrence_id().is_none())
.next()
.expect("there must be one main event");
let overrides = journals;
for journal in &overrides {
if journal.get_uid() != main_journal.get_uid() {
return Err(Error::InvalidData(
"Calendar object contains multiple UIDs".to_owned(),
));
}
if journal.0.get_recurrence_id().is_none() {
return Err(Error::InvalidData(
"Calendar object can only contain one main component".to_owned(),
));
}
}
Ok(Self::Journal(main_journal, overrides))
} }
} }
#[derive(Debug, Clone, Default)] #[derive(Debug, Clone)]
pub struct CalendarObject { pub struct CalendarObject {
data: CalendarObjectComponent, data: CalendarObjectComponent,
properties: Vec<Property>, properties: Vec<Property>,
@@ -84,16 +141,16 @@ impl CalendarObject {
"multiple calendars, only one allowed".to_owned(), "multiple calendars, only one allowed".to_owned(),
)); ));
} }
if cal.events.len()
+ cal.alarms.len() if !cal.events.is_empty() as u8
+ cal.todos.len() + !cal.todos.is_empty() as u8
+ cal.journals.len() + !cal.journals.is_empty() as u8
+ cal.free_busys.len() + !cal.free_busys.is_empty() as u8
!= 1 != 1
{ {
// https://datatracker.ietf.org/doc/html/rfc4791#section-4.1 // https://datatracker.ietf.org/doc/html/rfc4791#section-4.1
return Err(Error::InvalidData( return Err(Error::InvalidData(
"iCalendar object is only allowed to have exactly one component".to_owned(), "iCalendar object must have exactly one component type".to_owned(),
)); ));
} }
@@ -111,12 +168,27 @@ impl CalendarObject {
.map(|timezone| (timezone.get_tzid().to_owned(), timezone)) .map(|timezone| (timezone.get_tzid().to_owned(), timezone))
.collect(); .collect();
let data = if let Some(event) = cal.events.into_iter().next() { let data = if !cal.events.is_empty() {
CalendarObjectComponent::Event(EventObject { event, timezones }) CalendarObjectComponent::from_events(
} else if let Some(todo) = cal.todos.into_iter().next() { cal.events
CalendarObjectComponent::Todo(todo.into()) .into_iter()
} else if let Some(journal) = cal.journals.into_iter().next() { .map(|event| EventObject {
CalendarObjectComponent::Journal(journal.into()) event,
timezones: timezones.clone(),
})
.collect(),
)?
} else if !cal.todos.is_empty() {
CalendarObjectComponent::from_todos(
cal.todos.into_iter().map(|todo| todo.into()).collect(),
)?
} else if !cal.journals.is_empty() {
CalendarObjectComponent::from_journals(
cal.journals
.into_iter()
.map(|journal| journal.into())
.collect(),
)?
} else { } else {
return Err(Error::InvalidData( return Err(Error::InvalidData(
"iCalendar component type not supported :(".to_owned(), "iCalendar component type not supported :(".to_owned(),
@@ -141,9 +213,11 @@ impl CalendarObject {
pub fn get_id(&self) -> &str { pub fn get_id(&self) -> &str {
match &self.data { match &self.data {
CalendarObjectComponent::Todo(todo) => todo.0.get_uid(), // We've made sure before that the first component exists and all components share the
CalendarObjectComponent::Event(event) => event.event.get_uid(), // same UID
CalendarObjectComponent::Journal(journal) => journal.0.get_uid(), CalendarObjectComponent::Todo(todo, _) => todo.0.get_uid(),
CalendarObjectComponent::Event(event, _) => event.event.get_uid(),
CalendarObjectComponent::Journal(journal, _) => journal.0.get_uid(),
} }
} }
@@ -164,33 +238,40 @@ impl CalendarObject {
pub fn get_object_type(&self) -> CalendarObjectType { pub fn get_object_type(&self) -> CalendarObjectType {
match self.data { match self.data {
CalendarObjectComponent::Todo(_) => CalendarObjectType::Todo, CalendarObjectComponent::Todo(_, _) => CalendarObjectType::Todo,
CalendarObjectComponent::Event(_) => CalendarObjectType::Event, CalendarObjectComponent::Event(_, _) => CalendarObjectType::Event,
CalendarObjectComponent::Journal(_) => CalendarObjectType::Journal, CalendarObjectComponent::Journal(_, _) => CalendarObjectType::Journal,
} }
} }
pub fn get_first_occurence(&self) -> Result<Option<CalDateTime>, Error> { pub fn get_first_occurence(&self) -> Result<Option<CalDateTime>, Error> {
match &self.data { match &self.data {
CalendarObjectComponent::Event(event) => event.get_dtstart(), CalendarObjectComponent::Event(main_event, overrides) => Ok(overrides
.iter()
.chain([main_event].into_iter())
.map(|event| event.get_dtstart())
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.min()),
_ => Ok(None), _ => Ok(None),
} }
} }
pub fn get_last_occurence(&self) -> Result<Option<CalDateTime>, Error> { pub fn get_last_occurence(&self) -> Result<Option<CalDateTime>, Error> {
match &self.data { match &self.data {
CalendarObjectComponent::Event(event) => event.get_last_occurence(), CalendarObjectComponent::Event(main_event, overrides) => Ok(overrides
.iter()
.chain([main_event].into_iter())
.map(|event| event.get_last_occurence())
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.max()),
_ => Ok(None), _ => Ok(None),
} }
} }
pub fn event(&self) -> Option<&EventObject> {
match &self.data {
CalendarObjectComponent::Event(event) => Some(event),
_ => None,
}
}
pub fn expand_recurrence( pub fn expand_recurrence(
&self, &self,
start: Option<DateTime<Utc>>, start: Option<DateTime<Utc>>,
@@ -198,10 +279,10 @@ impl CalendarObject {
) -> Result<String, Error> { ) -> Result<String, Error> {
// Only events can be expanded // Only events can be expanded
match &self.data { match &self.data {
CalendarObjectComponent::Event(event) => { CalendarObjectComponent::Event(main_event, overrides) => {
let cal = IcalCalendar { let cal = IcalCalendar {
properties: self.properties.clone(), properties: self.properties.clone(),
events: event.expand_recurrence(start, end)?, events: main_event.expand_recurrence(start, end, overrides)?,
..Default::default() ..Default::default()
}; };
Ok(cal.generate()) Ok(cal.generate())

View File

@@ -3,3 +3,9 @@ use ical::parser::ical::component::IcalTodo;
#[derive(Debug, Clone, From)] #[derive(Debug, Clone, From)]
pub struct TodoObject(pub IcalTodo); pub struct TodoObject(pub IcalTodo);
impl TodoObject {
pub fn get_uid(&self) -> &str {
self.0.get_uid()
}
}

View File

@@ -0,0 +1,30 @@
use rustical_ical::CalendarObject;
const MULTI_VEVENT: &str = r#"
BEGIN:VCALENDAR
PRODID:-//Example Corp.//CalDAV Client//EN
VERSION:2.0
BEGIN:VEVENT
UID:2@example.com
SUMMARY:Weekly Meeting
DTSTAMP:20041210T183838Z
DTSTART:20041206T120000Z
DTEND:20041206T130000Z
RRULE:FREQ=WEEKLY
END:VEVENT
BEGIN:VEVENT
UID:2@example.com
SUMMARY:Weekly Meeting
RECURRENCE-ID:20041213T120000Z
DTSTAMP:20041210T183838Z
DTSTART:20041213T130000Z
DTEND:20041213T140000Z
END:VEVENT
END:VCALENDAR
"#;
#[test]
fn parse_calendar_object() {
let object = CalendarObject::from_ics(MULTI_VEVENT.to_string()).unwrap();
object.expand_recurrence(None, None).unwrap();
}

View File

@@ -192,20 +192,19 @@ pub async fn route_get_oidc_callback<US: UserStore + Clone>(
.await .await
.map_err(|e| OidcError::UserInfo(e.to_string()))?; .map_err(|e| OidcError::UserInfo(e.to_string()))?;
if let Some(require_group) = &oidc_config.require_group { if let Some(require_group) = &oidc_config.require_group
if !user_info_claims && !user_info_claims
.additional_claims() .additional_claims()
.groups .groups
.clone() .clone()
.unwrap_or_default() .unwrap_or_default()
.contains(require_group) .contains(require_group)
{ {
return Ok(( return Ok((
StatusCode::UNAUTHORIZED, StatusCode::UNAUTHORIZED,
"User is not in an authorized group to use RustiCal", "User is not in an authorized group to use RustiCal",
) )
.into_response()); .into_response());
}
} }
let user_id = match oidc_config.claim_userid { let user_id = match oidc_config.claim_userid {

View File

@@ -72,12 +72,11 @@ where
let mut inner = self.inner.clone(); let mut inner = self.inner.clone();
Box::pin(async move { Box::pin(async move {
if let Some(session) = request.extensions().get::<Session>() { if let Some(session) = request.extensions().get::<Session>()
if let Ok(Some(user_id)) = session.get::<String>("user").await { && let Ok(Some(user_id)) = session.get::<String>("user").await
if let Ok(Some(user)) = ap.get_principal(&user_id).await { && let Ok(Some(user)) = ap.get_principal(&user_id).await
request.extensions_mut().insert(user); {
} request.extensions_mut().insert(user);
}
} }
if let Some(auth) = auth_header { if let Some(auth) = auth_header {

View File

@@ -6,13 +6,23 @@ use rustical_ical::CalendarObjectType;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
#[derive(Debug, Default, Clone, Serialize, Deserialize)] #[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Calendar { pub struct CalendarMetadata {
pub principal: String, // Attributes that may be outsourced
pub id: String,
pub displayname: Option<String>, pub displayname: Option<String>,
pub order: i64, pub order: i64,
pub description: Option<String>, pub description: Option<String>,
pub color: Option<String>, pub color: Option<String>,
}
#[derive(Debug, Default, Clone, Serialize, Deserialize)]
pub struct Calendar {
// Attributes that may be outsourced
#[serde(flatten)]
pub meta: CalendarMetadata,
// Common calendar attributes
pub principal: String,
pub id: String,
pub timezone_id: Option<String>, pub timezone_id: Option<String>,
pub deleted_at: Option<NaiveDateTime>, pub deleted_at: Option<NaiveDateTime>,
pub synctoken: i64, pub synctoken: i64,

View File

@@ -1,282 +1,208 @@
use crate::CalendarStore;
use async_trait::async_trait; use async_trait::async_trait;
use derive_more::Constructor; use std::{collections::HashMap, sync::Arc};
use rustical_ical::CalendarObject;
use std::sync::Arc;
use crate::{ pub trait PrefixedCalendarStore: CalendarStore {
Calendar, CalendarStore, Error, calendar_store::CalendarQuery, const PREFIX: &'static str;
contact_birthday_store::BIRTHDAYS_PREFIX,
};
#[derive(Debug, Constructor)]
pub struct CombinedCalendarStore<CS: CalendarStore, BS: CalendarStore> {
cal_store: Arc<CS>,
birthday_store: Arc<BS>,
} }
impl<CS: CalendarStore, BS: CalendarStore> Clone for CombinedCalendarStore<CS, BS> { #[derive(Clone)]
fn clone(&self) -> Self { pub struct CombinedCalendarStore {
stores: HashMap<&'static str, Arc<dyn CalendarStore>>,
default: Arc<dyn CalendarStore>,
}
impl CombinedCalendarStore {
pub fn new(default: Arc<dyn CalendarStore>) -> Self {
Self { Self {
cal_store: self.cal_store.clone(), stores: HashMap::new(),
birthday_store: self.birthday_store.clone(), default,
} }
} }
pub fn with_store<CS: PrefixedCalendarStore>(mut self, store: Arc<CS>) -> Self {
let store: Arc<dyn CalendarStore> = store;
self.stores.insert(CS::PREFIX, store);
self
}
fn store_for_id(&self, id: &str) -> Arc<dyn CalendarStore> {
self.stores
.iter()
.find(|&(prefix, _store)| id.starts_with(prefix))
.map(|(_prefix, store)| store.clone())
.unwrap_or(self.default.clone())
}
} }
#[async_trait] #[async_trait]
impl<CS: CalendarStore, BS: CalendarStore> CalendarStore for CombinedCalendarStore<CS, BS> { impl CalendarStore for CombinedCalendarStore {
#[inline] #[inline]
async fn get_calendar( async fn get_calendar(
&self, &self,
principal: &str, principal: &str,
id: &str, id: &str,
show_deleted: bool, show_deleted: bool,
) -> Result<Calendar, Error> { ) -> Result<crate::Calendar, crate::Error> {
if id.starts_with(BIRTHDAYS_PREFIX) { self.store_for_id(id)
self.birthday_store .get_calendar(principal, id, show_deleted)
.get_calendar(principal, id, show_deleted) .await
.await
} else {
self.cal_store
.get_calendar(principal, id, show_deleted)
.await
}
} }
#[inline]
async fn update_calendar( async fn update_calendar(
&self, &self,
principal: String, principal: String,
id: String, id: String,
calendar: Calendar, calendar: crate::Calendar,
) -> Result<(), crate::Error> { ) -> Result<(), crate::Error> {
if id.starts_with(BIRTHDAYS_PREFIX) { self.store_for_id(&id)
self.birthday_store .update_calendar(principal, id, calendar)
.update_calendar(principal, id, calendar) .await
.await
} else {
self.cal_store
.update_calendar(principal, id, calendar)
.await
}
} }
#[inline] async fn insert_calendar(&self, calendar: crate::Calendar) -> Result<(), crate::Error> {
async fn insert_calendar(&self, calendar: Calendar) -> Result<(), Error> { self.store_for_id(&calendar.id)
if calendar.id.starts_with(BIRTHDAYS_PREFIX) { .insert_calendar(calendar)
Err(Error::ReadOnly) .await
} else {
self.cal_store.insert_calendar(calendar).await
}
} }
#[inline] async fn delete_calendar(
async fn get_calendars(&self, principal: &str) -> Result<Vec<Calendar>, Error> { &self,
Ok([ principal: &str,
self.cal_store.get_calendars(principal).await?, name: &str,
self.birthday_store.get_calendars(principal).await?, use_trashbin: bool,
] ) -> Result<(), crate::Error> {
.concat()) self.store_for_id(name)
.delete_calendar(principal, name, use_trashbin)
.await
}
async fn restore_calendar(&self, principal: &str, name: &str) -> Result<(), crate::Error> {
self.store_for_id(name)
.restore_calendar(principal, name)
.await
}
async fn sync_changes(
&self,
principal: &str,
cal_id: &str,
synctoken: i64,
) -> Result<(Vec<rustical_ical::CalendarObject>, Vec<String>, i64), crate::Error> {
self.store_for_id(cal_id)
.sync_changes(principal, cal_id, synctoken)
.await
}
async fn import_calendar(
&self,
calendar: crate::Calendar,
objects: Vec<rustical_ical::CalendarObject>,
merge_existing: bool,
) -> Result<(), crate::Error> {
self.store_for_id(&calendar.id)
.import_calendar(calendar, objects, merge_existing)
.await
}
async fn calendar_query(
&self,
principal: &str,
cal_id: &str,
query: crate::calendar_store::CalendarQuery,
) -> Result<Vec<rustical_ical::CalendarObject>, crate::Error> {
self.store_for_id(cal_id)
.calendar_query(principal, cal_id, query)
.await
}
async fn restore_object(
&self,
principal: &str,
cal_id: &str,
object_id: &str,
) -> Result<(), crate::Error> {
self.store_for_id(cal_id)
.restore_object(principal, cal_id, object_id)
.await
}
async fn calendar_metadata(
&self,
principal: &str,
cal_id: &str,
) -> Result<crate::CollectionMetadata, crate::Error> {
self.store_for_id(cal_id)
.calendar_metadata(principal, cal_id)
.await
}
async fn get_objects(
&self,
principal: &str,
cal_id: &str,
) -> Result<Vec<rustical_ical::CalendarObject>, crate::Error> {
self.store_for_id(cal_id)
.get_objects(principal, cal_id)
.await
}
async fn put_object(
&self,
principal: String,
cal_id: String,
object: rustical_ical::CalendarObject,
overwrite: bool,
) -> Result<(), crate::Error> {
self.store_for_id(&cal_id)
.put_object(principal, cal_id, object, overwrite)
.await
} }
#[inline]
async fn delete_object( async fn delete_object(
&self, &self,
principal: &str, principal: &str,
cal_id: &str, cal_id: &str,
object_id: &str, object_id: &str,
use_trashbin: bool, use_trashbin: bool,
) -> Result<(), Error> { ) -> Result<(), crate::Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) { self.store_for_id(cal_id)
self.birthday_store .delete_object(principal, cal_id, object_id, use_trashbin)
.delete_object(principal, cal_id, object_id, use_trashbin) .await
.await
} else {
self.cal_store
.delete_object(principal, cal_id, object_id, use_trashbin)
.await
}
} }
#[inline]
async fn get_object( async fn get_object(
&self, &self,
principal: &str, principal: &str,
cal_id: &str, cal_id: &str,
object_id: &str, object_id: &str,
show_deleted: bool, show_deleted: bool,
) -> Result<CalendarObject, Error> { ) -> Result<rustical_ical::CalendarObject, crate::Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) { self.store_for_id(cal_id)
self.birthday_store .get_object(principal, cal_id, object_id, show_deleted)
.get_object(principal, cal_id, object_id, show_deleted) .await
.await
} else {
self.cal_store
.get_object(principal, cal_id, object_id, show_deleted)
.await
}
} }
#[inline] async fn get_calendars(&self, principal: &str) -> Result<Vec<crate::Calendar>, crate::Error> {
async fn sync_changes( let mut calendars = self.default.get_calendars(principal).await?;
for store in self.stores.values() {
calendars.extend(store.get_calendars(principal).await?);
}
Ok(calendars)
}
async fn get_deleted_calendars(
&self, &self,
principal: &str, principal: &str,
cal_id: &str, ) -> Result<Vec<crate::Calendar>, crate::Error> {
synctoken: i64, let mut calendars = self.default.get_deleted_calendars(principal).await?;
) -> Result<(Vec<CalendarObject>, Vec<String>, i64), Error> { for store in self.stores.values() {
if cal_id.starts_with(BIRTHDAYS_PREFIX) { calendars.extend(store.get_deleted_calendars(principal).await?);
self.birthday_store
.sync_changes(principal, cal_id, synctoken)
.await
} else {
self.cal_store
.sync_changes(principal, cal_id, synctoken)
.await
} }
Ok(calendars)
} }
#[inline]
async fn calendar_metadata(
&self,
principal: &str,
cal_id: &str,
) -> Result<crate::CollectionMetadata, Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store
.calendar_metadata(principal, cal_id)
.await
} else {
self.cal_store.calendar_metadata(principal, cal_id).await
}
}
#[inline]
async fn get_objects(
&self,
principal: &str,
cal_id: &str,
) -> Result<Vec<CalendarObject>, Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store.get_objects(principal, cal_id).await
} else {
self.cal_store.get_objects(principal, cal_id).await
}
}
#[inline]
async fn calendar_query(
&self,
principal: &str,
cal_id: &str,
query: CalendarQuery,
) -> Result<Vec<CalendarObject>, Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store
.calendar_query(principal, cal_id, query)
.await
} else {
self.cal_store
.calendar_query(principal, cal_id, query)
.await
}
}
#[inline]
async fn restore_calendar(&self, principal: &str, name: &str) -> Result<(), Error> {
if name.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store.restore_calendar(principal, name).await
} else {
self.cal_store.restore_calendar(principal, name).await
}
}
#[inline]
async fn import_calendar(
&self,
calendar: Calendar,
objects: Vec<CalendarObject>,
merge_existing: bool,
) -> Result<(), Error> {
if calendar.id.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store
.import_calendar(calendar, objects, merge_existing)
.await
} else {
self.cal_store
.import_calendar(calendar, objects, merge_existing)
.await
}
}
#[inline]
async fn delete_calendar(
&self,
principal: &str,
name: &str,
use_trashbin: bool,
) -> Result<(), Error> {
if name.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store
.delete_calendar(principal, name, use_trashbin)
.await
} else {
self.cal_store
.delete_calendar(principal, name, use_trashbin)
.await
}
}
#[inline]
async fn get_deleted_calendars(&self, principal: &str) -> Result<Vec<Calendar>, Error> {
Ok([
self.birthday_store.get_deleted_calendars(principal).await?,
self.cal_store.get_deleted_calendars(principal).await?,
]
.concat())
}
#[inline]
async fn restore_object(
&self,
principal: &str,
cal_id: &str,
object_id: &str,
) -> Result<(), Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store
.restore_object(principal, cal_id, object_id)
.await
} else {
self.cal_store
.restore_object(principal, cal_id, object_id)
.await
}
}
#[inline]
async fn put_object(
&self,
principal: String,
cal_id: String,
object: CalendarObject,
overwrite: bool,
) -> Result<(), Error> {
if cal_id.starts_with(BIRTHDAYS_PREFIX) {
self.birthday_store
.put_object(principal, cal_id, object, overwrite)
.await
} else {
self.cal_store
.put_object(principal, cal_id, object, overwrite)
.await
}
}
#[inline]
fn is_read_only(&self, cal_id: &str) -> bool { fn is_read_only(&self, cal_id: &str) -> bool {
if cal_id.starts_with(BIRTHDAYS_PREFIX) { self.store_for_id(cal_id).is_read_only(cal_id)
self.birthday_store.is_read_only(cal_id)
} else {
self.cal_store.is_read_only(cal_id)
}
} }
} }

View File

@@ -1,4 +1,7 @@
use crate::{Addressbook, AddressbookStore, Calendar, CalendarStore, Error}; use crate::{
Addressbook, AddressbookStore, Calendar, CalendarStore, Error, calendar::CalendarMetadata,
combined_calendar_store::PrefixedCalendarStore,
};
use async_trait::async_trait; use async_trait::async_trait;
use derive_more::derive::Constructor; use derive_more::derive::Constructor;
use rustical_ical::{AddressObject, CalendarObject, CalendarObjectType}; use rustical_ical::{AddressObject, CalendarObject, CalendarObjectType};
@@ -10,16 +13,22 @@ pub(crate) const BIRTHDAYS_PREFIX: &str = "_birthdays_";
#[derive(Constructor, Clone)] #[derive(Constructor, Clone)]
pub struct ContactBirthdayStore<AS: AddressbookStore>(Arc<AS>); pub struct ContactBirthdayStore<AS: AddressbookStore>(Arc<AS>);
impl<AS: AddressbookStore> PrefixedCalendarStore for ContactBirthdayStore<AS> {
const PREFIX: &'static str = BIRTHDAYS_PREFIX;
}
fn birthday_calendar(addressbook: Addressbook) -> Calendar { fn birthday_calendar(addressbook: Addressbook) -> Calendar {
Calendar { Calendar {
principal: addressbook.principal, principal: addressbook.principal,
id: format!("{}{}", BIRTHDAYS_PREFIX, addressbook.id), id: format!("{}{}", BIRTHDAYS_PREFIX, addressbook.id),
displayname: addressbook meta: CalendarMetadata {
.displayname displayname: addressbook
.map(|name| format!("{name} birthdays")), .displayname
order: 0, .map(|name| format!("{name} birthdays")),
description: None, order: 0,
color: None, description: None,
color: None,
},
timezone_id: None, timezone_id: None,
deleted_at: addressbook.deleted_at, deleted_at: addressbook.deleted_at,
synctoken: addressbook.synctoken, synctoken: addressbook.synctoken,

View File

@@ -22,7 +22,7 @@ pub use secret::Secret;
pub use subscription_store::*; pub use subscription_store::*;
pub use addressbook::Addressbook; pub use addressbook::Addressbook;
pub use calendar::Calendar; pub use calendar::{Calendar, CalendarMetadata};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub enum CollectionOperationInfo { pub enum CollectionOperationInfo {

View File

@@ -433,14 +433,14 @@ impl AddressbookStore for SqliteAddressbookStore {
Self::_delete_addressbook(&mut *tx, principal, addressbook_id, use_trashbin).await?; Self::_delete_addressbook(&mut *tx, principal, addressbook_id, use_trashbin).await?;
tx.commit().await.map_err(crate::Error::from)?; tx.commit().await.map_err(crate::Error::from)?;
if let Some(addressbook) = addressbook { if let Some(addressbook) = addressbook
if let Err(err) = self.sender.try_send(CollectionOperation { && let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Delete, data: CollectionOperationInfo::Delete,
topic: addressbook.push_topic, topic: addressbook.push_topic,
}) { })
error!("Push notification about deleted addressbook failed: {err}"); {
}; error!("Push notification about deleted addressbook failed: {err}");
} };
Ok(()) Ok(())
} }

View File

@@ -5,7 +5,7 @@ use derive_more::derive::Constructor;
use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectType}; use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectType};
use rustical_store::calendar_store::CalendarQuery; use rustical_store::calendar_store::CalendarQuery;
use rustical_store::synctoken::format_synctoken; use rustical_store::synctoken::format_synctoken;
use rustical_store::{Calendar, CalendarStore, CollectionMetadata, Error}; use rustical_store::{Calendar, CalendarMetadata, CalendarStore, CollectionMetadata, Error};
use rustical_store::{CollectionOperation, CollectionOperationInfo}; use rustical_store::{CollectionOperation, CollectionOperationInfo};
use sqlx::types::chrono::NaiveDateTime; use sqlx::types::chrono::NaiveDateTime;
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction}; use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
@@ -69,10 +69,12 @@ impl From<CalendarRow> for Calendar {
Self { Self {
principal: value.principal, principal: value.principal,
id: value.id, id: value.id,
displayname: value.displayname, meta: CalendarMetadata {
order: value.order, displayname: value.displayname,
description: value.description, order: value.order,
color: value.color, description: value.description,
color: value.color,
},
timezone_id: value.timezone_id, timezone_id: value.timezone_id,
deleted_at: value.deleted_at, deleted_at: value.deleted_at,
synctoken: value.synctoken, synctoken: value.synctoken,
@@ -159,10 +161,10 @@ impl SqliteCalendarStore {
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"#, VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)"#,
calendar.principal, calendar.principal,
calendar.id, calendar.id,
calendar.displayname, calendar.meta.displayname,
calendar.description, calendar.meta.description,
calendar.order, calendar.meta.order,
calendar.color, calendar.meta.color,
calendar.subscription_url, calendar.subscription_url,
calendar.timezone_id, calendar.timezone_id,
calendar.push_topic, calendar.push_topic,
@@ -189,10 +191,10 @@ impl SqliteCalendarStore {
WHERE (principal, id) = (?, ?)"#, WHERE (principal, id) = (?, ?)"#,
calendar.principal, calendar.principal,
calendar.id, calendar.id,
calendar.displayname, calendar.meta.displayname,
calendar.description, calendar.meta.description,
calendar.order, calendar.meta.order,
calendar.color, calendar.meta.color,
calendar.timezone_id, calendar.timezone_id,
calendar.push_topic, calendar.push_topic,
comp_event, comp_todo, comp_journal, comp_event, comp_todo, comp_journal,
@@ -351,7 +353,6 @@ impl SqliteCalendarStore {
object: CalendarObject, object: CalendarObject,
overwrite: bool, overwrite: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
// TODO: Prevent objects from being commited to a subscription calendar
let (object_id, ics) = (object.get_id(), object.get_ics()); let (object_id, ics) = (object.get_id(), object.get_ics());
let first_occurence = object let first_occurence = object
@@ -554,14 +555,14 @@ impl CalendarStore for SqliteCalendarStore {
Self::_delete_calendar(&mut *tx, principal, id, use_trashbin).await?; Self::_delete_calendar(&mut *tx, principal, id, use_trashbin).await?;
tx.commit().await.map_err(crate::Error::from)?; tx.commit().await.map_err(crate::Error::from)?;
if let Some(cal) = cal { if let Some(cal) = cal
if let Err(err) = self.sender.try_send(CollectionOperation { && let Err(err) = self.sender.try_send(CollectionOperation {
data: CollectionOperationInfo::Delete, data: CollectionOperationInfo::Delete,
topic: cal.push_topic, topic: cal.push_topic,
}) { })
error!("Push notification about deleted calendar failed: {err}"); {
}; error!("Push notification about deleted calendar failed: {err}");
} };
Ok(()) Ok(())
} }
@@ -667,11 +668,16 @@ impl CalendarStore for SqliteCalendarStore {
object: CalendarObject, object: CalendarObject,
overwrite: bool, overwrite: bool,
) -> Result<(), Error> { ) -> Result<(), Error> {
// TODO: Prevent objects from being commited to a subscription calendar
let mut tx = self.db.begin().await.map_err(crate::Error::from)?; let mut tx = self.db.begin().await.map_err(crate::Error::from)?;
let object_id = object.get_id().to_owned(); let object_id = object.get_id().to_owned();
let calendar = Self::_get_calendar(&mut *tx, &principal, &cal_id, true).await?;
if calendar.subscription_url.is_some() {
// We cannot commit an object to a subscription calendar
return Err(Error::ReadOnly);
}
Self::_put_object( Self::_put_object(
&mut *tx, &mut *tx,
principal.to_owned(), principal.to_owned(),

View File

@@ -1,11 +1,11 @@
use std::collections::HashMap; use std::collections::HashMap;
use darling::{FromDeriveInput, FromField, FromMeta, FromVariant, util::Flag}; use darling::{FromDeriveInput, FromField, FromMeta, FromVariant, util::Flag};
use syn::{Ident, LitByteStr}; use syn::{Ident, LitStr};
#[derive(Debug, Default, FromMeta, Clone)] #[derive(Debug, Default, FromMeta, Clone)]
pub struct TagAttrs { pub struct TagAttrs {
pub rename: Option<LitByteStr>, pub rename: Option<LitStr>,
pub ns: Option<syn::Path>, pub ns: Option<syn::Path>,
} }
@@ -30,10 +30,10 @@ pub struct EnumAttrs {
#[derive(Default, FromDeriveInput, Clone)] #[derive(Default, FromDeriveInput, Clone)]
#[darling(attributes(xml))] #[darling(attributes(xml))]
pub struct StructAttrs { pub struct StructAttrs {
pub root: Option<LitByteStr>, pub root: Option<LitStr>,
pub ns: Option<syn::Path>, pub ns: Option<syn::Path>,
#[darling(default)] #[darling(default)]
pub ns_prefix: HashMap<syn::Path, LitByteStr>, pub ns_prefix: HashMap<syn::Path, LitStr>,
pub allow_invalid: Flag, pub allow_invalid: Flag,
} }

View File

@@ -35,7 +35,7 @@ impl Field {
} }
/// Field name in XML /// Field name in XML
pub fn xml_name(&self) -> syn::LitByteStr { pub fn xml_name(&self) -> syn::LitStr {
if let Some(rename) = self.attrs.common.rename.to_owned() { if let Some(rename) = self.attrs.common.rename.to_owned() {
rename rename
} else { } else {
@@ -43,7 +43,7 @@ impl Field {
.field_ident() .field_ident()
.as_ref() .as_ref()
.expect("unnamed tag fields need a rename attribute"); .expect("unnamed tag fields need a rename attribute");
syn::LitByteStr::new(ident.to_string().to_kebab_case().as_bytes(), ident.span()) syn::LitStr::new(ident.to_string().to_kebab_case().as_str(), ident.span())
} }
} }
@@ -174,6 +174,8 @@ impl Field {
.map(|ns| quote! { if ns == #ns }); .map(|ns| quote! { if ns == #ns });
let field_name = self.xml_name(); let field_name = self.xml_name();
let b_field_name =
syn::LitByteStr::new(self.xml_name().value().as_bytes(), field_name.span());
let builder_field_ident = self.builder_field_ident(); let builder_field_ident = self.builder_field_ident();
let deserializer = self.deserializer_type(); let deserializer = self.deserializer_type();
let value = quote! { <#deserializer as rustical_xml::XmlDeserialize>::deserialize(reader, &start, empty)? }; let value = quote! { <#deserializer as rustical_xml::XmlDeserialize>::deserialize(reader, &start, empty)? };
@@ -186,7 +188,7 @@ impl Field {
}; };
Some(quote! { Some(quote! {
(#namespace_match, #field_name) #namespace_condition => { #assignment; } (#namespace_match, #b_field_name) #namespace_condition => { #assignment; }
}) })
} }
@@ -231,6 +233,8 @@ impl Field {
} }
let builder_field_ident = self.builder_field_ident(); let builder_field_ident = self.builder_field_ident();
let field_name = self.xml_name(); let field_name = self.xml_name();
let b_field_name =
syn::LitByteStr::new(self.xml_name().value().as_bytes(), field_name.span());
let value = wrap_option_if_no_default( let value = wrap_option_if_no_default(
quote! { quote! {
@@ -240,7 +244,7 @@ impl Field {
); );
Some(quote! { Some(quote! {
#field_name => { #b_field_name => {
builder.#builder_field_ident = #value; builder.#builder_field_ident = #value;
} }
}) })
@@ -255,7 +259,6 @@ impl Field {
let value = quote! { let value = quote! {
if let ::quick_xml::name::ResolveResult::Bound(ns) = &ns { if let ::quick_xml::name::ResolveResult::Bound(ns) = &ns {
Some(ns.into()) Some(ns.into())
// Some(rustical_xml::ValueDeserialize::deserialize(&String::from_utf8_lossy(ns.0.as_ref()))?)
} else { } else {
None None
} }

View File

@@ -1,16 +1,12 @@
pub(crate) fn get_generic_type(ty: &syn::Type) -> Option<&syn::Type> { pub(crate) fn get_generic_type(ty: &syn::Type) -> Option<&syn::Type> {
if let syn::Type::Path(syn::TypePath { path, .. }) = ty { if let syn::Type::Path(syn::TypePath { path, .. }) = ty
if let Some(seg) = path.segments.last() { && let Some(seg) = path.segments.last()
if let syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments { && let syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments {
args, args, ..
.. }) = &seg.arguments
}) = &seg.arguments && let Some(syn::GenericArgument::Type(t)) = &args.first()
{ {
if let Some(syn::GenericArgument::Type(t)) = &args.first() { return Some(t);
return Some(t);
}
}
}
} }
None None
} }

View File

@@ -14,13 +14,13 @@ impl Variant {
&self.variant.ident &self.variant.ident
} }
pub fn xml_name(&self) -> syn::LitByteStr { pub fn xml_name(&self) -> syn::LitStr {
self.attrs self.attrs
.common .common
.rename .rename
.to_owned() .to_owned()
.unwrap_or(syn::LitByteStr::new( .unwrap_or(syn::LitStr::new(
self.ident().to_string().to_kebab_case().as_bytes(), self.ident().to_string().to_kebab_case().as_str(),
self.ident().span(), self.ident().span(),
)) ))
} }
@@ -75,6 +75,8 @@ impl Variant {
} }
let ident = self.ident(); let ident = self.ident();
let variant_name = self.xml_name(); let variant_name = self.xml_name();
let b_variant_name =
syn::LitByteStr::new(self.xml_name().value().as_bytes(), variant_name.span());
let deserializer_type = self.deserializer_type(); let deserializer_type = self.deserializer_type();
Some( Some(
@@ -93,7 +95,7 @@ impl Variant {
panic!("tuple variants should contain exactly one element"); panic!("tuple variants should contain exactly one element");
} }
quote! { quote! {
#variant_name => { #b_variant_name => {
let val = Some(<#deserializer_type as ::rustical_xml::XmlDeserialize>::deserialize(reader, start, empty)?); let val = Some(<#deserializer_type as ::rustical_xml::XmlDeserialize>::deserialize(reader, start, empty)?);
Ok(Self::#ident(val)) Ok(Self::#ident(val))
} }
@@ -104,7 +106,7 @@ impl Variant {
panic!("tuple variants should contain exactly one element"); panic!("tuple variants should contain exactly one element");
} }
quote! { quote! {
#variant_name => { #b_variant_name => {
let val = <#deserializer_type as ::rustical_xml::XmlDeserialize>::deserialize(reader, start, empty)?; let val = <#deserializer_type as ::rustical_xml::XmlDeserialize>::deserialize(reader, start, empty)?;
Ok(Self::#ident(val)) Ok(Self::#ident(val))
} }
@@ -112,7 +114,7 @@ impl Variant {
} }
(false, Fields::Unit, _) => { (false, Fields::Unit, _) => {
quote! { quote! {
#variant_name => { #b_variant_name => {
// Make sure that content is still consumed // Make sure that content is still consumed
<() as ::rustical_xml::XmlDeserialize>::deserialize(reader, start, empty)?; <() as ::rustical_xml::XmlDeserialize>::deserialize(reader, start, empty)?;
Ok(Self::#ident) Ok(Self::#ident)

View File

@@ -111,8 +111,7 @@ impl Enum {
Some(ns) => quote! { Some(#ns) }, Some(ns) => quote! { Some(#ns) },
None => quote! { None }, None => quote! { None },
}; };
let b_xml_name = variant.xml_name().value(); let xml_name = variant.xml_name().value();
let xml_name = String::from_utf8_lossy(&b_xml_name);
let out = quote! {(#ns, #xml_name)}; let out = quote! {(#ns, #xml_name)};
let ident = &variant.variant.ident; let ident = &variant.variant.ident;
@@ -134,8 +133,7 @@ impl Enum {
let str_to_unit_branches = tagged_variants.iter().map(|variant| { let str_to_unit_branches = tagged_variants.iter().map(|variant| {
let ident = &variant.variant.ident; let ident = &variant.variant.ident;
let b_xml_name = variant.xml_name().value(); let xml_name = variant.xml_name().value();
let xml_name = String::from_utf8_lossy(&b_xml_name);
if variant.attrs.prop.is_some() { if variant.attrs.prop.is_some() {
quote! { #xml_name => Ok(Self::#ident (Default::default())) } quote! { #xml_name => Ok(Self::#ident (Default::default())) }
} else { } else {

View File

@@ -16,8 +16,8 @@ impl Enum {
fn serialize( fn serialize(
&self, &self,
ns: Option<::quick_xml::name::Namespace>, ns: Option<::quick_xml::name::Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &::std::collections::HashMap<::quick_xml::name::Namespace, &[u8]>, namespaces: &::std::collections::HashMap<::quick_xml::name::Namespace, &str>,
writer: &mut ::quick_xml::Writer<&mut Vec<u8>> writer: &mut ::quick_xml::Writer<&mut Vec<u8>>
) -> ::std::io::Result<()> { ) -> ::std::io::Result<()> {
use ::quick_xml::events::{BytesEnd, BytesStart, BytesText, Event}; use ::quick_xml::events::{BytesEnd, BytesStart, BytesText, Event};
@@ -25,19 +25,20 @@ impl Enum {
let prefix = ns let prefix = ns
.map(|ns| namespaces.get(&ns)) .map(|ns| namespaces.get(&ns))
.unwrap_or(None) .unwrap_or(None)
.map(|prefix| if !prefix.is_empty() { .map(|prefix| {
[*prefix, b":"].concat() if !prefix.is_empty() {
} else { format!("{prefix}:")
vec![] } else {
}); String::new()
}
});
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();
let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat()); let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat());
let qname = tagname.as_ref().map(|tagname| ::quick_xml::name::QName(tagname));
const enum_untagged: bool = #enum_untagged; const enum_untagged: bool = #enum_untagged;
if let Some(qname) = &qname { if let Some(tagname) = tagname.as_ref() {
let mut bytes_start = BytesStart::from(qname.to_owned()); let mut bytes_start = BytesStart::new(tagname);
if !has_prefix { if !has_prefix {
if let Some(ns) = &ns { if let Some(ns) = &ns {
bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref())); bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
@@ -48,8 +49,8 @@ impl Enum {
#(#variant_serializers);* #(#variant_serializers);*
if let Some(qname) = &qname { if let Some(tagname) = tagname.as_ref() {
writer.write_event(Event::End(BytesEnd::from(qname.to_owned())))?; writer.write_event(Event::End(BytesEnd::new(tagname)))?;
} }
Ok(()) Ok(())
} }

View File

@@ -66,6 +66,9 @@ impl Enum {
Event::CData(cdata) => { Event::CData(cdata) => {
return Err(::rustical_xml::XmlError::UnsupportedEvent("CDATA")); return Err(::rustical_xml::XmlError::UnsupportedEvent("CDATA"));
} }
Event::GeneralRef(_) => {
return Err(::rustical_xml::XmlError::UnsupportedEvent("GeneralRef"));
}
Event::Decl(_) => { /* <?xml ... ?> ignore this */ } Event::Decl(_) => { /* <?xml ... ?> ignore this */ }
Event::Comment(_) => { /* ignore */ } Event::Comment(_) => { /* ignore */ }
Event::DocType(_) => { /* ignore */ } Event::DocType(_) => { /* ignore */ }
@@ -108,8 +111,7 @@ impl Enum {
Some(ns) => quote! { Some(#ns) }, Some(ns) => quote! { Some(#ns) },
None => quote! { None }, None => quote! { None },
}; };
let b_xml_name = variant.xml_name().value(); let xml_name = variant.xml_name().value();
let xml_name = String::from_utf8_lossy(&b_xml_name);
quote! {(#ns, #xml_name)} quote! {(#ns, #xml_name)}
}); });

View File

@@ -33,7 +33,7 @@ impl NamedStruct {
let field_index = field.target_field_index(); let field_index = field.target_field_index();
quote! { quote! {
::quick_xml::events::attributes::Attribute { ::quick_xml::events::attributes::Attribute {
key: ::quick_xml::name::QName(#field_name), key: ::quick_xml::name::QName(#field_name.as_bytes()),
value: ::std::borrow::Cow::from(::rustical_xml::ValueSerialize::serialize(&self.#field_index).into_bytes()) value: ::std::borrow::Cow::from(::rustical_xml::ValueSerialize::serialize(&self.#field_index).into_bytes())
} }
} }
@@ -47,7 +47,7 @@ impl NamedStruct {
let field_index = field.target_field_index(); let field_index = field.target_field_index();
quote! { quote! {
let tag_str = self.#field_index.to_string(); let tag_str = self.#field_index.to_string();
let tag = Some(tag.unwrap_or(tag_str.as_bytes())); let tag = Some(tag.unwrap_or(tag_str.as_str()));
} }
}); });
@@ -70,13 +70,12 @@ impl NamedStruct {
.ns_prefix .ns_prefix
.iter() .iter()
.map(|(ns, prefix)| { .map(|(ns, prefix)| {
let sep = if !prefix.value().is_empty() { let attr_name = if prefix.value().is_empty() {
b":".to_vec() "xmlns".to_owned()
} else { } else {
b"".to_vec() format!("xmlns:{}", prefix.value())
}; };
let attr_name = [b"xmlns".as_ref(), &sep, &prefix.value()].concat(); let a = syn::LitByteStr::new(attr_name.as_bytes(), prefix.span());
let a = syn::LitByteStr::new(&attr_name, prefix.span());
quote! { quote! {
bytes_start.push_attribute((#a.as_ref(), #ns.as_ref())); bytes_start.push_attribute((#a.as_ref(), #ns.as_ref()));
} }
@@ -91,8 +90,8 @@ impl NamedStruct {
fn serialize( fn serialize(
&self, &self,
ns: Option<::quick_xml::name::Namespace>, ns: Option<::quick_xml::name::Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &::std::collections::HashMap<::quick_xml::name::Namespace, &[u8]>, namespaces: &::std::collections::HashMap<::quick_xml::name::Namespace, &str>,
writer: &mut ::quick_xml::Writer<&mut Vec<u8>> writer: &mut ::quick_xml::Writer<&mut Vec<u8>>
) -> ::std::io::Result<()> { ) -> ::std::io::Result<()> {
use ::quick_xml::events::{BytesEnd, BytesStart, BytesText, Event}; use ::quick_xml::events::{BytesEnd, BytesStart, BytesText, Event};
@@ -105,17 +104,16 @@ impl NamedStruct {
.unwrap_or(None) .unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if !prefix.is_empty() {
[*prefix, b":"].concat() format!("{prefix}:")
} else { } else {
Vec::new() String::new()
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();
let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat()); let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat());
let qname = tagname.as_ref().map(|tagname| ::quick_xml::name::QName(tagname));
// if let Some(tagname) = tagname.as_ref() {
if let Some(qname) = &qname { let mut bytes_start = BytesStart::new(tagname);
let mut bytes_start = BytesStart::from(qname.to_owned());
if !has_prefix { if !has_prefix {
if let Some(ns) = &ns { if let Some(ns) = &ns {
bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref())); bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
@@ -134,8 +132,8 @@ impl NamedStruct {
} }
if !#is_empty { if !#is_empty {
#(#tag_writers);* #(#tag_writers);*
if let Some(qname) = &qname { if let Some(tagname) = tagname.as_ref() {
writer.write_event(Event::End(BytesEnd::from(qname.to_owned())))?; writer.write_event(Event::End(BytesEnd::new(tagname)))?;
} }
} }
Ok(()) Ok(())

View File

@@ -68,7 +68,7 @@ impl NamedStruct {
.ns_prefix .ns_prefix
.iter() .iter()
.map(|(ns, prefix)| { .map(|(ns, prefix)| {
quote! { (#ns, #prefix.as_ref()) } quote! { (#ns, #prefix) }
}) })
.collect() .collect()
} else { } else {
@@ -77,9 +77,9 @@ impl NamedStruct {
quote! { quote! {
impl #impl_generics ::rustical_xml::XmlRootTag for #ident #type_generics #where_clause { impl #impl_generics ::rustical_xml::XmlRootTag for #ident #type_generics #where_clause {
fn root_tag() -> &'static [u8] { #root } fn root_tag() -> &'static str { #root }
fn root_ns() -> Option<::quick_xml::name::Namespace<'static>> { #ns } fn root_ns() -> Option<::quick_xml::name::Namespace<'static>> { #ns }
fn root_ns_prefixes() -> ::std::collections::HashMap<::quick_xml::name::Namespace<'static>, &'static [u8]> { fn root_ns_prefixes() -> ::std::collections::HashMap<::quick_xml::name::Namespace<'static>, &'static str> {
::std::collections::HashMap::from_iter(vec![ ::std::collections::HashMap::from_iter(vec![
#(#prefixes),* #(#prefixes),*
]) ])
@@ -148,6 +148,8 @@ impl NamedStruct {
} }
} }
let mut string = String::new();
if !empty { if !empty {
loop { loop {
let event = reader.read_event_into(&mut buf)?; let event = reader.read_event_into(&mut buf)?;
@@ -167,12 +169,23 @@ impl NamedStruct {
} }
} }
Event::Text(bytes_text) => { Event::Text(bytes_text) => {
let text = bytes_text.unescape()?; let text = bytes_text.decode()?;
#(#text_field_branches)* string.push_str(&text);
} }
Event::CData(cdata) => { Event::CData(cdata) => {
let text = String::from_utf8(cdata.to_vec())?; let text = String::from_utf8(cdata.to_vec())?;
#(#text_field_branches)* string.push_str(&text);
}
Event::GeneralRef(gref) => {
if let Some(char) = gref.resolve_char_ref()? {
string.push(char);
} else if let Some(text) =
quick_xml::escape::resolve_xml_entity(&gref.xml_content()?)
{
string.push_str(text);
} else {
return Err(XmlError::UnsupportedEvent("invalid XML ref"));
}
} }
Event::Decl(_) => { /* <?xml ... ?> ignore this */ } Event::Decl(_) => { /* <?xml ... ?> ignore this */ }
Event::Comment(_) => { /* ignore */ } Event::Comment(_) => { /* ignore */ }
@@ -185,6 +198,9 @@ impl NamedStruct {
} }
} }
let text = string;
#(#text_field_branches)*
Ok(Self { Ok(Self {
#(#builder_field_builds),* #(#builder_field_builds),*
}) })

View File

@@ -49,7 +49,7 @@ impl<T: XmlRootTag + XmlDeserialize> XmlDocument for T {
let (ns, name) = reader.resolve_element(start.name()); let (ns, name) = reader.resolve_element(start.name());
let matches = match (Self::root_ns(), &ns, name) { let matches = match (Self::root_ns(), &ns, name) {
// Wrong tag // Wrong tag
(_, _, name) if name.as_ref() != Self::root_tag() => false, (_, _, name) if name.as_ref() != Self::root_tag().as_bytes() => false,
// Wrong namespace // Wrong namespace
(Some(root_ns), ns, _) if &ResolveResult::Bound(root_ns) != ns => false, (Some(root_ns), ns, _) if &ResolveResult::Bound(root_ns) != ns => false,
_ => true, _ => true,
@@ -60,7 +60,7 @@ impl<T: XmlRootTag + XmlDeserialize> XmlDocument for T {
format!("{ns:?}"), format!("{ns:?}"),
String::from_utf8_lossy(name.as_ref()).to_string(), String::from_utf8_lossy(name.as_ref()).to_string(),
format!("{root_ns:?}"), format!("{root_ns:?}"),
String::from_utf8_lossy(Self::root_tag()).to_string(), Self::root_tag().to_owned(),
)); ));
}; };

View File

@@ -8,6 +8,8 @@ pub enum XmlError {
#[error(transparent)] #[error(transparent)]
QuickXmlError(#[from] quick_xml::Error), QuickXmlError(#[from] quick_xml::Error),
#[error(transparent)] #[error(transparent)]
QuickXmlEncodingError(#[from] quick_xml::encoding::EncodingError),
#[error(transparent)]
QuickXmlAttrError(#[from] quick_xml::events::attributes::AttrError), QuickXmlAttrError(#[from] quick_xml::events::attributes::AttrError),
#[error(transparent)] #[error(transparent)]
FromUtf8Error(#[from] FromUtf8Error), FromUtf8Error(#[from] FromUtf8Error),

View File

@@ -23,9 +23,9 @@ pub use xml_derive::PropName;
pub use xml_derive::XmlRootTag; pub use xml_derive::XmlRootTag;
pub trait XmlRootTag { pub trait XmlRootTag {
fn root_tag() -> &'static [u8]; fn root_tag() -> &'static str;
fn root_ns() -> Option<Namespace<'static>>; fn root_ns() -> Option<Namespace<'static>>;
fn root_ns_prefixes() -> HashMap<Namespace<'static>, &'static [u8]>; fn root_ns_prefixes() -> HashMap<Namespace<'static>, &'static str>;
} }
#[derive(Debug)] #[derive(Debug)]

View File

@@ -28,7 +28,7 @@ impl<'a> From<&'a Namespace<'a>> for NamespaceOwned {
} }
impl NamespaceOwned { impl NamespaceOwned {
pub fn as_ref(&self) -> Namespace { pub fn as_ref(&self) -> Namespace<'_> {
Namespace(&self.0) Namespace(&self.0)
} }
} }

View File

@@ -1,7 +1,7 @@
use crate::XmlRootTag; use crate::XmlRootTag;
use quick_xml::{ use quick_xml::{
events::{BytesStart, Event, attributes::Attribute}, events::{BytesStart, Event, attributes::Attribute},
name::{Namespace, QName}, name::Namespace,
}; };
use std::collections::HashMap; use std::collections::HashMap;
pub use xml_derive::XmlSerialize; pub use xml_derive::XmlSerialize;
@@ -10,8 +10,8 @@ pub trait XmlSerialize {
fn serialize( fn serialize(
&self, &self,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()>; ) -> std::io::Result<()>;
@@ -22,8 +22,8 @@ impl<T: XmlSerialize> XmlSerialize for Option<T> {
fn serialize( fn serialize(
&self, &self,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
if let Some(some) = self { if let Some(some) = self {
@@ -60,8 +60,8 @@ impl XmlSerialize for () {
fn serialize( fn serialize(
&self, &self,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns
@@ -69,20 +69,17 @@ impl XmlSerialize for () {
.unwrap_or(None) .unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if !prefix.is_empty() {
[*prefix, b":"].concat() [*prefix, ":"].concat()
} else { } else {
Vec::new() String::new()
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();
let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat()); let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat());
let qname = tagname.as_ref().map(|tagname| QName(tagname)); if let Some(tagname) = tagname.as_ref() {
if let Some(qname) = &qname { let mut bytes_start = BytesStart::new(tagname);
let mut bytes_start = BytesStart::from(qname.to_owned()); if !has_prefix && let Some(ns) = &ns {
if !has_prefix { bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
if let Some(ns) = &ns {
bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
}
} }
writer.write_event(Event::Empty(bytes_start))?; writer.write_event(Event::Empty(bytes_start))?;
} }

View File

@@ -1,6 +1,6 @@
use crate::{XmlDeserialize, XmlError, XmlSerialize}; use crate::{XmlDeserialize, XmlError, XmlSerialize};
use quick_xml::events::{BytesEnd, BytesStart, BytesText, Event}; use quick_xml::events::{BytesEnd, BytesStart, BytesText, Event};
use quick_xml::name::{Namespace, QName}; use quick_xml::name::Namespace;
use std::collections::HashMap; use std::collections::HashMap;
use std::num::{ParseFloatError, ParseIntError}; use std::num::{ParseFloatError, ParseIntError};
use std::{convert::Infallible, io::BufRead}; use std::{convert::Infallible, io::BufRead};
@@ -77,20 +77,23 @@ impl<T: ValueDeserialize> XmlDeserialize for T {
loop { loop {
match reader.read_event_into(&mut buf)? { match reader.read_event_into(&mut buf)? {
Event::Text(bytes_text) => { Event::Text(bytes_text) => {
let text = bytes_text.unescape()?; let text = bytes_text.decode()?;
if !string.is_empty() { string.push_str(&text);
// Content already written
return Err(XmlError::UnsupportedEvent("content already written"));
}
string = text.to_string();
} }
Event::CData(cdata) => { Event::CData(cdata) => {
let text = String::from_utf8(cdata.to_vec())?; let text = String::from_utf8(cdata.to_vec())?;
if !string.is_empty() { string.push_str(&text);
// Content already written }
return Err(XmlError::UnsupportedEvent("content already written")); Event::GeneralRef(gref) => {
if let Some(char) = gref.resolve_char_ref()? {
string.push(char);
} else if let Some(text) =
quick_xml::escape::resolve_xml_entity(&gref.xml_content()?)
{
string.push_str(text);
} else {
return Err(XmlError::UnsupportedEvent("invalid XML ref"));
} }
string = text;
} }
Event::End(_) => break, Event::End(_) => break,
Event::Eof => return Err(XmlError::Eof), Event::Eof => return Err(XmlError::Eof),
@@ -107,8 +110,8 @@ impl<T: ValueSerialize> XmlSerialize for T {
fn serialize( fn serialize(
&self, &self,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns
@@ -116,26 +119,23 @@ impl<T: ValueSerialize> XmlSerialize for T {
.unwrap_or(None) .unwrap_or(None)
.map(|prefix| { .map(|prefix| {
if !prefix.is_empty() { if !prefix.is_empty() {
[*prefix, b":"].concat() [*prefix, ":"].concat()
} else { } else {
Vec::new() String::new()
} }
}); });
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();
let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat()); let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat());
let qname = tagname.as_ref().map(|tagname| QName(tagname)); if let Some(tagname) = tagname.as_ref() {
if let Some(qname) = &qname { let mut bytes_start = BytesStart::new(tagname);
let mut bytes_start = BytesStart::from(qname.to_owned()); if !has_prefix && let Some(ns) = &ns {
if !has_prefix { bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
if let Some(ns) = &ns {
bytes_start.push_attribute((b"xmlns".as_ref(), ns.as_ref()));
}
} }
writer.write_event(Event::Start(bytes_start))?; writer.write_event(Event::Start(bytes_start))?;
} }
writer.write_event(Event::Text(BytesText::new(&self.serialize())))?; writer.write_event(Event::Text(BytesText::new(&self.serialize())))?;
if let Some(qname) = &qname { if let Some(tagname) = tagname {
writer.write_event(Event::End(BytesEnd::from(qname.to_owned())))?; writer.write_event(Event::End(BytesEnd::new(tagname)))?;
} }
Ok(()) Ok(())
} }

View File

@@ -1,9 +1,9 @@
use rustical_xml::{de::XmlDocument, XmlDeserialize, XmlRootTag}; use rustical_xml::{XmlDeserialize, XmlRootTag, de::XmlDocument};
#[test] #[test]
fn test_struct_tagged_enum() { fn test_struct_tagged_enum() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"propfind")] #[xml(root = "propfind")]
struct Propfind { struct Propfind {
prop: Prop, prop: Prop,
} }
@@ -58,7 +58,7 @@ fn test_struct_tagged_enum() {
#[test] #[test]
fn test_tagged_enum_complex() { fn test_tagged_enum_complex() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"propfind")] #[xml(root = "propfind")]
struct Propfind { struct Propfind {
prop: PropStruct, prop: PropStruct,
} }
@@ -116,7 +116,7 @@ fn test_enum_document() {
#[test] #[test]
fn test_untagged_enum() { fn test_untagged_enum() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
prop: PropElement, prop: PropElement,
} }

View File

@@ -6,7 +6,7 @@ use std::collections::HashSet;
#[test] #[test]
fn test_struct_text_field() { fn test_struct_text_field() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ty = "text")] #[xml(ty = "text")]
text: String, text: String,
@@ -27,7 +27,7 @@ fn test_struct_text_field() {
#[test] #[test]
fn test_struct_document() { fn test_struct_document() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
child: Child, child: Child,
} }
@@ -52,9 +52,9 @@ fn test_struct_document() {
#[test] #[test]
fn test_struct_rename_field() { fn test_struct_rename_field() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(rename = b"ok-wow")] #[xml(rename = "ok-wow")]
child: Child, child: Child,
} }
@@ -78,7 +78,7 @@ fn test_struct_rename_field() {
#[test] #[test]
fn test_struct_optional_field() { fn test_struct_optional_field() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
child: Option<Child>, child: Option<Child>,
} }
@@ -96,9 +96,9 @@ fn test_struct_optional_field() {
#[test] #[test]
fn test_struct_vec() { fn test_struct_vec() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(rename = b"child", flatten)] #[xml(rename = "child", flatten)]
children: Vec<Child>, children: Vec<Child>,
} }
@@ -124,9 +124,9 @@ fn test_struct_vec() {
#[test] #[test]
fn test_struct_set() { fn test_struct_set() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(rename = b"child", flatten)] #[xml(rename = "child", flatten)]
children: HashSet<Child>, children: HashSet<Child>,
} }
@@ -154,7 +154,7 @@ fn test_struct_ns() {
const NS_HELLO: Namespace = Namespace(b"hello"); const NS_HELLO: Namespace = Namespace(b"hello");
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ns = "NS_HELLO")] #[xml(ns = "NS_HELLO")]
child: (), child: (),
@@ -169,7 +169,7 @@ fn test_struct_attr() {
const NS_HELLO: Namespace = Namespace(b"hello"); const NS_HELLO: Namespace = Namespace(b"hello");
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ns = "NS_HELLO")] #[xml(ns = "NS_HELLO")]
child: (), child: (),
@@ -196,7 +196,7 @@ fn test_struct_attr() {
#[test] #[test]
fn test_struct_generics() { fn test_struct_generics() {
#[derive(XmlDeserialize, XmlRootTag)] #[derive(XmlDeserialize, XmlRootTag)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document<T: XmlDeserialize> { struct Document<T: XmlDeserialize> {
#[allow(dead_code)] #[allow(dead_code)]
child: T, child: T,
@@ -217,7 +217,7 @@ fn test_struct_generics() {
#[test] #[test]
fn test_struct_unparsed() { fn test_struct_unparsed() {
#[derive(XmlDeserialize, XmlRootTag)] #[derive(XmlDeserialize, XmlRootTag)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[allow(dead_code)] #[allow(dead_code)]
child: Unparsed, child: Unparsed,
@@ -238,7 +238,7 @@ fn test_struct_unparsed() {
#[test] #[test]
fn test_xml_values() { fn test_xml_values() {
#[derive(XmlDeserialize, XmlRootTag, PartialEq, Debug)] #[derive(XmlDeserialize, XmlRootTag, PartialEq, Debug)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
href: String, href: String,
} }
@@ -262,7 +262,7 @@ fn test_xml_values() {
#[test] #[test]
fn test_xml_cdata() { fn test_xml_cdata() {
#[derive(XmlDeserialize, XmlRootTag, PartialEq, Debug)] #[derive(XmlDeserialize, XmlRootTag, PartialEq, Debug)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ty = "text")] #[xml(ty = "text")]
hello: String, hello: String,
@@ -275,7 +275,7 @@ fn test_xml_cdata() {
<document> <document>
<![CDATA[some text]]> <![CDATA[some text]]>
<href><![CDATA[some stuff]]></href> <href><![CDATA[some stuff]]></href>
<okay>&gt;</okay> <okay>nice&gt;text</okay>
</document> </document>
"#, "#,
) )
@@ -285,15 +285,29 @@ fn test_xml_cdata() {
Document { Document {
hello: "some text".to_owned(), hello: "some text".to_owned(),
href: "some stuff".to_owned(), href: "some stuff".to_owned(),
okay: ">".to_owned() okay: "nice>text".to_owned()
} }
); );
} }
#[test]
fn test_quickxml_bytesref() {
let gt = quick_xml::events::BytesRef::new("gt");
assert!(!gt.is_char_ref());
let result = if !gt.is_char_ref() {
quick_xml::escape::resolve_xml_entity(&gt.xml_content().unwrap())
.unwrap()
.to_string()
} else {
gt.xml_content().unwrap().to_string()
};
assert_eq!(result, ">");
}
#[test] #[test]
fn test_struct_xml_decl() { fn test_struct_xml_decl() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
child: Child, child: Child,
} }
@@ -307,14 +321,14 @@ fn test_struct_xml_decl() {
let doc = Document::parse_str( let doc = Document::parse_str(
r#" r#"
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<document><child>Hello!</child></document>"#, <document><child>Hello!&amp;</child></document>"#,
) )
.unwrap(); .unwrap();
assert_eq!( assert_eq!(
doc, doc,
Document { Document {
child: Child { child: Child {
text: "Hello!".to_owned() text: "Hello!&".to_owned()
} }
} }
); );
@@ -323,7 +337,7 @@ fn test_struct_xml_decl() {
#[test] #[test]
fn test_struct_tuple() { fn test_struct_tuple() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
child: Child, child: Child,
} }
@@ -348,7 +362,7 @@ fn test_struct_tuple() {
#[test] #[test]
fn test_struct_untagged_ns() { fn test_struct_untagged_ns() {
#[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, XmlDeserialize, XmlRootTag, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ty = "untagged")] #[xml(ty = "untagged")]
child: Child, child: Child,

View File

@@ -29,7 +29,7 @@ enum CalendarProp {
#[allow(dead_code)] #[allow(dead_code)]
Getcontenttype(&'static str), Getcontenttype(&'static str),
#[xml(ns = "NS_DAV", rename = b"principal-URL")] #[xml(ns = "NS_DAV", rename = "principal-URL")]
#[allow(dead_code)] #[allow(dead_code)]
PrincipalUrl, PrincipalUrl,
#[allow(dead_code)] #[allow(dead_code)]

View File

@@ -36,7 +36,7 @@ fn test_propertyupdate() {
} }
#[derive(XmlDeserialize, XmlRootTag)] #[derive(XmlDeserialize, XmlRootTag)]
#[xml(root = b"propertyupdate")] #[xml(root = "propertyupdate")]
struct PropertyupdateElement<T: XmlDeserialize> { struct PropertyupdateElement<T: XmlDeserialize> {
#[xml(ty = "untagged", flatten)] #[xml(ty = "untagged", flatten)]
#[allow(dead_code)] #[allow(dead_code)]

View File

@@ -3,7 +3,7 @@ use rustical_xml::{XmlRootTag, XmlSerialize, XmlSerializeRoot};
#[test] #[test]
fn test_struct_value_tagged() { fn test_struct_value_tagged() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"propfind")] #[xml(root = "propfind")]
struct Document { struct Document {
prop: Prop, prop: Prop,
} }

View File

@@ -7,7 +7,7 @@ use xml_derive::XmlDeserialize;
#[test] #[test]
fn test_struct_document() { fn test_struct_document() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
child: Child, child: Child,
} }
@@ -30,7 +30,7 @@ fn test_struct_document() {
#[test] #[test]
fn test_struct_untagged_attr() { fn test_struct_untagged_attr() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ty = "attr")] #[xml(ty = "attr")]
name: String, name: String,
@@ -57,7 +57,7 @@ fn test_struct_untagged_attr() {
#[test] #[test]
fn test_struct_value_tagged() { fn test_struct_value_tagged() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
href: String, href: String,
num: usize, num: usize,
@@ -82,7 +82,7 @@ fn test_struct_value_tagged() {
#[test] #[test]
fn test_struct_value_untagged() { fn test_struct_value_untagged() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ty = "untagged")] #[xml(ty = "untagged")]
href: String, href: String,
@@ -103,7 +103,7 @@ fn test_struct_value_untagged() {
#[test] #[test]
fn test_struct_vec() { fn test_struct_vec() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(flatten)] #[xml(flatten)]
href: Vec<String>, href: Vec<String>,
@@ -127,7 +127,7 @@ fn test_struct_vec() {
#[test] #[test]
fn test_struct_serialize_with() { fn test_struct_serialize_with() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(serialize_with = "serialize_href")] #[xml(serialize_with = "serialize_href")]
href: String, href: String,
@@ -136,8 +136,8 @@ fn test_struct_serialize_with() {
fn serialize_href( fn serialize_href(
val: &str, val: &str,
ns: Option<Namespace>, ns: Option<Namespace>,
tag: Option<&[u8]>, tag: Option<&str>,
namespaces: &HashMap<Namespace, &[u8]>, namespaces: &HashMap<Namespace, &str>,
writer: &mut Writer<&mut Vec<u8>>, writer: &mut Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
val.to_uppercase().serialize(ns, tag, namespaces, writer) val.to_uppercase().serialize(ns, tag, namespaces, writer)
@@ -160,7 +160,7 @@ fn test_struct_serialize_with() {
#[test] #[test]
fn test_struct_tag_list() { fn test_struct_tag_list() {
#[derive(Debug, XmlRootTag, XmlSerialize, XmlDeserialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, XmlDeserialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ty = "untagged", flatten)] #[xml(ty = "untagged", flatten)]
tags: Vec<Tag>, tags: Vec<Tag>,
@@ -194,9 +194,9 @@ fn test_struct_ns() {
const NS: Namespace = quick_xml::name::Namespace(b"NS:TEST:"); const NS: Namespace = quick_xml::name::Namespace(b"NS:TEST:");
#[derive(Debug, XmlRootTag, XmlSerialize)] #[derive(Debug, XmlRootTag, XmlSerialize)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
#[xml(ns = "NS", rename = b"okay")] #[xml(ns = "NS", rename = "okay")]
child: String, child: String,
} }
@@ -210,7 +210,7 @@ fn test_struct_ns() {
#[test] #[test]
fn test_struct_tuple() { fn test_struct_tuple() {
#[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)] #[derive(Debug, XmlRootTag, XmlSerialize, PartialEq)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document { struct Document {
child: Child, child: Child,
} }
@@ -230,8 +230,8 @@ fn test_tuple_struct() {
const NS: Namespace = quick_xml::name::Namespace(b"NS:TEST:"); const NS: Namespace = quick_xml::name::Namespace(b"NS:TEST:");
#[derive(Debug, XmlRootTag, XmlSerialize)] #[derive(Debug, XmlRootTag, XmlSerialize)]
#[xml(root = b"document")] #[xml(root = "document")]
struct Document(#[xml(ns = "NS", rename = b"okay")] String); struct Document(#[xml(ns = "NS", rename = "okay")] String);
Document("hello!".to_string()) Document("hello!".to_string())
.serialize_to_string() .serialize_to_string()

View File

@@ -3,10 +3,10 @@
a CalDAV/CardDAV server a CalDAV/CardDAV server
!!! warning !!! warning
RustiCal is under **active development**! RustiCal is under **active development**!
While I've been successfully using RustiCal productively for a few weeks now, While I've been successfully using RustiCal productively for some months now and there seems to be a growing user base,
you'd still be one of the first testers so expect bugs and rough edges. you'd still be one of the first testers so expect bugs and rough edges.
If you still want to play around with it in its current state, absolutely feel free to do so and to open up an issue if something is not working. :) If you still want to use it in its current state, absolutely feel free to do so and to open up an issue if something is not working. :)
[Installation](installation/index.md){ .md-button } [Installation](installation/index.md){ .md-button }
@@ -14,6 +14,7 @@ a CalDAV/CardDAV server
- easy to backup, everything saved in one SQLite database - easy to backup, everything saved in one SQLite database
- also export feature in the frontend - also export feature in the frontend
- Import your existing calendars in the frontend
- **[WebDAV Push](https://github.com/bitfireAT/webdav-push/)** support, so near-instant synchronisation to DAVx5 - **[WebDAV Push](https://github.com/bitfireAT/webdav-push/)** support, so near-instant synchronisation to DAVx5
- lightweight (the container image contains only one binary) - lightweight (the container image contains only one binary)
- adequately fast (I'd love to say blazingly fast™ :fire: but I don't have any benchmarks) - adequately fast (I'd love to say blazingly fast™ :fire: but I don't have any benchmarks)

View File

@@ -9,7 +9,7 @@ docker run \
-p 4000:4000 \ -p 4000:4000 \
-v YOUR_DATA_DIR:/var/lib/rustical/ \ -v YOUR_DATA_DIR:/var/lib/rustical/ \
-v OPTIONAL_YOUR_CONFIG_TOML:/etc/rustical/config.toml \ # (1)! -v OPTIONAL_YOUR_CONFIG_TOML:/etc/rustical/config.toml \ # (1)!
-e RUSTICAL__CONFIG_OPTION="asd" \ # (2)! -e RUSTICAL_CONFIG_OPTION="asd" \ # (2)!
ghcr.io/lennart-k/rustical ghcr.io/lennart-k/rustical
``` ```

View File

@@ -0,0 +1,11 @@
# Notes
## Kubernetes setup
If you setup RustiCal with Kubernetes and call the deployment `rustical`
Kubernetes will by default expose some environment variables starting with `RUSTICAL_`
that will be rejected by RustiCal.
So for now the solutions are either not calling the deployment `rustical` or setting
`enableServiceLinks: false`, see <https://kubernetes.io/docs/tutorials/services/connect-applications-service/#accessing-the-service>.
For the corresponding issue see <https://github.com/lennart-k/rustical/issues/122>

View File

@@ -68,6 +68,7 @@ nav:
- Installation: - Installation:
- installation/index.md - installation/index.md
- Configuration: installation/configuration.md - Configuration: installation/configuration.md
- Notes: installation/notes.md
- Client Setup: setup/client.md - Client Setup: setup/client.md
- OpenID Connect: setup/oidc.md - OpenID Connect: setup/oidc.md
- Developers: - Developers:

View File

@@ -1,7 +1,7 @@
use crate::config::NextcloudLoginConfig; use crate::config::NextcloudLoginConfig;
use axum::Router; use axum::Router;
use axum::body::{Body, HttpBody}; use axum::body::{Body, HttpBody};
use axum::extract::Request; use axum::extract::{DefaultBodyLimit, Request};
use axum::middleware::Next; use axum::middleware::Next;
use axum::response::{Redirect, Response}; use axum::response::{Redirect, Response};
use axum::routing::{any, options}; use axum::routing::{any, options};
@@ -39,11 +39,11 @@ pub fn make_app<AS: AddressbookStore, CS: CalendarStore, S: SubscriptionStore>(
nextcloud_login_config: NextcloudLoginConfig, nextcloud_login_config: NextcloudLoginConfig,
dav_push_enabled: bool, dav_push_enabled: bool,
session_cookie_samesite_strict: bool, session_cookie_samesite_strict: bool,
payload_limit_mb: usize,
) -> Router<()> { ) -> Router<()> {
let combined_cal_store = Arc::new(CombinedCalendarStore::new( let birthday_store = Arc::new(ContactBirthdayStore::new(addr_store.clone()));
cal_store.clone(), let combined_cal_store =
ContactBirthdayStore::new(addr_store.clone()).into(), Arc::new(CombinedCalendarStore::new(cal_store.clone()).with_store(birthday_store));
));
let mut router = Router::new() let mut router = Router::new()
.merge(caldav_router( .merge(caldav_router(
@@ -203,4 +203,5 @@ pub fn make_app<AS: AddressbookStore, CS: CalendarStore, S: SubscriptionStore>(
response response
}, },
)) ))
.layer(DefaultBodyLimit::max(payload_limit_mb * 1000 * 1000))
} }

View File

@@ -8,6 +8,7 @@ pub struct HttpConfig {
pub host: String, pub host: String,
pub port: u16, pub port: u16,
pub session_cookie_samesite_strict: bool, pub session_cookie_samesite_strict: bool,
pub payload_limit_mb: usize,
} }
impl Default for HttpConfig { impl Default for HttpConfig {
@@ -16,6 +17,7 @@ impl Default for HttpConfig {
host: "0.0.0.0".to_owned(), host: "0.0.0.0".to_owned(),
port: 4000, port: 4000,
session_cookie_samesite_strict: false, session_cookie_samesite_strict: false,
payload_limit_mb: 4,
} }
} }
} }

View File

@@ -117,6 +117,7 @@ async fn main() -> Result<()> {
config.nextcloud_login.clone(), config.nextcloud_login.clone(),
config.dav_push.enabled, config.dav_push.enabled,
config.http.session_cookie_samesite_strict, config.http.session_cookie_samesite_strict,
config.http.payload_limit_mb,
); );
let app = ServiceExt::<Request>::into_make_service( let app = ServiceExt::<Request>::into_make_service(
NormalizePathLayer::trim_trailing_slash().layer(app), NormalizePathLayer::trim_trailing_slash().layer(app),