Compare commits

..

11 Commits

Author SHA1 Message Date
Lennart
99287f85f4 version 0.12.0 2026-01-19 15:48:56 +01:00
Lennart
df3143cd4c Fix status code for failed preconditions 2026-01-19 15:37:41 +01:00
Lennart Kämmle
92a3418f8e Merge pull request #164 from lennart-k/feat/ical-rewrite
ical-rs overhaul
2026-01-19 15:14:14 +01:00
Lennart
ea2f841269 ical-rs: Pin version to Git commit 2026-01-19 15:04:54 +01:00
Lennart
15e1509fe3 sqlite_store: Add option to skip broken objects and add validation on start-up 2026-01-19 14:48:21 +01:00
Lennart
0eef4ffabf Add test for uploading invalid calendar object and fix precondition 2026-01-19 13:40:54 +01:00
Lennart
303f9aff68 Remove IcalError from caldav/carddav since it had an ambiguous status code 2026-01-19 12:51:51 +01:00
Lennart
3460a2821e dav: Check Host matching for MV,COPY 2026-01-19 12:37:35 +01:00
Lennart
f73658b32f Re-enable calendar-query test and fix calendar expansion 2026-01-19 12:09:34 +01:00
Lennart K
7e099bcd6e Merge branch 'main' into feat/ical-rewrite 2026-01-16 16:47:17 +01:00
Lennart K
dde05d2f45 Workflow: Publish container images for feature branches too 2026-01-16 16:29:38 +01:00
29 changed files with 435 additions and 271 deletions

View File

@@ -2,7 +2,10 @@ name: Docker
on: on:
push: push:
branches: ["main", "dev"] branches:
- main
- dev
- feat/*
release: release:
types: ["published"] types: ["published"]
@@ -45,7 +48,8 @@ jobs:
with: with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: | tags: |
type=ref,event=branch ${{ github.ref_name == 'main' && 'type=ref,event=branch' || '' }}
type=ref,event=branch,prefix=br-
type=ref,event=pr type=ref,event=pr
type=semver,pattern={{version}} type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}} type=semver,pattern={{major}}.{{minor}}

103
Cargo.lock generated
View File

@@ -573,9 +573,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]] [[package]]
name = "cc" name = "cc"
version = "1.2.52" version = "1.2.53"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3" checksum = "755d2fce177175ffca841e9a06afdb2c4ab0f593d53b4dee48147dfaade85932"
dependencies = [ dependencies = [
"find-msvc-tools", "find-msvc-tools",
"shlex", "shlex",
@@ -1241,9 +1241,9 @@ dependencies = [
[[package]] [[package]]
name = "find-msvc-tools" name = "find-msvc-tools"
version = "0.1.7" version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41" checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db"
[[package]] [[package]]
name = "flume" name = "flume"
@@ -1771,7 +1771,7 @@ dependencies = [
[[package]] [[package]]
name = "ical" name = "ical"
version = "0.12.0-dev" version = "0.12.0-dev"
source = "git+https://github.com/lennart-k/ical-rs?branch=dev#5e61c25646c3785448d349e7d18b2833fc483c53" source = "git+https://github.com/lennart-k/ical-rs?rev=f1ad6456fd6cbd1e6da095297febddd2cfe61422#f1ad6456fd6cbd1e6da095297febddd2cfe61422"
dependencies = [ dependencies = [
"chrono", "chrono",
"chrono-tz", "chrono-tz",
@@ -1781,7 +1781,7 @@ dependencies = [
"phf 0.13.1", "phf 0.13.1",
"regex", "regex",
"rrule", "rrule",
"thiserror 2.0.17", "thiserror 2.0.18",
] ]
[[package]] [[package]]
@@ -2118,7 +2118,7 @@ dependencies = [
"matchit 0.9.1", "matchit 0.9.1",
"percent-encoding", "percent-encoding",
"serde", "serde",
"thiserror 2.0.17", "thiserror 2.0.18",
] ]
[[package]] [[package]]
@@ -2370,7 +2370,7 @@ dependencies = [
"futures-sink", "futures-sink",
"js-sys", "js-sys",
"pin-project-lite", "pin-project-lite",
"thiserror 2.0.17", "thiserror 2.0.18",
"tracing", "tracing",
] ]
@@ -2400,7 +2400,7 @@ dependencies = [
"opentelemetry_sdk", "opentelemetry_sdk",
"prost", "prost",
"reqwest", "reqwest",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tonic", "tonic",
"tracing", "tracing",
@@ -2437,7 +2437,7 @@ dependencies = [
"opentelemetry", "opentelemetry",
"percent-encoding", "percent-encoding",
"rand 0.9.2", "rand 0.9.2",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
] ]
@@ -2891,7 +2891,7 @@ dependencies = [
"rustc-hash", "rustc-hash",
"rustls", "rustls",
"socket2", "socket2",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tracing", "tracing",
"web-time", "web-time",
@@ -2912,7 +2912,7 @@ dependencies = [
"rustls", "rustls",
"rustls-pki-types", "rustls-pki-types",
"slab", "slab",
"thiserror 2.0.17", "thiserror 2.0.18",
"tinyvec", "tinyvec",
"tracing", "tracing",
"web-time", "web-time",
@@ -3187,7 +3187,7 @@ dependencies = [
"chrono-tz", "chrono-tz",
"log", "log",
"regex", "regex",
"thiserror 2.0.17", "thiserror 2.0.18",
] ]
[[package]] [[package]]
@@ -3317,7 +3317,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical" name = "rustical"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"argon2", "argon2",
@@ -3349,6 +3349,7 @@ dependencies = [
"rustical_store", "rustical_store",
"rustical_store_sqlite", "rustical_store_sqlite",
"serde", "serde",
"similar-asserts",
"sqlx", "sqlx",
"tokio", "tokio",
"toml 0.9.11+spec-1.1.0", "toml 0.9.11+spec-1.1.0",
@@ -3363,7 +3364,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical_caldav" name = "rustical_caldav"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"async-std", "async-std",
"async-trait", "async-trait",
@@ -3393,7 +3394,7 @@ dependencies = [
"similar-asserts", "similar-asserts",
"strum", "strum",
"strum_macros", "strum_macros",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tower", "tower",
"tower-http", "tower-http",
@@ -3405,7 +3406,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical_carddav" name = "rustical_carddav"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum", "axum",
@@ -3428,7 +3429,7 @@ dependencies = [
"serde", "serde",
"strum", "strum",
"strum_macros", "strum_macros",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tower", "tower",
"tower-http", "tower-http",
@@ -3439,7 +3440,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical_dav" name = "rustical_dav"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum", "axum",
@@ -3457,7 +3458,7 @@ dependencies = [
"rustical_xml", "rustical_xml",
"serde", "serde",
"strum", "strum",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tower", "tower",
"tracing", "tracing",
@@ -3465,7 +3466,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical_dav_push" name = "rustical_dav_push"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum", "axum",
@@ -3483,14 +3484,14 @@ dependencies = [
"rustical_store", "rustical_store",
"rustical_xml", "rustical_xml",
"serde", "serde",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tracing", "tracing",
] ]
[[package]] [[package]]
name = "rustical_frontend" name = "rustical_frontend"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"askama", "askama",
"askama_web", "askama_web",
@@ -3513,7 +3514,7 @@ dependencies = [
"rustical_store", "rustical_store",
"serde", "serde",
"serde_json", "serde_json",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tower", "tower",
"tower-http", "tower-http",
@@ -3526,7 +3527,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical_ical" name = "rustical_ical"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"axum", "axum",
"chrono", "chrono",
@@ -3540,12 +3541,12 @@ dependencies = [
"serde", "serde",
"sha2", "sha2",
"similar-asserts", "similar-asserts",
"thiserror 2.0.17", "thiserror 2.0.18",
] ]
[[package]] [[package]]
name = "rustical_oidc" name = "rustical_oidc"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"axum", "axum",
@@ -3554,14 +3555,14 @@ dependencies = [
"openidconnect", "openidconnect",
"reqwest", "reqwest",
"serde", "serde",
"thiserror 2.0.17", "thiserror 2.0.18",
"tower-sessions", "tower-sessions",
"tracing", "tracing",
] ]
[[package]] [[package]]
name = "rustical_store" name = "rustical_store"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-trait", "async-trait",
@@ -3584,7 +3585,7 @@ dependencies = [
"rustical_xml", "rustical_xml",
"serde", "serde",
"sha2", "sha2",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tower", "tower",
"tower-sessions", "tower-sessions",
@@ -3594,7 +3595,7 @@ dependencies = [
[[package]] [[package]]
name = "rustical_store_sqlite" name = "rustical_store_sqlite"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"async-trait", "async-trait",
"chrono", "chrono",
@@ -3611,7 +3612,7 @@ dependencies = [
"serde", "serde",
"sha2", "sha2",
"sqlx", "sqlx",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tracing", "tracing",
"uuid", "uuid",
@@ -3619,10 +3620,10 @@ dependencies = [
[[package]] [[package]]
name = "rustical_xml" name = "rustical_xml"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"quick-xml", "quick-xml",
"thiserror 2.0.17", "thiserror 2.0.18",
"xml_derive", "xml_derive",
] ]
@@ -3655,9 +3656,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls-pki-types" name = "rustls-pki-types"
version = "1.13.3" version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4910321ebe4151be888e35fe062169554e74aad01beafed60410131420ceffbc" checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
dependencies = [ dependencies = [
"web-time", "web-time",
"zeroize", "zeroize",
@@ -3665,9 +3666,9 @@ dependencies = [
[[package]] [[package]]
name = "rustls-webpki" name = "rustls-webpki"
version = "0.103.8" version = "0.103.9"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52" checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
dependencies = [ dependencies = [
"ring", "ring",
"rustls-pki-types", "rustls-pki-types",
@@ -4047,7 +4048,7 @@ dependencies = [
"serde_json", "serde_json",
"sha2", "sha2",
"smallvec", "smallvec",
"thiserror 2.0.17", "thiserror 2.0.18",
"tokio", "tokio",
"tokio-stream", "tokio-stream",
"tracing", "tracing",
@@ -4131,7 +4132,7 @@ dependencies = [
"smallvec", "smallvec",
"sqlx-core", "sqlx-core",
"stringprep", "stringprep",
"thiserror 2.0.17", "thiserror 2.0.18",
"tracing", "tracing",
"uuid", "uuid",
"whoami", "whoami",
@@ -4170,7 +4171,7 @@ dependencies = [
"smallvec", "smallvec",
"sqlx-core", "sqlx-core",
"stringprep", "stringprep",
"thiserror 2.0.17", "thiserror 2.0.18",
"tracing", "tracing",
"uuid", "uuid",
"whoami", "whoami",
@@ -4196,7 +4197,7 @@ dependencies = [
"serde", "serde",
"serde_urlencoded", "serde_urlencoded",
"sqlx-core", "sqlx-core",
"thiserror 2.0.17", "thiserror 2.0.18",
"tracing", "tracing",
"url", "url",
"uuid", "uuid",
@@ -4315,11 +4316,11 @@ dependencies = [
[[package]] [[package]]
name = "thiserror" name = "thiserror"
version = "2.0.17" version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8" checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
dependencies = [ dependencies = [
"thiserror-impl 2.0.17", "thiserror-impl 2.0.18",
] ]
[[package]] [[package]]
@@ -4335,9 +4336,9 @@ dependencies = [
[[package]] [[package]]
name = "thiserror-impl" name = "thiserror-impl"
version = "2.0.17" version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913" checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
dependencies = [ dependencies = [
"proc-macro2", "proc-macro2",
"quote", "quote",
@@ -4719,7 +4720,7 @@ dependencies = [
"rand 0.8.5", "rand 0.8.5",
"serde", "serde",
"serde_json", "serde_json",
"thiserror 2.0.17", "thiserror 2.0.18",
"time", "time",
"tokio", "tokio",
"tracing", "tracing",
@@ -5441,7 +5442,7 @@ checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
[[package]] [[package]]
name = "xml_derive" name = "xml_derive"
version = "0.11.17" version = "0.12.0"
dependencies = [ dependencies = [
"darling 0.23.0", "darling 0.23.0",
"heck", "heck",
@@ -5562,6 +5563,6 @@ dependencies = [
[[package]] [[package]]
name = "zmij" name = "zmij"
version = "1.0.14" version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea" checksum = "94f63c051f4fe3c1509da62131a678643c5b6fbdc9273b2b79d4378ebda003d2"

View File

@@ -2,7 +2,7 @@
members = ["crates/*"] members = ["crates/*"]
[workspace.package] [workspace.package]
version = "0.11.17" version = "0.12.0"
rust-version = "1.92" rust-version = "1.92"
edition = "2024" edition = "2024"
description = "A CalDAV server" description = "A CalDAV server"
@@ -107,7 +107,7 @@ strum = "0.27"
strum_macros = "0.27" strum_macros = "0.27"
serde_json = { version = "1.0", features = ["raw_value"] } serde_json = { version = "1.0", features = ["raw_value"] }
sqlx-sqlite = { version = "0.8", features = ["bundled"] } sqlx-sqlite = { version = "0.8", features = ["bundled"] }
ical = { git = "https://github.com/lennart-k/ical-rs", branch = "dev", features = [ ical = { git = "https://github.com/lennart-k/ical-rs", rev = "f1ad6456fd6cbd1e6da095297febddd2cfe61422", features = [
"chrono-tz", "chrono-tz",
] } ] }
toml = "0.9" toml = "0.9"
@@ -153,6 +153,7 @@ criterion = { version = "0.8", features = ["async_tokio"] }
rstest.workspace = true rstest.workspace = true
rustical_store_sqlite = { workspace = true, features = ["test"] } rustical_store_sqlite = { workspace = true, features = ["test"] }
insta.workspace = true insta.workspace = true
similar-asserts.workspace = true
[dependencies] [dependencies]
rustical_store.workspace = true rustical_store.workspace = true

View File

@@ -26,7 +26,10 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
} }
let parser = ical::IcalParser::from_slice(body.as_bytes()); let parser = ical::IcalParser::from_slice(body.as_bytes());
let mut cal = parser.expect_one()?.mutable(); let mut cal = match parser.expect_one() {
Ok(cal) => cal.mutable(),
Err(err) => return Ok((StatusCode::BAD_REQUEST, err.to_string()).into_response()),
};
// Extract calendar metadata // Extract calendar metadata
let displayname = cal let displayname = cal
@@ -67,7 +70,10 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
cal_components.push(CalendarObjectType::Todo); cal_components.push(CalendarObjectType::Todo);
} }
let objects = cal.into_objects()?.into_iter().map(Into::into).collect(); let objects = match cal.into_objects() {
Ok(objects) => objects.into_iter().map(Into::into).collect(),
Err(err) => return Ok((StatusCode::BAD_REQUEST, err.to_string()).into_response()),
};
let new_cal = Calendar { let new_cal = Calendar {
principal, principal,
id: cal_id, id: cal_id,

View File

@@ -11,7 +11,7 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore; use rustical_store::CalendarStore;
use rustical_store::auth::Principal; use rustical_store::auth::Principal;
use std::str::FromStr; use std::str::FromStr;
use tracing::{debug, instrument}; use tracing::{instrument, warn};
#[instrument(skip(cal_store))] #[instrument(skip(cal_store))]
pub async fn get_event<C: CalendarStore>( pub async fn get_event<C: CalendarStore>(
@@ -94,9 +94,13 @@ pub async fn put_event<C: CalendarStore>(
true true
}; };
let Ok(object) = CalendarObject::from_ics(body.clone()) else { let object = match CalendarObject::from_ics(body.clone()) {
debug!("invalid calendar data:\n{body}"); Ok(object) => object,
Err(err) => {
warn!("invalid calendar data:\n{body}");
warn!("{err}");
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData)); return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
}
}; };
let etag = object.get_etag(); let etag = object.get_etag();
cal_store cal_store

View File

@@ -23,7 +23,7 @@ impl IntoResponse for Precondition {
if let Err(err) = error.serialize_root(&mut writer) { if let Err(err) = error.serialize_root(&mut writer) {
return rustical_dav::Error::from(err).into_response(); return rustical_dav::Error::from(err).into_response();
} }
let mut res = Response::builder().status(StatusCode::PRECONDITION_FAILED); let mut res = Response::builder().status(StatusCode::FORBIDDEN);
res.headers_mut().unwrap().typed_insert(ContentType::xml()); res.headers_mut().unwrap().typed_insert(ContentType::xml());
res.body(Body::from(output)).unwrap() res.body(Body::from(output)).unwrap()
} }
@@ -52,9 +52,6 @@ pub enum Error {
#[error(transparent)] #[error(transparent)]
XmlDecodeError(#[from] rustical_xml::XmlError), XmlDecodeError(#[from] rustical_xml::XmlError),
#[error(transparent)]
IcalError(#[from] rustical_ical::Error),
#[error(transparent)] #[error(transparent)]
PreconditionFailed(Precondition), PreconditionFailed(Precondition),
} }
@@ -75,19 +72,20 @@ impl Error {
Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST, Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR, Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
// TODO: Can also be Bad Request, if it's used input // The correct status code for a failed precondition is not PreconditionFailed but
Self::IcalError(_err) => StatusCode::INTERNAL_SERVER_ERROR, // Forbidden (or Conflict):
Self::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED, // https://datatracker.ietf.org/doc/html/rfc4791#section-1.3
Self::PreconditionFailed(_err) => StatusCode::FORBIDDEN,
} }
} }
} }
impl IntoResponse for Error { impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response { fn into_response(self) -> axum::response::Response {
if matches!( if let Self::PreconditionFailed(precondition) = self {
self.status_code(), return precondition.into_response();
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED }
) { if matches!(self.status_code(), StatusCode::INTERNAL_SERVER_ERROR) {
error!("{self}"); error!("{self}");
} }
(self.status_code(), self.to_string()).into_response() (self.status_code(), self.to_string()).into_response()

View File

@@ -103,7 +103,10 @@ pub async fn put_object<AS: AddressbookStore>(
true true
}; };
let object = AddressObject::from_vcf(body)?; let object = match AddressObject::from_vcf(body) {
Ok(object) => object,
Err(err) => return Ok((StatusCode::BAD_REQUEST, err.to_string()).into_response()),
};
let etag = object.get_etag(); let etag = object.get_etag();
addr_store addr_store
.put_object(&principal, &addressbook_id, &object_id, object, overwrite) .put_object(&principal, &addressbook_id, &object_id, object, overwrite)

View File

@@ -23,9 +23,6 @@ pub enum Error {
#[error(transparent)] #[error(transparent)]
XmlDecodeError(#[from] rustical_xml::XmlError), XmlDecodeError(#[from] rustical_xml::XmlError),
#[error(transparent)]
IcalError(#[from] rustical_ical::Error),
} }
impl Error { impl Error {
@@ -43,8 +40,6 @@ impl Error {
Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST, Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR, Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
// TODO: Can also be Bad Request, if it's used input
Self::IcalError(_err) => StatusCode::INTERNAL_SERVER_ERROR,
} }
} }
} }

View File

@@ -51,19 +51,18 @@ impl Error {
_ => StatusCode::BAD_REQUEST, _ => StatusCode::BAD_REQUEST,
}, },
Self::PropReadOnly => StatusCode::CONFLICT, Self::PropReadOnly => StatusCode::CONFLICT,
Self::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
Self::InternalError | Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::InternalError | Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Forbidden => StatusCode::FORBIDDEN, // The correct status code for a failed precondition is not PreconditionFailed but
// Forbidden (or Conflict):
// https://datatracker.ietf.org/doc/html/rfc4791#section-1.3
Self::PreconditionFailed | Self::Forbidden => StatusCode::FORBIDDEN,
} }
} }
} }
impl axum::response::IntoResponse for Error { impl axum::response::IntoResponse for Error {
fn into_response(self) -> axum::response::Response { fn into_response(self) -> axum::response::Response {
if matches!( if matches!(self.status_code(), StatusCode::INTERNAL_SERVER_ERROR) {
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
error!("{self}"); error!("{self}");
} }

View File

@@ -6,12 +6,15 @@ use axum::{
extract::{MatchedPath, Path, State}, extract::{MatchedPath, Path, State},
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use axum_extra::TypedHeader;
use headers::Host;
use http::{HeaderMap, StatusCode, Uri}; use http::{HeaderMap, StatusCode, Uri};
use matchit_serde::ParamsDeserializer; use matchit_serde::ParamsDeserializer;
use serde::Deserialize; use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
#[instrument(skip(path, resource_service,))] #[instrument(skip(path, resource_service,))]
#[allow(clippy::too_many_arguments)]
pub async fn axum_route_copy<R: ResourceService>( pub async fn axum_route_copy<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
@@ -20,6 +23,7 @@ pub async fn axum_route_copy<R: ResourceService>(
Overwrite(overwrite): Overwrite, Overwrite(overwrite): Overwrite,
matched_path: MatchedPath, matched_path: MatchedPath,
header_map: HeaderMap, header_map: HeaderMap,
TypedHeader(host): TypedHeader<Host>,
) -> Result<Response, R::Error> { ) -> Result<Response, R::Error> {
let destination = header_map let destination = header_map
.get("Destination") .get("Destination")
@@ -27,7 +31,11 @@ pub async fn axum_route_copy<R: ResourceService>(
.to_str() .to_str()
.map_err(|_| crate::Error::Forbidden)?; .map_err(|_| crate::Error::Forbidden)?;
let destination_uri: Uri = destination.parse().map_err(|_| crate::Error::Forbidden)?; let destination_uri: Uri = destination.parse().map_err(|_| crate::Error::Forbidden)?;
// TODO: Check that host also matches if let Some(authority) = destination_uri.authority()
&& host != authority.clone().into()
{
return Err(crate::Error::Forbidden.into());
}
let destination = destination_uri.path(); let destination = destination_uri.path();
let mut router = matchit::Router::new(); let mut router = matchit::Router::new();

View File

@@ -6,12 +6,15 @@ use axum::{
extract::{MatchedPath, Path, State}, extract::{MatchedPath, Path, State},
response::{IntoResponse, Response}, response::{IntoResponse, Response},
}; };
use axum_extra::TypedHeader;
use headers::Host;
use http::{HeaderMap, StatusCode, Uri}; use http::{HeaderMap, StatusCode, Uri};
use matchit_serde::ParamsDeserializer; use matchit_serde::ParamsDeserializer;
use serde::Deserialize; use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
#[instrument(skip(path, resource_service,))] #[instrument(skip(path, resource_service,))]
#[allow(clippy::too_many_arguments)]
pub async fn axum_route_move<R: ResourceService>( pub async fn axum_route_move<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
@@ -20,6 +23,7 @@ pub async fn axum_route_move<R: ResourceService>(
Overwrite(overwrite): Overwrite, Overwrite(overwrite): Overwrite,
matched_path: MatchedPath, matched_path: MatchedPath,
header_map: HeaderMap, header_map: HeaderMap,
TypedHeader(host): TypedHeader<Host>,
) -> Result<Response, R::Error> { ) -> Result<Response, R::Error> {
let destination = header_map let destination = header_map
.get("Destination") .get("Destination")
@@ -27,7 +31,11 @@ pub async fn axum_route_move<R: ResourceService>(
.to_str() .to_str()
.map_err(|_| crate::Error::Forbidden)?; .map_err(|_| crate::Error::Forbidden)?;
let destination_uri: Uri = destination.parse().map_err(|_| crate::Error::Forbidden)?; let destination_uri: Uri = destination.parse().map_err(|_| crate::Error::Forbidden)?;
// TODO: Check that host also matches if let Some(authority) = destination_uri.authority()
&& host != authority.clone().into()
{
return Err(crate::Error::Forbidden.into());
}
let destination = destination_uri.path(); let destination = destination_uri.path();
let mut router = matchit::Router::new(); let mut router = matchit::Router::new();

View File

@@ -53,9 +53,7 @@ impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response { fn into_response(self) -> axum::response::Response {
if matches!( if matches!(
self.status_code(), self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR StatusCode::INTERNAL_SERVER_ERROR | StatusCode::CONFLICT
| StatusCode::PRECONDITION_FAILED
| StatusCode::CONFLICT
) { ) {
error!("{self}"); error!("{self}");
} }

View File

@@ -2,6 +2,7 @@ use super::ChangeOperation;
use crate::BEGIN_IMMEDIATE; use crate::BEGIN_IMMEDIATE;
use async_trait::async_trait; use async_trait::async_trait;
use derive_more::derive::Constructor; use derive_more::derive::Constructor;
use ical::parser::ParserError;
use rustical_ical::AddressObject; use rustical_ical::AddressObject;
use rustical_store::{ use rustical_store::{
Addressbook, AddressbookStore, CollectionMetadata, CollectionOperation, Addressbook, AddressbookStore, CollectionMetadata, CollectionOperation,
@@ -9,7 +10,7 @@ use rustical_store::{
}; };
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction}; use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
use tokio::sync::mpsc::Sender; use tokio::sync::mpsc::Sender;
use tracing::{error_span, instrument, warn}; use tracing::{error, error_span, instrument, warn};
pub mod birthday_calendar; pub mod birthday_calendar;
@@ -18,6 +19,12 @@ struct AddressObjectRow {
id: String, id: String,
vcf: String, vcf: String,
} }
impl From<AddressObjectRow> for (String, Result<AddressObject, ParserError>) {
fn from(row: AddressObjectRow) -> Self {
let result = AddressObject::from_vcf(row.vcf);
(row.id, result)
}
}
impl TryFrom<AddressObjectRow> for (String, AddressObject) { impl TryFrom<AddressObjectRow> for (String, AddressObject) {
type Error = rustical_store::Error; type Error = rustical_store::Error;
@@ -31,6 +38,7 @@ impl TryFrom<AddressObjectRow> for (String, AddressObject) {
pub struct SqliteAddressbookStore { pub struct SqliteAddressbookStore {
db: SqlitePool, db: SqlitePool,
sender: Sender<CollectionOperation>, sender: Sender<CollectionOperation>,
skip_broken: bool,
} }
impl SqliteAddressbookStore { impl SqliteAddressbookStore {
@@ -88,6 +96,36 @@ impl SqliteAddressbookStore {
Ok(()) Ok(())
} }
#[allow(clippy::missing_panics_doc)]
pub async fn validate_objects(&self, principal: &str) -> Result<(), Error> {
let mut success = true;
for addressbook in self.get_addressbooks(principal).await? {
for (object_id, res) in Self::_get_objects(&self.db, principal, &addressbook.id).await?
{
if let Err(err) = res {
warn!(
"Invalid address object found at {principal}/{addr_id}/{object_id}.vcf. Error: {err}",
addr_id = addressbook.id
);
success = false;
}
}
}
if !success {
if self.skip_broken {
error!(
"Not all address objects are valid. Since data_store.sqlite.skip_broken=true they will be hidden. You are still advised to manually remove or repair the object. If you need help feel free to open up an issue on GitHub."
);
} else {
error!(
"Not all address objects are valid. Since data_store.sqlite.skip_broken=false this causes a panic. Remove or repair the broken objects manually or set data_store.sqlite.skip_broken=false as a temporary solution to ignore the error. If you need help feel free to open up an issue on GitHub."
);
panic!();
}
}
Ok(())
}
// Logs an operation to an address object // Logs an operation to an address object
async fn log_object_operation( async fn log_object_operation(
tx: &mut Transaction<'_, Sqlite>, tx: &mut Transaction<'_, Sqlite>,
@@ -134,7 +172,7 @@ impl SqliteAddressbookStore {
if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) { if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) {
error_span!( error_span!(
"Error trying to send addressbook update notification:", "Error trying to send addressbook update notification:",
err = format!("{err:?}"), err = format!("{err}"),
); );
} }
} }
@@ -353,8 +391,8 @@ impl SqliteAddressbookStore {
executor: E, executor: E,
principal: &str, principal: &str,
addressbook_id: &str, addressbook_id: &str,
) -> Result<Vec<(String, AddressObject)>, rustical_store::Error> { ) -> Result<impl Iterator<Item = (String, Result<AddressObject, ParserError>)>, Error> {
sqlx::query_as!( Ok(sqlx::query_as!(
AddressObjectRow, AddressObjectRow,
"SELECT id, vcf FROM addressobjects WHERE principal = ? AND addressbook_id = ? AND deleted_at IS NULL", "SELECT id, vcf FROM addressobjects WHERE principal = ? AND addressbook_id = ? AND deleted_at IS NULL",
principal, principal,
@@ -363,8 +401,8 @@ impl SqliteAddressbookStore {
.fetch_all(executor) .fetch_all(executor)
.await.map_err(crate::Error::from)? .await.map_err(crate::Error::from)?
.into_iter() .into_iter()
.map(std::convert::TryInto::try_into) .map(Into::into)
.collect() )
} }
async fn _get_object<'e, E: Executor<'e, Database = Sqlite>>( async fn _get_object<'e, E: Executor<'e, Database = Sqlite>>(
@@ -607,7 +645,16 @@ impl AddressbookStore for SqliteAddressbookStore {
principal: &str, principal: &str,
addressbook_id: &str, addressbook_id: &str,
) -> Result<Vec<(String, AddressObject)>, rustical_store::Error> { ) -> Result<Vec<(String, AddressObject)>, rustical_store::Error> {
Self::_get_objects(&self.db, principal, addressbook_id).await let objects = Self::_get_objects(&self.db, principal, addressbook_id).await?;
if self.skip_broken {
Ok(objects
.filter_map(|(id, res)| Some((id, res.ok()?)))
.collect())
} else {
Ok(objects
.map(|(id, res)| res.map(|obj| (id, obj)))
.collect::<Result<Vec<_>, _>>()?)
}
} }
#[instrument] #[instrument]

View File

@@ -3,6 +3,7 @@ use crate::BEGIN_IMMEDIATE;
use async_trait::async_trait; use async_trait::async_trait;
use chrono::TimeDelta; use chrono::TimeDelta;
use derive_more::derive::Constructor; use derive_more::derive::Constructor;
use ical::parser::ParserError;
use ical::types::CalDateTime; use ical::types::CalDateTime;
use regex::Regex; use regex::Regex;
use rustical_ical::{CalendarObject, CalendarObjectType}; use rustical_ical::{CalendarObject, CalendarObjectType};
@@ -13,7 +14,7 @@ use rustical_store::{CollectionOperation, CollectionOperationInfo};
use sqlx::types::chrono::NaiveDateTime; use sqlx::types::chrono::NaiveDateTime;
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction}; use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
use tokio::sync::mpsc::Sender; use tokio::sync::mpsc::Sender;
use tracing::{error_span, instrument, warn}; use tracing::{error, error_span, instrument, warn};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
struct CalendarObjectRow { struct CalendarObjectRow {
@@ -22,6 +23,23 @@ struct CalendarObjectRow {
uid: String, uid: String,
} }
impl From<CalendarObjectRow> for (String, Result<CalendarObject, ParserError>) {
fn from(row: CalendarObjectRow) -> Self {
let result = CalendarObject::from_ics(row.ics).inspect(|object| {
if object.get_uid() != row.uid {
warn!(
"Calendar object {}.ics: UID={} and row uid={} do not match",
row.id,
object.get_uid(),
row.uid
);
}
});
(row.id, result)
}
}
impl TryFrom<CalendarObjectRow> for (String, CalendarObject) { impl TryFrom<CalendarObjectRow> for (String, CalendarObject) {
type Error = rustical_store::Error; type Error = rustical_store::Error;
@@ -92,6 +110,7 @@ impl From<CalendarRow> for Calendar {
pub struct SqliteCalendarStore { pub struct SqliteCalendarStore {
db: SqlitePool, db: SqlitePool,
sender: Sender<CollectionOperation>, sender: Sender<CollectionOperation>,
skip_broken: bool,
} }
impl SqliteCalendarStore { impl SqliteCalendarStore {
@@ -141,11 +160,40 @@ impl SqliteCalendarStore {
if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) { if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) {
error_span!( error_span!(
"Error trying to send calendar update notification:", "Error trying to send calendar update notification:",
err = format!("{err:?}"), err = format!("{err}"),
); );
} }
} }
#[allow(clippy::missing_panics_doc)]
pub async fn validate_objects(&self, principal: &str) -> Result<(), Error> {
let mut success = true;
for calendar in self.get_calendars(principal).await? {
for (object_id, res) in Self::_get_objects(&self.db, principal, &calendar.id).await? {
if let Err(err) = res {
warn!(
"Invalid calendar object found at {principal}/{cal_id}/{object_id}.ics. Error: {err}",
cal_id = calendar.id
);
success = false;
}
}
}
if !success {
if self.skip_broken {
error!(
"Not all calendar objects are valid. Since data_store.sqlite.skip_broken=true they will be hidden. You are still advised to manually remove or repair the object. If you need help feel free to open up an issue on GitHub."
);
} else {
error!(
"Not all calendar objects are valid. Since data_store.sqlite.skip_broken=false this causes a panic. Remove or repair the broken objects manually or set data_store.sqlite.skip_broken=false as a temporary solution to ignore the error. If you need help feel free to open up an issue on GitHub."
);
panic!();
}
}
Ok(())
}
/// In the past exports generated objects with invalid VERSION:4.0 /// In the past exports generated objects with invalid VERSION:4.0
/// This repair sets them to VERSION:2.0 /// This repair sets them to VERSION:2.0
#[allow(clippy::missing_panics_doc)] #[allow(clippy::missing_panics_doc)]
@@ -456,8 +504,8 @@ impl SqliteCalendarStore {
executor: E, executor: E,
principal: &str, principal: &str,
cal_id: &str, cal_id: &str,
) -> Result<Vec<(String, CalendarObject)>, Error> { ) -> Result<impl Iterator<Item = (String, Result<CalendarObject, ParserError>)>, Error> {
sqlx::query_as!( Ok(sqlx::query_as!(
CalendarObjectRow, CalendarObjectRow,
"SELECT id, uid, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL", "SELECT id, uid, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL",
principal, principal,
@@ -466,8 +514,8 @@ impl SqliteCalendarStore {
.fetch_all(executor) .fetch_all(executor)
.await.map_err(crate::Error::from)? .await.map_err(crate::Error::from)?
.into_iter() .into_iter()
.map(std::convert::TryInto::try_into) .map(Into::into)
.collect() )
} }
async fn _calendar_query<'e, E: Executor<'e, Database = Sqlite>>( async fn _calendar_query<'e, E: Executor<'e, Database = Sqlite>>(
@@ -475,14 +523,14 @@ impl SqliteCalendarStore {
principal: &str, principal: &str,
cal_id: &str, cal_id: &str,
query: CalendarQuery, query: CalendarQuery,
) -> Result<Vec<(String, CalendarObject)>, Error> { ) -> Result<impl Iterator<Item = (String, Result<CalendarObject, ParserError>)>, Error> {
// We extend our query interval by one day in each direction since we really don't want to // We extend our query interval by one day in each direction since we really don't want to
// miss any objects because of timezone differences // miss any objects because of timezone differences
// I've previously tried NaiveDate::MIN,MAX, but it seems like sqlite cannot handle these // I've previously tried NaiveDate::MIN,MAX, but it seems like sqlite cannot handle these
let start = query.time_start.map(|start| start - TimeDelta::days(1)); let start = query.time_start.map(|start| start - TimeDelta::days(1));
let end = query.time_end.map(|end| end + TimeDelta::days(1)); let end = query.time_end.map(|end| end + TimeDelta::days(1));
sqlx::query_as!( Ok(sqlx::query_as!(
CalendarObjectRow, CalendarObjectRow,
r"SELECT id, uid, ics FROM calendarobjects r"SELECT id, uid, ics FROM calendarobjects
WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL
@@ -500,8 +548,7 @@ impl SqliteCalendarStore {
.await .await
.map_err(crate::Error::from)? .map_err(crate::Error::from)?
.into_iter() .into_iter()
.map(std::convert::TryInto::try_into) .map(Into::into))
.collect()
} }
async fn _get_object<'e, E: Executor<'e, Database = Sqlite>>( async fn _get_object<'e, E: Executor<'e, Database = Sqlite>>(
@@ -641,6 +688,7 @@ impl SqliteCalendarStore {
principal: &str, principal: &str,
cal_id: &str, cal_id: &str,
synctoken: i64, synctoken: i64,
skip_broken: bool,
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> { ) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> {
struct Row { struct Row {
object_id: String, object_id: String,
@@ -670,6 +718,8 @@ impl SqliteCalendarStore {
match Self::_get_object(&mut *conn, principal, cal_id, &object_id, false).await { match Self::_get_object(&mut *conn, principal, cal_id, &object_id, false).await {
Ok(object) => objects.push((object_id, object)), Ok(object) => objects.push((object_id, object)),
Err(rustical_store::Error::NotFound) => deleted_objects.push(object_id), Err(rustical_store::Error::NotFound) => deleted_objects.push(object_id),
// Skip broken object
Err(rustical_store::Error::IcalError(_)) if skip_broken => (),
Err(err) => return Err(err), Err(err) => return Err(err),
} }
} }
@@ -820,7 +870,16 @@ impl CalendarStore for SqliteCalendarStore {
cal_id: &str, cal_id: &str,
query: CalendarQuery, query: CalendarQuery,
) -> Result<Vec<(String, CalendarObject)>, Error> { ) -> Result<Vec<(String, CalendarObject)>, Error> {
Self::_calendar_query(&self.db, principal, cal_id, query).await let objects = Self::_calendar_query(&self.db, principal, cal_id, query).await?;
if self.skip_broken {
Ok(objects
.filter_map(|(id, res)| Some((id, res.ok()?)))
.collect())
} else {
Ok(objects
.map(|(id, res)| res.map(|obj| (id, obj)))
.collect::<Result<Vec<_>, _>>()?)
}
} }
async fn calendar_metadata( async fn calendar_metadata(
@@ -851,7 +910,16 @@ impl CalendarStore for SqliteCalendarStore {
principal: &str, principal: &str,
cal_id: &str, cal_id: &str,
) -> Result<Vec<(String, CalendarObject)>, Error> { ) -> Result<Vec<(String, CalendarObject)>, Error> {
Self::_get_objects(&self.db, principal, cal_id).await let objects = Self::_get_objects(&self.db, principal, cal_id).await?;
if self.skip_broken {
Ok(objects
.filter_map(|(id, res)| Some((id, res.ok()?)))
.collect())
} else {
Ok(objects
.map(|(id, res)| res.map(|obj| (id, obj)))
.collect::<Result<Vec<_>, _>>()?)
}
} }
#[instrument] #[instrument]
@@ -974,7 +1042,7 @@ impl CalendarStore for SqliteCalendarStore {
cal_id: &str, cal_id: &str,
synctoken: i64, synctoken: i64,
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> { ) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> {
Self::_sync_changes(&self.db, principal, cal_id, synctoken).await Self::_sync_changes(&self.db, principal, cal_id, synctoken, self.skip_broken).await
} }
fn is_read_only(&self, _cal_id: &str) -> bool { fn is_read_only(&self, _cal_id: &str) -> bool {

View File

@@ -18,7 +18,7 @@ impl From<sqlx::Error> for Error {
sqlx::Error::RowNotFound => Self::StoreError(rustical_store::Error::NotFound), sqlx::Error::RowNotFound => Self::StoreError(rustical_store::Error::NotFound),
sqlx::Error::Database(err) => { sqlx::Error::Database(err) => {
if err.is_unique_violation() { if err.is_unique_violation() {
warn!("{err:?}"); warn!("{err}");
Self::StoreError(rustical_store::Error::AlreadyExists) Self::StoreError(rustical_store::Error::AlreadyExists)
} else { } else {
Self::SqlxError(sqlx::Error::Database(err)) Self::SqlxError(sqlx::Error::Database(err))

View File

@@ -52,8 +52,8 @@ pub async fn test_store_context() -> TestStoreContext {
let db = get_test_db().await; let db = get_test_db().await;
TestStoreContext { TestStoreContext {
db: db.clone(), db: db.clone(),
addr_store: SqliteAddressbookStore::new(db.clone(), send_addr), addr_store: SqliteAddressbookStore::new(db.clone(), send_addr, false),
cal_store: SqliteCalendarStore::new(db.clone(), send_cal), cal_store: SqliteCalendarStore::new(db.clone(), send_cal, false),
principal_store: SqlitePrincipalStore::new(db.clone()), principal_store: SqlitePrincipalStore::new(db.clone()),
sub_store: SqliteStore::new(db), sub_store: SqliteStore::new(db),
} }

View File

@@ -18,6 +18,7 @@ pub fn cmd_gen_config(_args: GenConfigArgs) -> anyhow::Result<()> {
data_store: DataStoreConfig::Sqlite(SqliteDataStoreConfig { data_store: DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url: "/var/lib/rustical/db.sqlite3".to_owned(), db_url: "/var/lib/rustical/db.sqlite3".to_owned(),
run_repairs: true, run_repairs: true,
skip_broken: true,
}), }),
tracing: TracingConfig::default(), tracing: TracingConfig::default(),
frontend: FrontendConfig { frontend: FrontendConfig {

View File

@@ -28,6 +28,8 @@ pub struct SqliteDataStoreConfig {
pub db_url: String, pub db_url: String,
#[serde(default = "default_true")] #[serde(default = "default_true")]
pub run_repairs: bool, pub run_repairs: bool,
#[serde(default = "default_true")]
pub skip_broken: bool,
} }
#[derive(Debug, Deserialize, Serialize)] #[derive(Debug, Deserialize, Serialize)]

View File

@@ -8,7 +8,7 @@ use rustical_store::{CalendarMetadata, CalendarStore};
use rustical_store_sqlite::tests::{TestStoreContext, test_store_context}; use rustical_store_sqlite::tests::{TestStoreContext, test_store_context};
use tower::ServiceExt; use tower::ServiceExt;
fn mkcalendar_template( pub fn mkcalendar_template(
CalendarMetadata { CalendarMetadata {
displayname, displayname,
order: _order, order: _order,

View File

@@ -0,0 +1,77 @@
use axum::body::Body;
use headers::{Authorization, HeaderMapExt};
use http::{Request, StatusCode};
use rstest::rstest;
use rustical_store::CalendarMetadata;
use rustical_store_sqlite::tests::{TestStoreContext, test_store_context};
use tower::ServiceExt;
use crate::integration_tests::{
ResponseExtractString, caldav::calendar::mkcalendar_template, get_app,
};
#[rstest]
#[tokio::test]
async fn test_put_invalid(
#[from(test_store_context)]
#[future]
context: TestStoreContext,
) {
let context = context.await;
let app = get_app(context.clone());
let calendar_meta = CalendarMetadata {
displayname: Some("Calendar".to_string()),
description: Some("Description".to_string()),
color: Some("#00FF00".to_string()),
order: 0,
};
let (principal, cal_id) = ("user", "calendar");
let url = format!("/caldav/principal/{principal}/{cal_id}");
let mut request = Request::builder()
.method("MKCALENDAR")
.uri(&url)
.body(Body::from(mkcalendar_template(&calendar_meta)))
.unwrap();
request
.headers_mut()
.typed_insert(Authorization::basic("user", "pass"));
let response = app.clone().oneshot(request).await.unwrap();
assert_eq!(response.status(), StatusCode::CREATED);
// Invalid calendar data
let ical = r"BEGIN:VCALENDAR
VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN
BEGIN:VEVENT
UID:20010712T182145Z-123401@example.com
DTSTAMP:20060712T182145Z
DTSTART:20060714T170000Z
RRULE:UNTIL=123
DTEND:20060715T040000Z
SUMMARY:Bastille Day Party
END:VEVENT
END:VCALENDAR";
let mut request = Request::builder()
.method("PUT")
.uri(format!("{url}/qwue23489.ics"))
.header("If-None-Match", "*")
.header("Content-Type", "text/calendar")
.body(Body::from(ical))
.unwrap();
request
.headers_mut()
.typed_insert(Authorization::basic("user", "pass"));
let response = app.clone().oneshot(request).await.unwrap();
assert_eq!(response.status(), StatusCode::FORBIDDEN);
let body = response.extract_string().await;
insta::assert_snapshot!(body, @r#"
<?xml version="1.0" encoding="utf-8"?>
<error xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<CAL:valid-calendar-data/>
</error>
"#);
}

View File

@@ -87,70 +87,79 @@ const REPORT_7_8_3: &str = r#"
</C:calendar-query> </C:calendar-query>
"#; "#;
const OUTPUT_7_8_3: &str = r#" // Adapted from Example 7.8.3 of RFC 4791
<D:response> // In the RFC the output is wrong since it returns DTSTART in UTC as local time, e.g.
<D:href>http://cal.example.com/bernard/work/abcd2.ics</D:href> // DTSTART:20060103T170000
<D:propstat> // instead of
<D:prop> // DTSTART:20060103T170000Z
<D:getetag>"fffff-abcd2"</D:getetag> // In https://datatracker.ietf.org/doc/html/rfc4791#section-9.6.5
<C:calendar-data>BEGIN:VCALENDAR // it is clearly stated that times with timezone information MUST be returned in UTC.
// Also, the RECURRENCE-ID needs to include the TIMEZONE, which is fixed here by converting it to
// UTC
const OUTPUT_7_8_3: &str = r#"<?xml version="1.0" encoding="utf-8"?>
<multistatus xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<response>
<href>/caldav/principal/user/calendar/abcd2.ics</href>
<propstat>
<prop>
<CAL:calendar-data>BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN PRODID:-//Example Corp.//CalDAV Client//EN
BEGIN:VEVENT BEGIN:VEVENT
DTSTAMP:20060206T001121Z DTSTAMP:20060206T001121Z
DTSTART:20060103T170000 DTSTART:20060103T170000Z
DURATION:PT1H DURATION:PT1H
RECURRENCE-ID:20060103T170000
SUMMARY:Event #2 SUMMARY:Event #2
UID:00959BC664CA650E933C892C@example.com UID:abcd2
RECURRENCE-ID:20060103T170000Z
END:VEVENT END:VEVENT
BEGIN:VEVENT BEGIN:VEVENT
DTSTAMP:20060206T001121Z DTSTAMP:20060206T001121Z
DTSTART:20060104T190000 DTSTART:20060104T190000Z
DURATION:PT1H DURATION:PT1H
RECURRENCE-ID:20060104T170000 RECURRENCE-ID:20060104T170000Z
SUMMARY:Event #2 bis SUMMARY:Event #2 bis
UID:00959BC664CA650E933C892C@example.com UID:abcd2
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
</C:calendar-data> </CAL:calendar-data>
</D:prop> </prop>
<D:status>HTTP/1.1 200 OK</D:status> <status>HTTP/1.1 200 OK</status>
</D:propstat> </propstat>
</D:response> </response>
<D:response> <response>
<D:href>http://cal.example.com/bernard/work/abcd3.ics</D:href> <href>/caldav/principal/user/calendar/abcd3.ics</href>
<D:propstat> <propstat>
<D:prop> <prop>
<D:getetag>"fffff-abcd3"</D:getetag> <CAL:calendar-data>BEGIN:VCALENDAR
<C:calendar-data>BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN PRODID:-//Example Corp.//CalDAV Client//EN
BEGIN:VEVENT BEGIN:VEVENT
ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com
ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com
DTSTAMP:20060206T001220Z DTSTAMP:20060206T001220Z
DTSTART:20060104T150000 DTSTART:20060104T150000Z
DURATION:PT1H DURATION:PT1H
LAST-MODIFIED:20060206T001330Z LAST-MODIFIED:20060206T001330Z
ORGANIZER:mailto:cyrus@example.com ORGANIZER:mailto:cyrus@example.com
SEQUENCE:1 SEQUENCE:1
STATUS:TENTATIVE STATUS:TENTATIVE
SUMMARY:Event #3 SUMMARY:Event #3
UID:DC6C50A017428C5216A2F1CD@example.com UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
</C:calendar-data> </CAL:calendar-data>
</D:prop> </prop>
<D:status>HTTP/1.1 200 OK</D:status> <status>HTTP/1.1 200 OK</status>
</D:propstat> </propstat>
"#; </response>
</multistatus>"#;
#[rstest] #[rstest]
#[case(0, ICS_1, REPORT_7_8_1)] #[case(0, ICS_1, REPORT_7_8_1, None)]
#[case(1, ICS_1, REPORT_7_8_2)] #[case(1, ICS_1, REPORT_7_8_2, None)]
#[case(2, ICS_1, REPORT_7_8_3)] #[case(2, ICS_1, REPORT_7_8_3, Some(OUTPUT_7_8_3))]
#[tokio::test] #[tokio::test]
async fn test_report( async fn test_report(
#[from(test_store_context)] #[from(test_store_context)]
@@ -159,6 +168,7 @@ async fn test_report(
#[case] case: usize, #[case] case: usize,
#[case] ics: &'static str, #[case] ics: &'static str,
#[case] report: &'static str, #[case] report: &'static str,
#[case] output: Option<&'static str>,
) { ) {
let context = context.await; let context = context.await;
let app = get_app(context.clone()); let app = get_app(context.clone());
@@ -193,4 +203,7 @@ async fn test_report(
assert_eq!(response.status(), StatusCode::MULTI_STATUS); assert_eq!(response.status(), StatusCode::MULTI_STATUS);
let body = response.extract_string().await; let body = response.extract_string().await;
insta::assert_snapshot!(format!("{case}_report_body"), body); insta::assert_snapshot!(format!("{case}_report_body"), body);
if let Some(output) = output {
similar_asserts::assert_eq!(output, body.replace('\r', ""));
}
} }

View File

@@ -9,7 +9,8 @@ use tower::ServiceExt;
mod calendar; mod calendar;
mod calendar_import; mod calendar_import;
// mod calendar_report; mod calendar_put;
mod calendar_report;
#[rstest] #[rstest]
#[tokio::test] #[tokio::test]

View File

@@ -55,6 +55,7 @@ SEQUENCE:1
STATUS:TENTATIVE STATUS:TENTATIVE
SUMMARY:Event #3 SUMMARY:Event #3
UID:abcd3 UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT END:VEVENT
BEGIN:VTODO BEGIN:VTODO
DTSTAMP:20060205T235335Z DTSTAMP:20060205T235335Z

View File

@@ -60,6 +60,7 @@ SEQUENCE:1
STATUS:TENTATIVE STATUS:TENTATIVE
SUMMARY:Event #3 SUMMARY:Event #3
UID:[UID] UID:[UID]
X-ABC-GUID:[UID]
END:VEVENT END:VEVENT
BEGIN:VTODO BEGIN:VTODO
DTSTAMP:20060205T235335Z DTSTAMP:20060205T235335Z

View File

@@ -56,7 +56,7 @@ END:VCALENDAR
<href>/caldav/principal/user/calendar/abcd3.ics</href> <href>/caldav/principal/user/calendar/abcd3.ics</href>
<propstat> <propstat>
<prop> <prop>
<getetag>&quot;c6a5b1cf6985805686df99e7f2e1cf286567dcb3383fc6fa1b12ce42d3fbc01c&quot;</getetag> <getetag>&quot;a84fd022dfc742bf8f17ac04fca3aad687e9ae724180185e8e0df11e432dae30&quot;</getetag>
<CAL:calendar-data>BEGIN:VCALENDAR <CAL:calendar-data>BEGIN:VCALENDAR
VERSION:2.0 VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN PRODID:-//Example Corp.//CalDAV Client//EN
@@ -90,6 +90,7 @@ SEQUENCE:1
STATUS:TENTATIVE STATUS:TENTATIVE
SUMMARY:Event #3 SUMMARY:Event #3
UID:abcd3 UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
</CAL:calendar-data> </CAL:calendar-data>

View File

@@ -88,6 +88,7 @@ SEQUENCE:1
STATUS:TENTATIVE STATUS:TENTATIVE
SUMMARY:Event #3 SUMMARY:Event #3
UID:abcd3 UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
</CAL:calendar-data> </CAL:calendar-data>

View File

@@ -13,19 +13,19 @@ VERSION:2.0
PRODID:-//Example Corp.//CalDAV Client//EN PRODID:-//Example Corp.//CalDAV Client//EN
BEGIN:VEVENT BEGIN:VEVENT
DTSTAMP:20060206T001121Z DTSTAMP:20060206T001121Z
DTSTART:20060103T170000Z
DURATION:PT1H DURATION:PT1H
SUMMARY:Event #2 SUMMARY:Event #2
UID:abcd2 UID:abcd2
RECURRENCE-ID:20060103T170000Z RECURRENCE-ID:20060103T170000Z
DTSTART:20060103T170000Z
END:VEVENT END:VEVENT
BEGIN:VEVENT BEGIN:VEVENT
DTSTAMP:20060206T001121Z DTSTAMP:20060206T001121Z
DTSTART:20060104T190000Z
DURATION:PT1H DURATION:PT1H
SUMMARY:Event #2
UID:abcd2
RECURRENCE-ID:20060104T170000Z RECURRENCE-ID:20060104T170000Z
DTSTART:20060104T170000Z SUMMARY:Event #2 bis
UID:abcd2
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
</CAL:calendar-data> </CAL:calendar-data>
@@ -44,7 +44,7 @@ BEGIN:VEVENT
ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com
ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com
DTSTAMP:20060206T001220Z DTSTAMP:20060206T001220Z
DTSTART;TZID=US/Eastern:20060104T100000 DTSTART:20060104T150000Z
DURATION:PT1H DURATION:PT1H
LAST-MODIFIED:20060206T001330Z LAST-MODIFIED:20060206T001330Z
ORGANIZER:mailto:cyrus@example.com ORGANIZER:mailto:cyrus@example.com
@@ -52,6 +52,7 @@ SEQUENCE:1
STATUS:TENTATIVE STATUS:TENTATIVE
SUMMARY:Event #3 SUMMARY:Event #3
UID:abcd3 UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT END:VEVENT
END:VCALENDAR END:VCALENDAR
</CAL:calendar-data> </CAL:calendar-data>

View File

@@ -34,9 +34,6 @@ mod config;
pub mod integration_tests; pub mod integration_tests;
mod setup_tracing; mod setup_tracing;
mod migration_0_12;
use migration_0_12::{validate_address_objects_0_12, validate_calendar_objects_0_12};
#[derive(Parser, Debug)] #[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)] #[command(author, version, about, long_about = None)]
struct Args { struct Args {
@@ -73,13 +70,18 @@ async fn get_data_stores(
DataStoreConfig::Sqlite(SqliteDataStoreConfig { DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url, db_url,
run_repairs, run_repairs,
skip_broken,
}) => { }) => {
let db = create_db_pool(db_url, migrate).await?; let db = create_db_pool(db_url, migrate).await?;
// Channel to watch for changes (for DAV Push) // Channel to watch for changes (for DAV Push)
let (send, recv) = tokio::sync::mpsc::channel(1000); let (send, recv) = tokio::sync::mpsc::channel(1000);
let addressbook_store = Arc::new(SqliteAddressbookStore::new(db.clone(), send.clone())); let addressbook_store = Arc::new(SqliteAddressbookStore::new(
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send)); db.clone(),
send.clone(),
*skip_broken,
));
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send, *skip_broken));
if *run_repairs { if *run_repairs {
info!("Running repair tasks"); info!("Running repair tasks");
addressbook_store.repair_orphans().await?; addressbook_store.repair_orphans().await?;
@@ -88,6 +90,13 @@ async fn get_data_stores(
} }
let subscription_store = Arc::new(SqliteStore::new(db.clone())); let subscription_store = Arc::new(SqliteStore::new(db.clone()));
let principal_store = Arc::new(SqlitePrincipalStore::new(db)); let principal_store = Arc::new(SqlitePrincipalStore::new(db));
// Validate all calendar objects
for principal in principal_store.get_principals().await? {
cal_store.validate_objects(&principal.id).await?;
addressbook_store.validate_objects(&principal.id).await?;
}
( (
addressbook_store, addressbook_store,
cal_store, cal_store,
@@ -125,14 +134,6 @@ async fn main() -> Result<()> {
let (addr_store, cal_store, subscription_store, principal_store, update_recv) = let (addr_store, cal_store, subscription_store, principal_store, update_recv) =
get_data_stores(!args.no_migrations, &config.data_store).await?; get_data_stores(!args.no_migrations, &config.data_store).await?;
warn!(
"Validating calendar data against the next-version ical parser.
In the next major release these will be rejected and cause errors.
If any errors occur, please open an issue so they can be fixed before the next major release."
);
validate_calendar_objects_0_12(principal_store.as_ref(), cal_store.as_ref()).await?;
validate_address_objects_0_12(principal_store.as_ref(), addr_store.as_ref()).await?;
let mut tasks = vec![]; let mut tasks = vec![];
if config.dav_push.enabled { if config.dav_push.enabled {

View File

@@ -1,76 +0,0 @@
use ical::parser::{ical::IcalObjectParser, vcard::VcardParser};
use rustical_store::{AddressbookStore, CalendarStore, auth::AuthenticationProvider};
use tracing::{error, info};
pub async fn validate_calendar_objects_0_12(
principal_store: &impl AuthenticationProvider,
cal_store: &impl CalendarStore,
) -> Result<(), rustical_store::Error> {
let mut success = true;
for principal in principal_store.get_principals().await? {
for calendar in cal_store.get_calendars(&principal.id).await? {
for (object_id, object) in cal_store
.get_objects(&calendar.principal, &calendar.id)
.await?
{
if let Err(err) =
IcalObjectParser::from_slice(object.get_ics().as_bytes()).expect_one()
{
success = false;
error!(
"An error occured parsing a calendar object: principal={principal}, calendar={calendar}, object_id={object_id}: {err}",
principal = principal.id,
calendar = calendar.id,
);
println!("{}", object.get_ics());
}
}
}
}
if success {
info!("Your calendar data seems to be valid in the next major version.");
} else {
error!(
"Not all calendar objects will be successfully parsed in the next major version (v0.12).
This will not cause issues in this version, but please comment under the tracking issue on GitHub:
https://github.com/lennart-k/rustical/issues/165"
);
}
Ok(())
}
pub async fn validate_address_objects_0_12(
principal_store: &impl AuthenticationProvider,
addr_store: &impl AddressbookStore,
) -> Result<(), rustical_store::Error> {
let mut success = true;
for principal in principal_store.get_principals().await? {
for addressbook in addr_store.get_addressbooks(&principal.id).await? {
for (object_id, object) in addr_store
.get_objects(&addressbook.principal, &addressbook.id)
.await?
{
if let Err(err) = VcardParser::from_slice(object.get_vcf().as_bytes()).expect_one()
{
success = false;
error!(
"An error occured parsing an address object: principal={principal}, addressbook={addressbook}, object_id={object_id}: {err}",
principal = principal.id,
addressbook = addressbook.id,
);
println!("{}", object.get_vcf());
}
}
}
}
if success {
info!("Your addressbook data seems to be valid in the next major version.");
} else {
error!(
"Not all address objects will be successfully parsed in the next major version (v0.12).
This will not cause issues in this version, but please comment under the tracking issue on GitHub:
https://github.com/lennart-k/rustical/issues/165"
);
}
Ok(())
}