Compare commits

...

60 Commits

Author SHA1 Message Date
Lennart K
008e40e17f integration tests: Also use health command 2026-01-29 11:26:20 +01:00
Lennart K
0703b7b470 Add another integration test 2026-01-29 10:25:00 +01:00
Lennart K
233cf2ea37 fix test snapshots 2026-01-28 21:05:56 +01:00
Lennart K
494f31f992 Add more abstract integration test 2026-01-28 20:54:55 +01:00
Lennart K
c1758e2cba cmd_default: Add notifier to detect when rustical has started 2026-01-28 20:16:41 +01:00
Lennart K
af60a446ad refactoring of integration tests 2026-01-28 18:38:03 +01:00
Lennart K
c763a682ed update .gitignore 2026-01-27 23:07:19 +01:00
Lennart K
8ab9c61b0f Move commands to lib.rs 2026-01-27 23:06:57 +01:00
Lennart
8b2bb1b0d6 docs: Mention NixOS package 2026-01-26 12:20:04 +01:00
Lennart
da72aa26cb update README.md 2026-01-24 22:53:51 +01:00
Lennart
b89ff1a2b5 version 0.12.3 2026-01-24 22:49:02 +01:00
Lennart
246a1aa738 Add truncation for automatically derived timezones 2026-01-24 22:48:08 +01:00
Lennart
bb0484ac4a version 0.12.2 2026-01-24 20:09:42 +01:00
Lennart
1b3da2a99b update caldata-rs 2026-01-24 20:07:38 +01:00
Lennart
3b01ae1cf6 update test snapshots 2026-01-24 19:52:13 +01:00
Lennart K
d918a255a9 PUT calendar_object: Allow omission of timezones as in RFC7809 2026-01-24 19:44:58 +01:00
Lennart K
6a31d3000c Update vtimezones-rs 2026-01-24 18:05:42 +01:00
Lennart K
d5892ab56b Migrate ical-rs to caldata-rs 2026-01-22 11:01:00 +01:00
Lennart K
11a61cf8b1 version 0.12.1 2026-01-20 13:20:04 +01:00
Lennart Kämmle
227d4bc61a Merge pull request #171 from wrvsrx/fix-anniversayr-typo
Fix a typo about anniversary
2026-01-20 13:17:44 +01:00
wrvsrx
d9afc85222 Fix a typo about anniversary 2026-01-20 19:45:50 +08:00
Lennart
c9fe5706a9 clippy appeasement 2026-01-19 17:03:14 +01:00
Lennart
1b6214d426 MKCALENDAR: Handling of invalid timezones 2026-01-19 16:36:25 +01:00
Lennart
be34cc3091 xml: Implement namespace for Unparsed 2026-01-19 16:22:21 +01:00
Lennart
99287f85f4 version 0.12.0 2026-01-19 15:48:56 +01:00
Lennart
df3143cd4c Fix status code for failed preconditions 2026-01-19 15:37:41 +01:00
Lennart Kämmle
92a3418f8e Merge pull request #164 from lennart-k/feat/ical-rewrite
ical-rs overhaul
2026-01-19 15:14:14 +01:00
Lennart
ea2f841269 ical-rs: Pin version to Git commit 2026-01-19 15:04:54 +01:00
Lennart
15e1509fe3 sqlite_store: Add option to skip broken objects and add validation on start-up 2026-01-19 14:48:21 +01:00
Lennart
0eef4ffabf Add test for uploading invalid calendar object and fix precondition 2026-01-19 13:40:54 +01:00
Lennart
303f9aff68 Remove IcalError from caldav/carddav since it had an ambiguous status code 2026-01-19 12:51:51 +01:00
Lennart
3460a2821e dav: Check Host matching for MV,COPY 2026-01-19 12:37:35 +01:00
Lennart
f73658b32f Re-enable calendar-query test and fix calendar expansion 2026-01-19 12:09:34 +01:00
Lennart K
7e099bcd6e Merge branch 'main' into feat/ical-rewrite 2026-01-16 16:47:17 +01:00
Lennart K
dde05d2f45 Workflow: Publish container images for feature branches too 2026-01-16 16:29:38 +01:00
Lennart K
4adf1818d4 Merge branch 'main' into feat/ical-rewrite 2026-01-16 15:58:17 +01:00
Lennart K
f503bf2bf7 Update quick-xml 2026-01-16 15:47:10 +01:00
Lennart K
7c15976a1a rebase main 2026-01-16 15:41:39 +01:00
Lennart K
669d81aea0 address_object resource: Implement displayname 2026-01-16 15:39:56 +01:00
Lennart K
967d18de95 Fix comp-filter 2026-01-16 15:39:55 +01:00
Lennart K
63373ad525 simplify handling of ical-related errors 2026-01-16 15:39:54 +01:00
Lennart K
2c67890343 Update ical-rs 2026-01-16 15:39:53 +01:00
Lennart K
5ec2787ecf build MVP for birthday calendar 2026-01-16 15:39:53 +01:00
Lennart K
7eecd95757 Remove calendar-query integration test for now 2026-01-16 15:39:52 +01:00
Lennart K
c165e761be update ical-rs 2026-01-16 15:39:51 +01:00
Lennart K
5f68a5ae5c Re-add get_last_occurence for sqlite store 2026-01-16 15:39:50 +01:00
Lennart K
c77b59dcb0 Remove unused code 2026-01-16 15:39:49 +01:00
Lennart K
276fdcacf5 Re-implement calendar imports 2026-01-16 15:39:48 +01:00
Lennart K
43fff63008 Calendar export: Fix PRODID 2026-01-16 15:39:47 +01:00
Lennart K
977fd75500 Re-implement calendar export 2026-01-16 15:39:46 +01:00
Lennart K
5639127782 clean up ical-related stuff 2026-01-16 15:39:44 +01:00
Lennart K
a2255bc7f1 make calendar object id extrinsic 2026-01-16 15:39:34 +01:00
Lennart K
758793a11a Make AddressObject object_id an extrinsic property 2026-01-16 15:39:33 +01:00
Lennart K
a9f3833a32 small fixes 2026-01-16 15:39:30 +01:00
Lennart K
896e934c0a Decrease folder nesting 2026-01-16 15:39:01 +01:00
Lennart K
bb880aa403 incorporate get_first_occurenec 2026-01-16 15:39:00 +01:00
Lennart K
69acde10ba migrate to new ical-rs version 2026-01-16 15:38:57 +01:00
Lennart K
d84158e8ad version 0.11.17 2026-01-16 12:26:43 +01:00
Lennart K
7ef566040a Disable a test that will be fixed in 0.12 2026-01-16 12:16:02 +01:00
Lennart K
1c1f0c6da2 Update ical-rs@dev to fix cargo vendor 2026-01-16 12:10:10 +01:00
151 changed files with 2141 additions and 2662 deletions

View File

@@ -2,7 +2,10 @@ name: Docker
on:
push:
branches: ["main", "dev"]
branches:
- main
- dev
- feat/*
release:
types: ["published"]
@@ -45,7 +48,8 @@ jobs:
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
${{ github.ref_name == 'main' && 'type=ref,event=branch' || '' }}
type=ref,event=branch,prefix=br-
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}

4
.gitignore vendored
View File

@@ -3,7 +3,7 @@ crates/*/target
# For libraries ignore Cargo.lock
crates/*/Cargo.lock
db.sqlite3*
**/*.sqlite3*
config.toml
principals.toml
@@ -16,3 +16,5 @@ site
# Frontend
**/node_modules
**/.vite
**/*.snap.new

231
Cargo.lock generated
View File

@@ -181,9 +181,9 @@ dependencies = [
[[package]]
name = "askama_web"
version = "0.15.0"
version = "0.15.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c0d6576f8e59513752a3e2673ca602fb403be7d0d0aacba5cd8b219838ab58fe"
checksum = "5911a65ac3916ef133167a855d52978f9fbf54680a093e0ef29e20b7e94a4523"
dependencies = [
"askama",
"askama_web_derive",
@@ -565,6 +565,24 @@ version = "1.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b35204fbdc0b3f4446b89fc1ac2cf84a8a68971995d0bf2e925ec7cd960f9cb3"
[[package]]
name = "caldata"
version = "0.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f36de4a8034d98c95e7fe874b828272d823cfbd68e9571fe7bf6c419e852cbe2"
dependencies = [
"chrono",
"chrono-tz",
"derive_more",
"itertools 0.14.0",
"lazy_static",
"phf 0.13.1",
"regex",
"rrule",
"thiserror 2.0.18",
"vtimezones-rs",
]
[[package]]
name = "cast"
version = "0.3.0"
@@ -573,9 +591,9 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5"
[[package]]
name = "cc"
version = "1.2.52"
version = "1.2.54"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cd4932aefd12402b36c60956a4fe0035421f544799057659ff86f923657aada3"
checksum = "6354c81bbfd62d9cfa9cb3c773c2b7b2a3a482d569de977fd0e961f6e7c00583"
dependencies = [
"find-msvc-tools",
"shlex",
@@ -1241,9 +1259,9 @@ dependencies = [
[[package]]
name = "find-msvc-tools"
version = "0.1.7"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f449e6c6c08c865631d4890cfacf252b3d396c9bcc83adb6623cdb02a8336c41"
checksum = "8591b0bcc8a98a64310a2fae1bb3e9b8564dd10e381e6e28010fde8e8e8568db"
[[package]]
name = "flume"
@@ -1768,37 +1786,6 @@ dependencies = [
"cc",
]
[[package]]
name = "ical"
version = "0.11.0"
source = "git+https://github.com/lennart-k/ical-rs?branch=dev#ece5b95ddc20f89d14e162aba3a49038f9989701"
dependencies = [
"chrono",
"chrono-tz",
"derive_more",
"itertools 0.14.0",
"lazy_static",
"phf 0.13.1",
"regex",
"rrule",
"thiserror 2.0.17",
]
[[package]]
name = "ical"
version = "0.11.0"
source = "git+https://github.com/lennart-k/ical-rs?rev=7c2ab1f3#7c2ab1f3abdca768f22d8a36627eebbdd7947e29"
dependencies = [
"chrono",
"chrono-tz",
"derive_more",
"itertools 0.14.0",
"lazy_static",
"regex",
"rrule",
"thiserror 2.0.17",
]
[[package]]
name = "icu_collections"
version = "2.1.1"
@@ -2040,9 +2027,9 @@ checksum = "bcc35a38544a891a5f7c865aca548a982ccb3b8650a5b06d0fd33a10283c56fc"
[[package]]
name = "libm"
version = "0.2.15"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
checksum = "b6d2cec3eae94f9f509c767b45932f1ada8350c4bdb85af2fcab4a3c14807981"
[[package]]
name = "libredox"
@@ -2133,7 +2120,7 @@ dependencies = [
"matchit 0.9.1",
"percent-encoding",
"serde",
"thiserror 2.0.17",
"thiserror 2.0.18",
]
[[package]]
@@ -2215,9 +2202,9 @@ dependencies = [
[[package]]
name = "num-conv"
version = "0.1.0"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "51d515d32fb182ee37cda2ccdcb92950d6a3c2893aa280e540671c2cd0f3b1d9"
checksum = "cf97ec579c3c42f953ef76dbf8d55ac91fb219dde70e49aa4a6b7d74e9919050"
[[package]]
name = "num-integer"
@@ -2385,7 +2372,7 @@ dependencies = [
"futures-sink",
"js-sys",
"pin-project-lite",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tracing",
]
@@ -2415,7 +2402,7 @@ dependencies = [
"opentelemetry_sdk",
"prost",
"reqwest",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tonic",
"tracing",
@@ -2452,7 +2439,7 @@ dependencies = [
"opentelemetry",
"percent-encoding",
"rand 0.9.2",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tokio-stream",
]
@@ -2625,22 +2612,12 @@ dependencies = [
[[package]]
name = "phf_codegen"
version = "0.12.1"
version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "efbdcb6f01d193b17f0b9c3360fa7e0e620991b193ff08702f78b3ce365d7e61"
checksum = "49aa7f9d80421bca176ca8dbfebe668cc7a2684708594ec9f3c0db0805d5d6e1"
dependencies = [
"phf_generator 0.12.1",
"phf_shared 0.12.1",
]
[[package]]
name = "phf_generator"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2cbb1126afed61dd6368748dae63b1ee7dc480191c6262a3b4ff1e29d86a6c5b"
dependencies = [
"fastrand",
"phf_shared 0.12.1",
"phf_generator",
"phf_shared 0.13.1",
]
[[package]]
@@ -2659,7 +2636,7 @@ version = "0.13.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "812f032b54b1e759ccd5f8b6677695d5268c588701effba24601f6932f8269ef"
dependencies = [
"phf_generator 0.13.1",
"phf_generator",
"phf_shared 0.13.1",
"proc-macro2",
"quote",
@@ -2840,9 +2817,9 @@ dependencies = [
[[package]]
name = "proc-macro2"
version = "1.0.105"
version = "1.0.106"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "535d180e0ecab6268a3e718bb9fd44db66bbbc256257165fc699dadf70d16fe7"
checksum = "8fd00f0bb2e90d81d1044c2b32617f68fcb9fa3bb7640c23e9c748e53fb30934"
dependencies = [
"unicode-ident",
]
@@ -2885,9 +2862,9 @@ dependencies = [
[[package]]
name = "quick-xml"
version = "0.38.4"
version = "0.39.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b66c2058c55a409d601666cffe35f04333cf1013010882cec174a7467cd4e21c"
checksum = "f2e3bf4aa9d243beeb01a7b3bc30b77cfe2c44e24ec02d751a7104a53c2c49a1"
dependencies = [
"memchr",
]
@@ -2906,7 +2883,7 @@ dependencies = [
"rustc-hash",
"rustls",
"socket2",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tracing",
"web-time",
@@ -2927,7 +2904,7 @@ dependencies = [
"rustls",
"rustls-pki-types",
"slab",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tinyvec",
"tracing",
"web-time",
@@ -2949,9 +2926,9 @@ dependencies = [
[[package]]
name = "quote"
version = "1.0.43"
version = "1.0.44"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dc74d9a594b72ae6656596548f56f667211f8a97b3d4c3d467150794690dc40a"
checksum = "21b2ebcf727b7760c461f091f9f0f539b77b8e87f2fd88131e7f1b433b3cece4"
dependencies = [
"proc-macro2",
]
@@ -3202,7 +3179,7 @@ dependencies = [
"chrono-tz",
"log",
"regex",
"thiserror 2.0.17",
"thiserror 2.0.18",
]
[[package]]
@@ -3332,18 +3309,19 @@ dependencies = [
[[package]]
name = "rustical"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"anyhow",
"argon2",
"async-trait",
"axum",
"axum-extra",
"caldata",
"clap",
"figment",
"futures-util",
"headers",
"http",
"ical 0.11.0 (git+https://github.com/lennart-k/ical-rs?branch=dev)",
"insta",
"opentelemetry",
"opentelemetry-otlp",
@@ -3364,8 +3342,11 @@ dependencies = [
"rustical_store",
"rustical_store_sqlite",
"serde",
"similar-asserts",
"sqlx",
"tempfile",
"tokio",
"tokio-util",
"toml 0.9.11+spec-1.1.0",
"tower",
"tower-http",
@@ -3378,20 +3359,20 @@ dependencies = [
[[package]]
name = "rustical_caldav"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"async-std",
"async-trait",
"axum",
"axum-extra",
"base64 0.22.1",
"caldata",
"chrono",
"chrono-tz",
"derive_more",
"futures-util",
"headers",
"http",
"ical 0.11.0 (git+https://github.com/lennart-k/ical-rs?rev=7c2ab1f3)",
"insta",
"percent-encoding",
"quick-xml",
@@ -3408,7 +3389,7 @@ dependencies = [
"similar-asserts",
"strum",
"strum_macros",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tower",
"tower-http",
@@ -3420,17 +3401,17 @@ dependencies = [
[[package]]
name = "rustical_carddav"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"async-trait",
"axum",
"axum-extra",
"base64 0.22.1",
"caldata",
"chrono",
"derive_more",
"futures-util",
"http",
"ical 0.11.0 (git+https://github.com/lennart-k/ical-rs?rev=7c2ab1f3)",
"insta",
"percent-encoding",
"quick-xml",
@@ -3443,7 +3424,7 @@ dependencies = [
"serde",
"strum",
"strum_macros",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tower",
"tower-http",
@@ -3454,16 +3435,16 @@ dependencies = [
[[package]]
name = "rustical_dav"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"async-trait",
"axum",
"axum-extra",
"caldata",
"derive_more",
"futures-util",
"headers",
"http",
"ical 0.11.0 (git+https://github.com/lennart-k/ical-rs?rev=7c2ab1f3)",
"itertools 0.14.0",
"log",
"matchit 0.9.1",
@@ -3472,7 +3453,7 @@ dependencies = [
"rustical_xml",
"serde",
"strum",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tower",
"tracing",
@@ -3480,7 +3461,7 @@ dependencies = [
[[package]]
name = "rustical_dav_push"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"async-trait",
"axum",
@@ -3498,14 +3479,14 @@ dependencies = [
"rustical_store",
"rustical_xml",
"serde",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tracing",
]
[[package]]
name = "rustical_frontend"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"askama",
"askama_web",
@@ -3528,7 +3509,7 @@ dependencies = [
"rustical_store",
"serde",
"serde_json",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tower",
"tower-http",
@@ -3541,13 +3522,13 @@ dependencies = [
[[package]]
name = "rustical_ical"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"axum",
"caldata",
"chrono",
"chrono-tz",
"derive_more",
"ical 0.11.0 (git+https://github.com/lennart-k/ical-rs?rev=7c2ab1f3)",
"regex",
"rrule",
"rstest",
@@ -3555,12 +3536,12 @@ dependencies = [
"serde",
"sha2",
"similar-asserts",
"thiserror 2.0.17",
"thiserror 2.0.18",
]
[[package]]
name = "rustical_oidc"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"async-trait",
"axum",
@@ -3569,18 +3550,19 @@ dependencies = [
"openidconnect",
"reqwest",
"serde",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tower-sessions",
"tracing",
]
[[package]]
name = "rustical_store"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"anyhow",
"async-trait",
"axum",
"caldata",
"chrono",
"chrono-tz",
"clap",
@@ -3588,7 +3570,6 @@ dependencies = [
"futures-core",
"headers",
"http",
"ical 0.11.0 (git+https://github.com/lennart-k/ical-rs?rev=7c2ab1f3)",
"regex",
"rrule",
"rstest",
@@ -3599,7 +3580,7 @@ dependencies = [
"rustical_xml",
"serde",
"sha2",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tower",
"tower-sessions",
@@ -3609,9 +3590,10 @@ dependencies = [
[[package]]
name = "rustical_store_sqlite"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"async-trait",
"caldata",
"chrono",
"criterion",
"derive_more",
@@ -3625,7 +3607,7 @@ dependencies = [
"serde",
"sha2",
"sqlx",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tracing",
"uuid",
@@ -3633,10 +3615,10 @@ dependencies = [
[[package]]
name = "rustical_xml"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"quick-xml",
"thiserror 2.0.17",
"thiserror 2.0.18",
"xml_derive",
]
@@ -3669,9 +3651,9 @@ dependencies = [
[[package]]
name = "rustls-pki-types"
version = "1.13.2"
version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "21e6f2ab2928ca4291b86736a8bd920a277a399bba1589409d72154ff87c1282"
checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
dependencies = [
"web-time",
"zeroize",
@@ -3679,9 +3661,9 @@ dependencies = [
[[package]]
name = "rustls-webpki"
version = "0.103.8"
version = "0.103.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2ffdfa2f5286e2247234e03f680868ac2815974dc39e00ea15adc445d0aafe52"
checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
dependencies = [
"ring",
"rustls-pki-types",
@@ -3993,9 +3975,9 @@ dependencies = [
[[package]]
name = "socket2"
version = "0.6.1"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "17129e116933cf371d018bb80ae557e889637989d8638274fb25622827b03881"
checksum = "86f4aa3ad99f2088c990dfa82d367e19cb29268ed67c574d10d0a4bfe71f07e0"
dependencies = [
"libc",
"windows-sys 0.60.2",
@@ -4061,7 +4043,7 @@ dependencies = [
"serde_json",
"sha2",
"smallvec",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tokio",
"tokio-stream",
"tracing",
@@ -4145,7 +4127,7 @@ dependencies = [
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tracing",
"uuid",
"whoami",
@@ -4184,7 +4166,7 @@ dependencies = [
"smallvec",
"sqlx-core",
"stringprep",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tracing",
"uuid",
"whoami",
@@ -4210,7 +4192,7 @@ dependencies = [
"serde",
"serde_urlencoded",
"sqlx-core",
"thiserror 2.0.17",
"thiserror 2.0.18",
"tracing",
"url",
"uuid",
@@ -4329,11 +4311,11 @@ dependencies = [
[[package]]
name = "thiserror"
version = "2.0.17"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f63587ca0f12b72a0600bcba1d40081f830876000bb46dd2337a3051618f4fc8"
checksum = "4288b5bcbc7920c07a1149a35cf9590a2aa808e0bc1eafaade0b80947865fbc4"
dependencies = [
"thiserror-impl 2.0.17",
"thiserror-impl 2.0.18",
]
[[package]]
@@ -4349,9 +4331,9 @@ dependencies = [
[[package]]
name = "thiserror-impl"
version = "2.0.17"
version = "2.0.18"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ff15c8ecd7de3849db632e14d18d2571fa09dfc5ed93479bc4485c7a517c913"
checksum = "ebc4ee7f67670e9b64d05fa4253e753e016c6c95ff35b89b7941d6b856dec1d5"
dependencies = [
"proc-macro2",
"quote",
@@ -4369,9 +4351,9 @@ dependencies = [
[[package]]
name = "time"
version = "0.3.45"
version = "0.3.46"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9e442fc33d7fdb45aa9bfeb312c095964abdf596f7567261062b2a7107aaabd"
checksum = "9da98b7d9b7dad93488a84b8248efc35352b0b2657397d4167e7ad67e5d535e5"
dependencies = [
"deranged",
"itoa",
@@ -4384,15 +4366,15 @@ dependencies = [
[[package]]
name = "time-core"
version = "0.1.7"
version = "0.1.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8b36ee98fd31ec7426d599183e8fe26932a8dc1fb76ddb6214d05493377d34ca"
checksum = "7694e1cfe791f8d31026952abf09c69ca6f6fa4e1a1229e18988f06a04a12dca"
[[package]]
name = "time-macros"
version = "0.2.25"
version = "0.2.26"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "71e552d1249bf61ac2a52db88179fd0673def1e1ad8243a00d9ec9ed71fee3dd"
checksum = "78cc610bac2dcee56805c99642447d4c5dbde4d01f752ffea0199aee1f601dc4"
dependencies = [
"num-conv",
"time-core",
@@ -4492,6 +4474,7 @@ dependencies = [
"bytes",
"futures-core",
"futures-sink",
"futures-util",
"pin-project-lite",
"tokio",
]
@@ -4733,7 +4716,7 @@ dependencies = [
"rand 0.8.5",
"serde",
"serde_json",
"thiserror 2.0.17",
"thiserror 2.0.18",
"time",
"tokio",
"tracing",
@@ -4964,12 +4947,12 @@ checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a"
[[package]]
name = "vtimezones-rs"
version = "0.2.0"
version = "0.3.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6728de8767c8dea44f41b88115a205ed23adc3302e1b4342be59d922934dae5"
checksum = "1e4e9cf6888a927b6cec4aa2416f379885b92dd2aa4476bc83718fe58051f67e"
dependencies = [
"glob",
"phf 0.12.1",
"phf 0.13.1",
"phf_codegen",
]
@@ -5455,7 +5438,7 @@ checksum = "9edde0db4769d2dc68579893f2306b26c6ecfbe0ef499b013d731b7b9247e0b9"
[[package]]
name = "xml_derive"
version = "0.11.16"
version = "0.12.3"
dependencies = [
"darling 0.23.0",
"heck",
@@ -5576,6 +5559,6 @@ dependencies = [
[[package]]
name = "zmij"
version = "1.0.14"
version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bd8f3f50b848df28f887acb68e41201b5aea6bc8a8dacc00fb40635ff9a72fea"
checksum = "dfcd145825aace48cff44a8844de64bf75feec3080e0aa5cdbde72961ae51a65"

View File

@@ -2,7 +2,7 @@
members = ["crates/*"]
[workspace.package]
version = "0.11.16"
version = "0.12.3"
rust-version = "1.92"
edition = "2024"
description = "A CalDAV server"
@@ -32,8 +32,11 @@ opentelemetry = [
"dep:tracing-opentelemetry",
]
[profile.dev]
debug = 0
[lib]
doc = true
name = "rustical"
path = "src/lib.rs"
test = true
[workspace.dependencies]
rustical_dav = { path = "./crates/dav/", features = ["ical"] }
@@ -70,10 +73,11 @@ tokio = { version = "1.48", features = [
"rt-multi-thread",
"full",
] }
tokio-util = { version = "0.7", features = ["rt"] }
url = "2.5"
base64 = "0.22"
thiserror = "2.0"
quick-xml = { version = "0.38" }
quick-xml = { version = "0.39" }
rust-embed = "8.9"
tower-sessions = "0.14"
futures-core = "0.3"
@@ -107,7 +111,7 @@ strum = "0.27"
strum_macros = "0.27"
serde_json = { version = "1.0", features = ["raw_value"] }
sqlx-sqlite = { version = "0.8", features = ["bundled"] }
ical = { git = "https://github.com/lennart-k/ical-rs", rev = "7c2ab1f3" }
caldata = { version = "0.14.0", features = ["chrono-tz", "vtimezones-rs"] }
toml = "0.9"
tower = "0.5"
tower-http = { version = "0.6", features = [
@@ -137,7 +141,7 @@ reqwest = { version = "0.12", features = [
openidconnect = "4.0"
clap = { version = "4.5", features = ["derive", "env"] }
matchit-serde = { git = "https://github.com/lennart-k/matchit-serde", rev = "e18e65d7" }
vtimezones-rs = "0.2"
vtimezones-rs = "0.3"
ece = { version = "2.3", default-features = false, features = [
"backend-openssl",
] }
@@ -151,6 +155,8 @@ criterion = { version = "0.8", features = ["async_tokio"] }
rstest.workspace = true
rustical_store_sqlite = { workspace = true, features = ["test"] }
insta.workspace = true
similar-asserts.workspace = true
tempfile = "3.24"
[dependencies]
rustical_store.workspace = true
@@ -158,9 +164,11 @@ rustical_store_sqlite.workspace = true
rustical_caldav.workspace = true
rustical_carddav.workspace = true
rustical_frontend.workspace = true
caldata.workspace = true
toml.workspace = true
serde.workspace = true
tokio.workspace = true
tokio-util.workspace = true
tracing.workspace = true
anyhow.workspace = true
clap.workspace = true
@@ -199,7 +207,4 @@ tower-http.workspace = true
axum-extra.workspace = true
headers.workspace = true
http.workspace = true
# TODO: Remove in next major release
ical_dev = { package = "ical", git = "https://github.com/lennart-k/ical-rs", branch = "dev", features = [
"chrono-tz",
] }
futures-util.workspace = true

View File

@@ -24,6 +24,7 @@ a CalDAV/CardDAV server
- Apple configuration profiles (skip copy-pasting passwords and instead generate the configuration in the frontend)
- **OpenID Connect** support (with option to disable password login)
- Group-based **sharing**
- Partial [RFC 7809](https://datatracker.ietf.org/doc/html/rfc7809) support. RustiCal will accept timezones by reference and handle omitted timezones in objects.
## Getting Started

View File

@@ -34,7 +34,7 @@ rustical_store.workspace = true
chrono.workspace = true
chrono-tz.workspace = true
sha2.workspace = true
ical.workspace = true
caldata.workspace = true
percent-encoding.workspace = true
rustical_xml.workspace = true
uuid.workspace = true

View File

@@ -3,15 +3,13 @@ use crate::calendar::CalendarResourceService;
use axum::body::Body;
use axum::extract::State;
use axum::{extract::Path, response::Response};
use caldata::component::IcalCalendar;
use caldata::generator::Emitter;
use caldata::parser::ContentLine;
use headers::{ContentType, HeaderMapExt};
use http::{HeaderValue, Method, StatusCode, header};
use ical::builder::calendar::IcalCalendarBuilder;
use ical::generator::Emitter;
use ical::property::Property;
use percent_encoding::{CONTROLS, utf8_percent_encode};
use rustical_ical::{CalendarObjectComponent, EventObject};
use rustical_store::{CalendarStore, SubscriptionStore, auth::Principal};
use std::collections::HashMap;
use std::str::FromStr;
use tracing::instrument;
@@ -33,60 +31,45 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
return Err(crate::Error::Unauthorized);
}
let mut vtimezones = HashMap::new();
let objects = cal_store.get_objects(&principal, &calendar_id).await?;
let objects = cal_store
.get_objects(&principal, &calendar_id)
.await?
.into_iter()
.map(|(_, object)| object.into())
.collect();
let mut props = vec![];
let mut ical_calendar_builder = IcalCalendarBuilder::version("2.0")
.gregorian()
.prodid("RustiCal");
if let Some(displayname) = calendar.meta.displayname {
ical_calendar_builder = ical_calendar_builder.set(Property {
props.push(ContentLine {
name: "X-WR-CALNAME".to_owned(),
value: Some(displayname),
params: vec![],
params: vec![].into(),
});
}
if let Some(description) = calendar.meta.description {
ical_calendar_builder = ical_calendar_builder.set(Property {
props.push(ContentLine {
name: "X-WR-CALDESC".to_owned(),
value: Some(description),
params: vec![],
params: vec![].into(),
});
}
if let Some(color) = calendar.meta.color {
props.push(ContentLine {
name: "X-WR-CALCOLOR".to_owned(),
value: Some(color),
params: vec![].into(),
});
}
if let Some(timezone_id) = calendar.timezone_id {
ical_calendar_builder = ical_calendar_builder.set(Property {
props.push(ContentLine {
name: "X-WR-TIMEZONE".to_owned(),
value: Some(timezone_id),
params: vec![],
params: vec![].into(),
});
}
for object in &objects {
vtimezones.extend(object.get_vtimezones());
match object.get_data() {
CalendarObjectComponent::Event(EventObject { event, .. }, overrides) => {
ical_calendar_builder = ical_calendar_builder
.add_event(event.clone())
.add_events(overrides.iter().map(|ev| ev.event.clone()));
}
CalendarObjectComponent::Todo(todo, overrides) => {
ical_calendar_builder = ical_calendar_builder
.add_todo(todo.clone())
.add_todos(overrides.iter().cloned());
}
CalendarObjectComponent::Journal(journal, overrides) => {
ical_calendar_builder = ical_calendar_builder
.add_journal(journal.clone())
.add_journals(overrides.iter().cloned());
}
}
}
ical_calendar_builder = ical_calendar_builder.add_timezones(vtimezones.into_values().cloned());
let ical_calendar = ical_calendar_builder
.build()
.map_err(|parser_error| Error::IcalError(parser_error.into()))?;
let export_calendar = IcalCalendar::from_objects("RustiCal Export".to_owned(), objects, props);
let mut resp = Response::builder().status(StatusCode::OK);
let hdrs = resp.headers_mut().unwrap();
@@ -104,6 +87,6 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
if matches!(method, Method::HEAD) {
Ok(resp.body(Body::empty()).unwrap())
} else {
Ok(resp.body(Body::new(ical_calendar.generate())).unwrap())
Ok(resp.body(Body::new(export_calendar.generate())).unwrap())
}
}

View File

@@ -4,17 +4,14 @@ use axum::{
extract::{Path, State},
response::{IntoResponse, Response},
};
use caldata::component::{Component, ComponentMut};
use caldata::{IcalParser, parser::ParserOptions};
use http::StatusCode;
use ical::{
generator::Emitter,
parser::{Component, ComponentMut},
};
use rustical_dav::header::Overwrite;
use rustical_ical::{CalendarObject, CalendarObjectType};
use rustical_ical::CalendarObjectType;
use rustical_store::{
Calendar, CalendarMetadata, CalendarStore, SubscriptionStore, auth::Principal,
};
use std::io::BufReader;
use tracing::instrument;
#[instrument(skip(resource_service))]
@@ -29,18 +26,11 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
return Err(Error::Unauthorized);
}
let mut parser = ical::IcalParser::new(BufReader::new(body.as_bytes()));
let mut cal = parser
.next()
.expect("input must contain calendar")
.unwrap()
.mutable();
if parser.next().is_some() {
return Err(rustical_ical::Error::InvalidData(
"multiple calendars, only one allowed".to_owned(),
)
.into());
}
let parser = IcalParser::from_slice(body.as_bytes());
let mut cal = match parser.expect_one() {
Ok(cal) => cal.mutable(),
Err(err) => return Ok((StatusCode::BAD_REQUEST, err.to_string()).into_response()),
};
// Extract calendar metadata
let displayname = cal
@@ -49,14 +39,19 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
let description = cal
.get_property("X-WR-CALDESC")
.and_then(|prop| prop.value.clone());
let color = cal
.get_property("X-WR-CALCOLOR")
.and_then(|prop| prop.value.clone());
let timezone_id = cal
.get_property("X-WR-TIMEZONE")
.and_then(|prop| prop.value.clone());
// These properties should not appear in the expanded calendar objects
cal.remove_property("X-WR-CALNAME");
cal.remove_property("X-WR-CALDESC");
cal.remove_property("X-WR-CALCOLOR");
cal.remove_property("X-WR-TIMEZONE");
let cal = cal.verify().unwrap();
let cal = cal.build(&ParserOptions::default(), None).unwrap();
// Make sure timezone is valid
if let Some(timezone_id) = timezone_id.as_ref() {
assert!(
@@ -64,8 +59,7 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
"Invalid calendar timezone id"
);
}
// Extract necessary component types
// // Extract necessary component types
let mut cal_components = vec![];
if !cal.events.is_empty() {
cal_components.push(CalendarObjectType::Event);
@@ -77,13 +71,10 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
cal_components.push(CalendarObjectType::Todo);
}
let expanded_cals = cal.expand_calendar();
// Janky way to convert between IcalCalendar and CalendarObject
let objects = expanded_cals
.into_iter()
.map(|cal| cal.generate())
.map(|ics| CalendarObject::from_ics(ics, None))
.collect::<Result<Vec<_>, _>>()?;
let objects = match cal.into_objects() {
Ok(objects) => objects.into_iter().map(Into::into).collect(),
Err(err) => return Ok((StatusCode::BAD_REQUEST, err.to_string()).into_response()),
};
let new_cal = Calendar {
principal,
id: cal_id,
@@ -91,7 +82,7 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
displayname,
order: 0,
description,
color: None,
color,
},
timezone_id,
deleted_at: None,

View File

@@ -1,10 +1,13 @@
use std::str::FromStr;
use crate::Error;
use crate::calendar::CalendarResourceService;
use crate::calendar::prop::SupportedCalendarComponentSet;
use crate::error::Precondition;
use axum::extract::{Path, State};
use axum::response::{IntoResponse, Response};
use caldata::IcalParser;
use http::{Method, StatusCode};
use ical::IcalParser;
use rustical_dav::xml::HrefElement;
use rustical_ical::CalendarObjectType;
use rustical_store::auth::Principal;
@@ -84,20 +87,33 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
}
let timezone_id = if let Some(tzid) = request.calendar_timezone_id {
if chrono_tz::Tz::from_str(&tzid).is_err() {
return Err(Error::PreconditionFailed(Precondition::CalendarTimezone(
"Invalid timezone ID in calendar-timezone-id",
)));
}
Some(tzid)
} else if let Some(tz) = request.calendar_timezone {
// TODO: Proper error (calendar-timezone precondition)
let calendar = IcalParser::new(tz.as_bytes())
let calendar = IcalParser::from_slice(tz.as_bytes())
.next()
.ok_or_else(|| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?
.map_err(|_| rustical_dav::Error::BadRequest("Error parsing timezone".to_owned()))?;
.ok_or(Error::PreconditionFailed(Precondition::CalendarTimezone(
"No timezone data provided",
)))?
.map_err(|_| {
Error::PreconditionFailed(Precondition::CalendarTimezone("Error parsing timezone"))
})?;
let timezone = calendar.timezones.first().ok_or_else(|| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?;
let timezone: chrono_tz::Tz = timezone.try_into().map_err(|_| {
rustical_dav::Error::BadRequest("Cannot translate VTIMEZONE into IANA TZID".to_owned())
})?;
let timezone = calendar
.vtimezones
.values()
.next()
.ok_or(Error::PreconditionFailed(Precondition::CalendarTimezone(
"No timezone data provided",
)))?;
let timezone: Option<chrono_tz::Tz> = timezone.into();
let timezone = timezone.ok_or(Error::PreconditionFailed(
Precondition::CalendarTimezone("No timezone data provided"),
))?;
Some(timezone.name().to_owned())
} else {

View File

@@ -21,7 +21,7 @@ pub async fn get_objects_calendar_multiget<C: CalendarStore>(
principal: &str,
cal_id: &str,
store: &C,
) -> Result<(Vec<CalendarObject>, Vec<String>), Error> {
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>), Error> {
let mut result = vec![];
let mut not_found = vec![];
@@ -32,7 +32,7 @@ pub async fn get_objects_calendar_multiget<C: CalendarStore>(
let filename = filename.trim_start_matches('/');
if let Some(object_id) = filename.strip_suffix(".ics") {
match store.get_object(principal, cal_id, object_id, false).await {
Ok(object) => result.push(object),
Ok(object) => result.push((object_id.to_owned(), object)),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_string()),
Err(err) => return Err(err.into()),
}

View File

@@ -2,8 +2,13 @@ use crate::calendar::methods::report::calendar_query::{
TimeRangeElement,
prop_filter::{PropFilterElement, PropFilterable},
};
use ical::parser::ical::component::IcalTimeZone;
use rustical_ical::{CalendarObject, CalendarObjectComponent, CalendarObjectType};
use caldata::{
component::{
CalendarInnerData, Component, IcalAlarm, IcalCalendarObject, IcalEvent, IcalTimeZone,
IcalTodo,
},
parser::ContentLine,
};
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
@@ -68,9 +73,92 @@ pub trait CompFilterable: PropFilterable + Sized {
}
}
impl CompFilterable for CalendarObject {
impl CompFilterable for CalendarInnerData {
fn get_comp_name(&self) -> &'static str {
"VCALENDAR"
match self {
Self::Event(main, _) => main.get_comp_name(),
Self::Journal(main, _) => main.get_comp_name(),
Self::Todo(main, _) => main.get_comp_name(),
}
}
fn match_time_range(&self, time_range: &TimeRangeElement) -> bool {
if let Some(start) = &time_range.start
&& let Some(last_end) = self.get_last_occurence()
&& start.to_utc() > last_end.utc()
{
return false;
}
if let Some(end) = &time_range.end
&& let Some(first_start) = self.get_first_occurence()
&& end.to_utc() < first_start.utc()
{
return false;
}
true
}
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool {
match self {
Self::Event(main, overrides) => std::iter::once(main)
.chain(overrides.iter())
.flat_map(IcalEvent::get_alarms)
.any(|alarm| alarm.matches(comp_filter)),
Self::Todo(main, overrides) => std::iter::once(main)
.chain(overrides.iter())
.flat_map(IcalTodo::get_alarms)
.any(|alarm| alarm.matches(comp_filter)),
// VJOURNAL has no subcomponents
Self::Journal(_, _) => comp_filter.is_not_defined.is_some(),
}
}
}
impl PropFilterable for IcalAlarm {
fn get_named_properties<'a>(&'a self, name: &'a str) -> impl Iterator<Item = &'a ContentLine> {
Component::get_named_properties(self, name)
}
}
impl CompFilterable for IcalAlarm {
fn get_comp_name(&self) -> &'static str {
Component::get_comp_name(self)
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
true
}
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool {
comp_filter.is_not_defined.is_some()
}
}
impl PropFilterable for CalendarInnerData {
#[allow(refining_impl_trait)]
fn get_named_properties<'a>(
&'a self,
name: &'a str,
) -> Box<dyn Iterator<Item = &'a ContentLine> + 'a> {
// TODO: If we were pedantic, we would have to do recurrence expansion first
// and take into account the overrides :(
match self {
Self::Event(main, _) => Box::new(main.get_named_properties(name)),
Self::Todo(main, _) => Box::new(main.get_named_properties(name)),
Self::Journal(main, _) => Box::new(main.get_named_properties(name)),
}
}
}
impl PropFilterable for IcalCalendarObject {
fn get_named_properties<'a>(&'a self, name: &'a str) -> impl Iterator<Item = &'a ContentLine> {
Component::get_named_properties(self, name)
}
}
impl CompFilterable for IcalCalendarObject {
fn get_comp_name(&self) -> &'static str {
Component::get_comp_name(self)
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
@@ -83,54 +171,33 @@ impl CompFilterable for CalendarObject {
.get_vtimezones()
.values()
.map(|tz| tz.matches(comp_filter))
.chain([self.get_data().matches(comp_filter)]);
.chain([self.get_inner().matches(comp_filter)]);
if comp_filter.is_not_defined.is_some() {
matches.all(|x| x)
matches.all(|x| !x)
} else {
matches.any(|x| x)
}
}
}
impl PropFilterable for IcalTimeZone {
fn get_named_properties<'a>(&'a self, name: &'a str) -> impl Iterator<Item = &'a ContentLine> {
Component::get_named_properties(self, name)
}
}
impl CompFilterable for IcalTimeZone {
fn get_comp_name(&self) -> &'static str {
"VTIMEZONE"
Component::get_comp_name(self)
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
false
}
fn match_subcomponents(&self, _comp_filter: &CompFilterElement) -> bool {
true
}
}
impl CompFilterable for CalendarObjectComponent {
fn get_comp_name(&self) -> &'static str {
CalendarObjectType::from(self).as_str()
}
fn match_time_range(&self, time_range: &TimeRangeElement) -> bool {
if let Some(start) = &time_range.start
&& let Some(last_occurence) = self.get_last_occurence().unwrap_or(None)
&& **start > last_occurence.utc()
{
return false;
}
if let Some(end) = &time_range.end
&& let Some(first_occurence) = self.get_first_occurence().unwrap_or(None)
&& **end < first_occurence.utc()
{
return false;
}
true
}
fn match_subcomponents(&self, _comp_filter: &CompFilterElement) -> bool {
// TODO: Properly check subcomponents
true
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool {
// VTIMEZONE has no subcomponents
comp_filter.is_not_defined.is_some()
}
}
@@ -147,6 +214,7 @@ mod tests {
const ICS: &str = r"BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
PRODID:me
BEGIN:VTIMEZONE
TZID:Europe/Berlin
X-LIC-LOCATION:Europe/Berlin
@@ -166,7 +234,7 @@ END:VCALENDAR";
#[test]
fn test_comp_filter_matching() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let object = CalendarObject::from_ics(ICS.to_string()).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: Some(()),
@@ -175,7 +243,10 @@ END:VCALENDAR";
prop_filter: vec![],
comp_filter: vec![],
};
assert!(!object.matches(&comp_filter), "filter: wants no VCALENDAR");
assert!(
!object.get_inner().matches(&comp_filter),
"filter: wants no VCALENDAR"
);
let comp_filter = CompFilterElement {
is_not_defined: None,
@@ -190,7 +261,10 @@ END:VCALENDAR";
comp_filter: vec![],
}],
};
assert!(!object.matches(&comp_filter), "filter matches VTODO");
assert!(
!object.get_inner().matches(&comp_filter),
"filter matches VTODO"
);
let comp_filter = CompFilterElement {
is_not_defined: None,
@@ -205,7 +279,10 @@ END:VCALENDAR";
comp_filter: vec![],
}],
};
assert!(object.matches(&comp_filter), "filter matches VEVENT");
assert!(
object.get_inner().matches(&comp_filter),
"filter matches VEVENT"
);
let comp_filter = CompFilterElement {
is_not_defined: None,
@@ -252,13 +329,13 @@ END:VCALENDAR";
}],
};
assert!(
object.matches(&comp_filter),
object.get_inner().matches(&comp_filter),
"Some prop filters on VCALENDAR and VEVENT"
);
}
#[test]
fn test_comp_filter_time_range() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let object = CalendarObject::from_ics(ICS.to_string()).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: None,
@@ -281,7 +358,7 @@ END:VCALENDAR";
}],
};
assert!(
object.matches(&comp_filter),
object.get_inner().matches(&comp_filter),
"event should lie in time range"
);
@@ -306,14 +383,14 @@ END:VCALENDAR";
}],
};
assert!(
!object.matches(&comp_filter),
!object.get_inner().matches(&comp_filter),
"event should not lie in time range"
);
}
#[test]
fn test_match_timezone() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let object = CalendarObject::from_ics(ICS.to_string()).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: None,
@@ -340,7 +417,7 @@ END:VCALENDAR";
}],
};
assert!(
object.matches(&comp_filter),
object.get_inner().matches(&comp_filter),
"Timezone should be Europe/Berlin"
);
}

View File

@@ -1,8 +1,8 @@
use super::comp_filter::{CompFilterElement, CompFilterable};
use crate::calendar_object::CalendarObjectPropWrapperName;
use ical::property::Property;
use caldata::{component::IcalCalendarObject, parser::ContentLine};
use rustical_dav::xml::{PropfindType, TextMatchElement};
use rustical_ical::{CalendarObject, UtcDateTime};
use rustical_ical::UtcDateTime;
use rustical_store::calendar_store::CalendarQuery;
use rustical_xml::{XmlDeserialize, XmlRootTag};
@@ -30,8 +30,8 @@ pub struct ParamFilterElement {
impl ParamFilterElement {
#[must_use]
pub fn match_property(&self, prop: &Property) -> bool {
let Some(param) = prop.get_param(&self.name) else {
pub fn match_property(&self, prop: &ContentLine) -> bool {
let Some(param) = prop.params.get_param(&self.name) else {
return self.is_not_defined.is_some();
};
if self.is_not_defined.is_some() {
@@ -57,7 +57,7 @@ pub struct FilterElement {
impl FilterElement {
#[must_use]
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
pub fn matches(&self, cal_object: &IcalCalendarObject) -> bool {
cal_object.matches(&self.comp_filter)
}
}

View File

@@ -11,19 +11,19 @@ mod tests;
pub use comp_filter::{CompFilterElement, CompFilterable};
pub use elements::*;
#[allow(unused_imports)]
pub use prop_filter::{PropFilterElement, PropFilterable};
pub use prop_filter::PropFilterElement;
pub async fn get_objects_calendar_query<C: CalendarStore>(
cal_query: &CalendarQueryRequest,
principal: &str,
cal_id: &str,
store: &C,
) -> Result<Vec<CalendarObject>, Error> {
) -> Result<Vec<(String, CalendarObject)>, Error> {
let mut objects = store
.calendar_query(principal, cal_id, cal_query.into())
.await?;
if let Some(filter) = &cal_query.filter {
objects.retain(|object| filter.matches(object));
objects.retain(|(_id, object)| filter.matches(object.get_inner()));
}
Ok(objects)
}

View File

@@ -1,16 +1,8 @@
use super::{ParamFilterElement, TimeRangeElement};
use ical::{
generator::{IcalCalendar, IcalEvent},
parser::{
Component,
ical::component::{IcalJournal, IcalTimeZone, IcalTodo},
},
property::Property,
};
use caldata::{parser::ContentLine, types::CalDateTime};
use rustical_dav::xml::TextMatchElement;
use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectComponent, UtcDateTime};
use rustical_ical::UtcDateTime;
use rustical_xml::XmlDeserialize;
use std::collections::HashMap;
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
@@ -29,12 +21,16 @@ pub struct PropFilterElement {
pub(crate) name: String,
}
pub trait PropFilterable {
fn get_named_properties<'a>(&'a self, name: &'a str) -> impl Iterator<Item = &'a ContentLine>;
}
impl PropFilterElement {
#[must_use]
pub fn match_property(&self, property: &Property) -> bool {
pub fn match_property(&self, property: &ContentLine) -> bool {
if let Some(TimeRangeElement { start, end }) = &self.time_range {
// TODO: Respect timezones
let Ok(timestamp) = CalDateTime::parse_prop(property, &HashMap::default()) else {
let Ok(timestamp) = CalDateTime::parse_prop(property, None) else {
return false;
};
let timestamp = timestamp.utc();
@@ -69,63 +65,13 @@ impl PropFilterElement {
}
pub fn match_component(&self, comp: &impl PropFilterable) -> bool {
let properties = comp.get_named_properties(&self.name);
let mut properties = comp.get_named_properties(&self.name);
if self.is_not_defined.is_some() {
return properties.is_empty();
return properties.next().is_none();
}
// The filter matches when one property instance matches
// Example where this matters: We have multiple attendees and want to match one
properties.iter().any(|prop| self.match_property(prop))
}
}
pub trait PropFilterable {
fn get_named_properties(&self, name: &str) -> Vec<&Property>;
}
impl PropFilterable for CalendarObject {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
Self::get_named_properties(self, name)
}
}
impl PropFilterable for IcalEvent {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
Component::get_named_properties(self, name)
}
}
impl PropFilterable for IcalTodo {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
Component::get_named_properties(self, name)
}
}
impl PropFilterable for IcalJournal {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
Component::get_named_properties(self, name)
}
}
impl PropFilterable for IcalCalendar {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
Component::get_named_properties(self, name)
}
}
impl PropFilterable for IcalTimeZone {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
Component::get_named_properties(self, name)
}
}
impl PropFilterable for CalendarObjectComponent {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
match self {
Self::Event(event, _) => PropFilterable::get_named_properties(&event.event, name),
Self::Todo(todo, _) => PropFilterable::get_named_properties(todo, name),
Self::Journal(journal, _) => PropFilterable::get_named_properties(journal, name),
}
properties.any(|prop| self.match_property(prop))
}
}

View File

@@ -77,7 +77,7 @@ const FILTER_2: &str = r#"
#[case(ICS_1, FILTER_1, true)]
#[case(ICS_1, FILTER_2, false)]
fn yeet(#[case] ics: &str, #[case] filter: &str, #[case] matches: bool) {
let obj = CalendarObject::from_ics(ics.to_owned(), None).unwrap();
let obj = CalendarObject::from_ics(ics.to_owned()).unwrap();
let filter = FilterElement::parse_str(filter).unwrap();
assert_eq!(matches, filter.matches(&obj));
assert_eq!(matches, filter.matches(obj.get_inner()));
}

View File

@@ -51,7 +51,7 @@ impl ReportRequest {
}
fn objects_response(
objects: Vec<CalendarObject>,
objects: Vec<(String, CalendarObject)>,
not_found: Vec<String>,
path: &str,
principal: &str,
@@ -60,11 +60,12 @@ fn objects_response(
prop: &PropfindType<CalendarObjectPropWrapperName>,
) -> Result<MultistatusElement<CalendarObjectPropWrapper, String>, Error> {
let mut responses = Vec::new();
for object in objects {
let path = format!("{}/{}.ics", path, object.get_id());
for (object_id, object) in objects {
let path = format!("{path}/{object_id}.ics");
responses.push(
CalendarObjectResource {
object,
object_id,
principal: principal.to_owned(),
}
.propfind(&path, prop, None, puri, user)?,

View File

@@ -32,11 +32,12 @@ pub async fn handle_sync_collection<C: CalendarStore>(
.await?;
let mut responses = Vec::new();
for object in new_objects {
let path = format!("{}/{}.ics", path, object.get_id());
for (object_id, object) in new_objects {
let path = format!("{}/{}.ics", path, &object_id);
responses.push(
CalendarObjectResource {
object,
object_id,
principal: principal.to_owned(),
}
.propfind(&path, &sync_collection.prop, None, puri, user)?,

View File

@@ -1,9 +1,10 @@
use super::prop::{SupportedCalendarComponentSet, SupportedCalendarData};
use crate::Error;
use crate::calendar::prop::{ReportMethod, SupportedCollationSet};
use caldata::IcalParser;
use caldata::types::CalDateTime;
use chrono::{DateTime, Utc};
use derive_more::derive::{From, Into};
use ical::IcalParser;
use rustical_dav::extensions::{
CommonPropertiesExtension, CommonPropertiesProp, SyncTokenExtension, SyncTokenExtensionProp,
};
@@ -11,7 +12,6 @@ use rustical_dav::privileges::UserPrivilegeSet;
use rustical_dav::resource::{PrincipalUri, Resource, ResourceName};
use rustical_dav::xml::{HrefElement, Resourcetype, ResourcetypeInner, SupportedReportSet};
use rustical_dav_push::{DavPushExtension, DavPushExtensionProp};
use rustical_ical::CalDateTime;
use rustical_store::Calendar;
use rustical_store::auth::Principal;
use rustical_xml::{EnumVariants, PropName};
@@ -202,7 +202,7 @@ impl Resource for CalendarResource {
CalendarProp::CalendarTimezone(timezone) => {
if let Some(tz) = timezone {
// TODO: Proper error (calendar-timezone precondition)
let calendar = IcalParser::new(tz.as_bytes())
let calendar = IcalParser::from_slice(tz.as_bytes())
.next()
.ok_or_else(|| {
rustical_dav::Error::BadRequest(
@@ -215,13 +215,13 @@ impl Resource for CalendarResource {
)
})?;
let timezone = calendar.timezones.first().ok_or_else(|| {
let timezone = calendar.vtimezones.values().next().ok_or_else(|| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?;
let timezone: chrono_tz::Tz = timezone.try_into().map_err(|_| {
let timezone: Option<chrono_tz::Tz> = timezone.into();
let timezone = timezone.ok_or_else(|| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?;
self.cal.timezone_id = Some(timezone.name().to_owned());
}
Ok(())

View File

@@ -6,7 +6,7 @@ use crate::calendar::methods::report::route_report_calendar;
use crate::calendar::resource::CalendarResource;
use crate::calendar_object::CalendarObjectResourceService;
use crate::calendar_object::resource::CalendarObjectResource;
use crate::{CalDavPrincipalUri, Error};
use crate::{CalDavConfig, CalDavPrincipalUri, Error};
use async_trait::async_trait;
use axum::Router;
use axum::extract::Request;
@@ -23,6 +23,7 @@ use tower::Service;
pub struct CalendarResourceService<C: CalendarStore, S: SubscriptionStore> {
pub(crate) cal_store: Arc<C>,
pub(crate) sub_store: Arc<S>,
pub(crate) config: Arc<CalDavConfig>,
}
impl<C: CalendarStore, S: SubscriptionStore> Clone for CalendarResourceService<C, S> {
@@ -30,15 +31,17 @@ impl<C: CalendarStore, S: SubscriptionStore> Clone for CalendarResourceService<C
Self {
cal_store: self.cal_store.clone(),
sub_store: self.sub_store.clone(),
config: self.config.clone(),
}
}
}
impl<C: CalendarStore, S: SubscriptionStore> CalendarResourceService<C, S> {
pub const fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self {
pub const fn new(cal_store: Arc<C>, sub_store: Arc<S>, config: Arc<CalDavConfig>) -> Self {
Self {
cal_store,
sub_store,
config,
}
}
}
@@ -78,8 +81,9 @@ impl<C: CalendarStore, S: SubscriptionStore> ResourceService for CalendarResourc
.get_objects(principal, cal_id)
.await?
.into_iter()
.map(|object| CalendarObjectResource {
.map(|(object_id, object)| CalendarObjectResource {
object,
object_id,
principal: principal.to_owned(),
})
.collect())
@@ -91,7 +95,7 @@ impl<C: CalendarStore, S: SubscriptionStore> ResourceService for CalendarResourc
file: Self::Resource,
) -> Result<(), Self::Error> {
self.cal_store
.update_calendar(principal.to_owned(), cal_id.to_owned(), file.into())
.update_calendar(principal, cal_id, file.into())
.await?;
Ok(())
}
@@ -111,7 +115,8 @@ impl<C: CalendarStore, S: SubscriptionStore> ResourceService for CalendarResourc
Router::new()
.nest(
"/{object_id}",
CalendarObjectResourceService::new(self.cal_store.clone()).axum_router(),
CalendarObjectResourceService::new(self.cal_store.clone(), self.config.clone())
.axum_router(),
)
.route_service("/", self.axum_service())
}

View File

@@ -12,7 +12,7 @@ PRODID:-//github.com/lennart-k/vzic-rs//RustiCal Calendar server//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
LAST-MODIFIED:20250723T190331Z
LAST-MODIFIED:20260124T185655Z
X-LIC-LOCATION:Europe/Berlin
X-PROLEPTIC-TZNAME:LMT
BEGIN:STANDARD

View File

@@ -5,13 +5,14 @@ use axum::body::Body;
use axum::extract::{Path, State};
use axum::response::{IntoResponse, Response};
use axum_extra::TypedHeader;
use caldata::parser::ParserOptions;
use headers::{ContentType, ETag, HeaderMapExt, IfNoneMatch};
use http::{HeaderMap, HeaderValue, Method, StatusCode};
use rustical_ical::CalendarObject;
use rustical_store::CalendarStore;
use rustical_store::auth::Principal;
use std::str::FromStr;
use tracing::{debug, instrument};
use tracing::{instrument, warn};
#[instrument(skip(cal_store))]
pub async fn get_event<C: CalendarStore>(
@@ -20,7 +21,10 @@ pub async fn get_event<C: CalendarStore>(
calendar_id,
object_id,
}): Path<CalendarObjectPathComponents>,
State(CalendarObjectResourceService { cal_store }): State<CalendarObjectResourceService<C>>,
State(CalendarObjectResourceService {
cal_store,
config: _,
}): State<CalendarObjectResourceService<C>>,
user: Principal,
method: Method,
) -> Result<Response, Error> {
@@ -57,7 +61,9 @@ pub async fn put_event<C: CalendarStore>(
calendar_id,
object_id,
}): Path<CalendarObjectPathComponents>,
State(CalendarObjectResourceService { cal_store }): State<CalendarObjectResourceService<C>>,
State(CalendarObjectResourceService { cal_store, config }): State<
CalendarObjectResourceService<C>,
>,
user: Principal,
mut if_none_match: Option<TypedHeader<IfNoneMatch>>,
header_map: HeaderMap,
@@ -94,13 +100,22 @@ pub async fn put_event<C: CalendarStore>(
true
};
let Ok(object) = CalendarObject::from_ics(body.clone(), Some(object_id)) else {
debug!("invalid calendar data:\n{body}");
let object = match CalendarObject::import(
&body,
Some(ParserOptions {
rfc7809: config.rfc7809,
}),
) {
Ok(object) => object,
Err(err) => {
warn!("invalid calendar data:\n{body}");
warn!("{err}");
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
}
};
let etag = object.get_etag();
cal_store
.put_object(principal, calendar_id, object, overwrite)
.put_object(&principal, &calendar_id, &object_id, object, overwrite)
.await?;
let mut headers = HeaderMap::new();

View File

@@ -1,10 +1,9 @@
use std::borrow::Cow;
use super::prop::{
CalendarData, CalendarObjectProp, CalendarObjectPropName, CalendarObjectPropWrapper,
CalendarObjectPropWrapperName,
};
use crate::Error;
use caldata::generator::Emitter;
use derive_more::derive::{From, Into};
use rustical_dav::{
extensions::CommonPropertiesExtension,
@@ -14,16 +13,18 @@ use rustical_dav::{
};
use rustical_ical::CalendarObject;
use rustical_store::auth::Principal;
use std::borrow::Cow;
#[derive(Clone, From, Into)]
pub struct CalendarObjectResource {
pub object: CalendarObject,
pub object_id: String,
pub principal: String,
}
impl ResourceName for CalendarObjectResource {
fn get_name(&self) -> Cow<'_, str> {
Cow::from(format!("{}.ics", self.object.get_id()))
Cow::from(format!("{}.ics", self.object_id))
}
}
@@ -53,14 +54,18 @@ impl Resource for CalendarObjectResource {
CalendarObjectProp::Getetag(self.object.get_etag())
}
CalendarObjectPropName::CalendarData(CalendarData { expand, .. }) => {
CalendarObjectProp::CalendarData(if let Some(expand) = expand.as_ref() {
self.object.expand_recurrence(
CalendarObjectProp::CalendarData(expand.as_ref().map_or_else(
|| self.object.get_ics().to_owned(),
|expand| {
self.object
.get_inner()
.expand_recurrence(
Some(expand.start.to_utc()),
Some(expand.end.to_utc()),
)?
} else {
self.object.get_ics().to_owned()
})
)
.generate()
},
))
}
CalendarObjectPropName::Getcontenttype => {
CalendarObjectProp::Getcontenttype("text/calendar;charset=utf-8")

View File

@@ -1,5 +1,5 @@
use crate::{
CalDavPrincipalUri, Error,
CalDavConfig, CalDavPrincipalUri, Error,
calendar_object::{
methods::{get_event, put_event},
resource::CalendarObjectResource,
@@ -24,19 +24,21 @@ pub struct CalendarObjectPathComponents {
pub struct CalendarObjectResourceService<C: CalendarStore> {
pub(crate) cal_store: Arc<C>,
pub(crate) config: Arc<CalDavConfig>,
}
impl<C: CalendarStore> Clone for CalendarObjectResourceService<C> {
fn clone(&self) -> Self {
Self {
cal_store: self.cal_store.clone(),
config: self.config.clone(),
}
}
}
impl<C: CalendarStore> CalendarObjectResourceService<C> {
pub const fn new(cal_store: Arc<C>) -> Self {
Self { cal_store }
pub const fn new(cal_store: Arc<C>, config: Arc<CalDavConfig>) -> Self {
Self { cal_store, config }
}
}
@@ -66,6 +68,7 @@ impl<C: CalendarStore> ResourceService for CalendarObjectResourceService<C> {
.await?;
Ok(CalendarObjectResource {
object,
object_id: object_id.to_owned(),
principal: principal.to_owned(),
})
}

View File

@@ -12,6 +12,9 @@ pub enum Precondition {
#[error("valid-calendar-data")]
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
ValidCalendarData,
#[error("calendar-timezone")]
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
CalendarTimezone(&'static str),
}
impl IntoResponse for Precondition {
@@ -23,7 +26,7 @@ impl IntoResponse for Precondition {
if let Err(err) = error.serialize_root(&mut writer) {
return rustical_dav::Error::from(err).into_response();
}
let mut res = Response::builder().status(StatusCode::PRECONDITION_FAILED);
let mut res = Response::builder().status(StatusCode::FORBIDDEN);
res.headers_mut().unwrap().typed_insert(ContentType::xml());
res.body(Body::from(output)).unwrap()
}
@@ -52,9 +55,6 @@ pub enum Error {
#[error(transparent)]
XmlDecodeError(#[from] rustical_xml::XmlError),
#[error(transparent)]
IcalError(#[from] rustical_ical::Error),
#[error(transparent)]
PreconditionFailed(Precondition),
}
@@ -75,18 +75,20 @@ impl Error {
Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND,
Self::IcalError(err) => err.status_code(),
Self::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED,
// The correct status code for a failed precondition is not PreconditionFailed but
// Forbidden (or Conflict):
// https://datatracker.ietf.org/doc/html/rfc4791#section-1.3
Self::PreconditionFailed(_err) => StatusCode::FORBIDDEN,
}
}
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
if let Self::PreconditionFailed(precondition) = self {
return precondition.into_response();
}
if matches!(self.status_code(), StatusCode::INTERNAL_SERVER_ERROR) {
error!("{self}");
}
(self.status_code(), self.to_string()).into_response()

View File

@@ -8,6 +8,7 @@ use rustical_dav::resources::RootResourceService;
use rustical_store::auth::middleware::AuthenticationLayer;
use rustical_store::auth::{AuthenticationProvider, Principal};
use rustical_store::{CalendarStore, SubscriptionStore};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
pub mod calendar;
@@ -34,6 +35,7 @@ pub fn caldav_router<AP: AuthenticationProvider, C: CalendarStore, S: Subscripti
store: Arc<C>,
subscription_store: Arc<S>,
simplified_home_set: bool,
config: Arc<CalDavConfig>,
) -> Router {
Router::new().nest(
prefix,
@@ -42,9 +44,27 @@ pub fn caldav_router<AP: AuthenticationProvider, C: CalendarStore, S: Subscripti
sub_store: subscription_store,
cal_store: store,
simplified_home_set,
config,
})
.axum_router()
.layer(AuthenticationLayer::new(auth_provider))
.layer(Extension(CalDavPrincipalUri(prefix))),
)
}
const fn default_true() -> bool {
true
}
#[derive(Debug, Clone, Deserialize, Serialize)]
#[serde(deny_unknown_fields, default)]
pub struct CalDavConfig {
#[serde(default = "default_true")]
rfc7809: bool,
}
impl Default for CalDavConfig {
fn default() -> Self {
Self { rfc7809: true }
}
}

View File

@@ -1,7 +1,7 @@
use crate::calendar::CalendarResourceService;
use crate::calendar::resource::CalendarResource;
use crate::principal::PrincipalResource;
use crate::{CalDavPrincipalUri, Error};
use crate::{CalDavConfig, CalDavPrincipalUri, Error};
use async_trait::async_trait;
use axum::Router;
use rustical_dav::resource::{AxumMethods, ResourceService};
@@ -20,6 +20,7 @@ pub struct PrincipalResourceService<
pub(crate) cal_store: Arc<CS>,
// If true only return the principal as the calendar home set, otherwise also groups
pub(crate) simplified_home_set: bool,
pub(crate) config: Arc<CalDavConfig>,
}
impl<AP: AuthenticationProvider, S: SubscriptionStore, CS: CalendarStore> Clone
@@ -31,6 +32,7 @@ impl<AP: AuthenticationProvider, S: SubscriptionStore, CS: CalendarStore> Clone
sub_store: self.sub_store.clone(),
cal_store: self.cal_store.clone(),
simplified_home_set: self.simplified_home_set,
config: self.config.clone(),
}
}
}
@@ -84,7 +86,11 @@ impl<AP: AuthenticationProvider, S: SubscriptionStore, CS: CalendarStore> Resour
Router::new()
.nest(
"/{calendar_id}",
CalendarResourceService::new(self.cal_store.clone(), self.sub_store.clone())
CalendarResourceService::new(
self.cal_store.clone(),
self.sub_store.clone(),
self.config.clone(),
)
.axum_router(),
)
.route_service("/", self.axum_service())

View File

@@ -27,6 +27,7 @@ async fn test_principal_resource(
sub_store: Arc::new(sub_store),
auth_provider: Arc::new(auth_provider),
simplified_home_set: false,
config: Default::default(),
};
// We don't have any calendars here

View File

@@ -32,7 +32,7 @@ rustical_ical.workspace = true
http.workspace = true
tower-http.workspace = true
percent-encoding.workspace = true
ical.workspace = true
caldata.workspace = true
strum.workspace = true
strum_macros.workspace = true
rstest.workspace = true

View File

@@ -103,10 +103,13 @@ pub async fn put_object<AS: AddressbookStore>(
true
};
let object = AddressObject::from_vcf(object_id, body)?;
let object = match AddressObject::from_vcf(body) {
Ok(object) => object,
Err(err) => return Ok((StatusCode::BAD_REQUEST, err.to_string()).into_response()),
};
let etag = object.get_etag();
addr_store
.put_object(principal, addressbook_id, object, overwrite)
.put_object(&principal, &addressbook_id, &object_id, object, overwrite)
.await?;
let mut headers = HeaderMap::new();

View File

@@ -7,6 +7,7 @@ use crate::{
AddressObjectPropWrapperName,
},
};
use caldata::property::VcardFNProperty;
use derive_more::derive::{From, Into};
use rustical_dav::{
extensions::CommonPropertiesExtension,
@@ -21,11 +22,12 @@ use rustical_store::auth::Principal;
pub struct AddressObjectResource {
pub object: AddressObject,
pub principal: String,
pub object_id: String,
}
impl ResourceName for AddressObjectResource {
fn get_name(&self) -> Cow<'_, str> {
Cow::from(format!("{}.vcf", self.object.get_id()))
Cow::from(format!("{}.vcf", self.object_id))
}
}
@@ -69,7 +71,11 @@ impl Resource for AddressObjectResource {
}
fn get_displayname(&self) -> Option<&str> {
self.object.get_full_name()
self.object
.get_vcard()
.full_name
.first()
.map(|VcardFNProperty(name, _)| name.as_str())
}
fn get_owner(&self) -> Option<&str> {

View File

@@ -57,6 +57,7 @@ impl<AS: AddressbookStore> ResourceService for AddressObjectResourceService<AS>
.await?;
Ok(AddressObjectResource {
object,
object_id: object_id.to_owned(),
principal: principal.to_owned(),
})
}

View File

@@ -9,7 +9,6 @@ use http::{HeaderValue, Method, StatusCode, header};
use percent_encoding::{CONTROLS, utf8_percent_encode};
use rustical_dav::privileges::UserPrivilege;
use rustical_dav::resource::Resource;
use rustical_ical::AddressObject;
use rustical_store::auth::Principal;
use rustical_store::{AddressbookStore, SubscriptionStore};
use std::str::FromStr;
@@ -40,7 +39,7 @@ pub async fn route_get<AS: AddressbookStore, S: SubscriptionStore>(
let objects = addr_store.get_objects(&principal, &addressbook_id).await?;
let vcf = objects
.iter()
.map(AddressObject::get_vcf)
.map(|(_id, obj)| obj.get_vcf())
.collect::<Vec<_>>()
.join("\r\n");

View File

@@ -1,16 +1,15 @@
use std::io::BufReader;
use crate::Error;
use crate::addressbook::AddressbookResourceService;
use axum::{
extract::{Path, State},
response::{IntoResponse, Response},
};
use http::StatusCode;
use ical::{
parser::{Component, ComponentMut, vcard},
property::Property,
use caldata::{
VcardParser,
component::{Component, ComponentMut},
parser::{ContentLine, ParserOptions},
};
use http::StatusCode;
use rustical_store::{Addressbook, AddressbookStore, SubscriptionStore, auth::Principal};
use tracing::instrument;
@@ -25,7 +24,7 @@ pub async fn route_import<AS: AddressbookStore, S: SubscriptionStore>(
return Err(Error::Unauthorized);
}
let parser = vcard::VcardParser::new(BufReader::new(body.as_bytes()));
let parser = VcardParser::from_slice(body.as_bytes());
let mut objects = vec![];
for res in parser {
@@ -33,15 +32,16 @@ pub async fn route_import<AS: AddressbookStore, S: SubscriptionStore>(
let uid = card.get_uid();
if uid.is_none() {
let mut card_mut = card.mutable();
card_mut.set_property(Property {
card_mut.add_content_line(ContentLine {
name: "UID".to_owned(),
value: Some(uuid::Uuid::new_v4().to_string()),
params: vec![],
params: vec![].into(),
});
card = card_mut.verify().unwrap();
card = card_mut.build(&ParserOptions::default(), None).unwrap();
}
objects.push(card.try_into().unwrap());
// TODO: Make nicer
let uid = card.get_uid().unwrap();
objects.push((uid.to_owned(), card.into()));
}
if objects.is_empty() {

View File

@@ -29,7 +29,7 @@ pub async fn get_objects_addressbook_multiget<AS: AddressbookStore>(
principal: &str,
addressbook_id: &str,
store: &AS,
) -> Result<(Vec<AddressObject>, Vec<String>), Error> {
) -> Result<(Vec<(String, AddressObject)>, Vec<String>), Error> {
let mut result = vec![];
let mut not_found = vec![];
@@ -43,7 +43,7 @@ pub async fn get_objects_addressbook_multiget<AS: AddressbookStore>(
.get_object(principal, addressbook_id, object_id, false)
.await
{
Ok(object) => result.push(object),
Ok(object) => result.push((object_id.to_owned(), object)),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_string()),
Err(err) => return Err(err.into()),
}
@@ -74,11 +74,12 @@ pub async fn handle_addressbook_multiget<AS: AddressbookStore>(
.await?;
let mut responses = Vec::new();
for object in objects {
let path = format!("{}/{}.vcf", path, object.get_id());
for (object_id, object) in objects {
let path = format!("{path}/{object_id}.vcf");
responses.push(
AddressObjectResource {
object,
object_id,
principal: principal.to_owned(),
}
.propfind(&path, prop, None, puri, user)?,

View File

@@ -2,8 +2,8 @@ use crate::{
address_object::AddressObjectPropWrapperName,
addressbook::methods::report::addressbook_query::PropFilterElement,
};
use caldata::parser::ContentLine;
use derive_more::{From, Into};
use ical::property::Property;
use rustical_dav::xml::{PropfindType, TextMatchElement};
use rustical_ical::{AddressObject, UtcDateTime};
use rustical_xml::{ValueDeserialize, XmlDeserialize, XmlRootTag};
@@ -32,8 +32,8 @@ pub struct ParamFilterElement {
impl ParamFilterElement {
#[must_use]
pub fn match_property(&self, prop: &Property) -> bool {
let Some(param) = prop.get_param(&self.name) else {
pub fn match_property(&self, prop: &ContentLine) -> bool {
let Some(param) = prop.params.get_param(&self.name) else {
return self.is_not_defined.is_some();
};
if self.is_not_defined.is_some() {

View File

@@ -15,8 +15,8 @@ pub async fn get_objects_addressbook_query<AS: AddressbookStore>(
principal: &str,
addressbook_id: &str,
store: &AS,
) -> Result<Vec<AddressObject>, Error> {
) -> Result<Vec<(String, AddressObject)>, Error> {
let mut objects = store.get_objects(principal, addressbook_id).await?;
objects.retain(|object| addr_query.filter.matches(object));
objects.retain(|(_id, object)| addr_query.filter.matches(object));
Ok(objects)
}

View File

@@ -1,5 +1,5 @@
use super::{Allof, ParamFilterElement};
use ical::{parser::Component, property::Property};
use caldata::{component::Component, parser::ContentLine};
use rustical_dav::xml::TextMatchElement;
use rustical_ical::AddressObject;
use rustical_xml::XmlDeserialize;
@@ -31,7 +31,7 @@ pub struct PropFilterElement {
impl PropFilterElement {
#[must_use]
pub fn match_property(&self, property: &Property) -> bool {
pub fn match_property(&self, property: &ContentLine) -> bool {
if self.param_filter.is_empty() && self.text_match.is_empty() {
// Filter empty
return true;
@@ -56,22 +56,22 @@ impl PropFilterElement {
}
pub fn match_component(&self, comp: &impl PropFilterable) -> bool {
let properties = comp.get_named_properties(&self.name);
let mut properties = comp.get_named_properties(&self.name);
if self.is_not_defined.is_some() {
return properties.is_empty();
return properties.next().is_none();
}
// The filter matches when one property instance matches
properties.iter().any(|prop| self.match_property(prop))
properties.any(|prop| self.match_property(prop))
}
}
pub trait PropFilterable {
fn get_named_properties(&self, name: &str) -> Vec<&Property>;
fn get_named_properties<'a>(&'a self, name: &'a str) -> impl Iterator<Item = &'a ContentLine>;
}
impl PropFilterable for AddressObject {
fn get_named_properties(&self, name: &str) -> Vec<&Property> {
fn get_named_properties<'a>(&'a self, name: &'a str) -> impl Iterator<Item = &'a ContentLine> {
self.get_vcard().get_named_properties(name)
}
}

View File

@@ -64,7 +64,7 @@ const FILTER_2: &str = r#"
#[case(VCF_2, FILTER_2, true)]
fn test_filter(#[case] vcf: &str, #[case] filter: &str, #[case] matches: bool) {
dbg!(vcf);
let obj = AddressObject::from_vcf(String::new(), vcf.to_owned()).unwrap();
let obj = AddressObject::from_vcf(vcf.to_owned()).unwrap();
let filter = FilterElement::parse_str(filter).unwrap();
assert_eq!(matches, filter.matches(&obj));
}

View File

@@ -55,7 +55,7 @@ impl ReportRequest {
}
fn objects_response(
objects: Vec<AddressObject>,
objects: Vec<(String, AddressObject)>,
not_found: Vec<String>,
path: &str,
principal: &str,
@@ -64,11 +64,12 @@ fn objects_response(
prop: &PropfindType<AddressObjectPropWrapperName>,
) -> Result<MultistatusElement<AddressObjectPropWrapper, String>, Error> {
let mut responses = Vec::new();
for object in objects {
let path = format!("{}/{}.vcf", path, object.get_id());
for (object_id, object) in objects {
let path = format!("{}/{}.vcf", path, &object_id);
responses.push(
AddressObjectResource {
object,
object_id,
principal: principal.to_owned(),
}
.propfind(&path, prop, None, puri, user)?,

View File

@@ -32,11 +32,12 @@ pub async fn handle_sync_collection<AS: AddressbookStore>(
.await?;
let mut responses = Vec::new();
for object in new_objects {
let path = format!("{}/{}.vcf", path.trim_end_matches('/'), object.get_id());
for (object_id, object) in new_objects {
let path = format!("{}/{}.vcf", path.trim_end_matches('/'), object_id);
responses.push(
AddressObjectResource {
object,
object_id,
principal: principal.to_owned(),
}
.propfind(&path, &sync_collection.prop, None, puri, user)?,

View File

@@ -78,7 +78,8 @@ impl<AS: AddressbookStore, S: SubscriptionStore> ResourceService
.get_objects(principal, addressbook_id)
.await?
.into_iter()
.map(|object| AddressObjectResource {
.map(|(object_id, object)| AddressObjectResource {
object_id,
object,
principal: principal.to_owned(),
})
@@ -91,7 +92,7 @@ impl<AS: AddressbookStore, S: SubscriptionStore> ResourceService
file: Self::Resource,
) -> Result<(), Self::Error> {
self.addr_store
.update_addressbook(principal.to_owned(), addressbook_id.to_owned(), file.into())
.update_addressbook(principal, addressbook_id, file.into())
.await?;
Ok(())
}

View File

@@ -23,9 +23,6 @@ pub enum Error {
#[error(transparent)]
XmlDecodeError(#[from] rustical_xml::XmlError),
#[error(transparent)]
IcalError(#[from] rustical_ical::Error),
}
impl Error {
@@ -43,7 +40,6 @@ impl Error {
Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND,
Self::IcalError(err) => err.status_code(),
}
}
}

View File

@@ -28,7 +28,7 @@ headers.workspace = true
strum.workspace = true
matchit.workspace = true
matchit-serde.workspace = true
ical = { workspace = true, optional = true }
caldata = { workspace = true, optional = true }
[features]
ical = ["dep:ical"]
ical = ["dep:caldata"]

View File

@@ -51,19 +51,18 @@ impl Error {
_ => StatusCode::BAD_REQUEST,
},
Self::PropReadOnly => StatusCode::CONFLICT,
Self::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
Self::InternalError | Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Forbidden => StatusCode::FORBIDDEN,
// The correct status code for a failed precondition is not PreconditionFailed but
// Forbidden (or Conflict):
// https://datatracker.ietf.org/doc/html/rfc4791#section-1.3
Self::PreconditionFailed | Self::Forbidden => StatusCode::FORBIDDEN,
}
}
}
impl axum::response::IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
if matches!(self.status_code(), StatusCode::INTERNAL_SERVER_ERROR) {
error!("{self}");
}

View File

@@ -6,12 +6,15 @@ use axum::{
extract::{MatchedPath, Path, State},
response::{IntoResponse, Response},
};
use axum_extra::TypedHeader;
use headers::Host;
use http::{HeaderMap, StatusCode, Uri};
use matchit_serde::ParamsDeserializer;
use serde::Deserialize;
use tracing::instrument;
#[instrument(skip(path, resource_service,))]
#[allow(clippy::too_many_arguments)]
pub async fn axum_route_copy<R: ResourceService>(
Path(path): Path<R::PathComponents>,
State(resource_service): State<R>,
@@ -20,6 +23,7 @@ pub async fn axum_route_copy<R: ResourceService>(
Overwrite(overwrite): Overwrite,
matched_path: MatchedPath,
header_map: HeaderMap,
TypedHeader(host): TypedHeader<Host>,
) -> Result<Response, R::Error> {
let destination = header_map
.get("Destination")
@@ -27,7 +31,11 @@ pub async fn axum_route_copy<R: ResourceService>(
.to_str()
.map_err(|_| crate::Error::Forbidden)?;
let destination_uri: Uri = destination.parse().map_err(|_| crate::Error::Forbidden)?;
// TODO: Check that host also matches
if let Some(authority) = destination_uri.authority()
&& host != authority.clone().into()
{
return Err(crate::Error::Forbidden.into());
}
let destination = destination_uri.path();
let mut router = matchit::Router::new();

View File

@@ -6,12 +6,15 @@ use axum::{
extract::{MatchedPath, Path, State},
response::{IntoResponse, Response},
};
use axum_extra::TypedHeader;
use headers::Host;
use http::{HeaderMap, StatusCode, Uri};
use matchit_serde::ParamsDeserializer;
use serde::Deserialize;
use tracing::instrument;
#[instrument(skip(path, resource_service,))]
#[allow(clippy::too_many_arguments)]
pub async fn axum_route_move<R: ResourceService>(
Path(path): Path<R::PathComponents>,
State(resource_service): State<R>,
@@ -20,6 +23,7 @@ pub async fn axum_route_move<R: ResourceService>(
Overwrite(overwrite): Overwrite,
matched_path: MatchedPath,
header_map: HeaderMap,
TypedHeader(host): TypedHeader<Host>,
) -> Result<Response, R::Error> {
let destination = header_map
.get("Destination")
@@ -27,7 +31,11 @@ pub async fn axum_route_move<R: ResourceService>(
.to_str()
.map_err(|_| crate::Error::Forbidden)?;
let destination_uri: Uri = destination.parse().map_err(|_| crate::Error::Forbidden)?;
// TODO: Check that host also matches
if let Some(authority) = destination_uri.authority()
&& host != authority.clone().into()
{
return Err(crate::Error::Forbidden.into());
}
let destination = destination_uri.path();
let mut router = matchit::Router::new();

View File

@@ -71,6 +71,7 @@ pub async fn axum_route_proppatch<R: ResourceService>(
route_proppatch(&path, uri.path(), &body, &principal, &resource_service).await
}
#[allow(clippy::too_many_lines)]
pub async fn route_proppatch<R: ResourceService>(
path_components: &R::PathComponents,
path: &str,
@@ -116,12 +117,14 @@ pub async fn route_proppatch<R: ResourceService>(
}
}
SetPropertyPropWrapper::Invalid(invalid) => {
let propname = invalid.tag_name();
let Unparsed(propns, propname) = invalid;
if let Some(full_propname) = <R::Resource as Resource>::list_props()
.into_iter()
.find_map(|(ns, tag)| {
if tag == propname.as_str() {
if (ns, tag)
== (propns.as_ref().map(NamespaceOwned::as_ref), &propname)
{
Some((ns.map(NamespaceOwned::from), tag.to_owned()))
} else {
None
@@ -133,7 +136,7 @@ pub async fn route_proppatch<R: ResourceService>(
// - internal properties
props_conflict.push(full_propname);
} else {
props_not_found.push((None, propname));
props_not_found.push((propns, propname));
}
}
}

View File

@@ -45,7 +45,7 @@ impl<PN: XmlDeserialize> XmlDeserialize for PropElement<PN> {
// start of a child element
Event::Start(start) | Event::Empty(start) => {
let empty = matches!(event, Event::Empty(_));
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
let ns = match ns {
ResolveResult::Bound(ns) => Some(NamespaceOwned::from(ns)),
ResolveResult::Unknown(_ns) => todo!("handle error"),

View File

@@ -1,4 +1,4 @@
use ical::property::Property;
use caldata::parser::ContentLine;
use rustical_xml::{ValueDeserialize, XmlDeserialize};
use std::borrow::Cow;
@@ -128,7 +128,7 @@ impl TextMatchElement {
negate_condition.0 ^ matches
}
#[must_use]
pub fn match_property(&self, property: &Property) -> bool {
pub fn match_property(&self, property: &ContentLine) -> bool {
let text = property.value.as_deref().unwrap_or("");
self.match_text(text)
}

View File

@@ -15,7 +15,7 @@ chrono-tz.workspace = true
thiserror.workspace = true
derive_more.workspace = true
rustical_xml.workspace = true
ical.workspace = true
caldata.workspace = true
regex.workspace = true
rrule.workspace = true
serde.workspace = true

View File

@@ -1,59 +1,48 @@
use crate::{CalDateTime, LOCAL_DATE};
use crate::{CalendarObject, Error};
use chrono::Datelike;
use ical::generator::Emitter;
use ical::parser::{
Component,
vcard::{self, component::VcardContact},
use caldata::{
VcardParser,
component::{
CalendarInnerDataBuilder, ComponentMut, IcalAlarmBuilder, IcalCalendarObjectBuilder,
IcalEventBuilder, VcardContact,
},
generator::Emitter,
parser::{ContentLine, ParserOptions},
property::{
Calscale, IcalCALSCALEProperty, IcalDTENDProperty, IcalDTSTAMPProperty,
IcalDTSTARTProperty, IcalPRODIDProperty, IcalRRULEProperty, IcalSUMMARYProperty,
IcalUIDProperty, IcalVERSIONProperty, IcalVersion, VcardANNIVERSARYProperty,
VcardBDAYProperty, VcardFNProperty,
},
types::{CalDate, PartialDate, Timezone},
};
use chrono::{NaiveDate, Utc};
use sha2::{Digest, Sha256};
use std::{collections::HashMap, io::BufReader};
use std::collections::BTreeMap;
use std::str::FromStr;
#[derive(Debug, Clone)]
pub struct AddressObject {
id: String,
vcf: String,
vcard: VcardContact,
}
impl TryFrom<VcardContact> for AddressObject {
type Error = Error;
fn try_from(vcard: VcardContact) -> Result<Self, Self::Error> {
let uid = vcard
.get_uid()
.ok_or_else(|| Error::InvalidData("missing UID".to_owned()))?
.to_owned();
impl From<VcardContact> for AddressObject {
fn from(vcard: VcardContact) -> Self {
let vcf = vcard.generate();
Ok(Self {
vcf,
vcard,
id: uid,
})
Self { vcf, vcard }
}
}
impl AddressObject {
pub fn from_vcf(id: String, vcf: String) -> Result<Self, Error> {
let mut parser = vcard::VcardParser::new(BufReader::new(vcf.as_bytes()));
let vcard = parser.next().ok_or(Error::MissingContact)??;
if parser.next().is_some() {
return Err(Error::InvalidData(
"multiple vcards, only one allowed".to_owned(),
));
}
Ok(Self { id, vcf, vcard })
}
#[must_use]
pub fn get_id(&self) -> &str {
&self.id
pub fn from_vcf(vcf: String) -> Result<Self, Error> {
let parser = VcardParser::from_slice(vcf.as_bytes());
let vcard = parser.expect_one()?;
Ok(Self { vcf, vcard })
}
#[must_use]
pub fn get_etag(&self) -> String {
let mut hasher = Sha256::new();
hasher.update(self.get_id());
hasher.update(self.get_vcf());
format!("\"{:x}\"", hasher.finalize())
}
@@ -63,121 +52,115 @@ impl AddressObject {
&self.vcf
}
#[must_use]
pub fn get_anniversary(&self) -> Option<(CalDateTime, bool)> {
let prop = self.vcard.get_property("ANNIVERSARY")?.value.as_deref()?;
CalDateTime::parse_vcard(prop).ok()
}
fn get_significant_date_object(
&self,
date: &PartialDate,
summary_prefix: &str,
suffix: &str,
) -> Result<Option<CalendarObject>, Error> {
let Some(uid) = self.vcard.get_uid() else {
return Ok(None);
};
let uid = format!("{uid}{suffix}");
let year = date.get_year();
let year_suffix = year.map(|year| format!(" {year}")).unwrap_or_default();
let Some(month) = date.get_month() else {
return Ok(None);
};
let Some(day) = date.get_day() else {
return Ok(None);
};
let Some(dtstart) = NaiveDate::from_ymd_opt(year.unwrap_or(1900), month, day) else {
return Ok(None);
};
let start_date = CalDate(dtstart, Timezone::Local);
let Some(end_date) = start_date.succ_opt() else {
// start_date is MAX_DATE, this should never happen but FAPP also not raise an error
return Ok(None);
};
let Some(VcardFNProperty(fullname, _)) = self.vcard.full_name.first() else {
return Ok(None);
};
let summary = format!("{summary_prefix} {fullname}{year_suffix}");
#[must_use]
pub fn get_birthday(&self) -> Option<(CalDateTime, bool)> {
let prop = self.vcard.get_property("BDAY")?.value.as_deref()?;
CalDateTime::parse_vcard(prop).ok()
}
let event = IcalEventBuilder {
properties: vec![
IcalDTSTAMPProperty(Utc::now().into(), vec![].into()).into(),
IcalDTSTARTProperty(start_date.into(), vec![].into()).into(),
IcalDTENDProperty(end_date.into(), vec![].into()).into(),
IcalUIDProperty(uid, vec![].into()).into(),
IcalRRULEProperty(
rrule::RRule::from_str("FREQ=YEARLY").unwrap(),
vec![].into(),
)
.into(),
IcalSUMMARYProperty(summary.clone(), vec![].into()).into(),
ContentLine {
name: "TRANSP".to_owned(),
value: Some("TRANSPARENT".to_owned()),
..Default::default()
},
],
alarms: vec![IcalAlarmBuilder {
properties: vec![
ContentLine {
name: "TRIGGER".to_owned(),
value: Some("-PT0M".to_owned()),
params: vec![("VALUE".to_owned(), vec!["DURATION".to_owned()])].into(),
},
ContentLine {
name: "ACTION".to_owned(),
value: Some("DISPLAY".to_owned()),
..Default::default()
},
ContentLine {
name: "DESCRIPTION".to_owned(),
value: Some(summary),
..Default::default()
},
],
}],
};
#[must_use]
pub fn get_full_name(&self) -> Option<&str> {
let prop = self.vcard.get_property("FN")?;
prop.value.as_deref()
Ok(Some(
IcalCalendarObjectBuilder {
properties: vec![
IcalVERSIONProperty(IcalVersion::Version2_0, vec![].into()).into(),
IcalCALSCALEProperty(Calscale::Gregorian, vec![].into()).into(),
IcalPRODIDProperty(
"-//github.com/lennart-k/rustical birthday calendar//EN".to_owned(),
vec![].into(),
)
.into(),
],
inner: Some(CalendarInnerDataBuilder::Event(vec![event])),
vtimezones: BTreeMap::default(),
}
.build(&ParserOptions::default(), None)?
.into(),
))
}
pub fn get_anniversary_object(&self) -> Result<Option<CalendarObject>, Error> {
Ok(
if let Some((anniversary, contains_year)) = self.get_anniversary() {
let Some(fullname) = self.get_full_name() else {
let Some(VcardANNIVERSARYProperty(anniversary, _)) = &self.vcard.anniversary else {
return Ok(None);
};
let Some(date) = &anniversary.date else {
return Ok(None);
};
let anniversary = anniversary.date();
let year = contains_year.then_some(anniversary.year());
let anniversary_start = anniversary.format(LOCAL_DATE);
let anniversary_end = anniversary
.succ_opt()
.unwrap_or(anniversary)
.format(LOCAL_DATE);
let uid = format!("{}-anniversary", self.get_id());
let year_suffix = year.map(|year| format!(" ({year})")).unwrap_or_default();
Some(CalendarObject::from_ics(
format!(
r"BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//github.com/lennart-k/rustical birthday calendar//EN
BEGIN:VEVENT
DTSTART;VALUE=DATE:{anniversary_start}
DTEND;VALUE=DATE:{anniversary_end}
UID:{uid}
RRULE:FREQ=YEARLY
SUMMARY:💍 {fullname}{year_suffix}
TRANSP:TRANSPARENT
BEGIN:VALARM
TRIGGER;VALUE=DURATION:-PT0M
ACTION:DISPLAY
DESCRIPTION:💍 {fullname}{year_suffix}
END:VALARM
END:VEVENT
END:VCALENDAR",
),
None,
)?)
} else {
None
},
)
self.get_significant_date_object(date, "💍", "-anniversary")
}
pub fn get_birthday_object(&self) -> Result<Option<CalendarObject>, Error> {
Ok(
if let Some((birthday, contains_year)) = self.get_birthday() {
let Some(fullname) = self.get_full_name() else {
let Some(VcardBDAYProperty(bday, _)) = &self.vcard.birthday else {
return Ok(None);
};
let Some(date) = &bday.date else {
return Ok(None);
};
let birthday = birthday.date();
let year = contains_year.then_some(birthday.year());
let birthday_start = birthday.format(LOCAL_DATE);
let birthday_end = birthday.succ_opt().unwrap_or(birthday).format(LOCAL_DATE);
let uid = format!("{}-birthday", self.get_id());
let year_suffix = year.map(|year| format!(" ({year})")).unwrap_or_default();
Some(CalendarObject::from_ics(
format!(
r"BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:-//github.com/lennart-k/rustical birthday calendar//EN
BEGIN:VEVENT
DTSTART;VALUE=DATE:{birthday_start}
DTEND;VALUE=DATE:{birthday_end}
UID:{uid}
RRULE:FREQ=YEARLY
SUMMARY:🎂 {fullname}{year_suffix}
TRANSP:TRANSPARENT
BEGIN:VALARM
TRIGGER;VALUE=DURATION:-PT0M
ACTION:DISPLAY
DESCRIPTION:🎂 {fullname}{year_suffix}
END:VALARM
END:VEVENT
END:VCALENDAR",
),
None,
)?)
} else {
None
},
)
}
/// Get significant dates associated with this address object
pub fn get_significant_dates(&self) -> Result<HashMap<&'static str, CalendarObject>, Error> {
let mut out = HashMap::new();
if let Some(birthday) = self.get_birthday_object()? {
out.insert("birthday", birthday);
}
if let Some(anniversary) = self.get_anniversary_object()? {
out.insert("anniversary", anniversary);
}
Ok(out)
self.get_significant_date_object(date, "🎂", "-birthday")
}
#[must_use]

View File

@@ -0,0 +1,144 @@
use std::sync::OnceLock;
use crate::Error;
use caldata::{
IcalObjectParser,
component::{CalendarInnerData, IcalCalendarObject},
generator::Emitter,
parser::ParserOptions,
};
use derive_more::Display;
use serde::Deserialize;
use serde::Serialize;
use sha2::{Digest, Sha256};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Display)]
// specified in https://datatracker.ietf.org/doc/html/rfc5545#section-3.6
pub enum CalendarObjectType {
#[serde(rename = "VEVENT")]
Event = 0,
#[serde(rename = "VTODO")]
Todo = 1,
#[serde(rename = "VJOURNAL")]
Journal = 2,
}
impl From<&IcalCalendarObject> for CalendarObjectType {
fn from(value: &IcalCalendarObject) -> Self {
match value.get_inner() {
CalendarInnerData::Event(_, _) => Self::Event,
CalendarInnerData::Todo(_, _) => Self::Todo,
CalendarInnerData::Journal(_, _) => Self::Journal,
}
}
}
impl CalendarObjectType {
#[must_use]
pub const fn as_str(&self) -> &'static str {
match self {
Self::Event => "VEVENT",
Self::Todo => "VTODO",
Self::Journal => "VJOURNAL",
}
}
}
impl rustical_xml::ValueSerialize for CalendarObjectType {
fn serialize(&self) -> String {
self.as_str().to_owned()
}
}
impl rustical_xml::ValueDeserialize for CalendarObjectType {
fn deserialize(val: &str) -> std::result::Result<Self, rustical_xml::XmlError> {
match <String as rustical_xml::ValueDeserialize>::deserialize(val)?.as_str() {
"VEVENT" => Ok(Self::Event),
"VTODO" => Ok(Self::Todo),
"VJOURNAL" => Ok(Self::Journal),
_ => Err(rustical_xml::XmlError::InvalidValue(
rustical_xml::ParseValueError::Other(format!(
"Invalid value '{val}', must be VEVENT, VTODO, or VJOURNAL"
)),
)),
}
}
}
#[derive(Debug, Clone)]
pub struct CalendarObject {
inner: IcalCalendarObject,
ics: OnceLock<String>,
}
impl CalendarObject {
// This function parses iCalendar data but doesn't cache it
// This is meant for iCalendar data coming from outside that might need to be normalised.
// For example if timezones are omitted this can be fixed by this function.
pub fn import(ics: &str, options: Option<ParserOptions>) -> Result<Self, Error> {
let parser =
IcalObjectParser::from_slice(ics.as_bytes()).with_options(options.unwrap_or_default());
let inner = parser.expect_one()?;
Ok(Self {
inner,
ics: OnceLock::new(),
})
}
// This function parses iCalendar data and then caches the parsed iCalendar data.
// This function is only meant for loading data from a data store where we know the iCalendar
// is already in the desired form.
pub fn from_ics(ics: String) -> Result<Self, Error> {
let parser = IcalObjectParser::from_slice(ics.as_bytes());
let inner = parser.expect_one()?;
Ok(Self {
inner,
ics: ics.into(),
})
}
#[must_use]
pub const fn get_inner(&self) -> &IcalCalendarObject {
&self.inner
}
#[must_use]
pub fn get_uid(&self) -> &str {
self.inner.get_uid()
}
#[must_use]
pub fn get_etag(&self) -> String {
let mut hasher = Sha256::new();
hasher.update(self.get_uid());
hasher.update(self.get_ics());
format!("\"{:x}\"", hasher.finalize())
}
#[must_use]
pub fn get_ics(&self) -> &str {
self.ics.get_or_init(|| self.inner.generate())
}
#[must_use]
pub fn get_object_type(&self) -> CalendarObjectType {
(&self.inner).into()
}
}
impl From<CalendarObject> for IcalCalendarObject {
fn from(value: CalendarObject) -> Self {
value.inner
}
}
impl From<IcalCalendarObject> for CalendarObject {
fn from(value: IcalCalendarObject) -> Self {
Self {
ics: value.generate().into(),
inner: value,
}
}
}

View File

@@ -1,42 +0,0 @@
use axum::{http::StatusCode, response::IntoResponse};
use crate::CalDateTimeError;
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
pub enum Error {
#[error("Invalid ics/vcf input: {0}")]
InvalidData(String),
#[error("Missing calendar")]
MissingCalendar,
#[error("Missing contact")]
MissingContact,
#[error(transparent)]
ParserError(#[from] ical::parser::ParserError),
#[error(transparent)]
CalDateTimeError(#[from] CalDateTimeError),
#[error(transparent)]
RRuleError(#[from] rrule::RRuleError),
}
impl Error {
#[must_use]
pub const fn status_code(&self) -> StatusCode {
match self {
Self::InvalidData(_) | Self::MissingCalendar | Self::MissingContact => {
StatusCode::BAD_REQUEST
}
_ => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
(self.status_code(), self.to_string()).into_response()
}
}

View File

@@ -1,385 +0,0 @@
use crate::CalDateTime;
use crate::Error;
use chrono::{DateTime, Duration, Utc};
use ical::parser::ComponentMut;
use ical::{generator::IcalEvent, parser::Component, property::Property};
use rrule::{RRule, RRuleSet};
use std::{collections::HashMap, str::FromStr};
#[derive(Debug, Clone, Default)]
pub struct EventObject {
pub event: IcalEvent,
// If a timezone is None that means that in the VCALENDAR object there's a timezone defined
// with that name but its not from the Olson DB
pub timezones: HashMap<String, Option<chrono_tz::Tz>>,
}
impl EventObject {
#[must_use]
pub fn get_uid(&self) -> &str {
self.event.get_uid()
}
pub fn get_dtstart(&self) -> Result<Option<CalDateTime>, Error> {
if let Some(dtstart) = self.event.get_dtstart() {
Ok(Some(CalDateTime::parse_prop(dtstart, &self.timezones)?))
} else {
Ok(None)
}
}
pub fn get_dtend(&self) -> Result<Option<CalDateTime>, Error> {
if let Some(dtend) = self.event.get_dtend() {
Ok(Some(CalDateTime::parse_prop(dtend, &self.timezones)?))
} else {
Ok(None)
}
}
pub fn get_last_occurence(&self) -> Result<Option<CalDateTime>, Error> {
if self.event.get_rrule().is_some() {
// TODO: understand recurrence rules
return Ok(None);
}
if let Some(dtend) = self.get_dtend()? {
return Ok(Some(dtend));
}
let duration = self.event.get_duration().unwrap_or(Duration::days(1));
let first_occurence = self.get_dtstart()?;
Ok(first_occurence.map(|first_occurence| first_occurence + duration))
}
pub fn recurrence_ruleset(&self) -> Result<Option<rrule::RRuleSet>, Error> {
let dtstart: DateTime<rrule::Tz> = if let Some(dtstart) = self.get_dtstart()? {
if let Some(dtend) = self.get_dtend()? {
// DTSTART and DTEND MUST have the same timezone
assert_eq!(dtstart.timezone(), dtend.timezone());
}
dtstart
.as_datetime()
.with_timezone(&dtstart.timezone().into())
} else {
return Ok(None);
};
let mut rrule_set = RRuleSet::new(dtstart);
// TODO: Make nice, this is just a bodge to get correct behaviour
let mut empty = true;
for prop in &self.event.properties {
rrule_set = match prop.name.as_str() {
"RRULE" => {
let rrule = RRule::from_str(prop.value.as_ref().ok_or_else(|| {
Error::RRuleError(rrule::ParseError::MissingDateGenerationRules.into())
})?)?
.validate(dtstart)
.unwrap();
empty = false;
rrule_set.rrule(rrule)
}
"RDATE" => {
let rdate = CalDateTime::parse_prop(prop, &self.timezones)?.into();
empty = false;
rrule_set.rdate(rdate)
}
"EXDATE" => {
let exdate = CalDateTime::parse_prop(prop, &self.timezones)?.into();
empty = false;
rrule_set.exdate(exdate)
}
_ => rrule_set,
}
}
if empty {
return Ok(None);
}
Ok(Some(rrule_set))
}
// The returned calendar components MUST NOT use recurrence
// properties (i.e., EXDATE, EXRULE, RDATE, and RRULE) and MUST NOT
// have reference to or include VTIMEZONE components. Date and local
// time with reference to time zone information MUST be converted
// into date with UTC time.
pub fn expand_recurrence(
&self,
start: Option<DateTime<Utc>>,
end: Option<DateTime<Utc>>,
overrides: &[Self],
) -> Result<Vec<IcalEvent>, Error> {
let mut events = vec![];
let dtstart = self.get_dtstart()?.expect("We must have a DTSTART here");
let computed_duration = self
.get_dtend()?
.map(|dtend| dtend.as_datetime().into_owned() - dtstart.as_datetime().as_ref());
let Some(mut rrule_set) = self.recurrence_ruleset()? else {
// If ruleset empty simply return main event AND all overrides
return Ok(std::iter::once(self.clone())
.chain(overrides.iter().cloned())
.map(|event| event.event)
.collect());
};
if let Some(start) = start {
rrule_set = rrule_set.after(start.with_timezone(&rrule::Tz::UTC));
}
if let Some(end) = end {
rrule_set = rrule_set.before(end.with_timezone(&rrule::Tz::UTC));
}
let dates = rrule_set.all(2048).dates;
'recurrence: for date in dates {
let date = CalDateTime::from(date.to_utc());
let recurrence_id = if dtstart.is_date() {
date.format_date()
} else {
date.format()
};
for ev_override in overrides {
if let Some(override_id) = &ev_override
.event
.get_recurrence_id()
.as_ref()
.expect("overrides have a recurrence id")
.value
&& override_id == &recurrence_id
{
// We have an override for this occurence
//
events.push(ev_override.event.clone());
continue 'recurrence;
}
}
let mut ev = self.event.clone().mutable();
ev.remove_property("RRULE");
ev.remove_property("RDATE");
ev.remove_property("EXDATE");
ev.remove_property("EXRULE");
let dtstart_prop = ev
.get_property("DTSTART")
.expect("We must have a DTSTART here")
.clone();
ev.remove_property("DTSTART");
ev.remove_property("DTEND");
ev.set_property(Property {
name: "RECURRENCE-ID".to_string(),
value: Some(recurrence_id.clone()),
params: vec![],
});
ev.set_property(Property {
name: "DTSTART".to_string(),
value: Some(recurrence_id),
params: vec![],
});
if let Some(duration) = computed_duration {
let dtend = date + duration;
let dtendformat = if dtstart.is_date() {
dtend.format_date()
} else {
dtend.format()
};
ev.set_property(Property {
name: "DTEND".to_string(),
value: Some(dtendformat),
params: dtstart_prop.params,
});
}
events.push(ev.verify()?);
}
Ok(events)
}
}
#[cfg(test)]
mod tests {
use crate::{CalDateTime, CalendarObject};
use chrono::{DateTime, Utc};
use ical::generator::Emitter;
use rstest::rstest;
const ICS_1: &str = r"BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
X-LIC-LOCATION:Europe/Berlin
END:VTIMEZONE
BEGIN:VEVENT
UID:318ec6503573d9576818daf93dac07317058d95c
DTSTAMP:20250502T132758Z
DTSTART;TZID=Europe/Berlin:20250506T090000
DTEND;TZID=Europe/Berlin:20250506T092500
SEQUENCE:2
SUMMARY:weekly stuff
TRANSP:OPAQUE
RRULE:FREQ=WEEKLY;COUNT=4;INTERVAL=2;BYDAY=TU,TH,SU
END:VEVENT
END:VCALENDAR";
const EXPANDED_1: &[&str] = &[
"BEGIN:VEVENT\r
UID:318ec6503573d9576818daf93dac07317058d95c\r
DTSTAMP:20250502T132758Z\r
SEQUENCE:2\r
SUMMARY:weekly stuff\r
TRANSP:OPAQUE\r
RECURRENCE-ID:20250506T070000Z\r
DTSTART:20250506T070000Z\r
DTEND:20250506T072500Z\r
END:VEVENT\r\n",
"BEGIN:VEVENT\r
UID:318ec6503573d9576818daf93dac07317058d95c\r
DTSTAMP:20250502T132758Z\r
SEQUENCE:2\r
SUMMARY:weekly stuff\r
TRANSP:OPAQUE\r
RECURRENCE-ID:20250508T070000Z\r
DTSTART:20250508T070000Z\r
DTEND:20250508T072500Z\r
END:VEVENT\r\n",
"BEGIN:VEVENT\r
UID:318ec6503573d9576818daf93dac07317058d95c\r
DTSTAMP:20250502T132758Z\r
SEQUENCE:2\r
SUMMARY:weekly stuff\r
TRANSP:OPAQUE\r
RECURRENCE-ID:20250511T090000\r
DTSTART:20250511T070000Z\r
DTEND:20250511T072500Z\r
END:VEVENT\r\n",
"BEGIN:VEVENT\r
UID:318ec6503573d9576818daf93dac07317058d95c\r
DTSTAMP:20250502T132758Z\r
SEQUENCE:2\r
SUMMARY:weekly stuff\r
TRANSP:OPAQUE\r
RECURRENCE-ID:20250520T090000\r
DTSTA:20250520T070000Z\r
DTEND:20250520T072500Z\r
END:VEVENT\r\n",
];
const ICS_2: &str = r"BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:US/Eastern
END:VTIMEZONE
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART;TZID=US/Eastern:20060102T120000
DURATION:PT1H
RRULE:FREQ=DAILY;COUNT=5
SUMMARY:Event #2
UID:abcd2
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART;TZID=US/Eastern:20060104T140000
DURATION:PT1H
RECURRENCE-ID;TZID=US/Eastern:20060104T120000
SUMMARY:Event #2 bis
UID:abcd2
END:VEVENT
END:VCALENDAR
";
const EXPANDED_2: &[&str] = &[
"BEGIN:VEVENT\r
DTSTAMP:20060206T001121Z\r
DURATION:PT1H\r
SUMMARY:Event #2\r
UID:abcd2\r
RECURRENCE-ID:20060103T170000\r
DTSTART:20060103T170000\r
END:VEVENT\r\n",
"BEGIN:VEVENT\r
DTSTAMP:20060206T001121Z\r
DURATION:PT1H\r
SUMMARY:Event #2 bis\r
UID:abcd2\r
RECURRENCE-ID:20060104T170000\r
DTSTART:20060104T190000\r
END:VEVENT\r
END:VCALENDAR\r\n",
];
const ICS_3: &str = r"BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:US/Eastern
END:VTIMEZONE
BEGIN:VEVENT
ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com
ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com
DTSTAMP:20060206T001220Z
DTSTART;TZID=US/Eastern:20060104T100000
DURATION:PT1H
LAST-MODIFIED:20060206T001330Z
ORGANIZER:mailto:cyrus@example.com
SEQUENCE:1
STATUS:TENTATIVE
SUMMARY:Event #3
UID:abcd3
END:VEVENT
END:VCALENDAR
";
const EXPANDED_3: &[&str] = &["BEGIN:VEVENT
ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com
ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com
DTSTAMP:20060206T001220Z
DTSTART:20060104T150000
DURATION:PT1H
LAST-MODIFIED:20060206T001330Z
ORGANIZER:mailto:cyrus@example.com
SEQUENCE:1
STATUS:TENTATIVE
SUMMARY:Event #3
UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT"];
#[rstest]
#[case(ICS_1, EXPANDED_1, None, None)]
// from https://datatracker.ietf.org/doc/html/rfc4791#section-7.8.3
#[case(ICS_2, EXPANDED_2,
Some(CalDateTime::parse("20060103T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc()),
Some(CalDateTime::parse("20060105T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc())
)]
#[case(ICS_3, EXPANDED_3,
Some(CalDateTime::parse("20060103T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc()),
Some(CalDateTime::parse("20060105T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc())
)]
fn test_expand_recurrence(
#[case] ics: &'static str,
#[case] expanded: &[&str],
#[case] from: Option<DateTime<Utc>>,
#[case] to: Option<DateTime<Utc>>,
) {
let event = CalendarObject::from_ics(ics.to_string(), None).unwrap();
let crate::CalendarObjectComponent::Event(event, overrides) = event.get_data() else {
panic!()
};
let events: Vec<String> = event
.expand_recurrence(from, to, overrides)
.unwrap()
.into_iter()
.map(|event| Emitter::generate(&event))
.collect();
assert_eq!(events.len(), expanded.len());
for (output, reference) in events.iter().zip(expanded) {
similar_asserts::assert_eq!(output, reference);
}
}
}

View File

@@ -1,5 +0,0 @@
mod event;
mod object;
pub use event::*;
pub use object::*;

View File

@@ -1,366 +0,0 @@
use super::EventObject;
use crate::CalDateTime;
use crate::Error;
use chrono::DateTime;
use chrono::Utc;
use derive_more::Display;
use ical::generator::{Emitter, IcalCalendar};
use ical::parser::ical::component::IcalJournal;
use ical::parser::ical::component::IcalTimeZone;
use ical::parser::ical::component::IcalTodo;
use ical::property::Property;
use serde::Deserialize;
use serde::Serialize;
use sha2::{Digest, Sha256};
use std::{collections::HashMap, io::BufReader};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Display)]
// specified in https://datatracker.ietf.org/doc/html/rfc5545#section-3.6
pub enum CalendarObjectType {
#[serde(rename = "VEVENT")]
Event = 0,
#[serde(rename = "VTODO")]
Todo = 1,
#[serde(rename = "VJOURNAL")]
Journal = 2,
}
impl CalendarObjectType {
#[must_use]
pub const fn as_str(&self) -> &'static str {
match self {
Self::Event => "VEVENT",
Self::Todo => "VTODO",
Self::Journal => "VJOURNAL",
}
}
}
impl rustical_xml::ValueSerialize for CalendarObjectType {
fn serialize(&self) -> String {
self.as_str().to_owned()
}
}
impl rustical_xml::ValueDeserialize for CalendarObjectType {
fn deserialize(val: &str) -> std::result::Result<Self, rustical_xml::XmlError> {
match <String as rustical_xml::ValueDeserialize>::deserialize(val)?.as_str() {
"VEVENT" => Ok(Self::Event),
"VTODO" => Ok(Self::Todo),
"VJOURNAL" => Ok(Self::Journal),
_ => Err(rustical_xml::XmlError::InvalidValue(
rustical_xml::ParseValueError::Other(format!(
"Invalid value '{val}', must be VEVENT, VTODO, or VJOURNAL"
)),
)),
}
}
}
#[derive(Debug, Clone)]
pub enum CalendarObjectComponent {
Event(EventObject, Vec<EventObject>),
Todo(IcalTodo, Vec<IcalTodo>),
Journal(IcalJournal, Vec<IcalJournal>),
}
impl CalendarObjectComponent {
#[must_use]
pub fn get_uid(&self) -> &str {
match &self {
// We've made sure before that the first component exists and all components share the
// same UID
Self::Todo(todo, _) => todo.get_uid(),
Self::Event(event, _) => event.event.get_uid(),
Self::Journal(journal, _) => journal.get_uid(),
}
}
}
impl From<&CalendarObjectComponent> for CalendarObjectType {
fn from(value: &CalendarObjectComponent) -> Self {
match value {
CalendarObjectComponent::Event(..) => Self::Event,
CalendarObjectComponent::Todo(..) => Self::Todo,
CalendarObjectComponent::Journal(..) => Self::Journal,
}
}
}
impl CalendarObjectComponent {
fn from_events(mut events: Vec<EventObject>) -> Result<Self, Error> {
// A calendar object does not necessarily have to contain a main VOBJECT
if events.is_empty() {
return Err(Error::MissingCalendar);
}
#[allow(clippy::option_if_let_else)]
let main_event = if let Some(main) = events
.extract_if(.., |event| event.event.get_recurrence_id().is_none())
.next()
{
main
} else {
events.remove(0)
};
let overrides = events;
for event in &overrides {
if event.get_uid() != main_event.get_uid() {
return Err(Error::InvalidData(
"Calendar object contains multiple UIDs".to_owned(),
));
}
if event.event.get_recurrence_id().is_none() {
return Err(Error::InvalidData(
"Calendar object can only contain one main component".to_owned(),
));
}
}
Ok(Self::Event(main_event, overrides))
}
fn from_todos(mut todos: Vec<IcalTodo>) -> Result<Self, Error> {
// A calendar object does not necessarily have to contain a main VOBJECT
if todos.is_empty() {
return Err(Error::MissingCalendar);
}
#[allow(clippy::option_if_let_else)]
let main_todo = if let Some(main) = todos
.extract_if(.., |todo| todo.get_recurrence_id().is_none())
.next()
{
main
} else {
todos.remove(0)
};
let overrides = todos;
for todo in &overrides {
if todo.get_uid() != main_todo.get_uid() {
return Err(Error::InvalidData(
"Calendar object contains multiple UIDs".to_owned(),
));
}
if todo.get_recurrence_id().is_none() {
return Err(Error::InvalidData(
"Calendar object can only contain one main component".to_owned(),
));
}
}
Ok(Self::Todo(main_todo, overrides))
}
fn from_journals(mut journals: Vec<IcalJournal>) -> Result<Self, Error> {
// A calendar object does not necessarily have to contain a main VOBJECT
if journals.is_empty() {
return Err(Error::MissingCalendar);
}
#[allow(clippy::option_if_let_else)]
let main_journal = if let Some(main) = journals
.extract_if(.., |journal| journal.get_recurrence_id().is_none())
.next()
{
main
} else {
journals.remove(0)
};
let overrides = journals;
for journal in &overrides {
if journal.get_uid() != main_journal.get_uid() {
return Err(Error::InvalidData(
"Calendar object contains multiple UIDs".to_owned(),
));
}
if journal.get_recurrence_id().is_none() {
return Err(Error::InvalidData(
"Calendar object can only contain one main component".to_owned(),
));
}
}
Ok(Self::Journal(main_journal, overrides))
}
pub fn get_first_occurence(&self) -> Result<Option<CalDateTime>, Error> {
match &self {
Self::Event(main_event, overrides) => Ok(overrides
.iter()
.chain(std::iter::once(main_event))
.map(super::event::EventObject::get_dtstart)
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.min()),
_ => Ok(None),
}
}
pub fn get_last_occurence(&self) -> Result<Option<CalDateTime>, Error> {
match &self {
Self::Event(main_event, overrides) => Ok(overrides
.iter()
.chain(std::iter::once(main_event))
.map(super::event::EventObject::get_last_occurence)
.collect::<Result<Vec<_>, _>>()?
.into_iter()
.flatten()
.max()),
_ => Ok(None),
}
}
}
#[derive(Debug, Clone)]
pub struct CalendarObject {
data: CalendarObjectComponent,
properties: Vec<Property>,
id: String,
ics: String,
vtimezones: HashMap<String, IcalTimeZone>,
}
impl CalendarObject {
pub fn from_ics(ics: String, id: Option<String>) -> Result<Self, Error> {
let mut parser = ical::IcalParser::new(BufReader::new(ics.as_bytes()));
let cal = parser.next().ok_or(Error::MissingCalendar)??;
if parser.next().is_some() {
return Err(Error::InvalidData(
"multiple calendars, only one allowed".to_owned(),
));
}
if u8::from(!cal.events.is_empty())
+ u8::from(!cal.todos.is_empty())
+ u8::from(!cal.journals.is_empty())
+ u8::from(!cal.free_busys.is_empty())
!= 1
{
// https://datatracker.ietf.org/doc/html/rfc4791#section-4.1
return Err(Error::InvalidData(
"iCalendar object must have exactly one component type".to_owned(),
));
}
let timezones: HashMap<String, Option<chrono_tz::Tz>> = cal
.timezones
.clone()
.into_iter()
.map(|timezone| (timezone.get_tzid().to_owned(), (&timezone).try_into().ok()))
.collect();
let vtimezones = cal
.timezones
.clone()
.into_iter()
.map(|timezone| (timezone.get_tzid().to_owned(), timezone))
.collect();
let data = if !cal.events.is_empty() {
CalendarObjectComponent::from_events(
cal.events
.into_iter()
.map(|event| EventObject {
event,
timezones: timezones.clone(),
})
.collect(),
)?
} else if !cal.todos.is_empty() {
CalendarObjectComponent::from_todos(cal.todos)?
} else if !cal.journals.is_empty() {
CalendarObjectComponent::from_journals(cal.journals)?
} else {
return Err(Error::InvalidData(
"iCalendar component type not supported :(".to_owned(),
));
};
Ok(Self {
id: id.unwrap_or_else(|| data.get_uid().to_owned()),
data,
properties: cal.properties,
ics,
vtimezones,
})
}
#[must_use]
pub const fn get_vtimezones(&self) -> &HashMap<String, IcalTimeZone> {
&self.vtimezones
}
#[must_use]
pub const fn get_data(&self) -> &CalendarObjectComponent {
&self.data
}
#[must_use]
pub fn get_uid(&self) -> &str {
self.data.get_uid()
}
#[must_use]
pub fn get_id(&self) -> &str {
&self.id
}
#[must_use]
pub fn get_etag(&self) -> String {
let mut hasher = Sha256::new();
hasher.update(self.get_uid());
hasher.update(self.get_ics());
format!("\"{:x}\"", hasher.finalize())
}
#[must_use]
pub fn get_ics(&self) -> &str {
&self.ics
}
#[must_use]
pub fn get_component_name(&self) -> &str {
self.get_object_type().as_str()
}
#[must_use]
pub fn get_object_type(&self) -> CalendarObjectType {
(&self.data).into()
}
pub fn get_first_occurence(&self) -> Result<Option<CalDateTime>, Error> {
self.data.get_first_occurence()
}
pub fn get_last_occurence(&self) -> Result<Option<CalDateTime>, Error> {
self.data.get_last_occurence()
}
pub fn expand_recurrence(
&self,
start: Option<DateTime<Utc>>,
end: Option<DateTime<Utc>>,
) -> Result<String, Error> {
// Only events can be expanded
match &self.data {
CalendarObjectComponent::Event(main_event, overrides) => {
let cal = IcalCalendar {
properties: self.properties.clone(),
events: main_event.expand_recurrence(start, end, overrides)?,
..Default::default()
};
Ok(cal.generate())
}
_ => Ok(self.get_ics().to_string()),
}
}
#[must_use]
pub fn get_property(&self, name: &str) -> Option<&Property> {
self.properties
.iter()
.find(|property| property.name == name)
}
#[must_use]
pub fn get_named_properties(&self, name: &str) -> Vec<&Property> {
self.properties
.iter()
.filter(|property| property.name == name)
.collect()
}
}

View File

@@ -1,15 +1,13 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
mod timestamp;
mod timezone;
use caldata::parser::ParserError;
pub use timestamp::*;
pub use timezone::*;
mod icalendar;
pub use icalendar::*;
mod error;
pub use error::Error;
mod calendar_object;
pub use calendar_object::*;
mod address_object;
pub use address_object::AddressObject;
pub type Error = ParserError;

View File

@@ -1,35 +1,8 @@
use super::timezone::ICalTimezone;
use chrono::{DateTime, Datelike, Duration, Local, NaiveDate, NaiveDateTime, NaiveTime, Utc};
use chrono_tz::Tz;
use chrono::{DateTime, NaiveDateTime, Utc};
use derive_more::derive::Deref;
use ical::property::Property;
use rustical_xml::{ValueDeserialize, ValueSerialize};
use std::{borrow::Cow, collections::HashMap, ops::Add, sync::LazyLock};
static RE_VCARD_DATE_MM_DD: LazyLock<regex::Regex> =
LazyLock::new(|| regex::Regex::new(r"^--(?<m>\d{2})(?<d>\d{2})$").unwrap());
const LOCAL_DATE_TIME: &str = "%Y%m%dT%H%M%S";
const UTC_DATE_TIME: &str = "%Y%m%dT%H%M%SZ";
pub const LOCAL_DATE: &str = "%Y%m%d";
#[derive(Debug, thiserror::Error, PartialEq, Eq)]
pub enum CalDateTimeError {
#[error(
"Timezone has X-LIC-LOCATION property to specify a timezone from the Olson database, however its value {0} is invalid"
)]
InvalidOlson(String),
#[error("TZID {0} does not refer to a valid timezone")]
InvalidTZID(String),
#[error("Timestamp doesn't exist because of gap in local time")]
LocalTimeGap,
#[error("Datetime string {0} has an invalid format")]
InvalidDatetimeFormat(String),
#[error("Could not parse datetime {0}")]
ParseError(String),
#[error("Duration string {0} has an invalid format")]
InvalidDurationFormat(String),
}
#[derive(Debug, Clone, Deref, PartialEq, Eq, Hash)]
pub struct UtcDateTime(pub DateTime<Utc>);
@@ -54,375 +27,3 @@ impl ValueSerialize for UtcDateTime {
format!("{}", self.0.format(UTC_DATE_TIME))
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum CalDateTime {
// Form 1, example: 19980118T230000 -> Local
// Form 2, example: 19980119T070000Z -> UTC
// Form 3, example: TZID=America/New_York:19980119T020000 -> Olson
// https://en.wikipedia.org/wiki/Tz_database
DateTime(DateTime<ICalTimezone>),
Date(NaiveDate, ICalTimezone),
}
impl From<CalDateTime> for DateTime<rrule::Tz> {
fn from(value: CalDateTime) -> Self {
value
.as_datetime()
.into_owned()
.with_timezone(&value.timezone().into())
}
}
impl From<DateTime<rrule::Tz>> for CalDateTime {
fn from(value: DateTime<rrule::Tz>) -> Self {
Self::DateTime(value.with_timezone(&value.timezone().into()))
}
}
impl PartialOrd for CalDateTime {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for CalDateTime {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
match (&self, &other) {
(Self::DateTime(a), Self::DateTime(b)) => a.cmp(b),
(Self::DateTime(a), Self::Date(..)) => a.cmp(&other.as_datetime()),
(Self::Date(..), Self::DateTime(b)) => self.as_datetime().as_ref().cmp(b),
(Self::Date(..), Self::Date(..)) => self.as_datetime().cmp(&other.as_datetime()),
}
}
}
impl From<DateTime<Local>> for CalDateTime {
fn from(value: DateTime<Local>) -> Self {
Self::DateTime(value.with_timezone(&ICalTimezone::Local))
}
}
impl From<DateTime<Utc>> for CalDateTime {
fn from(value: DateTime<Utc>) -> Self {
Self::DateTime(value.with_timezone(&ICalTimezone::Olson(chrono_tz::UTC)))
}
}
impl Add<Duration> for CalDateTime {
type Output = Self;
fn add(self, duration: Duration) -> Self::Output {
match self {
Self::DateTime(datetime) => Self::DateTime(datetime + duration),
Self::Date(date, tz) => Self::DateTime(
date.and_time(NaiveTime::default())
.and_local_timezone(tz)
.earliest()
.expect("Local timezone has constant offset")
+ duration,
),
}
}
}
impl CalDateTime {
pub fn parse_prop(
prop: &Property,
timezones: &HashMap<String, Option<chrono_tz::Tz>>,
) -> Result<Self, CalDateTimeError> {
let prop_value = prop
.value
.as_ref()
.ok_or_else(|| CalDateTimeError::InvalidDatetimeFormat("empty property".into()))?;
let timezone = if let Some(tzid) = prop.get_param("TZID") {
if let Some(timezone) = timezones.get(tzid) {
timezone.to_owned()
} else {
// TZID refers to timezone that does not exist
return Err(CalDateTimeError::InvalidTZID(tzid.to_string()));
}
} else {
// No explicit timezone specified.
// This is valid and will be localtime or UTC depending on the value
// We will stick to this default as documented in https://github.com/lennart-k/rustical/issues/102
None
};
Self::parse(prop_value, timezone)
}
#[must_use]
pub fn format(&self) -> String {
match self {
Self::DateTime(datetime) => match datetime.timezone() {
ICalTimezone::Olson(chrono_tz::UTC) => datetime.format(UTC_DATE_TIME).to_string(),
_ => datetime.format(LOCAL_DATE_TIME).to_string(),
},
Self::Date(date, _) => date.format(LOCAL_DATE).to_string(),
}
}
#[must_use]
pub fn format_date(&self) -> String {
match self {
Self::DateTime(datetime) => datetime.format(LOCAL_DATE).to_string(),
Self::Date(date, _) => date.format(LOCAL_DATE).to_string(),
}
}
#[must_use]
pub fn date(&self) -> NaiveDate {
match self {
Self::DateTime(datetime) => datetime.date_naive(),
Self::Date(date, _) => date.to_owned(),
}
}
#[must_use]
pub const fn is_date(&self) -> bool {
matches!(&self, Self::Date(_, _))
}
#[must_use]
pub fn as_datetime(&self) -> Cow<'_, DateTime<ICalTimezone>> {
match self {
Self::DateTime(datetime) => Cow::Borrowed(datetime),
Self::Date(date, tz) => Cow::Owned(
date.and_time(NaiveTime::default())
.and_local_timezone(tz.to_owned())
.earliest()
.expect("Midnight always exists"),
),
}
}
#[must_use]
pub fn with_timezone(&self, tz: &ICalTimezone) -> Self {
match self {
Self::DateTime(datetime) => Self::DateTime(datetime.with_timezone(tz)),
Self::Date(date, _) => Self::Date(date.to_owned(), tz.to_owned()),
}
}
pub fn parse(value: &str, timezone: Option<Tz>) -> Result<Self, CalDateTimeError> {
if let Ok(datetime) = NaiveDateTime::parse_from_str(value, LOCAL_DATE_TIME) {
if let Some(timezone) = timezone {
return Ok(Self::DateTime(
datetime
.and_local_timezone(timezone.into())
.earliest()
.ok_or(CalDateTimeError::LocalTimeGap)?,
));
}
return Ok(Self::DateTime(
datetime
.and_local_timezone(ICalTimezone::Local)
.earliest()
.ok_or(CalDateTimeError::LocalTimeGap)?,
));
}
if let Ok(datetime) = NaiveDateTime::parse_from_str(value, UTC_DATE_TIME) {
return Ok(datetime.and_utc().into());
}
let timezone = timezone.map_or(ICalTimezone::Local, ICalTimezone::Olson);
if let Ok(date) = NaiveDate::parse_from_str(value, LOCAL_DATE) {
return Ok(Self::Date(date, timezone));
}
if let Ok(date) = NaiveDate::parse_from_str(value, "%Y-%m-%d") {
return Ok(Self::Date(date, timezone));
}
if let Ok(date) = NaiveDate::parse_from_str(value, "%Y%m%d") {
return Ok(Self::Date(date, timezone));
}
Err(CalDateTimeError::InvalidDatetimeFormat(value.to_string()))
}
// Also returns whether the date contains a year
pub fn parse_vcard(value: &str) -> Result<(Self, bool), CalDateTimeError> {
if let Ok(datetime) = Self::parse(value, None) {
return Ok((datetime, true));
}
if let Some(captures) = RE_VCARD_DATE_MM_DD.captures(value) {
// Because 1972 is a leap year
let year = 1972;
// Cannot fail because of the regex
let month = captures.name("m").unwrap().as_str().parse().ok().unwrap();
let day = captures.name("d").unwrap().as_str().parse().ok().unwrap();
return Ok((
Self::Date(
NaiveDate::from_ymd_opt(year, month, day)
.ok_or_else(|| CalDateTimeError::ParseError(value.to_string()))?,
ICalTimezone::Local,
),
false,
));
}
Err(CalDateTimeError::InvalidDatetimeFormat(value.to_string()))
}
#[must_use]
pub fn utc(&self) -> DateTime<Utc> {
self.as_datetime().to_utc()
}
#[must_use]
pub fn timezone(&self) -> ICalTimezone {
match &self {
Self::DateTime(datetime) => datetime.timezone(),
Self::Date(_, tz) => tz.to_owned(),
}
}
}
impl From<CalDateTime> for DateTime<Utc> {
fn from(value: CalDateTime) -> Self {
value.utc()
}
}
impl Datelike for CalDateTime {
fn year(&self) -> i32 {
match &self {
Self::DateTime(datetime) => datetime.year(),
Self::Date(date, _) => date.year(),
}
}
fn month(&self) -> u32 {
match &self {
Self::DateTime(datetime) => datetime.month(),
Self::Date(date, _) => date.month(),
}
}
fn month0(&self) -> u32 {
match &self {
Self::DateTime(datetime) => datetime.month0(),
Self::Date(date, _) => date.month0(),
}
}
fn day(&self) -> u32 {
match &self {
Self::DateTime(datetime) => datetime.day(),
Self::Date(date, _) => date.day(),
}
}
fn day0(&self) -> u32 {
match &self {
Self::DateTime(datetime) => datetime.day0(),
Self::Date(date, _) => date.day0(),
}
}
fn ordinal(&self) -> u32 {
match &self {
Self::DateTime(datetime) => datetime.ordinal(),
Self::Date(date, _) => date.ordinal(),
}
}
fn ordinal0(&self) -> u32 {
match &self {
Self::DateTime(datetime) => datetime.ordinal0(),
Self::Date(date, _) => date.ordinal0(),
}
}
fn weekday(&self) -> chrono::Weekday {
match &self {
Self::DateTime(datetime) => datetime.weekday(),
Self::Date(date, _) => date.weekday(),
}
}
fn iso_week(&self) -> chrono::IsoWeek {
match &self {
Self::DateTime(datetime) => datetime.iso_week(),
Self::Date(date, _) => date.iso_week(),
}
}
fn with_year(&self, year: i32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_year(year)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_year(year)?, tz.to_owned())),
}
}
fn with_month(&self, month: u32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_month(month)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_month(month)?, tz.to_owned())),
}
}
fn with_month0(&self, month0: u32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_month0(month0)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_month0(month0)?, tz.to_owned())),
}
}
fn with_day(&self, day: u32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_day(day)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_day(day)?, tz.to_owned())),
}
}
fn with_day0(&self, day0: u32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_day0(day0)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_day0(day0)?, tz.to_owned())),
}
}
fn with_ordinal(&self, ordinal: u32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_ordinal(ordinal)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_ordinal(ordinal)?, tz.to_owned())),
}
}
fn with_ordinal0(&self, ordinal0: u32) -> Option<Self> {
match &self {
Self::DateTime(datetime) => Some(Self::DateTime(datetime.with_ordinal0(ordinal0)?)),
Self::Date(date, tz) => Some(Self::Date(date.with_ordinal0(ordinal0)?, tz.to_owned())),
}
}
}
#[cfg(test)]
mod tests {
use crate::CalDateTime;
use chrono::NaiveDate;
#[test]
fn test_vcard_date() {
assert_eq!(
CalDateTime::parse_vcard("19850412").unwrap(),
(
CalDateTime::Date(
NaiveDate::from_ymd_opt(1985, 4, 12).unwrap(),
crate::ICalTimezone::Local
),
true
)
);
assert_eq!(
CalDateTime::parse_vcard("1985-04-12").unwrap(),
(
CalDateTime::Date(
NaiveDate::from_ymd_opt(1985, 4, 12).unwrap(),
crate::ICalTimezone::Local
),
true
)
);
assert_eq!(
CalDateTime::parse_vcard("--0412").unwrap(),
(
CalDateTime::Date(
NaiveDate::from_ymd_opt(1972, 4, 12).unwrap(),
crate::ICalTimezone::Local
),
false
)
);
}
}

View File

@@ -1,92 +0,0 @@
use chrono::{Local, NaiveDate, NaiveDateTime, TimeZone};
use chrono_tz::Tz;
use derive_more::{Display, From};
#[derive(Debug, Clone, From, PartialEq, Eq)]
pub enum ICalTimezone {
Local,
Olson(Tz),
}
impl From<ICalTimezone> for rrule::Tz {
fn from(value: ICalTimezone) -> Self {
match value {
ICalTimezone::Local => Self::LOCAL,
ICalTimezone::Olson(tz) => Self::Tz(tz),
}
}
}
impl From<rrule::Tz> for ICalTimezone {
fn from(value: rrule::Tz) -> Self {
match value {
rrule::Tz::Local(_) => Self::Local,
rrule::Tz::Tz(tz) => Self::Olson(tz),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Display)]
pub enum CalTimezoneOffset {
Local(chrono::FixedOffset),
Olson(chrono_tz::TzOffset),
}
impl chrono::Offset for CalTimezoneOffset {
fn fix(&self) -> chrono::FixedOffset {
match self {
Self::Local(local) => local.fix(),
Self::Olson(olson) => olson.fix(),
}
}
}
impl TimeZone for ICalTimezone {
type Offset = CalTimezoneOffset;
fn from_offset(offset: &Self::Offset) -> Self {
match offset {
CalTimezoneOffset::Local(_) => Self::Local,
CalTimezoneOffset::Olson(offset) => Self::Olson(Tz::from_offset(offset)),
}
}
fn offset_from_local_date(&self, local: &NaiveDate) -> chrono::MappedLocalTime<Self::Offset> {
match self {
Self::Local => Local
.offset_from_local_date(local)
.map(CalTimezoneOffset::Local),
Self::Olson(tz) => tz
.offset_from_local_date(local)
.map(CalTimezoneOffset::Olson),
}
}
fn offset_from_local_datetime(
&self,
local: &NaiveDateTime,
) -> chrono::MappedLocalTime<Self::Offset> {
match self {
Self::Local => Local
.offset_from_local_datetime(local)
.map(CalTimezoneOffset::Local),
Self::Olson(tz) => tz
.offset_from_local_datetime(local)
.map(CalTimezoneOffset::Olson),
}
}
fn offset_from_utc_datetime(&self, utc: &NaiveDateTime) -> Self::Offset {
match self {
Self::Local => CalTimezoneOffset::Local(Local.offset_from_utc_datetime(utc)),
Self::Olson(tz) => CalTimezoneOffset::Olson(tz.offset_from_utc_datetime(utc)),
}
}
fn offset_from_utc_date(&self, utc: &NaiveDate) -> Self::Offset {
match self {
Self::Local => CalTimezoneOffset::Local(Local.offset_from_utc_date(utc)),
Self::Olson(tz) => CalTimezoneOffset::Olson(tz.offset_from_utc_date(utc)),
}
}
}

View File

@@ -25,6 +25,6 @@ END:VCALENDAR
#[test]
fn parse_calendar_object() {
let object = CalendarObject::from_ics(MULTI_VEVENT.to_string(), None).unwrap();
object.expand_recurrence(None, None).unwrap();
let object = CalendarObject::from_ics(MULTI_VEVENT.to_string()).unwrap();
object.get_inner().expand_recurrence(None, None);
}

View File

@@ -13,7 +13,7 @@ anyhow.workspace = true
async-trait.workspace = true
serde.workspace = true
sha2.workspace = true
ical.workspace = true
caldata.workspace = true
chrono.workspace = true
regex.workspace = true
thiserror.workspace = true

View File

@@ -15,8 +15,8 @@ pub trait AddressbookStore: Send + Sync + 'static {
async fn update_addressbook(
&self,
principal: String,
id: String,
principal: &str,
id: &str,
addressbook: Addressbook,
) -> Result<(), Error>;
async fn insert_addressbook(&self, addressbook: Addressbook) -> Result<(), Error>;
@@ -33,7 +33,7 @@ pub trait AddressbookStore: Send + Sync + 'static {
principal: &str,
addressbook_id: &str,
synctoken: i64,
) -> Result<(Vec<AddressObject>, Vec<String>, i64), Error>;
) -> Result<(Vec<(String, AddressObject)>, Vec<String>, i64), Error>;
async fn addressbook_metadata(
&self,
@@ -45,7 +45,7 @@ pub trait AddressbookStore: Send + Sync + 'static {
&self,
principal: &str,
addressbook_id: &str,
) -> Result<Vec<AddressObject>, Error>;
) -> Result<Vec<(String, AddressObject)>, Error>;
async fn get_object(
&self,
principal: &str,
@@ -55,8 +55,9 @@ pub trait AddressbookStore: Send + Sync + 'static {
) -> Result<AddressObject, Error>;
async fn put_object(
&self,
principal: String,
addressbook_id: String,
principal: &str,
addressbook_id: &str,
object_id: &str,
object: AddressObject,
overwrite: bool,
) -> Result<(), Error>;
@@ -77,7 +78,7 @@ pub trait AddressbookStore: Send + Sync + 'static {
async fn import_addressbook(
&self,
addressbook: Addressbook,
objects: Vec<AddressObject>,
objects: Vec<(String, AddressObject)>,
merge_existing: bool,
) -> Result<(), Error>;
}

View File

@@ -22,8 +22,8 @@ pub trait CalendarStore: Send + Sync + 'static {
async fn update_calendar(
&self,
principal: String,
id: String,
principal: &str,
id: &str,
calendar: Calendar,
) -> Result<(), Error>;
async fn insert_calendar(&self, calendar: Calendar) -> Result<(), Error>;
@@ -46,7 +46,7 @@ pub trait CalendarStore: Send + Sync + 'static {
principal: &str,
cal_id: &str,
synctoken: i64,
) -> Result<(Vec<CalendarObject>, Vec<String>, i64), Error>;
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error>;
/// Since the <calendar-query> rules are rather complex this function
/// is only meant to do some prefiltering
@@ -55,7 +55,7 @@ pub trait CalendarStore: Send + Sync + 'static {
principal: &str,
cal_id: &str,
_query: CalendarQuery,
) -> Result<Vec<CalendarObject>, Error> {
) -> Result<Vec<(String, CalendarObject)>, Error> {
self.get_objects(principal, cal_id).await
}
@@ -69,7 +69,7 @@ pub trait CalendarStore: Send + Sync + 'static {
&self,
principal: &str,
cal_id: &str,
) -> Result<Vec<CalendarObject>, Error>;
) -> Result<Vec<(String, CalendarObject)>, Error>;
async fn get_object(
&self,
principal: &str,
@@ -79,19 +79,25 @@ pub trait CalendarStore: Send + Sync + 'static {
) -> Result<CalendarObject, Error>;
async fn put_objects(
&self,
principal: String,
cal_id: String,
objects: Vec<CalendarObject>,
principal: &str,
cal_id: &str,
objects: Vec<(String, CalendarObject)>,
overwrite: bool,
) -> Result<(), Error>;
async fn put_object(
&self,
principal: String,
cal_id: String,
principal: &str,
cal_id: &str,
object_id: &str,
object: CalendarObject,
overwrite: bool,
) -> Result<(), Error> {
self.put_objects(principal, cal_id, vec![object], overwrite)
self.put_objects(
principal,
cal_id,
vec![(object_id.to_owned(), object)],
overwrite,
)
.await
}
async fn delete_object(

View File

@@ -1,5 +1,6 @@
use crate::CalendarStore;
use crate::{Calendar, CalendarStore, calendar_store::CalendarQuery};
use async_trait::async_trait;
use rustical_ical::CalendarObject;
use std::{collections::HashMap, sync::Arc};
pub trait PrefixedCalendarStore: CalendarStore {
@@ -51,11 +52,11 @@ impl CalendarStore for CombinedCalendarStore {
async fn update_calendar(
&self,
principal: String,
id: String,
calendar: crate::Calendar,
principal: &str,
id: &str,
calendar: Calendar,
) -> Result<(), crate::Error> {
self.store_for_id(&id)
self.store_for_id(id)
.update_calendar(principal, id, calendar)
.await
}
@@ -88,7 +89,7 @@ impl CalendarStore for CombinedCalendarStore {
principal: &str,
cal_id: &str,
synctoken: i64,
) -> Result<(Vec<rustical_ical::CalendarObject>, Vec<String>, i64), crate::Error> {
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), crate::Error> {
self.store_for_id(cal_id)
.sync_changes(principal, cal_id, synctoken)
.await
@@ -97,7 +98,7 @@ impl CalendarStore for CombinedCalendarStore {
async fn import_calendar(
&self,
calendar: crate::Calendar,
objects: Vec<rustical_ical::CalendarObject>,
objects: Vec<CalendarObject>,
merge_existing: bool,
) -> Result<(), crate::Error> {
self.store_for_id(&calendar.id)
@@ -109,8 +110,8 @@ impl CalendarStore for CombinedCalendarStore {
&self,
principal: &str,
cal_id: &str,
query: crate::calendar_store::CalendarQuery,
) -> Result<Vec<rustical_ical::CalendarObject>, crate::Error> {
query: CalendarQuery,
) -> Result<Vec<(String, CalendarObject)>, crate::Error> {
self.store_for_id(cal_id)
.calendar_query(principal, cal_id, query)
.await
@@ -141,7 +142,7 @@ impl CalendarStore for CombinedCalendarStore {
&self,
principal: &str,
cal_id: &str,
) -> Result<Vec<rustical_ical::CalendarObject>, crate::Error> {
) -> Result<Vec<(String, CalendarObject)>, crate::Error> {
self.store_for_id(cal_id)
.get_objects(principal, cal_id)
.await
@@ -149,12 +150,12 @@ impl CalendarStore for CombinedCalendarStore {
async fn put_objects(
&self,
principal: String,
cal_id: String,
objects: Vec<rustical_ical::CalendarObject>,
principal: &str,
cal_id: &str,
objects: Vec<(String, CalendarObject)>,
overwrite: bool,
) -> Result<(), crate::Error> {
self.store_for_id(&cal_id)
self.store_for_id(cal_id)
.put_objects(principal, cal_id, objects, overwrite)
.await
}

View File

@@ -26,7 +26,7 @@ pub enum Error {
Other(#[from] anyhow::Error),
#[error(transparent)]
IcalError(#[from] rustical_ical::Error),
IcalError(#[from] caldata::parser::ParserError),
}
impl Error {
@@ -36,7 +36,7 @@ impl Error {
Self::NotFound => StatusCode::NOT_FOUND,
Self::AlreadyExists => StatusCode::CONFLICT,
Self::ReadOnly => StatusCode::FORBIDDEN,
Self::IcalError(err) => err.status_code(),
Self::IcalError(_err) => StatusCode::INTERNAL_SERVER_ERROR,
Self::InvalidPrincipalType(_) => StatusCode::BAD_REQUEST,
_ => StatusCode::INTERNAL_SERVER_ERROR,
}
@@ -52,9 +52,7 @@ impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR
| StatusCode::PRECONDITION_FAILED
| StatusCode::CONFLICT
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::CONFLICT
) {
error!("{self}");
}

View File

@@ -20,6 +20,7 @@ rstest.workspace = true
criterion.workspace = true
[dependencies]
caldata.workspace = true
tokio.workspace = true
rustical_store.workspace = true
async-trait.workspace = true

View File

@@ -34,16 +34,19 @@ fn benchmark(c: &mut Criterion) {
cal_store
});
let object = CalendarObject::from_ics(include_str!("ical_event.ics").to_owned(), None).unwrap();
let row = (
"asd".to_owned(),
CalendarObject::from_ics(include_str!("ical_event.ics").to_owned()).unwrap(),
);
let batch_size = 1000;
let objects: Vec<_> = std::iter::repeat_n(object.clone(), batch_size).collect();
let objects: Vec<_> = std::iter::repeat_n(row.clone(), batch_size).collect();
c.bench_function("put_batch", |b| {
b.to_async(&runtime).iter(async || {
// yeet
cal_store
.put_objects("user".to_owned(), "okwow".to_owned(), objects.clone(), true)
.put_objects("user", "okwow", objects.clone(), true)
.await
.unwrap();
});
@@ -54,7 +57,7 @@ fn benchmark(c: &mut Criterion) {
// yeet
for _ in 0..1000 {
cal_store
.put_object("user".to_owned(), "okwow".to_owned(), object.clone(), true)
.put_object("user", "okwow", &row.0, row.1.clone(), true)
.await
.unwrap();
}

View File

@@ -1,14 +1,13 @@
use crate::addressbook_store::SqliteAddressbookStore;
use async_trait::async_trait;
use chrono::NaiveDateTime;
use rustical_ical::{AddressObject, CalendarObject, CalendarObjectType};
use rustical_ical::{CalendarObject, CalendarObjectType};
use rustical_store::{
Addressbook, AddressbookStore, Calendar, CalendarMetadata, CalendarStore, CollectionMetadata,
Error, PrefixedCalendarStore,
};
use sha2::{Digest, Sha256};
use sqlx::{Executor, Sqlite};
use std::collections::HashMap;
use tracing::instrument;
pub const BIRTHDAYS_PREFIX: &str = "_birthdays_";
@@ -269,17 +268,18 @@ impl CalendarStore for SqliteAddressbookStore {
#[instrument]
async fn update_calendar(
&self,
principal: String,
id: String,
principal: &str,
id: &str,
mut calendar: Calendar,
) -> Result<(), Error> {
assert_eq!(principal, calendar.principal);
assert_eq!(id, calendar.id);
calendar.id = calendar
.id
.strip_prefix(BIRTHDAYS_PREFIX)
.ok_or(Error::NotFound)?
.to_string();
Self::_update_birthday_calendar(&self.db, &principal, &calendar).await
Self::_update_birthday_calendar(&self.db, principal, &calendar).await
}
#[instrument]
@@ -324,19 +324,35 @@ impl CalendarStore for SqliteAddressbookStore {
principal: &str,
cal_id: &str,
synctoken: i64,
) -> Result<(Vec<CalendarObject>, Vec<String>, i64), Error> {
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> {
let cal_id = cal_id
.strip_prefix(BIRTHDAYS_PREFIX)
.ok_or(Error::NotFound)?;
let (objects, deleted_objects, new_synctoken) =
AddressbookStore::sync_changes(self, principal, cal_id, synctoken).await?;
let objects: Result<Vec<Option<CalendarObject>>, rustical_ical::Error> = objects
.iter()
.map(AddressObject::get_birthday_object)
.collect();
let objects = objects?.into_iter().flatten().collect();
Ok((objects, deleted_objects, new_synctoken))
let mut out_objects = vec![];
for (object_id, object) in objects {
if let Some(birthday) = object.get_birthday_object()? {
out_objects.push((format!("{object_id}-birthday"), birthday));
}
if let Some(anniversary) = object.get_anniversary_object()? {
out_objects.push((format!("{object_id}-anniversary"), anniversary));
}
}
let deleted_objects = deleted_objects
.into_iter()
.flat_map(|object_id| {
[
format!("{object_id}-birthday"),
format!("{object_id}-anniversary"),
]
})
.collect();
Ok((out_objects, deleted_objects, new_synctoken))
}
#[instrument]
@@ -356,21 +372,19 @@ impl CalendarStore for SqliteAddressbookStore {
&self,
principal: &str,
cal_id: &str,
) -> Result<Vec<CalendarObject>, Error> {
) -> Result<Vec<(String, CalendarObject)>, Error> {
let mut objects = vec![];
let cal_id = cal_id
.strip_prefix(BIRTHDAYS_PREFIX)
.ok_or(Error::NotFound)?;
let objects: Result<Vec<HashMap<&'static str, CalendarObject>>, rustical_ical::Error> =
AddressbookStore::get_objects(self, principal, cal_id)
.await?
.iter()
.map(AddressObject::get_significant_dates)
.collect();
let objects = objects?
.into_iter()
.flat_map(HashMap::into_values)
.collect();
for (object_id, object) in AddressbookStore::get_objects(self, principal, cal_id).await? {
if let Some(birthday) = object.get_birthday_object()? {
objects.push((format!("{object_id}-birthday"), birthday));
}
if let Some(anniversary) = object.get_anniversary_object()? {
objects.push((format!("{object_id}-anniversary"), anniversary));
}
}
Ok(objects)
}
@@ -386,19 +400,22 @@ impl CalendarStore for SqliteAddressbookStore {
.strip_prefix(BIRTHDAYS_PREFIX)
.ok_or(Error::NotFound)?;
let (addressobject_id, date_type) = object_id.rsplit_once('-').ok_or(Error::NotFound)?;
let obj =
AddressbookStore::get_object(self, principal, cal_id, addressobject_id, show_deleted)
.await?
.get_significant_dates()?
.remove(date_type)
.ok_or(Error::NotFound)
.await?;
match date_type {
"birthday" => Ok(obj.get_birthday_object()?.ok_or(Error::NotFound)?),
"anniversary" => Ok(obj.get_anniversary_object()?.ok_or(Error::NotFound)?),
_ => Err(Error::NotFound),
}
}
#[instrument]
async fn put_objects(
&self,
_principal: String,
_cal_id: String,
_objects: Vec<CalendarObject>,
_principal: &str,
_cal_id: &str,
_objects: Vec<(String, CalendarObject)>,
_overwrite: bool,
) -> Result<(), Error> {
Err(Error::ReadOnly)

View File

@@ -1,6 +1,7 @@
use super::ChangeOperation;
use crate::BEGIN_IMMEDIATE;
use async_trait::async_trait;
use caldata::parser::ParserError;
use derive_more::derive::Constructor;
use rustical_ical::AddressObject;
use rustical_store::{
@@ -9,7 +10,7 @@ use rustical_store::{
};
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
use tokio::sync::mpsc::Sender;
use tracing::{error_span, instrument, warn};
use tracing::{error, error_span, instrument, warn};
pub mod birthday_calendar;
@@ -18,12 +19,18 @@ struct AddressObjectRow {
id: String,
vcf: String,
}
impl From<AddressObjectRow> for (String, Result<AddressObject, ParserError>) {
fn from(row: AddressObjectRow) -> Self {
let result = AddressObject::from_vcf(row.vcf);
(row.id, result)
}
}
impl TryFrom<AddressObjectRow> for AddressObject {
impl TryFrom<AddressObjectRow> for (String, AddressObject) {
type Error = rustical_store::Error;
fn try_from(value: AddressObjectRow) -> Result<Self, Self::Error> {
Ok(Self::from_vcf(value.id, value.vcf)?)
Ok((value.id, AddressObject::from_vcf(value.vcf)?))
}
}
@@ -31,6 +38,7 @@ impl TryFrom<AddressObjectRow> for AddressObject {
pub struct SqliteAddressbookStore {
db: SqlitePool,
sender: Sender<CollectionOperation>,
skip_broken: bool,
}
impl SqliteAddressbookStore {
@@ -88,6 +96,36 @@ impl SqliteAddressbookStore {
Ok(())
}
#[allow(clippy::missing_panics_doc)]
pub async fn validate_objects(&self, principal: &str) -> Result<(), Error> {
let mut success = true;
for addressbook in self.get_addressbooks(principal).await? {
for (object_id, res) in Self::_get_objects(&self.db, principal, &addressbook.id).await?
{
if let Err(err) = res {
warn!(
"Invalid address object found at {principal}/{addr_id}/{object_id}.vcf. Error: {err}",
addr_id = addressbook.id
);
success = false;
}
}
}
if !success {
if self.skip_broken {
error!(
"Not all address objects are valid. Since data_store.sqlite.skip_broken=true they will be hidden. You are still advised to manually remove or repair the object. If you need help feel free to open up an issue on GitHub."
);
} else {
error!(
"Not all address objects are valid. Since data_store.sqlite.skip_broken=false this causes a panic. Remove or repair the broken objects manually or set data_store.sqlite.skip_broken=false as a temporary solution to ignore the error. If you need help feel free to open up an issue on GitHub."
);
panic!();
}
}
Ok(())
}
// Logs an operation to an address object
async fn log_object_operation(
tx: &mut Transaction<'_, Sqlite>,
@@ -134,7 +172,7 @@ impl SqliteAddressbookStore {
if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) {
error_span!(
"Error trying to send addressbook update notification:",
err = format!("{err:?}"),
err = format!("{err}"),
);
}
}
@@ -290,7 +328,7 @@ impl SqliteAddressbookStore {
principal: &str,
addressbook_id: &str,
synctoken: i64,
) -> Result<(Vec<AddressObject>, Vec<String>, i64), rustical_store::Error> {
) -> Result<(Vec<(String, AddressObject)>, Vec<String>, i64), rustical_store::Error> {
struct Row {
object_id: String,
synctoken: i64,
@@ -318,7 +356,7 @@ impl SqliteAddressbookStore {
for Row { object_id, .. } in changes {
match Self::_get_object(&mut *conn, principal, addressbook_id, &object_id, false).await
{
Ok(object) => objects.push(object),
Ok(object) => objects.push((object_id, object)),
Err(rustical_store::Error::NotFound) => deleted_objects.push(object_id),
Err(err) => return Err(err),
}
@@ -353,8 +391,8 @@ impl SqliteAddressbookStore {
executor: E,
principal: &str,
addressbook_id: &str,
) -> Result<Vec<AddressObject>, rustical_store::Error> {
sqlx::query_as!(
) -> Result<impl Iterator<Item = (String, Result<AddressObject, ParserError>)>, Error> {
Ok(sqlx::query_as!(
AddressObjectRow,
"SELECT id, vcf FROM addressobjects WHERE principal = ? AND addressbook_id = ? AND deleted_at IS NULL",
principal,
@@ -363,8 +401,8 @@ impl SqliteAddressbookStore {
.fetch_all(executor)
.await.map_err(crate::Error::from)?
.into_iter()
.map(std::convert::TryInto::try_into)
.collect()
.map(Into::into)
)
}
async fn _get_object<'e, E: Executor<'e, Database = Sqlite>>(
@@ -374,7 +412,7 @@ impl SqliteAddressbookStore {
object_id: &str,
show_deleted: bool,
) -> Result<AddressObject, rustical_store::Error> {
sqlx::query_as!(
let (id, object) = sqlx::query_as!(
AddressObjectRow,
"SELECT id, vcf FROM addressobjects WHERE (principal, addressbook_id, id) = (?, ?, ?) AND ((deleted_at IS NULL) OR ?)",
principal,
@@ -385,17 +423,20 @@ impl SqliteAddressbookStore {
.fetch_one(executor)
.await
.map_err(crate::Error::from)?
.try_into()
.try_into()?;
assert_eq!(id, object_id);
Ok(object)
}
async fn _put_object<'e, E: Executor<'e, Database = Sqlite>>(
executor: E,
principal: &str,
addressbook_id: &str,
object_id: &str,
object: &AddressObject,
overwrite: bool,
) -> Result<(), rustical_store::Error> {
let (object_id, vcf) = (object.get_id(), object.get_vcf());
let vcf = object.get_vcf();
(if overwrite {
sqlx::query!(
@@ -500,11 +541,13 @@ impl AddressbookStore for SqliteAddressbookStore {
#[instrument]
async fn update_addressbook(
&self,
principal: String,
id: String,
principal: &str,
id: &str,
addressbook: Addressbook,
) -> Result<(), rustical_store::Error> {
Self::_update_addressbook(&self.db, &principal, &id, &addressbook).await
assert_eq!(principal, &addressbook.principal);
assert_eq!(id, &addressbook.id);
Self::_update_addressbook(&self.db, principal, id, &addressbook).await
}
#[instrument]
@@ -569,7 +612,7 @@ impl AddressbookStore for SqliteAddressbookStore {
principal: &str,
addressbook_id: &str,
synctoken: i64,
) -> Result<(Vec<AddressObject>, Vec<String>, i64), rustical_store::Error> {
) -> Result<(Vec<(String, AddressObject)>, Vec<String>, i64), rustical_store::Error> {
Self::_sync_changes(&self.db, principal, addressbook_id, synctoken).await
}
@@ -601,8 +644,17 @@ impl AddressbookStore for SqliteAddressbookStore {
&self,
principal: &str,
addressbook_id: &str,
) -> Result<Vec<AddressObject>, rustical_store::Error> {
Self::_get_objects(&self.db, principal, addressbook_id).await
) -> Result<Vec<(String, AddressObject)>, rustical_store::Error> {
let objects = Self::_get_objects(&self.db, principal, addressbook_id).await?;
if self.skip_broken {
Ok(objects
.filter_map(|(id, res)| Some((id, res.ok()?)))
.collect())
} else {
Ok(objects
.map(|(id, res)| res.map(|obj| (id, obj)))
.collect::<Result<Vec<_>, _>>()?)
}
}
#[instrument]
@@ -619,8 +671,9 @@ impl AddressbookStore for SqliteAddressbookStore {
#[instrument]
async fn put_object(
&self,
principal: String,
addressbook_id: String,
principal: &str,
addressbook_id: &str,
object_id: &str,
object: AddressObject,
overwrite: bool,
) -> Result<(), rustical_store::Error> {
@@ -630,15 +683,21 @@ impl AddressbookStore for SqliteAddressbookStore {
.await
.map_err(crate::Error::from)?;
let object_id = object.get_id().to_owned();
Self::_put_object(&mut *tx, &principal, &addressbook_id, &object, overwrite).await?;
Self::_put_object(
&mut *tx,
principal,
addressbook_id,
object_id,
&object,
overwrite,
)
.await?;
let sync_token = Self::log_object_operation(
&mut tx,
&principal,
&addressbook_id,
&object_id,
principal,
addressbook_id,
object_id,
ChangeOperation::Add,
)
.await
@@ -648,7 +707,7 @@ impl AddressbookStore for SqliteAddressbookStore {
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_addressbook(&principal, &addressbook_id, false)
self.get_addressbook(principal, addressbook_id, false)
.await?
.push_topic,
);
@@ -733,7 +792,7 @@ impl AddressbookStore for SqliteAddressbookStore {
async fn import_addressbook(
&self,
addressbook: Addressbook,
objects: Vec<AddressObject>,
objects: Vec<(String, AddressObject)>,
merge_existing: bool,
) -> Result<(), Error> {
let mut tx = self
@@ -758,11 +817,12 @@ impl AddressbookStore for SqliteAddressbookStore {
}
let mut sync_token = None;
for object in objects {
for (object_id, object) in objects {
Self::_put_object(
&mut *tx,
&addressbook.principal,
&addressbook.id,
&object_id,
&object,
false,
)
@@ -773,7 +833,7 @@ impl AddressbookStore for SqliteAddressbookStore {
&mut tx,
&addressbook.principal,
&addressbook.id,
object.get_id(),
&object_id,
ChangeOperation::Add,
)
.await?,

View File

@@ -1,10 +1,12 @@
use super::ChangeOperation;
use crate::BEGIN_IMMEDIATE;
use async_trait::async_trait;
use caldata::parser::ParserError;
use caldata::types::CalDateTime;
use chrono::TimeDelta;
use derive_more::derive::Constructor;
use regex::Regex;
use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectType};
use rustical_ical::{CalendarObject, CalendarObjectType};
use rustical_store::calendar_store::CalendarQuery;
use rustical_store::synctoken::format_synctoken;
use rustical_store::{Calendar, CalendarMetadata, CalendarStore, CollectionMetadata, Error};
@@ -12,7 +14,7 @@ use rustical_store::{CollectionOperation, CollectionOperationInfo};
use sqlx::types::chrono::NaiveDateTime;
use sqlx::{Acquire, Executor, Sqlite, SqlitePool, Transaction};
use tokio::sync::mpsc::Sender;
use tracing::{error_span, instrument, warn};
use tracing::{error, error_span, instrument, warn};
#[derive(Debug, Clone)]
struct CalendarObjectRow {
@@ -21,21 +23,37 @@ struct CalendarObjectRow {
uid: String,
}
impl TryFrom<CalendarObjectRow> for CalendarObject {
impl From<CalendarObjectRow> for (String, Result<CalendarObject, ParserError>) {
fn from(row: CalendarObjectRow) -> Self {
let result = CalendarObject::from_ics(row.ics).inspect(|object| {
if object.get_uid() != row.uid {
warn!(
"Calendar object {}.ics: UID={} and row uid={} do not match",
row.id,
object.get_uid(),
row.uid
);
}
});
(row.id, result)
}
}
impl TryFrom<CalendarObjectRow> for (String, CalendarObject) {
type Error = rustical_store::Error;
fn try_from(value: CalendarObjectRow) -> Result<Self, Self::Error> {
let object = Self::from_ics(value.ics, Some(value.id))?;
if object.get_uid() != value.uid {
return Err(rustical_store::Error::IcalError(
rustical_ical::Error::InvalidData(format!(
"uid={} and UID={} don't match",
value.uid,
object.get_uid()
)),
));
fn try_from(row: CalendarObjectRow) -> Result<Self, Self::Error> {
let object = CalendarObject::from_ics(row.ics)?;
if object.get_uid() != row.uid {
warn!(
"Calendar object {}.ics: UID={} and row uid={} do not match",
row.id,
object.get_uid(),
row.uid
);
}
Ok(object)
Ok((row.id, object))
}
}
@@ -92,6 +110,7 @@ impl From<CalendarRow> for Calendar {
pub struct SqliteCalendarStore {
db: SqlitePool,
sender: Sender<CollectionOperation>,
skip_broken: bool,
}
impl SqliteCalendarStore {
@@ -141,11 +160,40 @@ impl SqliteCalendarStore {
if let Err(err) = self.sender.try_send(CollectionOperation { topic, data }) {
error_span!(
"Error trying to send calendar update notification:",
err = format!("{err:?}"),
err = format!("{err}"),
);
}
}
#[allow(clippy::missing_panics_doc)]
pub async fn validate_objects(&self, principal: &str) -> Result<(), Error> {
let mut success = true;
for calendar in self.get_calendars(principal).await? {
for (object_id, res) in Self::_get_objects(&self.db, principal, &calendar.id).await? {
if let Err(err) = res {
warn!(
"Invalid calendar object found at {principal}/{cal_id}/{object_id}.ics. Error: {err}",
cal_id = calendar.id
);
success = false;
}
}
}
if !success {
if self.skip_broken {
error!(
"Not all calendar objects are valid. Since data_store.sqlite.skip_broken=true they will be hidden. You are still advised to manually remove or repair the object. If you need help feel free to open up an issue on GitHub."
);
} else {
error!(
"Not all calendar objects are valid. Since data_store.sqlite.skip_broken=false this causes a panic. Remove or repair the broken objects manually or set data_store.sqlite.skip_broken=false as a temporary solution to ignore the error. If you need help feel free to open up an issue on GitHub."
);
panic!();
}
}
Ok(())
}
/// In the past exports generated objects with invalid VERSION:4.0
/// This repair sets them to VERSION:2.0
#[allow(clippy::missing_panics_doc)]
@@ -357,8 +405,8 @@ impl SqliteCalendarStore {
async fn _update_calendar<'e, E: Executor<'e, Database = Sqlite>>(
executor: E,
principal: String,
id: String,
principal: &str,
id: &str,
calendar: Calendar,
) -> Result<(), Error> {
let comp_event = calendar.components.contains(&CalendarObjectType::Event);
@@ -456,8 +504,8 @@ impl SqliteCalendarStore {
executor: E,
principal: &str,
cal_id: &str,
) -> Result<Vec<CalendarObject>, Error> {
sqlx::query_as!(
) -> Result<impl Iterator<Item = (String, Result<CalendarObject, ParserError>)>, Error> {
Ok(sqlx::query_as!(
CalendarObjectRow,
"SELECT id, uid, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL",
principal,
@@ -466,8 +514,8 @@ impl SqliteCalendarStore {
.fetch_all(executor)
.await.map_err(crate::Error::from)?
.into_iter()
.map(std::convert::TryInto::try_into)
.collect()
.map(Into::into)
)
}
async fn _calendar_query<'e, E: Executor<'e, Database = Sqlite>>(
@@ -475,14 +523,14 @@ impl SqliteCalendarStore {
principal: &str,
cal_id: &str,
query: CalendarQuery,
) -> Result<Vec<CalendarObject>, Error> {
) -> Result<impl Iterator<Item = (String, Result<CalendarObject, ParserError>)>, Error> {
// We extend our query interval by one day in each direction since we really don't want to
// miss any objects because of timezone differences
// I've previously tried NaiveDate::MIN,MAX, but it seems like sqlite cannot handle these
let start = query.time_start.map(|start| start - TimeDelta::days(1));
let end = query.time_end.map(|end| end + TimeDelta::days(1));
sqlx::query_as!(
Ok(sqlx::query_as!(
CalendarObjectRow,
r"SELECT id, uid, ics FROM calendarobjects
WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL
@@ -500,8 +548,7 @@ impl SqliteCalendarStore {
.await
.map_err(crate::Error::from)?
.into_iter()
.map(std::convert::TryInto::try_into)
.collect()
.map(Into::into))
}
async fn _get_object<'e, E: Executor<'e, Database = Sqlite>>(
@@ -511,7 +558,7 @@ impl SqliteCalendarStore {
object_id: &str,
show_deleted: bool,
) -> Result<CalendarObject, Error> {
sqlx::query_as!(
let (row_id, object) = sqlx::query_as!(
CalendarObjectRow,
"SELECT id, uid, ics FROM calendarobjects WHERE (principal, cal_id, id) = (?, ?, ?) AND ((deleted_at IS NULL) OR ?)",
principal,
@@ -522,7 +569,9 @@ impl SqliteCalendarStore {
.fetch_one(executor)
.await
.map_err(crate::Error::from)?
.try_into()
.try_into()?;
assert_eq!(object_id, row_id);
Ok(object)
}
#[instrument]
@@ -530,23 +579,24 @@ impl SqliteCalendarStore {
executor: E,
principal: &str,
cal_id: &str,
object_id: &str,
object: &CalendarObject,
overwrite: bool,
) -> Result<(), Error> {
let (object_id, uid, ics) = (object.get_id(), object.get_uid(), object.get_ics());
let (uid, ics) = (object.get_uid(), object.get_ics());
let first_occurence = object
.get_inner()
.get_inner()
.get_first_occurence()
.ok()
.flatten()
.as_ref()
.map(CalDateTime::date);
.map(CalDateTime::date_floor);
let last_occurence = object
.get_inner()
.get_inner()
.get_last_occurence()
.ok()
.flatten()
.as_ref()
.map(CalDateTime::date);
.map(CalDateTime::date_ceil);
let etag = object.get_etag();
let object_type = object.get_object_type() as u8;
@@ -638,7 +688,8 @@ impl SqliteCalendarStore {
principal: &str,
cal_id: &str,
synctoken: i64,
) -> Result<(Vec<CalendarObject>, Vec<String>, i64), Error> {
skip_broken: bool,
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> {
struct Row {
object_id: String,
synctoken: i64,
@@ -665,8 +716,10 @@ impl SqliteCalendarStore {
for Row { object_id, .. } in changes {
match Self::_get_object(&mut *conn, principal, cal_id, &object_id, false).await {
Ok(object) => objects.push(object),
Ok(object) => objects.push((object_id, object)),
Err(rustical_store::Error::NotFound) => deleted_objects.push(object_id),
// Skip broken object
Err(rustical_store::Error::IcalError(_)) if skip_broken => (),
Err(err) => return Err(err),
}
}
@@ -705,8 +758,8 @@ impl CalendarStore for SqliteCalendarStore {
#[instrument]
async fn update_calendar(
&self,
principal: String,
id: String,
principal: &str,
id: &str,
calendar: Calendar,
) -> Result<(), Error> {
Self::_update_calendar(&self.db, principal, id, calendar).await
@@ -774,14 +827,23 @@ impl CalendarStore for SqliteCalendarStore {
let mut sync_token = None;
for object in objects {
Self::_put_object(&mut *tx, &calendar.principal, &calendar.id, &object, false).await?;
let object_id = object.get_uid();
Self::_put_object(
&mut *tx,
&calendar.principal,
&calendar.id,
object_id,
&object,
false,
)
.await?;
sync_token = Some(
Self::log_object_operation(
&mut tx,
&calendar.principal,
&calendar.id,
object.get_id(),
object_id,
ChangeOperation::Add,
)
.await?,
@@ -807,8 +869,17 @@ impl CalendarStore for SqliteCalendarStore {
principal: &str,
cal_id: &str,
query: CalendarQuery,
) -> Result<Vec<CalendarObject>, Error> {
Self::_calendar_query(&self.db, principal, cal_id, query).await
) -> Result<Vec<(String, CalendarObject)>, Error> {
let objects = Self::_calendar_query(&self.db, principal, cal_id, query).await?;
if self.skip_broken {
Ok(objects
.filter_map(|(id, res)| Some((id, res.ok()?)))
.collect())
} else {
Ok(objects
.map(|(id, res)| res.map(|obj| (id, obj)))
.collect::<Result<Vec<_>, _>>()?)
}
}
async fn calendar_metadata(
@@ -838,8 +909,17 @@ impl CalendarStore for SqliteCalendarStore {
&self,
principal: &str,
cal_id: &str,
) -> Result<Vec<CalendarObject>, Error> {
Self::_get_objects(&self.db, principal, cal_id).await
) -> Result<Vec<(String, CalendarObject)>, Error> {
let objects = Self::_get_objects(&self.db, principal, cal_id).await?;
if self.skip_broken {
Ok(objects
.filter_map(|(id, res)| Some((id, res.ok()?)))
.collect())
} else {
Ok(objects
.map(|(id, res)| res.map(|obj| (id, obj)))
.collect::<Result<Vec<_>, _>>()?)
}
}
#[instrument]
@@ -856,9 +936,9 @@ impl CalendarStore for SqliteCalendarStore {
#[instrument]
async fn put_objects(
&self,
principal: String,
cal_id: String,
objects: Vec<CalendarObject>,
principal: &str,
cal_id: &str,
objects: Vec<(String, CalendarObject)>,
overwrite: bool,
) -> Result<(), Error> {
let mut tx = self
@@ -867,25 +947,25 @@ impl CalendarStore for SqliteCalendarStore {
.await
.map_err(crate::Error::from)?;
let calendar = Self::_get_calendar(&mut *tx, &principal, &cal_id, true).await?;
let calendar = Self::_get_calendar(&mut *tx, principal, cal_id, true).await?;
if calendar.subscription_url.is_some() {
// We cannot commit an object to a subscription calendar
return Err(Error::ReadOnly);
}
let mut sync_token = None;
for object in objects {
for (object_id, object) in objects {
sync_token = Some(
Self::log_object_operation(
&mut tx,
&principal,
&cal_id,
object.get_id(),
principal,
cal_id,
&object_id,
ChangeOperation::Add,
)
.await?,
);
Self::_put_object(&mut *tx, &principal, &cal_id, &object, overwrite).await?;
Self::_put_object(&mut *tx, principal, cal_id, &object_id, &object, overwrite).await?;
}
tx.commit().await.map_err(crate::Error::from)?;
@@ -893,9 +973,7 @@ impl CalendarStore for SqliteCalendarStore {
if let Some(sync_token) = sync_token {
self.send_push_notification(
CollectionOperationInfo::Content { sync_token },
self.get_calendar(&principal, &cal_id, true)
.await?
.push_topic,
self.get_calendar(principal, cal_id, true).await?.push_topic,
);
}
Ok(())
@@ -963,8 +1041,8 @@ impl CalendarStore for SqliteCalendarStore {
principal: &str,
cal_id: &str,
synctoken: i64,
) -> Result<(Vec<CalendarObject>, Vec<String>, i64), Error> {
Self::_sync_changes(&self.db, principal, cal_id, synctoken).await
) -> Result<(Vec<(String, CalendarObject)>, Vec<String>, i64), Error> {
Self::_sync_changes(&self.db, principal, cal_id, synctoken, self.skip_broken).await
}
fn is_read_only(&self, _cal_id: &str) -> bool {

View File

@@ -18,7 +18,7 @@ impl From<sqlx::Error> for Error {
sqlx::Error::RowNotFound => Self::StoreError(rustical_store::Error::NotFound),
sqlx::Error::Database(err) => {
if err.is_unique_violation() {
warn!("{err:?}");
warn!("{err}");
Self::StoreError(rustical_store::Error::AlreadyExists)
} else {
Self::SqlxError(sqlx::Error::Database(err))

View File

@@ -37,11 +37,11 @@ impl SqliteStore {
}
pub async fn create_db_pool(db_url: &str, migrate: bool) -> Result<Pool<Sqlite>, sqlx::Error> {
let options: SqliteConnectOptions = db_url.parse()?;
let db = SqlitePool::connect_with(
SqliteConnectOptions::new()
options
.journal_mode(sqlx::sqlite::SqliteJournalMode::Wal)
.synchronous(sqlx::sqlite::SqliteSynchronous::Normal)
.filename(db_url)
.create_if_missing(true),
)
.await?;

View File

@@ -1,6 +1,6 @@
use crate::{
SqliteStore, addressbook_store::SqliteAddressbookStore, calendar_store::SqliteCalendarStore,
principal_store::SqlitePrincipalStore,
create_db_pool, principal_store::SqlitePrincipalStore,
};
use rstest::fixture;
use rustical_store::auth::{AuthenticationProvider, Principal, PrincipalType};
@@ -9,12 +9,23 @@ use sqlx::SqlitePool;
mod addressbook_store;
mod calendar_store;
async fn get_test_db() -> SqlitePool {
let db = SqlitePool::connect("sqlite::memory:").await.unwrap();
sqlx::migrate!("./migrations").run(&db).await.unwrap();
#[derive(Debug, Clone)]
pub struct TestStoreContext {
pub db: SqlitePool,
pub addr_store: SqliteAddressbookStore,
pub cal_store: SqliteCalendarStore,
pub principal_store: SqlitePrincipalStore,
pub sub_store: SqliteStore,
}
#[fixture]
pub async fn test_store_context() -> TestStoreContext {
let (send_addr, _recv) = tokio::sync::mpsc::channel(1);
let (send_cal, _recv) = tokio::sync::mpsc::channel(1);
let db = create_db_pool(":memory:", true).await.unwrap();
// Populate with test data
let principal_store = SqlitePrincipalStore::new(db.clone());
// Populate with test data
principal_store
.insert_principal(
Principal {
@@ -33,28 +44,11 @@ async fn get_test_db() -> SqlitePool {
.await
.unwrap();
db
}
#[derive(Debug, Clone)]
pub struct TestStoreContext {
pub db: SqlitePool,
pub addr_store: SqliteAddressbookStore,
pub cal_store: SqliteCalendarStore,
pub principal_store: SqlitePrincipalStore,
pub sub_store: SqliteStore,
}
#[fixture]
pub async fn test_store_context() -> TestStoreContext {
let (send_addr, _recv) = tokio::sync::mpsc::channel(1);
let (send_cal, _recv) = tokio::sync::mpsc::channel(1);
let db = get_test_db().await;
TestStoreContext {
db: db.clone(),
addr_store: SqliteAddressbookStore::new(db.clone(), send_addr),
cal_store: SqliteCalendarStore::new(db.clone(), send_cal),
principal_store: SqlitePrincipalStore::new(db.clone()),
addr_store: SqliteAddressbookStore::new(db.clone(), send_addr, false),
cal_store: SqliteCalendarStore::new(db.clone(), send_cal, false),
principal_store,
sub_store: SqliteStore::new(db),
}
}

View File

@@ -136,7 +136,7 @@ impl NamedStruct {
#(#builder_field_inits),*
};
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
#(#tagname_field_branches);*
#(#namespace_field_branches);*
@@ -161,7 +161,7 @@ impl NamedStruct {
// start of a child element
Event::Start(start) | Event::Empty(start) => {
let empty = matches!(event, Event::Empty(_));
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
match (ns, name.as_ref()) {
#(#named_field_branches),*
#(#untagged_field_branches),*

View File

@@ -42,7 +42,7 @@ impl<T: XmlRootTag + XmlDeserialize> XmlDocument for T {
match event {
Event::Decl(_) | Event::Comment(_) => { /* ignore this */ }
Event::Start(start) | Event::Empty(start) => {
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
let matches = match (Self::root_ns(), &ns, name) {
// Wrong tag
(_, _, name) if name.as_ref() != Self::root_tag().as_bytes() => false,

View File

@@ -1,6 +1,6 @@
use quick_xml::name::Namespace;
#[derive(Debug, Clone, Default, PartialEq, Eq)]
#[derive(Debug, Clone, Default, PartialEq, Eq, Hash)]
pub struct NamespaceOwned(pub Vec<u8>);
impl<'a> From<Namespace<'a>> for NamespaceOwned {

View File

@@ -1,18 +1,21 @@
use std::io::BufRead;
use quick_xml::events::BytesStart;
use quick_xml::{events::BytesStart, name::ResolveResult};
use crate::{XmlDeserialize, XmlError};
use crate::{NamespaceOwned, XmlDeserialize, XmlError};
// TODO: actually implement
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct Unparsed(String);
pub struct Unparsed(pub Option<NamespaceOwned>, pub String);
impl Unparsed {
#[must_use]
pub fn tag_name(&self) -> String {
// TODO: respect namespace?
self.0.clone()
pub const fn ns(&self) -> Option<&NamespaceOwned> {
self.0.as_ref()
}
#[must_use]
pub const fn tag_name(&self) -> &str {
self.1.as_str()
}
}
@@ -27,7 +30,12 @@ impl XmlDeserialize for Unparsed {
let mut buf = vec![];
reader.read_to_end_into(start.name(), &mut buf)?;
}
let tag_name = String::from_utf8_lossy(start.local_name().as_ref()).to_string();
Ok(Self(tag_name))
let (ns, tag_name) = reader.resolver().resolve_element(start.name());
let ns: Option<NamespaceOwned> = match ns {
ResolveResult::Bound(ns) => Some(ns.into()),
ResolveResult::Unbound | ResolveResult::Unknown(_) => None,
};
let tag_name = String::from_utf8_lossy(tag_name.as_ref()).to_string();
Ok(Self(ns, tag_name))
}
}

View File

@@ -48,3 +48,26 @@ Since the app tokens are random they use the faster `pbkdf2` algorithm.
```sh
cargo install --locked --git https://github.com/lennart-k/rustical
```
## NixOS (community-maintained by [@PopeRigby](https://github.com/PopeRigby))
!!! warning
The NixOS package is not maintained by myself but since I appreciate [@PopeRigby](https://github.com/PopeRigby)'s work on it I want to mention it.
Since rustical's development is still quite active I **strongly** recommend installing from the `nixpkgs-unstable` branch.
In the `nixpkgs-unstable` you'll find a `rustical` package you can install.
There's also a service that has not been merged yet. If you know how to add modules from PRs in Nix
you can already install it <https://github.com/NixOS/nixpkgs/pull/424188>
and then setup rustical as a service:
```nix title="In your configuration.nix"
services.rustical = {
enable = true;
package = inputs.rustical.legacyPackages.${pkgs.stdenv.hostPlatform.system}.rustical;
settings = {
# Settings the same as in config.toml but in Nix syntax
# http.port = 3002;
};
};
```

View File

@@ -9,7 +9,7 @@ use axum_extra::TypedHeader;
use headers::{HeaderMapExt, UserAgent};
use http::header::CONNECTION;
use http::{HeaderValue, StatusCode};
use rustical_caldav::caldav_router;
use rustical_caldav::{CalDavConfig, caldav_router};
use rustical_carddav::carddav_router;
use rustical_frontend::nextcloud_login::nextcloud_login_router;
use rustical_frontend::{FrontendConfig, frontend_router};
@@ -32,7 +32,8 @@ use tracing::field::display;
#[allow(
clippy::too_many_arguments,
clippy::too_many_lines,
clippy::cognitive_complexity
clippy::cognitive_complexity,
clippy::missing_panics_doc
)]
pub fn make_app<
AS: AddressbookStore + PrefixedCalendarStore,
@@ -45,6 +46,7 @@ pub fn make_app<
auth_provider: Arc<impl AuthenticationProvider>,
frontend_config: FrontendConfig,
oidc_config: Option<OidcConfig>,
caldav_config: CalDavConfig,
nextcloud_login_config: &NextcloudLoginConfig,
dav_push_enabled: bool,
session_cookie_samesite_strict: bool,
@@ -54,6 +56,8 @@ pub fn make_app<
let combined_cal_store =
Arc::new(CombinedCalendarStore::new(cal_store).with_store(birthday_store));
let caldav_config = Arc::new(caldav_config);
let mut router = Router::new()
// endpoint to be used by healthcheck to see if rustical is online
.route("/ping", axum::routing::get(async || "Pong!"))
@@ -63,6 +67,7 @@ pub fn make_app<
combined_cal_store.clone(),
subscription_store.clone(),
false,
caldav_config.clone(),
))
.merge(caldav_router(
"/caldav-compat",
@@ -70,6 +75,7 @@ pub fn make_app<
combined_cal_store.clone(),
subscription_store.clone(),
true,
caldav_config,
))
.route(
"/.well-known/caldav",
@@ -104,9 +110,9 @@ pub fn make_app<
options(async || {
let mut resp = Response::builder().status(StatusCode::OK);
resp.headers_mut()
.unwrap()
.expect("this always works")
.insert("DAV", HeaderValue::from_static("1"));
resp.body(Body::empty()).unwrap()
resp.body(Body::empty()).expect("empty body always works")
}),
);

View File

@@ -2,11 +2,12 @@ use crate::config::HttpConfig;
use clap::Parser;
use http::Method;
#[derive(Parser, Debug)]
#[derive(Parser, Debug, Default)]
pub struct HealthArgs {}
/// Healthcheck for running rustical instance
/// Currently just pings to see if it's reachable via HTTP
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
pub async fn cmd_health(http_config: HttpConfig, _health_args: HealthArgs) -> anyhow::Result<()> {
let client = reqwest::ClientBuilder::new().build().unwrap();

View File

@@ -33,7 +33,8 @@ pub struct MembershipArgs {
command: MembershipCommand,
}
pub async fn handle_membership_command(
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
pub async fn cmd_membership(
user_store: &impl AuthenticationProvider,
MembershipArgs { command }: MembershipArgs,
) -> anyhow::Result<()> {

View File

@@ -3,21 +3,28 @@ use crate::config::{
SqliteDataStoreConfig, TracingConfig,
};
use clap::Parser;
use rustical_caldav::CalDavConfig;
use rustical_frontend::FrontendConfig;
pub mod health;
mod health;
pub mod membership;
pub mod principals;
pub use health::{HealthArgs, cmd_health};
pub use principals::{PrincipalsArgs, cmd_principals};
#[derive(Debug, Parser)]
pub struct GenConfigArgs {}
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
pub fn cmd_gen_config(_args: GenConfigArgs) -> anyhow::Result<()> {
let config = Config {
http: HttpConfig::default(),
caldav: CalDavConfig::default(),
data_store: DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url: "/var/lib/rustical/db.sqlite3".to_owned(),
run_repairs: true,
skip_broken: true,
}),
tracing: TracingConfig::default(),
frontend: FrontendConfig {

View File

@@ -1,56 +1,49 @@
use super::membership::{MembershipArgs, handle_membership_command};
use crate::{config::Config, get_data_stores};
use super::membership::MembershipArgs;
use crate::{config::Config, get_data_stores, membership::cmd_membership};
use clap::{Parser, Subcommand};
use figment::{
Figment,
providers::{Env, Format, Toml},
};
use password_hash::{PasswordHasher, SaltString, rand_core::OsRng};
use rustical_store::auth::{AuthenticationProvider, Principal, PrincipalType};
#[derive(Parser, Debug)]
pub struct PrincipalsArgs {
#[arg(short, long, env, default_value = "/etc/rustical/config.toml")]
config_file: String,
#[command(subcommand)]
command: Command,
pub command: PrincipalsCommand,
}
#[derive(Parser, Debug)]
struct CreateArgs {
id: String,
pub struct CreateArgs {
pub id: String,
#[arg(value_enum, short, long)]
principal_type: Option<PrincipalType>,
pub principal_type: Option<PrincipalType>,
#[arg(short, long)]
name: Option<String>,
pub name: Option<String>,
#[arg(long, help = "Ask for password input")]
password: bool,
pub password: bool,
}
#[derive(Parser, Debug)]
struct RemoveArgs {
id: String,
pub struct RemoveArgs {
pub id: String,
}
#[derive(Parser, Debug)]
struct EditArgs {
id: String,
pub struct EditArgs {
pub id: String,
#[arg(long, help = "Ask for password input")]
password: bool,
pub password: bool,
#[arg(
long,
help = "Remove password (If you only want to use OIDC for example)"
)]
remove_password: bool,
pub remove_password: bool,
#[arg(short, long, help = "Change principal displayname")]
name: Option<String>,
pub name: Option<String>,
#[arg(value_enum, short, long, help = "Change the principal type")]
principal_type: Option<PrincipalType>,
pub principal_type: Option<PrincipalType>,
}
#[derive(Debug, Subcommand)]
enum Command {
pub enum PrincipalsCommand {
List,
Create(CreateArgs),
Remove(RemoveArgs),
@@ -58,16 +51,12 @@ enum Command {
Membership(MembershipArgs),
}
pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
let config: Config = Figment::new()
.merge(Toml::file(&args.config_file))
.merge(Env::prefixed("RUSTICAL_").split("__"))
.extract()?;
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
pub async fn cmd_principals(args: PrincipalsArgs, config: Config) -> anyhow::Result<()> {
let (_, _, _, principal_store, _) = get_data_stores(true, &config.data_store).await?;
match args.command {
Command::List => {
PrincipalsCommand::List => {
for principal in principal_store.get_principals().await? {
println!(
"{} (displayname={}) [{}]",
@@ -77,7 +66,7 @@ pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
);
}
}
Command::Create(CreateArgs {
PrincipalsCommand::Create(CreateArgs {
id,
principal_type,
name,
@@ -111,11 +100,11 @@ pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
.await?;
println!("Principal created");
}
Command::Remove(RemoveArgs { id }) => {
PrincipalsCommand::Remove(RemoveArgs { id }) => {
principal_store.remove_principal(&id).await?;
println!("Principal {id} removed");
}
Command::Edit(EditArgs {
PrincipalsCommand::Edit(EditArgs {
id,
remove_password,
password,
@@ -151,8 +140,8 @@ pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
principal_store.insert_principal(principal, true).await?;
println!("Principal {id} updated");
}
Command::Membership(args) => {
handle_membership_command(principal_store.as_ref(), args).await?;
PrincipalsCommand::Membership(args) => {
cmd_membership(principal_store.as_ref(), args).await?;
}
}
Ok(())

View File

@@ -1,3 +1,4 @@
use rustical_caldav::CalDavConfig;
use rustical_frontend::FrontendConfig;
use rustical_oidc::OidcConfig;
use serde::{Deserialize, Serialize};
@@ -28,6 +29,8 @@ pub struct SqliteDataStoreConfig {
pub db_url: String,
#[serde(default = "default_true")]
pub run_repairs: bool,
#[serde(default = "default_true")]
pub skip_broken: bool,
}
#[derive(Debug, Deserialize, Serialize)]
@@ -95,4 +98,6 @@ pub struct Config {
pub dav_push: DavPushConfig,
#[serde(default)]
pub nextcloud_login: NextcloudLoginConfig,
#[serde(default)]
pub caldav: CalDavConfig,
}

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/caldav/calendar.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/caldav/calendar.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/caldav/calendar_import.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/caldav/calendar_import.rs
expression: body
---

View File

@@ -1,107 +0,0 @@
---
source: src/integration_tests/caldav/calendar_import.rs
expression: body
---
BEGIN:VCALENDAR
VERSION:2.0
CALSCALE:GREGORIAN
PRODID:RustiCal
BEGIN:VTIMEZONE
LAST-MODIFIED:20040110T032845Z
TZID:US/Eastern
BEGIN:DAYLIGHT
DTSTART:20000404T020000
RRULE:FREQ=YEARLY;BYDAY=1SU;BYMONTH=4
TZNAME:EDT
TZOFFSETFROM:-0500
TZOFFSETTO:-0400
END:DAYLIGHT
BEGIN:STANDARD
DTSTART:20001026T020000
RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10
TZNAME:EST
TZOFFSETFROM:-0400
TZOFFSETTO:-0500
END:STANDARD
END:VTIMEZONE
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART;TZID=US/Eastern:20060104T140000
DURATION:PT1H
RECURRENCE-ID;TZID=US/Eastern:20060104T120000
SUMMARY:Event #2 bis
UID:[UID]
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20060206T001102Z
DTSTART;TZID=US/Eastern:20060102T100000
DURATION:PT1H
SUMMARY:Event #1
Description:Go Steelers!
UID:[UID]
END:VEVENT
BEGIN:VEVENT
DTSTAMP:20060206T001121Z
DTSTART;TZID=US/Eastern:20060102T120000
DURATION:PT1H
RRULE:FREQ=DAILY;COUNT=5
SUMMARY:Event #2
UID:[UID]
END:VEVENT
BEGIN:VEVENT
ATTENDEE;PARTSTAT=ACCEPTED;ROLE=CHAIR:mailto:cyrus@example.com
ATTENDEE;PARTSTAT=NEEDS-ACTION:mailto:lisa@example.com
DTSTAMP:20060206T001220Z
DTSTART;TZID=US/Eastern:20060104T100000
DURATION:PT1H
LAST-MODIFIED:20060206T001330Z
ORGANIZER:mailto:cyrus@example.com
SEQUENCE:1
STATUS:TENTATIVE
SUMMARY:Event #3
UID:[UID]
END:VEVENT
BEGIN:VTODO
DTSTAMP:20060205T235335Z
DUE;VALUE=DATE:20060104
STATUS:NEEDS-ACTION
SUMMARY:Task #1
UID:[UID]
BEGIN:VALARM
ACTION:AUDIO
TRIGGER;RELATED=START:-PT10M
END:VALARM
END:VTODO
BEGIN:VTODO
DTSTAMP:20060205T235300Z
DUE;VALUE=DATE:20060106
LAST-MODIFIED:20060205T235308Z
SEQUENCE:1
STATUS:NEEDS-ACTION
SUMMARY:Task #2
UID:[UID]
BEGIN:VALARM
ACTION:AUDIO
TRIGGER;RELATED=START:-PT10M
END:VALARM
END:VTODO
BEGIN:VTODO
COMPLETED:20051223T122322Z
DTSTAMP:20060205T235400Z
DUE;VALUE=DATE:20051225
LAST-MODIFIED:20060205T235308Z
SEQUENCE:1
STATUS:COMPLETED
SUMMARY:Task #3
UID:[UID]
END:VTODO
BEGIN:VTODO
DTSTAMP:20060205T235600Z
DUE;VALUE=DATE:20060101
LAST-MODIFIED:20060205T235308Z
SEQUENCE:1
STATUS:CANCELLED
SUMMARY:Task #4
UID:[UID]
END:VTODO
END:VCALENDAR

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/caldav/calendar_import.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/carddav/addressbook.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/carddav/addressbook.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/carddav/addressbook.rs
expression: body
---

View File

@@ -1,13 +0,0 @@
---
source: src/integration_tests/carddav/addressbook_import.rs
expression: body
---
BEGIN:VCARD
VERSION:4.0
FN:Simon Perreault
N:Perreault;Simon;;;ing. jr,M.Sc.
BDAY:--0203
GENDER:M
EMAIL;TYPE=work:simon.perreault@viagenie.ca
UID:[UID]
END:VCARD

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/carddav/addressbook_import.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/carddav/mod.rs
expression: body
---

View File

@@ -1,5 +0,0 @@
---
source: src/integration_tests/carddav/mod.rs
expression: body
---

165
src/lib.rs Normal file
View File

@@ -0,0 +1,165 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
use crate::config::Config;
use anyhow::Result;
use app::make_app;
use axum::ServiceExt;
use axum::extract::Request;
use clap::{Parser, Subcommand};
use config::{DataStoreConfig, SqliteDataStoreConfig};
use rustical_dav_push::DavPushController;
use rustical_store::auth::AuthenticationProvider;
use rustical_store::{
AddressbookStore, CalendarStore, CollectionOperation, PrefixedCalendarStore, SubscriptionStore,
};
use rustical_store_sqlite::addressbook_store::SqliteAddressbookStore;
use rustical_store_sqlite::calendar_store::SqliteCalendarStore;
use rustical_store_sqlite::principal_store::SqlitePrincipalStore;
use rustical_store_sqlite::{SqliteStore, create_db_pool};
use setup_tracing::setup_tracing;
use std::sync::Arc;
use tokio::sync::Notify;
use tokio::sync::mpsc::Receiver;
use tower::Layer;
use tower_http::normalize_path::NormalizePathLayer;
use tracing::{info, warn};
pub mod app;
mod commands;
pub use commands::*;
pub mod config;
mod setup_tracing;
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Args {
#[arg(short, long, env, default_value = "/etc/rustical/config.toml")]
pub config_file: String,
#[arg(long, env, help = "Do no run database migrations (only for sql store)")]
pub no_migrations: bool,
#[command(subcommand)]
pub command: Option<Command>,
}
#[derive(Debug, Subcommand)]
pub enum Command {
GenConfig(commands::GenConfigArgs),
Principals(PrincipalsArgs),
#[command(
about = "Healthcheck for running instance (Used for HEALTHCHECK in Docker container)"
)]
Health(HealthArgs),
}
#[allow(clippy::missing_errors_doc)]
pub async fn get_data_stores(
migrate: bool,
config: &DataStoreConfig,
) -> Result<(
Arc<impl AddressbookStore + PrefixedCalendarStore>,
Arc<impl CalendarStore>,
Arc<impl SubscriptionStore>,
Arc<impl AuthenticationProvider>,
Receiver<CollectionOperation>,
)> {
Ok(match &config {
DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url,
run_repairs,
skip_broken,
}) => {
let db = create_db_pool(db_url, migrate).await?;
// Channel to watch for changes (for DAV Push)
let (send, recv) = tokio::sync::mpsc::channel(1000);
let addressbook_store = Arc::new(SqliteAddressbookStore::new(
db.clone(),
send.clone(),
*skip_broken,
));
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send, *skip_broken));
if *run_repairs {
info!("Running repair tasks");
addressbook_store.repair_orphans().await?;
cal_store.repair_invalid_version_4_0().await?;
cal_store.repair_orphans().await?;
}
let subscription_store = Arc::new(SqliteStore::new(db.clone()));
let principal_store = Arc::new(SqlitePrincipalStore::new(db));
// Validate all calendar objects
for principal in principal_store.get_principals().await? {
cal_store.validate_objects(&principal.id).await?;
addressbook_store.validate_objects(&principal.id).await?;
}
(
addressbook_store,
cal_store,
subscription_store,
principal_store,
recv,
)
}
})
}
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
pub async fn cmd_default(
args: Args,
config: Config,
start_notifier: Option<Arc<Notify>>,
tracing: bool,
) -> Result<()> {
if tracing {
setup_tracing(&config.tracing);
}
let (addr_store, cal_store, subscription_store, principal_store, update_recv) =
get_data_stores(!args.no_migrations, &config.data_store).await?;
let mut tasks = vec![];
if config.dav_push.enabled {
let dav_push_controller = DavPushController::new(
config.dav_push.allowed_push_servers,
subscription_store.clone(),
);
tasks.push(tokio::spawn(async move {
dav_push_controller.notifier(update_recv).await;
}));
}
let app = make_app(
addr_store.clone(),
cal_store.clone(),
subscription_store.clone(),
principal_store.clone(),
config.frontend.clone(),
config.oidc.clone(),
config.caldav,
&config.nextcloud_login,
config.dav_push.enabled,
config.http.session_cookie_samesite_strict,
config.http.payload_limit_mb,
);
let app = ServiceExt::<Request>::into_make_service(
NormalizePathLayer::trim_trailing_slash().layer(app),
);
let address = format!("{}:{}", config.http.host, config.http.port);
let listener = tokio::net::TcpListener::bind(&address).await?;
tasks.push(tokio::spawn(async move {
info!("RustiCal serving on http://{address}");
if let Some(start_notifier) = start_notifier {
start_notifier.notify_waiters();
}
axum::serve(listener, app).await.unwrap();
}));
for task in tasks {
task.await?;
}
Ok(())
}

View File

@@ -1,103 +1,12 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
use crate::commands::health::{HealthArgs, cmd_health};
use crate::config::Config;
use anyhow::Result;
use app::make_app;
use axum::ServiceExt;
use axum::extract::Request;
use clap::{Parser, Subcommand};
use commands::cmd_gen_config;
use commands::principals::{PrincipalsArgs, cmd_principals};
use config::{DataStoreConfig, SqliteDataStoreConfig};
use clap::Parser;
use figment::Figment;
use figment::providers::{Env, Format, Toml};
use rustical_dav_push::DavPushController;
use rustical_store::auth::AuthenticationProvider;
use rustical_store::{
AddressbookStore, CalendarStore, CollectionOperation, PrefixedCalendarStore, SubscriptionStore,
};
use rustical_store_sqlite::addressbook_store::SqliteAddressbookStore;
use rustical_store_sqlite::calendar_store::SqliteCalendarStore;
use rustical_store_sqlite::principal_store::SqlitePrincipalStore;
use rustical_store_sqlite::{SqliteStore, create_db_pool};
use setup_tracing::setup_tracing;
use std::sync::Arc;
use tokio::sync::mpsc::Receiver;
use tower::Layer;
use tower_http::normalize_path::NormalizePathLayer;
use tracing::{info, warn};
mod app;
mod commands;
mod config;
#[cfg(test)]
pub mod integration_tests;
mod setup_tracing;
mod migration_0_12;
use migration_0_12::{validate_address_objects_0_12, validate_calendar_objects_0_12};
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
#[arg(short, long, env, default_value = "/etc/rustical/config.toml")]
config_file: String,
#[arg(long, env, help = "Do no run database migrations (only for sql store)")]
no_migrations: bool,
#[command(subcommand)]
command: Option<Command>,
}
#[derive(Debug, Subcommand)]
enum Command {
GenConfig(commands::GenConfigArgs),
Principals(PrincipalsArgs),
#[command(
about = "Healthcheck for running instance (Used for HEALTHCHECK in Docker container)"
)]
Health(HealthArgs),
}
async fn get_data_stores(
migrate: bool,
config: &DataStoreConfig,
) -> Result<(
Arc<impl AddressbookStore + PrefixedCalendarStore>,
Arc<impl CalendarStore>,
Arc<impl SubscriptionStore>,
Arc<impl AuthenticationProvider>,
Receiver<CollectionOperation>,
)> {
Ok(match &config {
DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url,
run_repairs,
}) => {
let db = create_db_pool(db_url, migrate).await?;
// Channel to watch for changes (for DAV Push)
let (send, recv) = tokio::sync::mpsc::channel(1000);
let addressbook_store = Arc::new(SqliteAddressbookStore::new(db.clone(), send.clone()));
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send));
if *run_repairs {
info!("Running repair tasks");
addressbook_store.repair_orphans().await?;
cal_store.repair_invalid_version_4_0().await?;
cal_store.repair_orphans().await?;
}
let subscription_store = Arc::new(SqliteStore::new(db.clone()));
let principal_store = Arc::new(SqlitePrincipalStore::new(db));
(
addressbook_store,
cal_store,
subscription_store,
principal_store,
recv,
)
}
})
}
use rustical::config::Config;
use rustical::{Args, Command};
use rustical::{cmd_default, cmd_gen_config, cmd_health, cmd_principals};
use tracing::warn;
#[tokio::main(flavor = "multi_thread")]
async fn main() -> Result<()> {
@@ -111,67 +20,17 @@ async fn main() -> Result<()> {
};
match args.command {
Some(Command::GenConfig(gen_config_args)) => cmd_gen_config(gen_config_args)?,
Some(Command::Principals(principals_args)) => cmd_principals(principals_args).await?,
Some(Command::GenConfig(gen_config_args)) => cmd_gen_config(gen_config_args),
Some(Command::Principals(principals_args)) => {
cmd_principals(principals_args, parse_config()?).await
}
Some(Command::Health(health_args)) => {
let config: Config = parse_config()?;
cmd_health(config.http, health_args).await?;
cmd_health(config.http, health_args).await
}
None => {
let config: Config = parse_config()?;
setup_tracing(&config.tracing);
let (addr_store, cal_store, subscription_store, principal_store, update_recv) =
get_data_stores(!args.no_migrations, &config.data_store).await?;
warn!(
"Validating calendar data against the next-version ical parser.
In the next major release these will be rejected and cause errors.
If any errors occur, please open an issue so they can be fixed before the next major release."
);
validate_calendar_objects_0_12(principal_store.as_ref(), cal_store.as_ref()).await?;
validate_address_objects_0_12(principal_store.as_ref(), addr_store.as_ref()).await?;
let mut tasks = vec![];
if config.dav_push.enabled {
let dav_push_controller = DavPushController::new(
config.dav_push.allowed_push_servers,
subscription_store.clone(),
);
tasks.push(tokio::spawn(async move {
dav_push_controller.notifier(update_recv).await;
}));
}
let app = make_app(
addr_store.clone(),
cal_store.clone(),
subscription_store.clone(),
principal_store.clone(),
config.frontend.clone(),
config.oidc.clone(),
&config.nextcloud_login,
config.dav_push.enabled,
config.http.session_cookie_samesite_strict,
config.http.payload_limit_mb,
);
let app = ServiceExt::<Request>::into_make_service(
NormalizePathLayer::trim_trailing_slash().layer(app),
);
let address = format!("{}:{}", config.http.host, config.http.port);
let listener = tokio::net::TcpListener::bind(&address).await?;
tasks.push(tokio::spawn(async move {
info!("RustiCal serving on http://{address}");
axum::serve(listener, app).await.unwrap();
}));
for task in tasks {
task.await?;
cmd_default(args, config, None, true).await
}
}
}
Ok(())
}

View File

@@ -1,81 +0,0 @@
use rustical_store::{AddressbookStore, CalendarStore, auth::AuthenticationProvider};
use tracing::{error, info};
pub async fn validate_calendar_objects_0_12(
principal_store: &impl AuthenticationProvider,
cal_store: &impl CalendarStore,
) -> Result<(), rustical_store::Error> {
let mut success = true;
for principal in principal_store.get_principals().await? {
for calendar in cal_store.get_calendars(&principal.id).await? {
for object in cal_store
.get_objects(&calendar.principal, &calendar.id)
.await?
{
if let Err(err) = ical_dev::parser::ical::IcalObjectParser::from_slice(
object.get_ics().as_bytes(),
)
.expect_one()
{
success = false;
error!(
"An error occured parsing a calendar object: principal={principal}, calendar={calendar}, object_id={object_id}: {err}",
principal = principal.id,
calendar = calendar.id,
object_id = object.get_id()
);
println!("{}", object.get_ics());
}
}
}
}
if success {
info!("Your calendar data seems to be valid in the next major version.");
} else {
error!(
"Not all calendar objects will be successfully parsed in the next major version (v0.12).
This will not cause issues in this version, but please comment under the tracking issue on GitHub:
https://github.com/lennart-k/rustical/issues/165"
);
}
Ok(())
}
pub async fn validate_address_objects_0_12(
principal_store: &impl AuthenticationProvider,
addr_store: &impl AddressbookStore,
) -> Result<(), rustical_store::Error> {
let mut success = true;
for principal in principal_store.get_principals().await? {
for addressbook in addr_store.get_addressbooks(&principal.id).await? {
for object in addr_store
.get_objects(&addressbook.principal, &addressbook.id)
.await?
{
if let Err(err) =
ical_dev::parser::vcard::VcardParser::from_slice(object.get_vcf().as_bytes())
.expect_one()
{
success = false;
error!(
"An error occured parsing an address object: principal={principal}, addressbook={addressbook}, object_id={object_id}: {err}",
principal = principal.id,
addressbook = addressbook.id,
object_id = object.get_id()
);
println!("{}", object.get_vcf());
}
}
}
}
if success {
info!("Your addressbook data seems to be valid in the next major version.");
} else {
error!(
"Not all address objects will be successfully parsed in the next major version (v0.12).
This will not cause issues in this version, but please comment under the tracking issue on GitHub:
https://github.com/lennart-k/rustical/issues/165"
);
}
Ok(())
}

Some files were not shown because too many files have changed in this diff Show More