Compare commits

..

1 Commits

Author SHA1 Message Date
Lennart
cc384b6124 Merge pull request #97 from lennart-k/feature/sharing
Fix issues with group collections
2025-07-18 14:14:23 +02:00
255 changed files with 7485 additions and 8399 deletions

20
.github/workflows/ci.yml vendored Normal file
View File

@@ -0,0 +1,20 @@
name: Rust CI
on:
push:
branches: ["main"]
pull_request:
branches: ["main"]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose --workspace

View File

@@ -1,57 +0,0 @@
name: "CICD"
on: [push, pull_request]
permissions:
contents: read
pull-requests: write
env:
CARGO_TERM_COLOR: always
jobs:
check:
name: Check
runs-on: ubuntu-latest
steps:
- run: rustup update
- name: Checkout sources
uses: actions/checkout@v4
- run: cargo check
test:
name: Test Suite
runs-on: ubuntu-latest
steps:
- run: rustup update
- name: Checkout sources
uses: actions/checkout@v4
- run: cargo test --all-features --verbose --workspace
coverage:
name: Test Coverage
runs-on: ubuntu-latest
steps:
- run: rustup update
- name: Install tarpaulin
run: cargo install cargo-tarpaulin
- name: Checkout sources
uses: actions/checkout@v4
- name: Run tarpaulin
run: cargo tarpaulin --workspace --all-features --exclude xml_derive --coveralls ${{ secrets.COVERALLS_REPO_TOKEN }}
lints:
name: Lints
runs-on: ubuntu-latest
steps:
- run: rustup update
- run: rustup component add rustfmt clippy
- name: Checkout sources
uses: actions/checkout@v4
- name: Run cargo fmt
run: cargo fmt --all -- --check
- name: Run cargo clippy
run: cargo clippy -- -D warnings

View File

@@ -17,8 +17,6 @@ jobs:
with:
python-version: 3.x
- run: rustup update
- run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
- name: Set up build cache

View File

@@ -1,38 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT principal, cal_id, id, (deleted_at IS NOT NULL) AS \"deleted: bool\"\n FROM calendarobjects\n WHERE (principal, cal_id, id) NOT IN (\n SELECT DISTINCT principal, cal_id, object_id FROM calendarobjectchangelog\n )\n ;\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "cal_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "deleted: bool",
"ordinal": 3,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "053c17f3b54ae3e153137926115486eb19a801bd73a74230bcf72a9a7254824a"
}

View File

@@ -1,32 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT id, uid, ics FROM calendarobjects\n WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL\n AND (last_occurence IS NULL OR ? IS NULL OR last_occurence >= date(?))\n AND (first_occurence IS NULL OR ? IS NULL OR first_occurence <= date(?))\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "uid",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 6
},
"nullable": [
false,
false,
false
]
},
"hash": "3a29efff3d3f6e1e05595d1a2d095af5fc963572c90bd10a6616af78757f8c39"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "REPLACE INTO calendarobjects (principal, cal_id, id, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 8
},
"nullable": []
},
"hash": "3e1cca532372e891ab3e604ecb79311d8cd64108d4f238db4c79e9467a3b6d2e"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "UPDATE calendars SET principal = ?, id = ?, displayname = ?, description = ?, \"order\" = ?, color = ?, timezone_id = ?, push_topic = ?, comp_event = ?, comp_todo = ?, comp_journal = ?\n WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 13
},
"nullable": []
},
"hash": "46ae176a06e314492f661c28436d6370883052c854da43475d7ced60cf8326e3"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "UPDATE birthday_calendars SET principal = ?, id = ?, displayname = ?, description = ?, \"order\" = ?, color = ?, timezone_id = ?, push_topic = ?\n WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 10
},
"nullable": []
},
"hash": "4a05eda4e23e8652312548b179a1cc16f43768074ab9e7ab7b7783395384984e"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO calendars (principal, id, displayname, description, \"order\", color, subscription_url, timezone, timezone_id, push_topic, comp_event, comp_todo, comp_journal)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 13
},
"nullable": []
},
"hash": "5132ee8198f155242aa332a10019c48ec334884bcf7841c8aa03fd5eb11351d9"
}

View File

@@ -1,74 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT principal, id, displayname, description, \"order\", color, timezone_id, deleted_at, addr_synctoken, push_topic\n FROM birthday_calendars\n INNER JOIN (\n SELECT principal AS addr_principal,\n id AS addr_id,\n synctoken AS addr_synctoken\n FROM addressbooks\n ) ON (principal, id) = (addr_principal, addr_id)\n WHERE (principal, id) = (?, ?)\n AND ((deleted_at IS NULL) OR ?)\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "displayname",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "order",
"ordinal": 4,
"type_info": "Integer"
},
{
"name": "color",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "timezone_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 7,
"type_info": "Datetime"
},
{
"name": "addr_synctoken",
"ordinal": 8,
"type_info": "Integer"
},
{
"name": "push_topic",
"ordinal": 9,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
false,
true,
true,
false,
true,
true,
true,
false,
false
]
},
"hash": "525fc4eab8a0f3eacff7e3c78ce809943f817abf8c8f9ae50073924bccdea2dc"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "SELECT id, uid, ics FROM calendarobjects WHERE (principal, cal_id, id) = (?, ?, ?) AND ((deleted_at IS NULL) OR ?)",
"query": "SELECT id, ics FROM calendarobjects WHERE (principal, cal_id, id) = (?, ?, ?) AND ((deleted_at IS NULL) OR ?)",
"describe": {
"columns": [
{
@@ -8,14 +8,9 @@
"ordinal": 0,
"type_info": "Text"
},
{
"name": "uid",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 2,
"ordinal": 1,
"type_info": "Text"
}
],
@@ -23,10 +18,9 @@
"Right": 4
},
"nullable": [
false,
false,
false
]
},
"hash": "505ebe8e64ac709b230dce7150240965e45442aca6c5f3b3115738ef508939ed"
"hash": "543838c030550cb09d1af08adfeade8b7ce3575d92fddbc6e9582d141bc9e49d"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "SELECT id, uid, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL",
"query": "SELECT id, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL",
"describe": {
"columns": [
{
@@ -8,14 +8,9 @@
"ordinal": 0,
"type_info": "Text"
},
{
"name": "uid",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 2,
"ordinal": 1,
"type_info": "Text"
}
],
@@ -23,10 +18,9 @@
"Right": 2
},
"nullable": [
false,
false,
false
]
},
"hash": "804ed2a4a7032e9605d1871297498f5a96de0fc816ce660c705fb28318be0d42"
"hash": "54c9c0e36a52e6963f11c6aa27f13aafb4204b8aa34b664fd825bd447db80e86"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT INTO calendars (principal, id, displayname, description, \"order\", color, subscription_url, timezone_id, push_topic, comp_event, comp_todo, comp_journal)\n VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 12
},
"nullable": []
},
"hash": "60b940ff493e7c0fcb2ffe8ae97172c6444525ffeec21b194bd7443d11d06113"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO calendarobjects (principal, cal_id, id, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 8
},
"nullable": []
},
"hash": "6327bee90e5df01536a0ddb15adcc37af3027f6902aa3786365c5ab2fbf06bda"
}

View File

@@ -1,74 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT principal, id, displayname, description, \"order\", color, timezone_id, deleted_at, addr_synctoken, push_topic\n FROM birthday_calendars\n INNER JOIN (\n SELECT principal AS addr_principal,\n id AS addr_id,\n synctoken AS addr_synctoken\n FROM addressbooks\n ) ON (principal, id) = (addr_principal, addr_id)\n WHERE principal = ?\n AND (\n (deleted_at IS NULL AND NOT ?) -- not deleted, want not deleted\n OR (deleted_at IS NOT NULL AND ?) -- deleted, want deleted\n )\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "displayname",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "order",
"ordinal": 4,
"type_info": "Integer"
},
{
"name": "color",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "timezone_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 7,
"type_info": "Datetime"
},
{
"name": "addr_synctoken",
"ordinal": 8,
"type_info": "Integer"
},
{
"name": "push_topic",
"ordinal": 9,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
false,
true,
true,
false,
true,
true,
true,
false,
false
]
},
"hash": "66d57f2c99ef37b383a478aff99110e1efbc7ce9332f10da4fa69f7594fb7455"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "UPDATE birthday_calendars SET deleted_at = NULL WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "6c039308ad2ec29570ab492d7a0e85fb79c0a4d3b882b74ff1c2786c12324896"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "UPDATE birthday_calendars SET deleted_at = datetime() WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "83f0aaf406785e323ac12019ac24f603c53125a1b2326f324c1e2d7b6c690adc"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "REPLACE INTO calendarobjects (principal, cal_id, id, uid, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 9
},
"nullable": []
},
"hash": "a68a1b96189b854a7ba2a3cd866ba583af5ad84bc1cd8b20cb805e9ce3bad820"
}

View File

@@ -39,39 +39,44 @@
"type_info": "Text"
},
{
"name": "timezone_id",
"name": "timezone",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "deleted_at",
"name": "timezone_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 9,
"type_info": "Datetime"
},
{
"name": "subscription_url",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "push_topic",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "comp_event",
"name": "push_topic",
"ordinal": 11,
"type_info": "Bool"
"type_info": "Text"
},
{
"name": "comp_todo",
"name": "comp_event",
"ordinal": 12,
"type_info": "Bool"
},
{
"name": "comp_journal",
"name": "comp_todo",
"ordinal": 13,
"type_info": "Bool"
},
{
"name": "comp_journal",
"ordinal": 14,
"type_info": "Bool"
}
],
"parameters": {
@@ -88,6 +93,7 @@
true,
true,
true,
true,
false,
false,
false,

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT INTO birthday_calendars (principal, id, displayname, push_topic)\n VALUES (?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "bfdf662cd03e741b7a36f5e2ac01d32ac367c52ce41bd70394f754248b29749c"
}

View File

@@ -1,38 +0,0 @@
{
"db_name": "SQLite",
"query": "\n SELECT principal, addressbook_id, id, (deleted_at IS NOT NULL) AS \"deleted: bool\"\n FROM addressobjects\n WHERE (principal, addressbook_id, id) NOT IN (\n SELECT DISTINCT principal, addressbook_id, object_id FROM addressobjectchangelog\n )\n ;\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "addressbook_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "deleted: bool",
"ordinal": 3,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "c138b1143ac04af4930266ffae0990e82005911c11a683ad565e92335e085f4d"
}

View File

@@ -0,0 +1,26 @@
{
"db_name": "SQLite",
"query": "SELECT id, ics FROM calendarobjects\n WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL\n AND (last_occurence IS NULL OR ? IS NULL OR last_occurence >= date(?))\n AND (first_occurence IS NULL OR ? IS NULL OR first_occurence <= date(?))\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 6
},
"nullable": [
false,
false
]
},
"hash": "c550dbf3d5ce7069f28d767ea9045e477ef8d29d6186851760757a06dec42339"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "DELETE FROM birthday_calendars WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "cadc4ac16b7ac22b71c91ab36ad9dbb1dec943798d795fcbc811f4c651fea02a"
}

View File

@@ -1,6 +1,6 @@
{
"db_name": "SQLite",
"query": "SELECT principal, id, displayname, \"order\", description, color, timezone_id, deleted_at, synctoken, subscription_url, push_topic, comp_event, comp_todo, comp_journal\n FROM calendars\n WHERE principal = ? AND deleted_at IS NOT NULL",
"query": "SELECT *\n FROM calendars\n WHERE principal = ? AND deleted_at IS NOT NULL",
"describe": {
"columns": [
{
@@ -14,14 +14,14 @@
"type_info": "Text"
},
{
"name": "displayname",
"name": "synctoken",
"ordinal": 2,
"type_info": "Text"
"type_info": "Integer"
},
{
"name": "order",
"name": "displayname",
"ordinal": 3,
"type_info": "Integer"
"type_info": "Text"
},
{
"name": "description",
@@ -29,49 +29,54 @@
"type_info": "Text"
},
{
"name": "color",
"name": "order",
"ordinal": 5,
"type_info": "Text"
"type_info": "Integer"
},
{
"name": "timezone_id",
"name": "color",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "deleted_at",
"name": "timezone",
"ordinal": 7,
"type_info": "Datetime"
},
{
"name": "synctoken",
"ordinal": 8,
"type_info": "Integer"
},
{
"name": "subscription_url",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "push_topic",
"name": "timezone_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 9,
"type_info": "Datetime"
},
{
"name": "subscription_url",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "comp_event",
"name": "push_topic",
"ordinal": 11,
"type_info": "Bool"
"type_info": "Text"
},
{
"name": "comp_todo",
"name": "comp_event",
"ordinal": 12,
"type_info": "Bool"
},
{
"name": "comp_journal",
"name": "comp_todo",
"ordinal": 13,
"type_info": "Bool"
},
{
"name": "comp_journal",
"ordinal": 14,
"type_info": "Bool"
}
],
"parameters": {
@@ -80,13 +85,14 @@
"nullable": [
false,
false,
false,
true,
true,
false,
true,
true,
true,
true,
false,
true,
false,
false,
@@ -94,5 +100,5 @@
false
]
},
"hash": "27ac68a4eea40c1cac663cad034028cf6c373354b29e3a5290c18f58101913cd"
"hash": "cce62f7829bd688cd8c7928b587bc31f0e50865c214b1df113350bea2c254237"
}

View File

@@ -39,39 +39,44 @@
"type_info": "Text"
},
{
"name": "timezone_id",
"name": "timezone",
"ordinal": 7,
"type_info": "Text"
},
{
"name": "deleted_at",
"name": "timezone_id",
"ordinal": 8,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 9,
"type_info": "Datetime"
},
{
"name": "subscription_url",
"ordinal": 9,
"type_info": "Text"
},
{
"name": "push_topic",
"ordinal": 10,
"type_info": "Text"
},
{
"name": "comp_event",
"name": "push_topic",
"ordinal": 11,
"type_info": "Bool"
"type_info": "Text"
},
{
"name": "comp_todo",
"name": "comp_event",
"ordinal": 12,
"type_info": "Bool"
},
{
"name": "comp_journal",
"name": "comp_todo",
"ordinal": 13,
"type_info": "Bool"
},
{
"name": "comp_journal",
"ordinal": 14,
"type_info": "Bool"
}
],
"parameters": {
@@ -88,6 +93,7 @@
true,
true,
true,
true,
false,
false,
false,

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT INTO calendarobjects (principal, cal_id, id, uid, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 9
},
"nullable": []
},
"hash": "d498a758ed707408b00b7d2675250ea739a681ce1f009f05e97f2e101bd7e556"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "UPDATE calendars SET principal = ?, id = ?, displayname = ?, description = ?, \"order\" = ?, color = ?, timezone = ?, timezone_id = ?, push_topic = ?, comp_event = ?, comp_todo = ?, comp_journal = ?\n WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 14
},
"nullable": []
},
"hash": "d65c9c40606e59dd816a51b9b9ac60fd2ff81aaa358fcc038134e9a68ba45ad7"
}

1529
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,24 +2,21 @@
members = ["crates/*"]
[workspace.package]
version = "0.10.5"
rust-version = "1.91"
version = "0.4.13"
edition = "2024"
description = "A CalDAV server"
documentation = "https://lennart-k.github.io/rustical/"
repository = "https://github.com/lennart-k/rustical"
license = "AGPL-3.0-or-later"
[package]
name = "rustical"
version.workspace = true
rust-version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
license.workspace = true
resolver = "2"
publish = true
publish = false
[features]
debug = ["opentelemetry"]
@@ -37,18 +34,7 @@ opentelemetry = [
debug = 0
[workspace.dependencies]
rustical_dav = { path = "./crates/dav/" }
rustical_dav_push = { path = "./crates/dav_push/" }
rustical_store = { path = "./crates/store/" }
rustical_store_sqlite = { path = "./crates/store_sqlite/" }
rustical_caldav = { path = "./crates/caldav/" }
rustical_carddav = { path = "./crates/carddav/" }
rustical_frontend = { path = "./crates/frontend/" }
rustical_xml = { path = "./crates/xml/" }
rustical_oidc = { path = "./crates/oidc/" }
rustical_ical = { path = "./crates/ical/" }
matchit = "0.9"
matchit = "0.8"
uuid = { version = "1.11", features = ["v4", "fast-rng"] }
async-trait = "0.1"
axum = "0.8"
@@ -61,7 +47,8 @@ pbkdf2 = { version = "0.12", features = ["simple"] }
rand_core = { version = "0.9", features = ["std"] }
chrono = { version = "0.4", features = ["serde"] }
regex = "1.10"
rstest = "0.26"
lazy_static = "1.5"
rstest = "0.25"
rstest_reuse = "0.7"
sha2 = "0.10"
tokio = { version = "1", features = [
@@ -74,7 +61,7 @@ tokio = { version = "1", features = [
url = "2.5"
base64 = "0.22"
thiserror = "2.0"
quick-xml = { version = "0.38" }
quick-xml = { version = "0.37" }
rust-embed = "8.5"
tower-sessions = "0.14"
futures-core = "0.3.31"
@@ -108,12 +95,8 @@ strum = "0.27"
strum_macros = "0.27"
serde_json = { version = "1.0", features = ["raw_value"] }
sqlx-sqlite = { version = "0.8", features = ["bundled"] }
ical = { git = "https://github.com/lennart-k/ical-rs", features = [
"generator",
"serde",
"chrono-tz",
] }
toml = "0.9"
ical = { version = "0.11", features = ["generator", "serde"] }
toml = "0.8"
tower = "0.5"
tower-http = { version = "0.6", features = [
"trace",
@@ -121,10 +104,20 @@ tower-http = { version = "0.6", features = [
"catch-panic",
] }
percent-encoding = "2.3"
rustical_dav = { path = "./crates/dav/" }
rustical_dav_push = { path = "./crates/dav_push/" }
rustical_store = { path = "./crates/store/" }
rustical_store_sqlite = { path = "./crates/store_sqlite/" }
rustical_caldav = { path = "./crates/caldav/" }
rustical_carddav = { path = "./crates/carddav/" }
rustical_frontend = { path = "./crates/frontend/" }
rustical_xml = { path = "./crates/xml/" }
rustical_oidc = { path = "./crates/oidc/" }
rustical_ical = { path = "./crates/ical/" }
chrono-tz = "0.10"
chrono-humanize = "0.2"
rand = "0.9"
axum-extra = { version = "0.12", features = ["typed-header"] }
axum-extra = { version = "0.10", features = ["typed-header"] }
rrule = "0.14"
argon2 = "0.5"
rpassword = "7.3"
@@ -133,7 +126,7 @@ syn = { version = "2.0", features = ["full"] }
quote = "1.0"
proc-macro2 = "1.0"
heck = "0.5"
darling = "0.23"
darling = "0.20"
reqwest = { version = "0.12", features = [
"rustls-tls",
"charset",
@@ -141,41 +134,39 @@ reqwest = { version = "0.12", features = [
], default-features = false }
openidconnect = "4.0"
clap = { version = "4.5", features = ["derive", "env"] }
matchit-serde = { git = "https://github.com/lennart-k/matchit-serde", rev = "e18e65d7" }
vtimezones-rs = "0.2"
matchit-serde = { git = "https://github.com/lennart-k/matchit-serde", rev = "f0591d13" }
ece = { version = "2.3", default-features = false, features = [
"backend-openssl",
] }
openssl = { version = "0.10", features = ["vendored"] }
async-std = { version = "1.13", features = ["attributes"] }
similar-asserts = "1.7"
[dependencies]
rustical_store.workspace = true
rustical_store_sqlite.workspace = true
rustical_caldav.workspace = true
rustical_store = { workspace = true }
rustical_store_sqlite = { workspace = true }
rustical_caldav = { workspace = true }
rustical_carddav.workspace = true
rustical_frontend.workspace = true
toml.workspace = true
serde.workspace = true
tokio.workspace = true
tracing.workspace = true
anyhow.workspace = true
rustical_frontend = { workspace = true }
toml = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }
tracing = { workspace = true }
anyhow = { workspace = true }
clap.workspace = true
sqlx.workspace = true
async-trait.workspace = true
sqlx = { workspace = true }
async-trait = { workspace = true }
uuid.workspace = true
axum.workspace = true
opentelemetry = { version = "0.31", optional = true }
opentelemetry-otlp = { version = "0.31", optional = true, features = [
opentelemetry = { version = "0.30", optional = true }
opentelemetry-otlp = { version = "0.30", optional = true, features = [
"grpc-tonic",
] }
opentelemetry_sdk = { version = "0.31", features = [
opentelemetry_sdk = { version = "0.30", features = [
"rt-tokio",
], optional = true }
opentelemetry-semantic-conventions = { version = "0.31", optional = true }
tracing-opentelemetry = { version = "0.32", optional = true }
opentelemetry-semantic-conventions = { version = "0.30", optional = true }
tracing-opentelemetry = { version = "0.31", optional = true }
tracing-subscriber = { version = "0.3", features = [
"env-filter",
"fmt",

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM rust:1.91-alpine AS chef
FROM --platform=$BUILDPLATFORM rust:1.88-alpine AS chef
ARG TARGETPLATFORM
ARG BUILDPLATFORM
@@ -45,7 +45,4 @@ CMD ["/usr/local/bin/rustical"]
ENV RUSTICAL_DATA_STORE__SQLITE__DB_URL=/var/lib/rustical/db.sqlite3
LABEL org.opencontainers.image.authors="Lennart K github.com/lennart-k"
LABEL org.opencontainers.image.licenses="AGPL-3.0-or-later"
EXPOSE 4000
HEALTHCHECK --interval=30s --timeout=30s --start-period=3s --retries=3 CMD ["/usr/local/bin/rustical", "health"]

View File

@@ -12,6 +12,3 @@ docs:
docs-dev:
mkdocs serve
coverage:
cargo tarpaulin --workspace --exclude xml_derive

View File

@@ -4,23 +4,21 @@ a CalDAV/CardDAV server
> [!WARNING]
RustiCal is under **active development**!
While I've been successfully using RustiCal productively for some months now and there seems to be a growing user base,
While I've been successfully using RustiCal productively for a few weeks now,
you'd still be one of the first testers so expect bugs and rough edges.
If you still want to use it in its current state, absolutely feel free to do so and to open up an issue if something is not working. :)
If you still want to play around with it in its current state, absolutely feel free to do so and to open up an issue if something is not working. :)
## Features
- easy to backup, everything saved in one SQLite database
- also export feature in the frontend
- Import your existing calendars in the frontend
- **[WebDAV Push](https://github.com/bitfireAT/webdav-push/)** support, so near-instant synchronisation to DAVx5
- [WebDAV Push](https://github.com/bitfireAT/webdav-push/) support, so near-instant synchronisation to DAVx5
- lightweight (the container image contains only one binary)
- adequately fast (I'd love to say blazingly fast™ :fire: but I don't have any benchmarks)
- deleted calendars are recoverable
- Nextcloud login flow (In DAVx5 you can login through the Nextcloud flow and automatically generate an app token)
- Apple configuration profiles (skip copy-pasting passwords and instead generate the configuration in the frontend)
- **OpenID Connect** support (with option to disable password login)
- Group-based **sharing**
- OpenID Connect support (with option to disable password login)
## Getting Started

View File

@@ -7,7 +7,6 @@ accepted = [
"CDLA-Permissive-2.0",
"Zlib",
"AGPL-3.0",
"GPL-3.0",
"MPL-2.0",
]
workarounds = ["ring", "chrono", "rustls"]

View File

@@ -1,22 +0,0 @@
services:
rustical:
image: ghcr.io/lennart-k/rustical:latest
restart: unless-stopped
environment:
RUSTICAL_FRONTEND__ALLOW_PASSWORD_LOGIN: "false"
RUSTICAL_OIDC__NAME: "Authelia"
RUSTICAL_OIDC__ISSUER: "https://auth.example.com"
RUSTICAL_OIDC__CLIENT_ID: "{{ rustical_oidc_client_id }}"
RUSTICAL_OIDC__CLIENT_SECRET: "{{ rustical_oidc_client_secret }}"
RUSTICAL_OIDC__CLAIM_USERID: "preferred_username"
RUSTICAL_OIDC__SCOPES: '["openid", "profile", "groups"]'
RUSTICAL_OIDC__REQUIRE_GROUP: "app:rustical" # optional
RUSTICAL_OIDC__ALLOW_SIGN_UP: "true"
volumes:
- data:/var/lib/rustical
# Here you probably want to you expose instead
ports:
- 4000:4000
volumes:
data:

View File

@@ -1,7 +1,6 @@
[package]
name = "rustical_caldav"
version.workspace = true
rust-version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
@@ -12,27 +11,26 @@ publish = false
rustical_store_sqlite = { workspace = true, features = ["test"] }
rstest.workspace = true
async-std.workspace = true
serde_json.workspace = true
[dependencies]
axum.workspace = true
axum-extra.workspace = true
tower.workspace = true
async-trait.workspace = true
thiserror.workspace = true
quick-xml.workspace = true
tracing.workspace = true
futures-util.workspace = true
derive_more.workspace = true
base64.workspace = true
serde.workspace = true
tokio.workspace = true
url.workspace = true
rustical_dav.workspace = true
rustical_store.workspace = true
chrono.workspace = true
chrono-tz.workspace = true
sha2.workspace = true
async-trait = { workspace = true }
thiserror = { workspace = true }
quick-xml = { workspace = true }
tracing = { workspace = true }
futures-util = { workspace = true }
derive_more = { workspace = true }
base64 = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }
url = { workspace = true }
rustical_dav = { workspace = true }
rustical_store = { workspace = true }
chrono = { workspace = true }
chrono-tz = { workspace = true }
sha2 = { workspace = true }
ical.workspace = true
percent-encoding.workspace = true
rustical_xml.workspace = true
@@ -44,5 +42,3 @@ headers.workspace = true
tower-http.workspace = true
strum.workspace = true
strum_macros.workspace = true
vtimezones-rs.workspace = true
similar-asserts.workspace = true

View File

@@ -8,7 +8,7 @@ use http::{HeaderValue, Method, StatusCode, header};
use ical::generator::{Emitter, IcalCalendarBuilder};
use ical::property::Property;
use percent_encoding::{CONTROLS, utf8_percent_encode};
use rustical_ical::{CalendarObjectComponent, EventObject};
use rustical_ical::{CalendarObjectComponent, EventObject, JournalObject, TodoObject};
use rustical_store::{CalendarStore, SubscriptionStore, auth::Principal};
use std::collections::HashMap;
use std::str::FromStr;
@@ -32,67 +32,58 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
return Err(crate::Error::Unauthorized);
}
let mut vtimezones = HashMap::new();
let calendar = cal_store
.get_calendar(&principal, &calendar_id, true)
.await?;
let mut timezones = HashMap::new();
let objects = cal_store.get_objects(&principal, &calendar_id).await?;
let mut ical_calendar_builder = IcalCalendarBuilder::version("4.0")
.gregorian()
.prodid("RustiCal");
if let Some(displayname) = calendar.meta.displayname {
if calendar.displayname.is_some() {
ical_calendar_builder = ical_calendar_builder.set(Property {
name: "X-WR-CALNAME".to_owned(),
value: Some(displayname),
value: calendar.displayname,
params: None,
});
}
if let Some(description) = calendar.meta.description {
if calendar.description.is_some() {
ical_calendar_builder = ical_calendar_builder.set(Property {
name: "X-WR-CALDESC".to_owned(),
value: Some(description),
value: calendar.description,
params: None,
});
}
if let Some(timezone_id) = calendar.timezone_id {
if calendar.timezone_id.is_some() {
ical_calendar_builder = ical_calendar_builder.set(Property {
name: "X-WR-TIMEZONE".to_owned(),
value: Some(timezone_id),
value: calendar.timezone_id,
params: None,
});
}
let mut ical_calendar = ical_calendar_builder.build();
for object in &objects {
vtimezones.extend(object.get_vtimezones());
match object.get_data() {
CalendarObjectComponent::Event(EventObject { event, .. }, overrides) => {
ical_calendar_builder = ical_calendar_builder.add_event(event.clone());
for ev_override in overrides {
ical_calendar_builder =
ical_calendar_builder.add_event(ev_override.event.clone());
}
CalendarObjectComponent::Event(EventObject {
event,
timezones: object_timezones,
..
}) => {
timezones.extend(object_timezones);
ical_calendar.events.push(event.clone());
}
CalendarObjectComponent::Todo(todo, overrides) => {
ical_calendar_builder = ical_calendar_builder.add_todo(todo.clone());
for ev_override in overrides {
ical_calendar_builder = ical_calendar_builder.add_todo(ev_override.clone());
}
CalendarObjectComponent::Todo(TodoObject { todo, .. }) => {
ical_calendar.todos.push(todo.clone());
}
CalendarObjectComponent::Journal(journal, overrides) => {
ical_calendar_builder = ical_calendar_builder.add_journal(journal.clone());
for ev_override in overrides {
ical_calendar_builder = ical_calendar_builder.add_journal(ev_override.clone());
}
CalendarObjectComponent::Journal(JournalObject { journal, .. }) => {
ical_calendar.journals.push(journal.clone());
}
}
}
for vtimezone in vtimezones.into_values() {
ical_calendar_builder = ical_calendar_builder.add_tz(vtimezone.to_owned());
}
let ical_calendar = ical_calendar_builder
.build()
.map_err(|parser_error| Error::IcalError(parser_error.into()))?;
let mut resp = Response::builder().status(StatusCode::OK);
let hdrs = resp.headers_mut().unwrap();
hdrs.typed_insert(ContentType::from_str("text/calendar").unwrap());

View File

@@ -1,110 +0,0 @@
use crate::Error;
use crate::calendar::CalendarResourceService;
use axum::{
extract::{Path, State},
response::{IntoResponse, Response},
};
use http::StatusCode;
use ical::{
generator::Emitter,
parser::{Component, ComponentMut},
};
use rustical_dav::header::Overwrite;
use rustical_ical::{CalendarObject, CalendarObjectType};
use rustical_store::{
Calendar, CalendarMetadata, CalendarStore, SubscriptionStore, auth::Principal,
};
use std::io::BufReader;
use tracing::instrument;
#[instrument(skip(resource_service))]
pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
Path((principal, cal_id)): Path<(String, String)>,
user: Principal,
State(resource_service): State<CalendarResourceService<C, S>>,
Overwrite(overwrite): Overwrite,
body: String,
) -> Result<Response, Error> {
if !user.is_principal(&principal) {
return Err(Error::Unauthorized);
}
let mut parser = ical::IcalParser::new(BufReader::new(body.as_bytes()));
let mut cal = parser
.next()
.expect("input must contain calendar")
.unwrap()
.mutable();
if parser.next().is_some() {
return Err(rustical_ical::Error::InvalidData(
"multiple calendars, only one allowed".to_owned(),
)
.into());
}
// Extract calendar metadata
let displayname = cal
.get_property("X-WR-CALNAME")
.and_then(|prop| prop.value.clone());
let description = cal
.get_property("X-WR-CALDESC")
.and_then(|prop| prop.value.clone());
let timezone_id = cal
.get_property("X-WR-TIMEZONE")
.and_then(|prop| prop.value.clone());
// These properties should not appear in the expanded calendar objects
cal.remove_property("X-WR-CALNAME");
cal.remove_property("X-WR-CALDESC");
cal.remove_property("X-WR-TIMEZONE");
let cal = cal.verify().unwrap();
// Make sure timezone is valid
if let Some(timezone_id) = timezone_id.as_ref() {
assert!(
vtimezones_rs::VTIMEZONES.contains_key(timezone_id),
"Invalid calendar timezone id"
);
}
// Extract necessary component types
let mut cal_components = vec![];
if !cal.events.is_empty() {
cal_components.push(CalendarObjectType::Event);
}
if !cal.journals.is_empty() {
cal_components.push(CalendarObjectType::Journal);
}
if !cal.todos.is_empty() {
cal_components.push(CalendarObjectType::Todo);
}
let expanded_cals = cal.expand_calendar();
// Janky way to convert between IcalCalendar and CalendarObject
let objects = expanded_cals
.into_iter()
.map(|cal| cal.generate())
.map(|ics| CalendarObject::from_ics(ics, None))
.collect::<Result<Vec<_>, _>>()?;
let new_cal = Calendar {
principal,
id: cal_id,
meta: CalendarMetadata {
displayname,
order: 0,
description,
color: None,
},
timezone_id,
deleted_at: None,
synctoken: 0,
subscription_url: None,
push_topic: uuid::Uuid::new_v4().to_string(),
components: cal_components,
};
let cal_store = resource_service.cal_store;
cal_store
.import_calendar(new_cal, objects, overwrite)
.await?;
Ok(StatusCode::OK.into_response())
}

View File

@@ -4,11 +4,10 @@ use crate::calendar::prop::SupportedCalendarComponentSet;
use axum::extract::{Path, State};
use axum::response::{IntoResponse, Response};
use http::{Method, StatusCode};
use ical::IcalParser;
use rustical_dav::xml::HrefElement;
use rustical_ical::CalendarObjectType;
use rustical_store::auth::Principal;
use rustical_store::{Calendar, CalendarMetadata, CalendarStore, SubscriptionStore};
use rustical_store::{Calendar, CalendarStore, SubscriptionStore};
use rustical_xml::{Unparsed, XmlDeserialize, XmlDocument, XmlRootTag};
use tracing::instrument;
@@ -46,7 +45,7 @@ pub struct PropElement {
}
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug)]
#[xml(root = "mkcalendar")]
#[xml(root = b"mkcalendar")]
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
struct MkcalendarRequest {
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
@@ -54,7 +53,7 @@ struct MkcalendarRequest {
}
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug)]
#[xml(root = "mkcol")]
#[xml(root = b"mkcol")]
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
struct MkcolRequest {
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
@@ -79,55 +78,31 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
_ => unreachable!("We never call with another method"),
};
if request.displayname.as_deref() == Some("") {
request.displayname = None;
if let Some("") = request.displayname.as_deref() {
request.displayname = None
}
let timezone_id = if let Some(tzid) = request.calendar_timezone_id {
Some(tzid)
} else if let Some(tz) = request.calendar_timezone {
// TODO: Proper error (calendar-timezone precondition)
let calendar = IcalParser::new(tz.as_bytes())
.next()
.ok_or_else(|| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?
.map_err(|_| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?;
let timezone = calendar.timezones.first().ok_or_else(|| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?;
let timezone: chrono_tz::Tz = timezone
.try_into()
.map_err(|_| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?;
Some(timezone.name().to_owned())
} else {
None
};
let calendar = Calendar {
id: cal_id.clone(),
principal: principal.clone(),
meta: CalendarMetadata {
order: request.calendar_order.unwrap_or(0),
displayname: request.displayname,
color: request.calendar_color,
description: request.calendar_description,
},
timezone_id,
id: cal_id.to_owned(),
principal: principal.to_owned(),
order: request.calendar_order.unwrap_or(0),
displayname: request.displayname,
timezone: request.calendar_timezone,
timezone_id: request.calendar_timezone_id,
color: request.calendar_color,
description: request.calendar_description,
deleted_at: None,
synctoken: 0,
subscription_url: request.source.map(|href| href.href),
push_topic: uuid::Uuid::new_v4().to_string(),
components: request.supported_calendar_component_set.map_or_else(
|| {
vec![
CalendarObjectType::Event,
CalendarObjectType::Todo,
CalendarObjectType::Journal,
]
},
Into::into,
),
components: request
.supported_calendar_component_set
.map(Into::into)
.unwrap_or(vec![
CalendarObjectType::Event,
CalendarObjectType::Todo,
CalendarObjectType::Journal,
]),
};
cal_store.insert_calendar(calendar).await?;

View File

@@ -1,5 +1,4 @@
pub mod get;
pub mod import;
pub mod mkcalendar;
pub mod post;
pub mod report;

View File

@@ -49,12 +49,12 @@ pub async fn route_post<C: CalendarStore, S: SubscriptionStore>(
};
let subscription = Subscription {
id: sub_id.clone(),
id: sub_id.to_owned(),
push_resource: request
.subscription
.web_push_subscription
.push_resource
.clone(),
.to_owned(),
topic: calendar_resource.cal.push_topic,
expiration: expires.naive_local(),
public_key: request

View File

@@ -4,10 +4,10 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore;
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, href+)>
pub struct CalendarMultigetRequest {
pub(crate) struct CalendarMultigetRequest {
#[xml(ty = "untagged")]
pub(crate) prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(flatten)]
@@ -26,21 +26,21 @@ pub async fn get_objects_calendar_multiget<C: CalendarStore>(
let mut not_found = vec![];
for href in &cal_query.href {
if let Ok(href) = percent_encoding::percent_decode_str(href).decode_utf8()
&& let Some(filename) = href.strip_prefix(path)
{
let filename = filename.trim_start_matches('/');
if let Some(filename) = href.strip_prefix(path) {
let filename = filename.trim_start_matches("/");
if let Some(object_id) = filename.strip_suffix(".ics") {
match store.get_object(principal, cal_id, object_id, false).await {
Ok(object) => result.push(object),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_string()),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()),
Err(err) => return Err(err.into()),
}
};
} else {
not_found.push(href.to_string());
not_found.push(href.to_owned());
continue;
}
} else {
not_found.push(href.to_owned());
continue;
}
}

View File

@@ -0,0 +1,205 @@
use crate::{Error, calendar_object::CalendarObjectPropWrapperName};
use rustical_dav::xml::PropfindType;
use rustical_ical::{CalendarObject, UtcDateTime};
use rustical_store::{CalendarStore, calendar_store::CalendarQuery};
use rustical_xml::XmlDeserialize;
use std::ops::Deref;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
pub(crate) struct TimeRangeElement {
#[xml(ty = "attr")]
pub(crate) start: Option<UtcDateTime>,
#[xml(ty = "attr")]
pub(crate) end: Option<UtcDateTime>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
struct ParamFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
text_match: Option<TextMatchElement>,
#[xml(ty = "attr")]
name: String,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
struct TextMatchElement {
#[xml(ty = "attr")]
collation: String,
#[xml(ty = "attr")]
negate_collation: String,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
pub(crate) struct PropFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
text_match: Option<TextMatchElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
param_filter: Vec<ParamFilterElement>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
pub(crate) struct CompFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) prop_filter: Vec<PropFilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) comp_filter: Vec<CompFilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", ty = "attr")]
pub(crate) name: String,
}
impl CompFilterElement {
// match the VCALENDAR part
pub fn matches_root(&self, cal_object: &CalendarObject) -> bool {
let comp_vcal = self.name == "VCALENDAR";
match (self.is_not_defined, comp_vcal) {
// Client wants VCALENDAR to not exist but we are a VCALENDAR
(Some(()), true) => return false,
// Client is asking for something different than a vcalendar
(None, false) => return false,
_ => {}
};
if self.time_range.is_some() {
// <time-range> should be applied on VEVENT/VTODO but not on VCALENDAR
return false;
}
// TODO: Implement prop-filter at some point
// Apply sub-comp-filters on VEVENT/VTODO/VJOURNAL component
if self
.comp_filter
.iter()
.all(|filter| filter.matches(cal_object))
{
return true;
}
false
}
// match the VEVENT/VTODO/VJOURNAL part
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
let comp_name_matches = self.name == cal_object.get_component_name();
match (self.is_not_defined, comp_name_matches) {
// Client wants VCALENDAR to not exist but we are a VCALENDAR
(Some(()), true) => return false,
// Client is asking for something different than a vcalendar
(None, false) => return false,
_ => {}
};
// TODO: Implement prop-filter (and comp-filter?) at some point
if let Some(time_range) = &self.time_range {
if let Some(start) = &time_range.start {
if let Some(last_occurence) = cal_object.get_last_occurence().unwrap_or(None) {
if start.deref() > &last_occurence.utc() {
return false;
}
};
}
if let Some(end) = &time_range.end {
if let Some(first_occurence) = cal_object.get_first_occurence().unwrap_or(None) {
if end.deref() < &first_occurence.utc() {
return false;
}
};
}
}
true
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7
pub(crate) struct FilterElement {
// This comp-filter matches on VCALENDAR
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) comp_filter: CompFilterElement,
}
impl FilterElement {
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
self.comp_filter.matches_root(cal_object)
}
}
impl From<&FilterElement> for CalendarQuery {
fn from(value: &FilterElement) -> Self {
let comp_filter_vcalendar = &value.comp_filter;
for comp_filter in comp_filter_vcalendar.comp_filter.iter() {
// A calendar object cannot contain both VEVENT and VTODO, so we only have to handle
// whatever we get first
if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO") {
if let Some(time_range) = &comp_filter.time_range {
let start = time_range.start.as_ref().map(|start| start.date_naive());
let end = time_range.end.as_ref().map(|end| end.date_naive());
return CalendarQuery {
time_start: start,
time_end: end,
};
}
}
}
Default::default()
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, filter, timezone?)>
pub struct CalendarQueryRequest {
#[xml(ty = "untagged")]
pub prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) filter: Option<FilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone: Option<String>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone_id: Option<String>,
}
impl From<&CalendarQueryRequest> for CalendarQuery {
fn from(value: &CalendarQueryRequest) -> Self {
value
.filter
.as_ref()
.map(CalendarQuery::from)
.unwrap_or_default()
}
}
pub async fn get_objects_calendar_query<C: CalendarStore>(
cal_query: &CalendarQueryRequest,
principal: &str,
cal_id: &str,
store: &C,
) -> Result<Vec<CalendarObject>, Error> {
let mut objects = store
.calendar_query(principal, cal_id, cal_query.into())
.await?;
if let Some(filter) = &cal_query.filter {
objects.retain(|object| filter.matches(object));
}
Ok(objects)
}

View File

@@ -1,345 +0,0 @@
use crate::calendar::methods::report::calendar_query::{
TimeRangeElement,
prop_filter::{PropFilterElement, PropFilterable},
};
use ical::parser::ical::component::IcalTimeZone;
use rustical_ical::{CalendarObject, CalendarObjectComponent, CalendarObjectType};
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
pub struct CompFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) prop_filter: Vec<PropFilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) comp_filter: Vec<CompFilterElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
pub trait CompFilterable: PropFilterable + Sized {
fn get_comp_name(&self) -> &'static str;
fn match_time_range(&self, time_range: &TimeRangeElement) -> bool;
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool;
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
// The scope of the
// CALDAV:comp-filter XML element is the calendar object when used as
// a child of the CALDAV:filter XML element. The scope of the
// CALDAV:comp-filter XML element is the enclosing calendar component
// when used as a child of another CALDAV:comp-filter XML element
fn matches(&self, comp_filter: &CompFilterElement) -> bool {
let name_matches = self.get_comp_name() == comp_filter.name;
match (comp_filter.is_not_defined.is_some(), name_matches) {
// We are the component that's not supposed to be defined
(true, true)
// We don't match
| (false, false) => return false,
// We shall not be and indeed we aren't
(true, false) => return true,
_ => {}
}
if let Some(time_range) = comp_filter.time_range.as_ref()
&& !self.match_time_range(time_range)
{
return false;
}
for prop_filter in &comp_filter.prop_filter {
if !prop_filter.match_component(self) {
return false;
}
}
comp_filter
.comp_filter
.iter()
.all(|filter| self.match_subcomponents(filter))
}
}
impl CompFilterable for CalendarObject {
fn get_comp_name(&self) -> &'static str {
"VCALENDAR"
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
// VCALENDAR has no concept of time range
false
}
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool {
let mut matches = self
.get_vtimezones()
.values()
.map(|tz| tz.matches(comp_filter))
.chain([self.get_data().matches(comp_filter)]);
if comp_filter.is_not_defined.is_some() {
matches.all(|x| x)
} else {
matches.any(|x| x)
}
}
}
impl CompFilterable for IcalTimeZone {
fn get_comp_name(&self) -> &'static str {
"VTIMEZONE"
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
false
}
fn match_subcomponents(&self, _comp_filter: &CompFilterElement) -> bool {
true
}
}
impl CompFilterable for CalendarObjectComponent {
fn get_comp_name(&self) -> &'static str {
CalendarObjectType::from(self).as_str()
}
fn match_time_range(&self, time_range: &TimeRangeElement) -> bool {
if let Some(start) = &time_range.start
&& let Some(last_occurence) = self.get_last_occurence().unwrap_or(None)
&& **start > last_occurence.utc()
{
return false;
}
if let Some(end) = &time_range.end
&& let Some(first_occurence) = self.get_first_occurence().unwrap_or(None)
&& **end < first_occurence.utc()
{
return false;
}
true
}
fn match_subcomponents(&self, _comp_filter: &CompFilterElement) -> bool {
// TODO: Properly check subcomponents
true
}
}
#[cfg(test)]
mod tests {
use chrono::{TimeZone, Utc};
use rustical_ical::{CalendarObject, UtcDateTime};
use crate::calendar::methods::report::calendar_query::{
CompFilterable, TextMatchElement, TimeRangeElement,
comp_filter::CompFilterElement,
prop_filter::PropFilterElement,
text_match::{NegateCondition, TextCollation},
};
const ICS: &str = r"BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
X-LIC-LOCATION:Europe/Berlin
END:VTIMEZONE
BEGIN:VEVENT
UID:318ec6503573d9576818daf93dac07317058d95c
DTSTAMP:20250502T132758Z
DTSTART;TZID=Europe/Berlin:20250506T090000
DTEND;TZID=Europe/Berlin:20250506T092500
SEQUENCE:2
SUMMARY:weekly stuff
TRANSP:OPAQUE
RRULE:FREQ=WEEKLY;COUNT=4;INTERVAL=2;BYDAY=TU,TH,SU
END:VEVENT
END:VCALENDAR";
#[test]
fn test_comp_filter_matching() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: Some(()),
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![],
};
assert!(!object.matches(&comp_filter), "filter: wants no VCALENDAR");
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VTODO".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(!object.matches(&comp_filter), "filter matches VTODO");
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(object.matches(&comp_filter), "filter matches VEVENT");
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![
PropFilterElement {
is_not_defined: None,
name: "VERSION".to_string(),
time_range: None,
text_match: Some(TextMatchElement {
needle: "2.0".to_string(),
collation: TextCollation::default(),
negate_condition: NegateCondition::default(),
}),
param_filter: vec![],
},
PropFilterElement {
is_not_defined: Some(()),
name: "STUFF".to_string(),
time_range: None,
text_match: None,
param_filter: vec![],
},
],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![PropFilterElement {
is_not_defined: None,
name: "SUMMARY".to_string(),
time_range: None,
text_match: Some(TextMatchElement {
collation: TextCollation::default(),
negate_condition: NegateCondition(false),
needle: "weekly".to_string(),
}),
param_filter: vec![],
}],
comp_filter: vec![],
}],
};
assert!(
object.matches(&comp_filter),
"Some prop filters on VCALENDAR and VEVENT"
);
}
#[test]
fn test_comp_filter_time_range() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: Some(TimeRangeElement {
start: Some(UtcDateTime(
Utc.with_ymd_and_hms(2025, 4, 1, 0, 0, 0).unwrap(),
)),
end: Some(UtcDateTime(
Utc.with_ymd_and_hms(2025, 8, 1, 0, 0, 0).unwrap(),
)),
}),
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(
object.matches(&comp_filter),
"event should lie in time range"
);
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: Some(TimeRangeElement {
start: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 4, 1, 0, 0, 0).unwrap(),
)),
end: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 8, 1, 0, 0, 0).unwrap(),
)),
}),
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(
!object.matches(&comp_filter),
"event should not lie in time range"
);
}
#[test]
fn test_match_timezone() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VTIMEZONE".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![PropFilterElement {
is_not_defined: None,
name: "TZID".to_string(),
time_range: None,
text_match: Some(TextMatchElement {
collation: TextCollation::AsciiCasemap,
negate_condition: NegateCondition::default(),
needle: "Europe/Berlin".to_string(),
}),
param_filter: vec![],
}],
comp_filter: vec![],
}],
};
assert!(
object.matches(&comp_filter),
"Timezone should be Europe/Berlin"
);
}
}

View File

@@ -1,132 +0,0 @@
use crate::{
calendar::methods::report::calendar_query::{
TextMatchElement,
comp_filter::{CompFilterElement, CompFilterable},
},
calendar_object::CalendarObjectPropWrapperName,
};
use rustical_dav::xml::PropfindType;
use rustical_ical::{CalendarObject, UtcDateTime};
use rustical_store::calendar_store::CalendarQuery;
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
pub struct TimeRangeElement {
#[xml(ty = "attr")]
pub(crate) start: Option<UtcDateTime>,
#[xml(ty = "attr")]
pub(crate) end: Option<UtcDateTime>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.3
pub struct ParamFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) text_match: Option<TextMatchElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7
pub struct FilterElement {
// This comp-filter matches on VCALENDAR
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) comp_filter: CompFilterElement,
}
impl FilterElement {
#[must_use]
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
cal_object.matches(&self.comp_filter)
}
}
impl From<&FilterElement> for CalendarQuery {
fn from(value: &FilterElement) -> Self {
let comp_filter_vcalendar = &value.comp_filter;
for comp_filter in &comp_filter_vcalendar.comp_filter {
// A calendar object cannot contain both VEVENT and VTODO, so we only have to handle
// whatever we get first
if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO")
&& let Some(time_range) = &comp_filter.time_range
{
let start = time_range.start.as_ref().map(|start| start.date_naive());
let end = time_range.end.as_ref().map(|end| end.date_naive());
return Self {
time_start: start,
time_end: end,
};
}
}
Self::default()
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, filter, timezone?)>
pub struct CalendarQueryRequest {
#[xml(ty = "untagged")]
pub prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) filter: Option<FilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone: Option<String>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone_id: Option<String>,
}
impl From<&CalendarQueryRequest> for CalendarQuery {
fn from(value: &CalendarQueryRequest) -> Self {
value.filter.as_ref().map(Self::from).unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use crate::calendar::methods::report::calendar_query::{
CompFilterElement, FilterElement, TimeRangeElement,
};
use chrono::{NaiveDate, TimeZone, Utc};
use rustical_ical::UtcDateTime;
use rustical_store::calendar_store::CalendarQuery;
#[test]
fn test_filter_element_calendar_query() {
let filter = FilterElement {
comp_filter: CompFilterElement {
name: "VCALENDAR".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: Some(TimeRangeElement {
start: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 4, 1, 0, 0, 0).unwrap(),
)),
end: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 8, 1, 0, 0, 0).unwrap(),
)),
}),
prop_filter: vec![],
comp_filter: vec![],
}],
},
};
let derived_query: CalendarQuery = (&filter).into();
let query = CalendarQuery {
time_start: Some(NaiveDate::from_ymd_opt(2024, 4, 1).unwrap()),
time_end: Some(NaiveDate::from_ymd_opt(2024, 8, 1).unwrap()),
};
assert_eq!(derived_query, query);
}
}

View File

@@ -1,133 +0,0 @@
use crate::Error;
use rustical_ical::CalendarObject;
use rustical_store::CalendarStore;
mod comp_filter;
mod elements;
mod prop_filter;
pub mod text_match;
#[allow(unused_imports)]
pub use comp_filter::{CompFilterElement, CompFilterable};
pub use elements::*;
#[allow(unused_imports)]
pub use prop_filter::{PropFilterElement, PropFilterable};
#[allow(unused_imports)]
pub use text_match::TextMatchElement;
pub async fn get_objects_calendar_query<C: CalendarStore>(
cal_query: &CalendarQueryRequest,
principal: &str,
cal_id: &str,
store: &C,
) -> Result<Vec<CalendarObject>, Error> {
let mut objects = store
.calendar_query(principal, cal_id, cal_query.into())
.await?;
if let Some(filter) = &cal_query.filter {
objects.retain(|object| filter.matches(object));
}
Ok(objects)
}
#[cfg(test)]
mod tests {
use rustical_dav::xml::PropElement;
use rustical_xml::XmlDocument;
use crate::{
calendar::methods::report::{
ReportRequest,
calendar_query::{
CalendarQueryRequest, FilterElement, ParamFilterElement, TextMatchElement,
comp_filter::CompFilterElement,
prop_filter::PropFilterElement,
text_match::{NegateCondition, TextCollation},
},
},
calendar_object::{CalendarData, CalendarObjectPropName, CalendarObjectPropWrapperName},
};
#[test]
fn calendar_query_7_8_7() {
const INPUT: &str = r#"
<?xml version="1.0" encoding="utf-8" ?>
<C:calendar-query xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:prop xmlns:D="DAV:">
<D:getetag/>
<C:calendar-data/>
</D:prop>
<C:filter>
<C:comp-filter name="VCALENDAR">
<C:comp-filter name="VEVENT">
<C:prop-filter name="ATTENDEE">
<C:text-match collation="i;ascii-casemap">mailto:lisa@example.com</C:text-match>
<C:param-filter name="PARTSTAT">
<C:text-match collation="i;ascii-casemap">NEEDS-ACTION</C:text-match>
</C:param-filter>
</C:prop-filter>
</C:comp-filter>
</C:comp-filter>
</C:filter>
</C:calendar-query>
"#;
let report = ReportRequest::parse_str(INPUT).unwrap();
let calendar_query: CalendarQueryRequest =
if let ReportRequest::CalendarQuery(query) = report {
query
} else {
panic!()
};
assert_eq!(
calendar_query,
CalendarQueryRequest {
prop: rustical_dav::xml::PropfindType::Prop(PropElement(
vec![
CalendarObjectPropWrapperName::CalendarObject(
CalendarObjectPropName::Getetag,
),
CalendarObjectPropWrapperName::CalendarObject(
CalendarObjectPropName::CalendarData(CalendarData::default())
),
],
vec![]
)),
filter: Some(FilterElement {
comp_filter: CompFilterElement {
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
prop_filter: vec![PropFilterElement {
name: "ATTENDEE".to_owned(),
text_match: Some(TextMatchElement {
collation: TextCollation::AsciiCasemap,
negate_condition: NegateCondition(false),
needle: "mailto:lisa@example.com".to_string()
}),
is_not_defined: None,
param_filter: vec![ParamFilterElement {
is_not_defined: None,
name: "PARTSTAT".to_owned(),
text_match: Some(TextMatchElement {
collation: TextCollation::AsciiCasemap,
negate_condition: NegateCondition(false),
needle: "NEEDS-ACTION".to_string()
}),
}],
time_range: None
}],
comp_filter: vec![],
is_not_defined: None,
name: "VEVENT".to_owned(),
time_range: None
}],
name: "VCALENDAR".to_owned()
}
}),
timezone: None,
timezone_id: None
}
);
}
}

View File

@@ -1,127 +0,0 @@
use std::collections::HashMap;
use ical::{
generator::{IcalCalendar, IcalEvent},
parser::{
Component,
ical::component::{IcalJournal, IcalTimeZone, IcalTodo},
},
property::Property,
};
use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectComponent, UtcDateTime};
use rustical_xml::XmlDeserialize;
use crate::calendar::methods::report::calendar_query::{
ParamFilterElement, TextMatchElement, TimeRangeElement,
};
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.2
pub struct PropFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) text_match: Option<TextMatchElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) param_filter: Vec<ParamFilterElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
impl PropFilterElement {
pub fn match_component(&self, comp: &impl PropFilterable) -> bool {
let property = comp.get_property(&self.name);
let property = match (self.is_not_defined.is_some(), property) {
// We are the component that's not supposed to be defined
(true, Some(_))
// We don't match
| (false, None) => return false,
// We shall not be and indeed we aren't
(true, None) => return true,
(false, Some(property)) => property
};
if let Some(TimeRangeElement { start, end }) = &self.time_range {
// TODO: Respect timezones
let Ok(timestamp) = CalDateTime::parse_prop(property, &HashMap::default()) else {
return false;
};
let timestamp = timestamp.utc();
if let Some(UtcDateTime(start)) = start
&& start > &timestamp
{
return false;
}
if let Some(UtcDateTime(end)) = end
&& end < &timestamp
{
return false;
}
return true;
}
if let Some(text_match) = &self.text_match
&& !text_match.match_property(property)
{
return false;
}
// TODO: param-filter
true
}
}
pub trait PropFilterable {
fn get_property(&self, name: &str) -> Option<&Property>;
}
impl PropFilterable for CalendarObject {
fn get_property(&self, name: &str) -> Option<&Property> {
Self::get_property(self, name)
}
}
impl PropFilterable for IcalEvent {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalTodo {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalJournal {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalCalendar {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalTimeZone {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for CalendarObjectComponent {
fn get_property(&self, name: &str) -> Option<&Property> {
match self {
Self::Event(event, _) => PropFilterable::get_property(&event.event, name),
Self::Todo(todo, _) => PropFilterable::get_property(todo, name),
Self::Journal(journal, _) => PropFilterable::get_property(journal, name),
}
}
}

View File

@@ -1,103 +0,0 @@
use ical::property::Property;
use rustical_xml::{ValueDeserialize, XmlDeserialize};
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub enum TextCollation {
#[default]
AsciiCasemap,
Octet,
}
impl TextCollation {
// Check whether a haystack contains a needle respecting the collation
#[must_use]
pub fn match_text(&self, needle: &str, haystack: &str) -> bool {
match self {
// https://datatracker.ietf.org/doc/html/rfc4790#section-9.2
Self::AsciiCasemap => haystack
.to_ascii_uppercase()
.contains(&needle.to_ascii_uppercase()),
Self::Octet => haystack.contains(needle),
}
}
}
impl AsRef<str> for TextCollation {
fn as_ref(&self) -> &str {
match self {
Self::AsciiCasemap => "i;ascii-casemap",
Self::Octet => "i;octet",
}
}
}
impl ValueDeserialize for TextCollation {
fn deserialize(val: &str) -> Result<Self, rustical_xml::XmlError> {
match val {
"i;ascii-casemap" => Ok(Self::AsciiCasemap),
"i;octet" => Ok(Self::Octet),
_ => Err(rustical_xml::XmlError::InvalidVariant(format!(
"Invalid collation: {val}"
))),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct NegateCondition(pub bool);
impl ValueDeserialize for NegateCondition {
fn deserialize(val: &str) -> Result<Self, rustical_xml::XmlError> {
match val {
"yes" => Ok(Self(true)),
"no" => Ok(Self(false)),
_ => Err(rustical_xml::XmlError::InvalidVariant(format!(
"Invalid negate-condition parameter: {val}"
))),
}
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
pub struct TextMatchElement {
#[xml(ty = "attr", default = "Default::default")]
pub collation: TextCollation,
#[xml(ty = "attr", default = "Default::default")]
pub(crate) negate_condition: NegateCondition,
#[xml(ty = "text")]
pub(crate) needle: String,
}
impl TextMatchElement {
#[must_use]
pub fn match_property(&self, property: &Property) -> bool {
let Self {
collation,
negate_condition,
needle,
} = self;
let matches = property
.value
.as_ref()
.is_some_and(|haystack| collation.match_text(needle, haystack));
// XOR
negate_condition.0 ^ matches
}
}
#[cfg(test)]
mod tests {
use crate::calendar::methods::report::calendar_query::text_match::TextCollation;
#[test]
fn test_collation() {
assert!(TextCollation::AsciiCasemap.match_text("GrüN", "grün"));
assert!(!TextCollation::AsciiCasemap.match_text("GrÜN", "grün"));
assert!(!TextCollation::Octet.match_text("GrÜN", "grün"));
assert!(TextCollation::Octet.match_text("hallo", "hallo"));
assert!(TextCollation::AsciiCasemap.match_text("HaLlo", "hAllo"));
}
}

View File

@@ -27,7 +27,7 @@ use sync_collection::handle_sync_collection;
use tracing::instrument;
mod calendar_multiget;
pub mod calendar_query;
mod calendar_query;
mod sync_collection;
#[derive(XmlDeserialize, XmlDocument, Clone, Debug, PartialEq)]
@@ -41,11 +41,11 @@ pub(crate) enum ReportRequest {
}
impl ReportRequest {
const fn props(&self) -> &PropfindType<CalendarObjectPropWrapperName> {
fn props(&self) -> &PropfindType<CalendarObjectPropWrapperName> {
match &self {
Self::CalendarMultiget(CalendarMultigetRequest { prop, .. })
| Self::CalendarQuery(CalendarQueryRequest { prop, .. })
| Self::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
ReportRequest::CalendarMultiget(CalendarMultigetRequest { prop, .. }) => prop,
ReportRequest::CalendarQuery(CalendarQueryRequest { prop, .. }) => prop,
ReportRequest::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
}
}
}
@@ -184,7 +184,7 @@ mod tests {
"/caldav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned()
]
})
);
)
}
#[test]
@@ -241,7 +241,7 @@ mod tests {
timezone: None,
timezone_id: None,
})
);
)
}
#[test]
@@ -269,6 +269,6 @@ mod tests {
"/caldav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned()
]
})
);
)
}
}

View File

@@ -4,6 +4,3 @@ pub mod resource;
mod service;
pub use service::CalendarResourceService;
#[cfg(test)]
pub mod tests;

View File

@@ -3,15 +3,13 @@ use rustical_ical::CalendarObjectType;
use rustical_xml::{XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray;
use crate::calendar::methods::report::calendar_query::text_match::TextCollation;
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq, From, Into)]
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, From, Into)]
pub struct SupportedCalendarComponent {
#[xml(ty = "attr")]
pub name: CalendarObjectType,
}
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq)]
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq)]
pub struct SupportedCalendarComponentSet {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub comp: Vec<SupportedCalendarComponent>,
@@ -38,29 +36,7 @@ impl From<SupportedCalendarComponentSet> for Vec<CalendarObjectType> {
}
}
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq, From, Into)]
pub struct SupportedCollation(#[xml(ty = "text")] pub TextCollation);
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq)]
pub struct SupportedCollationSet(
#[xml(
ns = "rustical_dav::namespace::NS_CALDAV",
flatten,
rename = "supported-collation"
)]
pub Vec<SupportedCollation>,
);
impl Default for SupportedCollationSet {
fn default() -> Self {
Self(vec![
SupportedCollation(TextCollation::AsciiCasemap),
SupportedCollation(TextCollation::Octet),
])
}
}
#[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
#[derive(Debug, Clone, XmlSerialize, PartialEq)]
pub struct CalendarData {
#[xml(ty = "attr")]
content_type: String,
@@ -77,13 +53,13 @@ impl Default for CalendarData {
}
}
#[derive(Debug, Clone, XmlSerialize, Default, PartialEq, Eq)]
#[derive(Debug, Clone, XmlSerialize, Default, PartialEq)]
pub struct SupportedCalendarData {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
calendar_data: CalendarData,
}
#[derive(Debug, Clone, XmlSerialize, PartialEq, Eq, VariantArray)]
#[derive(Debug, Clone, XmlSerialize, PartialEq, VariantArray)]
pub enum ReportMethod {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
CalendarQuery,

View File

@@ -1,9 +1,8 @@
use super::prop::{SupportedCalendarComponentSet, SupportedCalendarData};
use crate::Error;
use crate::calendar::prop::{ReportMethod, SupportedCollationSet};
use crate::calendar::prop::ReportMethod;
use chrono::{DateTime, Utc};
use derive_more::derive::{From, Into};
use ical::IcalParser;
use rustical_dav::extensions::{
CommonPropertiesExtension, CommonPropertiesProp, SyncTokenExtension, SyncTokenExtensionProp,
};
@@ -16,9 +15,9 @@ use rustical_store::Calendar;
use rustical_store::auth::Principal;
use rustical_xml::{EnumVariants, PropName};
use rustical_xml::{XmlDeserialize, XmlSerialize};
use serde::Deserialize;
use std::str::FromStr;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarPropName")]
pub enum CalendarProp {
// CalDAV (RFC 4791)
@@ -35,12 +34,10 @@ pub enum CalendarProp {
CalendarTimezoneId(Option<String>),
#[xml(ns = "rustical_dav::namespace::NS_ICAL")]
CalendarOrder(Option<i64>),
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", skip_deserializing)]
SupportedCalendarComponentSet(SupportedCalendarComponentSet),
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", skip_deserializing)]
SupportedCalendarData(SupportedCalendarData),
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", skip_deserializing)]
SupportedCollationSet(SupportedCollationSet),
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
MaxResourceSize(i64),
#[xml(skip_deserializing)]
@@ -56,7 +53,7 @@ pub enum CalendarProp {
MaxDateTime(String),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarPropWrapperName", untagged)]
pub enum CalendarPropWrapper {
Calendar(CalendarProp),
@@ -65,7 +62,7 @@ pub enum CalendarPropWrapper {
Common(CommonPropertiesProp),
}
#[derive(Clone, Debug, From, Into, Deserialize)]
#[derive(Clone, Debug, From, Into)]
pub struct CalendarResource {
pub cal: Calendar,
pub read_only: bool,
@@ -73,7 +70,7 @@ pub struct CalendarResource {
impl ResourceName for CalendarResource {
fn get_name(&self) -> String {
self.cal.id.clone()
self.cal.id.to_owned()
}
}
@@ -91,7 +88,7 @@ impl SyncTokenExtension for CalendarResource {
impl DavPushExtension for CalendarResource {
fn get_topic(&self) -> String {
self.cal.push_topic.clone()
self.cal.push_topic.to_owned()
}
}
@@ -130,17 +127,13 @@ impl Resource for CalendarResource {
Ok(match prop {
CalendarPropWrapperName::Calendar(prop) => CalendarPropWrapper::Calendar(match prop {
CalendarPropName::CalendarColor => {
CalendarProp::CalendarColor(self.cal.meta.color.clone())
CalendarProp::CalendarColor(self.cal.color.clone())
}
CalendarPropName::CalendarDescription => {
CalendarProp::CalendarDescription(self.cal.meta.description.clone())
CalendarProp::CalendarDescription(self.cal.description.clone())
}
CalendarPropName::CalendarTimezone => {
CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| {
vtimezones_rs::VTIMEZONES
.get(tzid)
.map(|tz| (*tz).to_string())
}))
CalendarProp::CalendarTimezone(self.cal.timezone.clone())
}
// chrono_tz uses the IANA database
CalendarPropName::TimezoneServiceSet => CalendarProp::TimezoneServiceSet(
@@ -150,7 +143,7 @@ impl Resource for CalendarResource {
CalendarProp::CalendarTimezoneId(self.cal.timezone_id.clone())
}
CalendarPropName::CalendarOrder => {
CalendarProp::CalendarOrder(Some(self.cal.meta.order))
CalendarProp::CalendarOrder(Some(self.cal.order))
}
CalendarPropName::SupportedCalendarComponentSet => {
CalendarProp::SupportedCalendarComponentSet(self.cal.components.clone().into())
@@ -158,16 +151,13 @@ impl Resource for CalendarResource {
CalendarPropName::SupportedCalendarData => {
CalendarProp::SupportedCalendarData(SupportedCalendarData::default())
}
CalendarPropName::SupportedCollationSet => {
CalendarProp::SupportedCollationSet(SupportedCollationSet::default())
}
CalendarPropName::MaxResourceSize => CalendarProp::MaxResourceSize(10_000_000),
CalendarPropName::MaxResourceSize => CalendarProp::MaxResourceSize(10000000),
CalendarPropName::SupportedReportSet => {
CalendarProp::SupportedReportSet(SupportedReportSet::all())
}
CalendarPropName::Source => {
CalendarProp::Source(self.cal.subscription_url.clone().map(HrefElement::from))
}
CalendarPropName::Source => CalendarProp::Source(
self.cal.subscription_url.to_owned().map(HrefElement::from),
),
CalendarPropName::MinDateTime => {
CalendarProp::MinDateTime(CalDateTime::from(DateTime::<Utc>::MIN_UTC).format())
}
@@ -194,67 +184,45 @@ impl Resource for CalendarResource {
match prop {
CalendarPropWrapper::Calendar(prop) => match prop {
CalendarProp::CalendarColor(color) => {
self.cal.meta.color = color;
self.cal.color = color;
Ok(())
}
CalendarProp::CalendarDescription(description) => {
self.cal.meta.description = description;
self.cal.description = description;
Ok(())
}
CalendarProp::CalendarTimezone(timezone) => {
if let Some(tz) = timezone {
// TODO: Proper error (calendar-timezone precondition)
let calendar = IcalParser::new(tz.as_bytes())
.next()
.ok_or_else(|| {
rustical_dav::Error::BadRequest(
"No timezone data provided".to_owned(),
)
})?
.map_err(|_| {
rustical_dav::Error::BadRequest(
"No timezone data provided".to_owned(),
)
})?;
let timezone = calendar.timezones.first().ok_or_else(|| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?;
let timezone: chrono_tz::Tz = timezone.try_into().map_err(|_| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?;
self.cal.timezone_id = Some(timezone.name().to_owned());
}
// TODO: Ensure that timezone-id is also updated
self.cal.timezone = timezone;
Ok(())
}
CalendarProp::TimezoneServiceSet(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::CalendarTimezoneId(timezone_id) => {
if let Some(tzid) = &timezone_id
&& !vtimezones_rs::VTIMEZONES.contains_key(tzid)
{
return Err(rustical_dav::Error::BadRequest(format!(
"Invalid timezone-id: {tzid}"
)));
if let Some(tzid) = &timezone_id {
// Validate timezone id
chrono_tz::Tz::from_str(tzid).map_err(|_| {
rustical_dav::Error::BadRequest(format!("Invalid timezone-id: {tzid}"))
})?;
// TODO: Ensure that timezone is also updated (For now hope that clients play nice)
}
self.cal.timezone_id = timezone_id;
Ok(())
}
CalendarProp::CalendarOrder(order) => {
self.cal.meta.order = order.unwrap_or_default();
self.cal.order = order.unwrap_or_default();
Ok(())
}
CalendarProp::SupportedCalendarComponentSet(comp_set) => {
self.cal.components = comp_set.into();
Ok(())
}
CalendarProp::TimezoneServiceSet(_)
| CalendarProp::SupportedCalendarData(_)
| CalendarProp::SupportedCollationSet(_)
| CalendarProp::MaxResourceSize(_)
| CalendarProp::SupportedReportSet(_)
| CalendarProp::Source(_)
| CalendarProp::MinDateTime(_)
| CalendarProp::MaxDateTime(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::SupportedCalendarData(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::MaxResourceSize(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::SupportedReportSet(_) => Err(rustical_dav::Error::PropReadOnly),
// Converting between a calendar subscription calendar and a normal one would be weird
CalendarProp::Source(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::MinDateTime(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::MaxDateTime(_) => Err(rustical_dav::Error::PropReadOnly),
},
CalendarPropWrapper::SyncToken(prop) => SyncTokenExtension::set_prop(self, prop),
CalendarPropWrapper::DavPush(prop) => DavPushExtension::set_prop(self, prop),
@@ -269,32 +237,36 @@ impl Resource for CalendarResource {
match prop {
CalendarPropWrapperName::Calendar(prop) => match prop {
CalendarPropName::CalendarColor => {
self.cal.meta.color = None;
self.cal.color = None;
Ok(())
}
CalendarPropName::CalendarDescription => {
self.cal.meta.description = None;
self.cal.description = None;
Ok(())
}
CalendarPropName::CalendarTimezone | CalendarPropName::CalendarTimezoneId => {
CalendarPropName::CalendarTimezone => {
self.cal.timezone = None;
Ok(())
}
CalendarPropName::TimezoneServiceSet => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::CalendarTimezoneId => {
self.cal.timezone_id = None;
Ok(())
}
CalendarPropName::CalendarOrder => {
self.cal.meta.order = 0;
self.cal.order = 0;
Ok(())
}
CalendarPropName::SupportedCalendarComponentSet => {
Err(rustical_dav::Error::PropReadOnly)
}
CalendarPropName::TimezoneServiceSet
| CalendarPropName::SupportedCalendarData
| CalendarPropName::SupportedCollationSet
| CalendarPropName::MaxResourceSize
| CalendarPropName::SupportedReportSet
| CalendarPropName::Source
| CalendarPropName::MinDateTime
| CalendarPropName::MaxDateTime => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::SupportedCalendarData => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::MaxResourceSize => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::SupportedReportSet => Err(rustical_dav::Error::PropReadOnly),
// Converting a calendar subscription calendar into a normal one would be weird
CalendarPropName::Source => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::MinDateTime => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::MaxDateTime => Err(rustical_dav::Error::PropReadOnly),
},
CalendarPropWrapperName::SyncToken(prop) => SyncTokenExtension::remove_prop(self, prop),
CalendarPropWrapperName::DavPush(prop) => DavPushExtension::remove_prop(self, prop),
@@ -305,10 +277,10 @@ impl Resource for CalendarResource {
}
fn get_displayname(&self) -> Option<&str> {
self.cal.meta.displayname.as_deref()
self.cal.displayname.as_deref()
}
fn set_displayname(&mut self, name: Option<String>) -> Result<(), rustical_dav::Error> {
self.cal.meta.displayname = name;
self.cal.displayname = name;
Ok(())
}
@@ -317,26 +289,19 @@ impl Resource for CalendarResource {
}
fn get_user_privileges(&self, user: &Principal) -> Result<UserPrivilegeSet, Self::Error> {
if self.cal.subscription_url.is_some() || self.read_only {
if self.cal.subscription_url.is_some() {
return Ok(UserPrivilegeSet::owner_write_properties(
user.is_principal(&self.cal.principal),
));
}
if self.read_only {
return Ok(UserPrivilegeSet::owner_read(
user.is_principal(&self.cal.principal),
));
}
Ok(UserPrivilegeSet::owner_only(
user.is_principal(&self.cal.principal),
))
}
}
#[cfg(test)]
mod tests {
#[test]
fn test_tzdb_version() {
// Ensure that both chrono_tz and vzic_rs use the same tzdb version
assert_eq!(
chrono_tz::IANA_TZDB_VERSION,
vtimezones_rs::IANA_TZDB_VERSION
);
}
}

View File

@@ -1,5 +1,4 @@
use crate::calendar::methods::get::route_get;
use crate::calendar::methods::import::route_import;
use crate::calendar::methods::mkcalendar::route_mkcalendar;
use crate::calendar::methods::post::route_post;
use crate::calendar::methods::report::route_report_calendar;
@@ -35,7 +34,7 @@ impl<C: CalendarStore, S: SubscriptionStore> Clone for CalendarResourceService<C
}
impl<C: CalendarStore, S: SubscriptionStore> CalendarResourceService<C, S> {
pub const fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self {
pub fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self {
Self {
cal_store,
sub_store,
@@ -52,7 +51,7 @@ impl<C: CalendarStore, S: SubscriptionStore> ResourceService for CalendarResourc
type Principal = Principal;
type PrincipalUri = CalDavPrincipalUri;
const DAV_HEADER: &str = "1, 3, access-control, calendar-access, webdav-push";
const DAV_HEADER: &str = "1, 3, access-control, calendar-access, calendar-proxy, webdav-push";
async fn get_resource(
&self,
@@ -139,13 +138,6 @@ impl<C: CalendarStore, S: SubscriptionStore> AxumMethods for CalendarResourceSer
})
}
fn import() -> Option<rustical_dav::resource::MethodFunction<Self>> {
Some(|state, req| {
let mut service = Handler::with_state(route_import::<C, S>, state);
Box::pin(Service::call(&mut service, req))
})
}
fn mkcalendar() -> Option<fn(Self, Request) -> BoxFuture<'static, Result<Response, Infallible>>>
{
Some(|state, req| {

View File

@@ -1,230 +0,0 @@
<?xml version="1.0" encoding="utf-8"?>
<response xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<href>/caldav/principal/user/calendar/</href>
<propstat>
<prop>
<calendar-color xmlns="http://apple.com/ns/ical/"/>
<calendar-description xmlns="urn:ietf:params:xml:ns:caldav"/>
<calendar-timezone xmlns="urn:ietf:params:xml:ns:caldav"/>
<timezone-service-set xmlns="urn:ietf:params:xml:ns:caldav"/>
<calendar-timezone-id xmlns="urn:ietf:params:xml:ns:caldav"/>
<calendar-order xmlns="http://apple.com/ns/ical/"/>
<supported-calendar-component-set xmlns="urn:ietf:params:xml:ns:caldav"/>
<supported-calendar-data xmlns="urn:ietf:params:xml:ns:caldav"/>
<supported-collation-set xmlns="urn:ietf:params:xml:ns:caldav"/>
<max-resource-size xmlns="DAV:"/>
<supported-report-set xmlns="DAV:"/>
<source xmlns="http://calendarserver.org/ns/"/>
<min-date-time xmlns="urn:ietf:params:xml:ns:caldav"/>
<max-date-time xmlns="urn:ietf:params:xml:ns:caldav"/>
<sync-token xmlns="DAV:"/>
<getctag xmlns="http://calendarserver.org/ns/"/>
<transports xmlns="https://bitfire.at/webdav-push"/>
<topic xmlns="https://bitfire.at/webdav-push"/>
<supported-triggers xmlns="https://bitfire.at/webdav-push"/>
<resourcetype xmlns="DAV:"/>
<displayname xmlns="DAV:"/>
<current-user-principal xmlns="DAV:"/>
<current-user-privilege-set xmlns="DAV:"/>
<owner xmlns="DAV:"/>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>
<?xml version="1.0" encoding="utf-8"?>
<response xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<href>/caldav/principal/user/calendar/</href>
<propstat>
<prop>
<CAL:calendar-timezone>BEGIN:VCALENDAR
PRODID:-//github.com/lennart-k/vzic-rs//RustiCal Calendar server//EN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
LAST-MODIFIED:20250723T190331Z
X-LIC-LOCATION:Europe/Berlin
X-PROLEPTIC-TZNAME:LMT
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+005328
TZOFFSETTO:+0100
DTSTART:18930401T000000
END:STANDARD
BEGIN:DAYLIGHT
TZNAME:CEST
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
DTSTART:19160430T230000
RDATE:19400401T020000
RDATE:19430329T020000
RDATE:19460414T020000
RDATE:19470406T030000
RDATE:19480418T020000
RDATE:19490410T020000
RDATE:19800406T020000
END:DAYLIGHT
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
DTSTART:19161001T010000
RDATE:19421102T030000
RDATE:19431004T030000
RDATE:19441002T030000
RDATE:19451118T030000
RDATE:19461007T030000
END:STANDARD
BEGIN:DAYLIGHT
TZNAME:CEST
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
DTSTART:19170416T020000
RRULE:FREQ=YEARLY;BYMONTH=4;BYDAY=3MO;UNTIL=19180415T010000Z
END:DAYLIGHT
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
DTSTART:19170917T030000
RRULE:FREQ=YEARLY;BYMONTH=9;BYDAY=3MO;UNTIL=19180916T010000Z
END:STANDARD
BEGIN:DAYLIGHT
TZNAME:CEST
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
DTSTART:19440403T020000
RRULE:FREQ=YEARLY;BYMONTH=4;BYDAY=1MO;UNTIL=19450402T010000Z
END:DAYLIGHT
BEGIN:DAYLIGHT
TZNAME:CEMT
TZOFFSETFROM:+0200
TZOFFSETTO:+0300
DTSTART:19450524T020000
RDATE:19470511T030000
END:DAYLIGHT
BEGIN:DAYLIGHT
TZNAME:CEST
TZOFFSETFROM:+0300
TZOFFSETTO:+0200
DTSTART:19450924T030000
RDATE:19470629T030000
END:DAYLIGHT
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+0100
TZOFFSETTO:+0100
DTSTART:19460101T000000
RDATE:19800101T000000
END:STANDARD
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
DTSTART:19471005T030000
RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=1SU;UNTIL=19491002T010000Z
END:STANDARD
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
DTSTART:19800928T030000
RRULE:FREQ=YEARLY;BYMONTH=9;BYDAY=-1SU;UNTIL=19950924T010000Z
END:STANDARD
BEGIN:DAYLIGHT
TZNAME:CEST
TZOFFSETFROM:+0100
TZOFFSETTO:+0200
DTSTART:19810329T020000
RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU
END:DAYLIGHT
BEGIN:STANDARD
TZNAME:CET
TZOFFSETFROM:+0200
TZOFFSETTO:+0100
DTSTART:19961027T030000
RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU
END:STANDARD
END:VTIMEZONE
END:VCALENDAR
</CAL:calendar-timezone>
<CAL:timezone-service-set>
<href>https://www.iana.org/time-zones</href>
</CAL:timezone-service-set>
<CAL:calendar-timezone-id>Europe/Berlin</CAL:calendar-timezone-id>
<calendar-order xmlns="http://apple.com/ns/ical/">0</calendar-order>
<CAL:supported-calendar-component-set>
<CAL:comp name="VEVENT"/>
<CAL:comp name="VTODO"/>
</CAL:supported-calendar-component-set>
<CAL:supported-calendar-data>
<CAL:calendar-data content-type="text/calendar" version="2.0"/>
</CAL:supported-calendar-data>
<CAL:supported-collation-set>
<CAL:supported-collation>i;ascii-casemap</CAL:supported-collation>
<CAL:supported-collation>i;octet</CAL:supported-collation>
</CAL:supported-collation-set>
<max-resource-size>10000000</max-resource-size>
<supported-report-set>
<supported-report>
<report>
<CAL:calendar-query/>
</report>
</supported-report>
<supported-report>
<report>
<CAL:calendar-multiget/>
</report>
</supported-report>
<supported-report>
<report>
<sync-collection/>
</report>
</supported-report>
</supported-report-set>
<CAL:min-date-time>-2621430101T000000Z</CAL:min-date-time>
<CAL:max-date-time>+2621421231T235959Z</CAL:max-date-time>
<sync-token>github.com/lennart-k/rustical/ns/12</sync-token>
<CS:getctag>github.com/lennart-k/rustical/ns/12</CS:getctag>
<PUSH:transports>
<PUSH:web-push/>
</PUSH:transports>
<PUSH:topic>b28b41e9-8801-4fc5-ae29-8efb5fadeb36</PUSH:topic>
<PUSH:supported-triggers>
<PUSH:content-update>
<depth>1</depth>
</PUSH:content-update>
<PUSH:property-update>
<depth>1</depth>
</PUSH:property-update>
</PUSH:supported-triggers>
<resourcetype>
<collection/>
<CAL:calendar/>
</resourcetype>
<displayname>Calendar</displayname>
<current-user-principal>
<href>/caldav/principal/user/</href>
</current-user-principal>
<current-user-privilege-set>
<privilege>
<read/>
</privilege>
<privilege>
<write-properties/>
</privilege>
<privilege>
<read-acl/>
</privilege>
<privilege>
<read-current-user-privilege-set/>
</privilege>
</current-user-privilege-set>
<owner>
<href>/caldav/principal/user/</href>
</owner>
</prop>
<status>HTTP/1.1 200 OK</status>
</propstat>
</response>

View File

@@ -1,11 +0,0 @@
[
{
"id": "user",
"displayname": null,
"principal_type": "individual",
"password": null,
"memberships": [
"group"
]
}
]

View File

@@ -1,6 +0,0 @@
<?xml version="1.0" encoding="UTF-8"?>
<propfind xmlns="DAV:"><propname/></propfind>
<?xml version="1.0" encoding="UTF-8"?>
<propfind xmlns="DAV:"><allprop/></propfind>

View File

@@ -1,42 +0,0 @@
[
{
"cal": {
"principal": "user",
"id": "calendar",
"displayname": "Calendar",
"order": 0,
"description": null,
"color": null,
"timezone_id": "Europe/Berlin",
"deleted_at": null,
"synctoken": 12,
"subscription_url": null,
"push_topic": "b28b41e9-8801-4fc5-ae29-8efb5fadeb36",
"components": [
"VEVENT",
"VTODO"
]
},
"read_only": true
},
{
"cal": {
"principal": "user",
"id": "calendar",
"displayname": "Calendar",
"order": 0,
"description": null,
"color": null,
"timezone_id": "Europe/Berlin",
"deleted_at": null,
"synctoken": 12,
"subscription_url": null,
"push_topic": "b28b41e9-8801-4fc5-ae29-8efb5fadeb36",
"components": [
"VEVENT",
"VTODO"
]
},
"read_only": true
}
]

View File

@@ -1,45 +0,0 @@
use crate::{CalDavPrincipalUri, calendar::resource::CalendarResource};
use rustical_dav::resource::Resource;
use rustical_store::auth::Principal;
use rustical_xml::XmlSerializeRoot;
use serde_json::from_str;
#[tokio::test]
async fn test_propfind() {
let requests: Vec<_> = include_str!("./test_files/propfind.requests")
.trim()
.split("\n\n")
.collect();
let principals: Vec<Principal> =
from_str(include_str!("./test_files/propfind.principals.json")).unwrap();
let resources: Vec<CalendarResource> =
from_str(include_str!("./test_files/propfind.resources.json")).unwrap();
let outputs: Vec<_> = include_str!("./test_files/propfind.outputs")
.trim()
.split("\n\n")
.collect();
for principal in principals {
for ((request, resource), &expected_output) in requests.iter().zip(&resources).zip(&outputs)
{
let propfind = CalendarResource::parse_propfind(request).unwrap();
let response = resource
.propfind(
&format!("/caldav/principal/{}/{}", principal.id, resource.cal.id),
&propfind.prop,
propfind.include.as_ref(),
&CalDavPrincipalUri("/caldav"),
&principal,
)
.unwrap();
let expected_output = expected_output.trim();
let output = response
.serialize_to_string()
.unwrap()
.trim()
.replace("\r\n", "\n");
similar_asserts::assert_eq!(expected_output, output);
}
}
}

View File

@@ -11,7 +11,7 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore;
use rustical_store::auth::Principal;
use std::str::FromStr;
use tracing::{debug, instrument};
use tracing::instrument;
#[instrument(skip(cal_store))]
pub async fn get_event<C: CalendarStore>(
@@ -78,9 +78,11 @@ pub async fn put_event<C: CalendarStore>(
true
};
let Ok(object) = CalendarObject::from_ics(body.clone(), Some(object_id)) else {
debug!("invalid calendar data:\n{body}");
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
let object = match CalendarObject::from_ics(object_id, body) {
Ok(obj) => obj,
Err(_) => {
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
}
};
cal_store
.put_object(principal, calendar_id, object, overwrite)

View File

@@ -2,7 +2,7 @@ use rustical_dav::extensions::CommonPropertiesProp;
use rustical_ical::UtcDateTime;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarObjectPropName")]
pub enum CalendarObjectProp {
// WebDAV (RFC 2518)
@@ -17,7 +17,7 @@ pub enum CalendarObjectProp {
CalendarData(String),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarObjectPropWrapperName", untagged)]
pub enum CalendarObjectPropWrapper {
CalendarObject(CalendarObjectProp),
@@ -25,7 +25,7 @@ pub enum CalendarObjectPropWrapper {
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub struct ExpandElement {
pub(crate) struct ExpandElement {
#[xml(ty = "attr")]
pub(crate) start: UtcDateTime,
#[xml(ty = "attr")]

View File

@@ -1,7 +1,4 @@
use super::prop::{
CalendarData, CalendarObjectProp, CalendarObjectPropName, CalendarObjectPropWrapper,
CalendarObjectPropWrapperName,
};
use super::prop::*;
use crate::Error;
use derive_more::derive::{From, Into};
use rustical_dav::{

View File

@@ -35,7 +35,7 @@ impl<C: CalendarStore> Clone for CalendarObjectResourceService<C> {
}
impl<C: CalendarStore> CalendarObjectResourceService<C> {
pub const fn new(cal_store: Arc<C>) -> Self {
pub fn new(cal_store: Arc<C>) -> Self {
Self { cal_store }
}
}
@@ -106,8 +106,9 @@ where
D: Deserializer<'de>,
{
let name: String = Deserialize::deserialize(deserializer)?;
name.strip_suffix(".ics").map_or_else(
|| Err(serde::de::Error::custom("Missing .ics extension")),
|object_id| Ok(object_id.to_owned()),
)
if let Some(object_id) = name.strip_suffix(".ics") {
Ok(object_id.to_owned())
} else {
Err(serde::de::Error::custom("Missing .ics extension"))
}
}

View File

@@ -60,35 +60,29 @@ pub enum Error {
}
impl Error {
#[must_use]
pub fn status_code(&self) -> StatusCode {
match self {
Self::StoreError(err) => match err {
Error::StoreError(err) => match err {
rustical_store::Error::NotFound => StatusCode::NOT_FOUND,
rustical_store::Error::AlreadyExists => StatusCode::CONFLICT,
rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR,
},
Self::DavError(err) => StatusCode::try_from(err.status_code().as_u16())
Error::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::DavError(err) => StatusCode::try_from(err.status_code().as_u16())
.expect("Just converting between versions"),
Self::Unauthorized => StatusCode::UNAUTHORIZED,
Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND,
Self::IcalError(err) => err.status_code(),
Self::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED,
Error::Unauthorized => StatusCode::UNAUTHORIZED,
Error::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Error::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotFound => StatusCode::NOT_FOUND,
Error::IcalError(err) => err.status_code(),
Error::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED,
}
}
}
impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
error!("{self}");
}
(self.status_code(), self.to_string()).into_response()
}
}

View File

@@ -1,5 +1,3 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
use axum::{Extension, Router};
use derive_more::Constructor;
use principal::PrincipalResourceService;
@@ -39,8 +37,8 @@ pub fn caldav_router<AP: AuthenticationProvider, C: CalendarStore, S: Subscripti
prefix,
RootResourceService::<_, Principal, CalDavPrincipalUri>::new(PrincipalResourceService {
auth_provider: auth_provider.clone(),
sub_store: subscription_store,
cal_store: store,
sub_store: subscription_store.clone(),
cal_store: store.clone(),
simplified_home_set,
})
.axum_router()

View File

@@ -24,7 +24,7 @@ pub struct PrincipalResource {
impl ResourceName for PrincipalResource {
fn get_name(&self) -> String {
self.principal.id.clone()
self.principal.id.to_owned()
}
}
@@ -41,6 +41,11 @@ impl Resource for PrincipalResource {
Resourcetype(&[
ResourcetypeInner(Some(rustical_dav::namespace::NS_DAV), "collection"),
ResourcetypeInner(Some(rustical_dav::namespace::NS_DAV), "principal"),
// https://github.com/apple/ccs-calendarserver/blob/13c706b985fb728b9aab42dc0fef85aae21921c3/doc/Extensions/caldav-proxy.txt
// ResourcetypeInner(
// Some(rustical_dav::namespace::NS_CALENDARSERVER),
// "calendar-proxy-write",
// ),
])
}
@@ -56,7 +61,7 @@ impl Resource for PrincipalResource {
PrincipalPropWrapperName::Principal(prop) => {
PrincipalPropWrapper::Principal(match prop {
PrincipalPropName::CalendarUserType => {
PrincipalProp::CalendarUserType(self.principal.principal_type.clone())
PrincipalProp::CalendarUserType(self.principal.principal_type.to_owned())
}
PrincipalPropName::PrincipalUrl => {
PrincipalProp::PrincipalUrl(principal_url.into())
@@ -121,7 +126,7 @@ impl Resource for PrincipalResource {
}
fn get_user_privileges(&self, user: &Principal) -> Result<UserPrivilegeSet, Self::Error> {
Ok(UserPrivilegeSet::owner_only(
Ok(UserPrivilegeSet::owner_read(
user.is_principal(&self.principal.id),
))
}

View File

@@ -6,7 +6,7 @@ use rustical_store::auth::PrincipalType;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp {
// Scheduling Extensions to CalDAV (RFC 6638)
@@ -16,13 +16,13 @@ pub enum PrincipalProp {
CalendarUserAddressSet(HrefElement),
// WebDAV Access Control (RFC 3744)
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = "principal-URL")]
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = b"principal-URL")]
PrincipalUrl(HrefElement),
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMembership(GroupMembership),
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMemberSet(GroupMemberSet),
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = "alternate-URI-set")]
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = b"alternate-URI-set")]
AlternateUriSet,
// #[xml(ns = "rustical_dav::namespace::NS_DAV")]
// PrincipalCollectionSet(HrefElement),
@@ -34,17 +34,17 @@ pub enum PrincipalProp {
CalendarHomeSet(CalendarHomeSet),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)]
pub struct CalendarHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)]
pub enum PrincipalPropWrapper {
Principal(PrincipalProp),
Common(CommonPropertiesProp),
}
#[derive(XmlSerialize, PartialEq, Eq, Clone, VariantArray)]
#[derive(XmlSerialize, PartialEq, Clone, VariantArray)]
pub enum ReportMethod {
// We don't actually support principal-match
#[xml(ns = "rustical_dav::namespace::NS_DAV")]

View File

@@ -46,7 +46,7 @@ impl<AP: AuthenticationProvider, S: SubscriptionStore, CS: CalendarStore> Resour
type Principal = Principal;
type PrincipalUri = CalDavPrincipalUri;
const DAV_HEADER: &str = "1, 3, access-control, calendar-access";
const DAV_HEADER: &str = "1, 3, access-control, calendar-access, calendar-proxy";
async fn get_resource(
&self,

View File

@@ -1,19 +1,14 @@
use std::sync::Arc;
use crate::{
CalDavPrincipalUri,
principal::{PrincipalResource, PrincipalResourceService},
};
use crate::principal::PrincipalResourceService;
use rstest::rstest;
use rustical_dav::resource::{Resource, ResourceService};
use rustical_store::auth::{Principal, PrincipalType::Individual};
use rustical_dav::resource::ResourceService;
use rustical_store_sqlite::{
SqliteStore,
calendar_store::SqliteCalendarStore,
principal_store::SqlitePrincipalStore,
tests::{get_test_calendar_store, get_test_principal_store, get_test_subscription_store},
};
use rustical_xml::XmlSerializeRoot;
#[rstest]
#[tokio::test]
@@ -35,15 +30,6 @@ async fn test_principal_resource(
simplified_home_set: false,
};
// We don't have any calendars here
assert!(
service
.get_members(&("user".to_owned(),))
.await
.unwrap()
.is_empty()
);
assert!(matches!(
service
.get_resource(&("invalid-user".to_owned(),), true)
@@ -58,35 +44,4 @@ async fn test_principal_resource(
}
#[tokio::test]
async fn test_propfind() {
let propfind = PrincipalResource::parse_propfind(
r#"<?xml version="1.0" encoding="UTF-8"?><propfind xmlns="DAV:"><allprop/></propfind>"#,
)
.unwrap();
let principal = Principal {
id: "user".to_string(),
displayname: None,
principal_type: Individual,
password: None,
memberships: vec!["group".to_string()],
};
let resource = PrincipalResource {
principal: principal.clone(),
members: vec![],
simplified_home_set: false,
};
let response = resource
.propfind(
&format!("/caldav/principal/{}", principal.id),
&propfind.prop,
propfind.include.as_ref(),
&CalDavPrincipalUri("/caldav"),
&principal,
)
.unwrap();
let _output = response.serialize_to_string().unwrap();
}
async fn test_propfind() {}

View File

@@ -1,7 +1,6 @@
[package]
name = "rustical_carddav"
version.workspace = true
rust-version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
@@ -12,19 +11,19 @@ publish = false
axum.workspace = true
axum-extra.workspace = true
tower.workspace = true
async-trait.workspace = true
thiserror.workspace = true
quick-xml.workspace = true
tracing.workspace = true
futures-util.workspace = true
derive_more.workspace = true
base64.workspace = true
serde.workspace = true
tokio.workspace = true
url.workspace = true
rustical_dav.workspace = true
rustical_store.workspace = true
chrono.workspace = true
async-trait = { workspace = true }
thiserror = { workspace = true }
quick-xml = { workspace = true }
tracing = { workspace = true }
futures-util = { workspace = true }
derive_more = { workspace = true }
base64 = { workspace = true }
serde = { workspace = true }
tokio = { workspace = true }
url = { workspace = true }
rustical_dav = { workspace = true }
rustical_store = { workspace = true }
chrono = { workspace = true }
rustical_xml.workspace = true
uuid.workspace = true
rustical_dav_push.workspace = true

View File

@@ -1,7 +1,7 @@
use rustical_dav::extensions::CommonPropertiesProp;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressObjectPropName")]
pub enum AddressObjectProp {
// WebDAV (RFC 2518)
@@ -15,7 +15,7 @@ pub enum AddressObjectProp {
AddressData(String),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressObjectPropWrapperName", untagged)]
pub enum AddressObjectPropWrapper {
AddressObject(AddressObjectProp),

View File

@@ -98,8 +98,9 @@ where
D: Deserializer<'de>,
{
let name: String = Deserialize::deserialize(deserializer)?;
name.strip_suffix(".vcf").map_or_else(
|| Err(serde::de::Error::custom("Missing .vcf extension")),
|object_id| Ok(object_id.to_owned()),
)
if let Some(object_id) = name.strip_suffix(".vcf") {
Ok(object_id.to_owned())
} else {
Err(serde::de::Error::custom("Missing .vcf extension"))
}
}

View File

@@ -1,67 +0,0 @@
use std::io::BufReader;
use crate::Error;
use crate::addressbook::AddressbookResourceService;
use axum::{
extract::{Path, State},
response::{IntoResponse, Response},
};
use http::StatusCode;
use ical::{
parser::{Component, ComponentMut, vcard},
property::Property,
};
use rustical_store::{Addressbook, AddressbookStore, SubscriptionStore, auth::Principal};
use tracing::instrument;
#[instrument(skip(resource_service))]
pub async fn route_import<AS: AddressbookStore, S: SubscriptionStore>(
Path((principal, addressbook_id)): Path<(String, String)>,
user: Principal,
State(resource_service): State<AddressbookResourceService<AS, S>>,
body: String,
) -> Result<Response, Error> {
if !user.is_principal(&principal) {
return Err(Error::Unauthorized);
}
let parser = vcard::VcardParser::new(BufReader::new(body.as_bytes()));
let mut objects = vec![];
for res in parser {
let mut card = res.unwrap();
let uid = card.get_uid();
if uid.is_none() {
let mut card_mut = card.mutable();
card_mut.set_property(Property {
name: "UID".to_owned(),
value: Some(uuid::Uuid::new_v4().to_string()),
params: None,
});
card = card_mut.verify().unwrap();
}
objects.push(card.try_into().unwrap());
}
if objects.is_empty() {
return Ok((StatusCode::BAD_REQUEST, "empty addressbook data").into_response());
}
let addressbook = Addressbook {
principal,
id: addressbook_id,
displayname: None,
description: None,
deleted_at: None,
synctoken: 0,
push_topic: uuid::Uuid::new_v4().to_string(),
};
let addr_store = resource_service.addr_store;
addr_store
.import_addressbook(addressbook, objects, false)
.await?;
Ok(StatusCode::OK.into_response())
}

View File

@@ -8,7 +8,7 @@ use rustical_store::{Addressbook, AddressbookStore, SubscriptionStore, auth::Pri
use rustical_xml::{XmlDeserialize, XmlDocument, XmlRootTag};
use tracing::instrument;
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
pub struct Resourcetype {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
addressbook: Option<()>,
@@ -16,25 +16,25 @@ pub struct Resourcetype {
collection: Option<()>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
pub struct MkcolAddressbookProp {
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
resourcetype: Option<Resourcetype>,
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
displayname: Option<String>,
#[xml(rename = "addressbook-description")]
#[xml(rename = b"addressbook-description")]
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
description: Option<String>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
pub struct PropElement<T: XmlDeserialize> {
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
prop: T,
}
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq)]
#[xml(root = "mkcol")]
#[xml(root = b"mkcol")]
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
struct MkcolRequest {
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
@@ -53,13 +53,13 @@ pub async fn route_mkcol<AS: AddressbookStore, S: SubscriptionStore>(
}
let mut request = MkcolRequest::parse_str(&body)?.set.prop;
if request.displayname.as_deref() == Some("") {
request.displayname = None;
if let Some("") = request.displayname.as_deref() {
request.displayname = None
}
let addressbook = Addressbook {
id: addressbook_id.clone(),
principal: principal.clone(),
id: addressbook_id.to_owned(),
principal: principal.to_owned(),
displayname: request.displayname,
description: request.description,
deleted_at: None,
@@ -127,6 +127,6 @@ mod tests {
}
}
}
);
)
}
}

View File

@@ -1,5 +1,5 @@
pub mod get;
pub mod import;
pub mod mkcol;
pub mod post;
pub mod put;
pub mod report;

View File

@@ -45,12 +45,12 @@ pub async fn route_post<AS: AddressbookStore, S: SubscriptionStore>(
};
let subscription = Subscription {
id: sub_id.clone(),
id: sub_id.to_owned(),
push_resource: request
.subscription
.web_push_subscription
.push_resource
.clone(),
.to_owned(),
topic: addressbook_resource.0.push_topic,
expiration: expires.naive_local(),
public_key: request

View File

@@ -0,0 +1,47 @@
use crate::Error;
use crate::addressbook::AddressbookResourceService;
use axum::response::IntoResponse;
use axum::{
extract::{Path, State},
response::Response,
};
use http::StatusCode;
use ical::VcardParser;
use rustical_ical::AddressObject;
use rustical_store::Addressbook;
use rustical_store::{AddressbookStore, SubscriptionStore, auth::Principal};
use tracing::instrument;
#[instrument(skip(addr_store))]
pub async fn route_put<AS: AddressbookStore, S: SubscriptionStore>(
Path((principal, addressbook_id)): Path<(String, String)>,
State(AddressbookResourceService { addr_store, .. }): State<AddressbookResourceService<AS, S>>,
user: Principal,
body: String,
) -> Result<Response, Error> {
if !user.is_principal(&principal) {
return Err(Error::Unauthorized);
}
let mut objects = vec![];
for object in VcardParser::new(body.as_bytes()) {
let object = object.map_err(rustical_ical::Error::from)?;
objects.push(AddressObject::try_from(object)?);
}
let addressbook = Addressbook {
id: addressbook_id.clone(),
principal: principal.clone(),
displayname: None,
description: None,
deleted_at: None,
synctoken: Default::default(),
push_topic: uuid::Uuid::new_v4().to_string(),
};
addr_store
.import_addressbook(principal.clone(), addressbook, objects)
.await?;
Ok(StatusCode::CREATED.into_response())
}

View File

@@ -13,7 +13,7 @@ use rustical_ical::AddressObject;
use rustical_store::{AddressbookStore, auth::Principal};
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
pub struct AddressbookMultigetRequest {
@@ -34,24 +34,24 @@ pub async fn get_objects_addressbook_multiget<AS: AddressbookStore>(
let mut not_found = vec![];
for href in &addressbook_multiget.href {
if let Ok(href) = percent_encoding::percent_decode_str(href).decode_utf8()
&& let Some(filename) = href.strip_prefix(path)
{
let filename = filename.trim_start_matches('/');
if let Some(filename) = href.strip_prefix(path) {
let filename = filename.trim_start_matches("/");
if let Some(object_id) = filename.strip_suffix(".vcf") {
match store
.get_object(principal, addressbook_id, object_id, false)
.await
{
Ok(object) => result.push(object),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_string()),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()),
Err(err) => return Err(err.into()),
}
};
} else {
not_found.push(href.to_string());
not_found.push(href.to_owned());
continue;
}
} else {
not_found.push(href.to_owned());
continue;
}
}

View File

@@ -26,10 +26,10 @@ pub(crate) enum ReportRequest {
}
impl ReportRequest {
const fn props(&self) -> &PropfindType<AddressObjectPropWrapperName> {
fn props(&self) -> &PropfindType<AddressObjectPropWrapperName> {
match self {
Self::AddressbookMultiget(AddressbookMultigetRequest { prop, .. })
| Self::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
ReportRequest::AddressbookMultiget(AddressbookMultigetRequest { prop, .. }) => prop,
ReportRequest::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
}
}
}
@@ -101,7 +101,7 @@ mod tests {
assert_eq!(
report_request,
ReportRequest::SyncCollection(SyncCollectionRequest {
sync_token: String::new(),
sync_token: "".to_owned(),
sync_level: SyncLevel::One,
prop: rustical_dav::xml::PropfindType::Prop(PropElement(
vec![AddressObjectPropWrapperName::AddressObject(
@@ -111,7 +111,7 @@ mod tests {
)),
limit: None
})
);
)
}
#[test]
@@ -142,6 +142,6 @@ mod tests {
"/carddav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned()
]
})
);
)
}
}

View File

@@ -6,7 +6,7 @@ use rustical_dav_push::DavPushExtensionProp;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressbookPropName")]
pub enum AddressbookProp {
// CardDAV (RFC 6352)
@@ -20,7 +20,7 @@ pub enum AddressbookProp {
MaxResourceSize(i64),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressbookPropWrapperName", untagged)]
pub enum AddressbookPropWrapper {
Addressbook(AddressbookProp),
@@ -29,7 +29,7 @@ pub enum AddressbookPropWrapper {
Common(CommonPropertiesProp),
}
#[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
#[derive(Debug, Clone, XmlSerialize, PartialEq)]
pub struct AddressDataType {
#[xml(ty = "attr")]
pub content_type: &'static str,
@@ -37,7 +37,7 @@ pub struct AddressDataType {
pub version: &'static str,
}
#[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
#[derive(Debug, Clone, XmlSerialize, PartialEq)]
pub struct SupportedAddressData {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV", flatten)]
address_data_type: &'static [AddressDataType],
@@ -60,7 +60,7 @@ impl Default for SupportedAddressData {
}
}
#[derive(Debug, Clone, XmlSerialize, PartialEq, Eq, VariantArray)]
#[derive(Debug, Clone, XmlSerialize, PartialEq, VariantArray)]
pub enum ReportMethod {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
AddressbookMultiget,

View File

@@ -17,7 +17,7 @@ pub struct AddressbookResource(pub(crate) Addressbook);
impl ResourceName for AddressbookResource {
fn get_name(&self) -> String {
self.0.id.clone()
self.0.id.to_owned()
}
}
@@ -29,7 +29,7 @@ impl SyncTokenExtension for AddressbookResource {
impl DavPushExtension for AddressbookResource {
fn get_topic(&self) -> String {
self.0.push_topic.clone()
self.0.push_topic.to_owned()
}
}
@@ -59,13 +59,13 @@ impl Resource for AddressbookResource {
AddressbookPropWrapperName::Addressbook(prop) => {
AddressbookPropWrapper::Addressbook(match prop {
AddressbookPropName::MaxResourceSize => {
AddressbookProp::MaxResourceSize(10_000_000)
AddressbookProp::MaxResourceSize(10000000)
}
AddressbookPropName::SupportedReportSet => {
AddressbookProp::SupportedReportSet(SupportedReportSet::all())
}
AddressbookPropName::AddressbookDescription => {
AddressbookProp::AddressbookDescription(self.0.description.clone())
AddressbookProp::AddressbookDescription(self.0.description.to_owned())
}
AddressbookPropName::SupportedAddressData => {
AddressbookProp::SupportedAddressData(SupportedAddressData::default())
@@ -92,11 +92,9 @@ impl Resource for AddressbookResource {
self.0.description = description;
Ok(())
}
AddressbookProp::MaxResourceSize(_)
| AddressbookProp::SupportedReportSet(_)
| AddressbookProp::SupportedAddressData(_) => {
Err(rustical_dav::Error::PropReadOnly)
}
AddressbookProp::MaxResourceSize(_) => Err(rustical_dav::Error::PropReadOnly),
AddressbookProp::SupportedReportSet(_) => Err(rustical_dav::Error::PropReadOnly),
AddressbookProp::SupportedAddressData(_) => Err(rustical_dav::Error::PropReadOnly),
},
AddressbookPropWrapper::SyncToken(prop) => SyncTokenExtension::set_prop(self, prop),
AddressbookPropWrapper::DavPush(prop) => DavPushExtension::set_prop(self, prop),
@@ -114,11 +112,9 @@ impl Resource for AddressbookResource {
self.0.description = None;
Ok(())
}
AddressbookPropName::MaxResourceSize
| AddressbookPropName::SupportedReportSet
| AddressbookPropName::SupportedAddressData => {
Err(rustical_dav::Error::PropReadOnly)
}
AddressbookPropName::MaxResourceSize => Err(rustical_dav::Error::PropReadOnly),
AddressbookPropName::SupportedReportSet => Err(rustical_dav::Error::PropReadOnly),
AddressbookPropName::SupportedAddressData => Err(rustical_dav::Error::PropReadOnly),
},
AddressbookPropWrapperName::SyncToken(prop) => {
SyncTokenExtension::remove_prop(self, prop)

View File

@@ -3,8 +3,8 @@ use super::methods::report::route_report_addressbook;
use crate::address_object::AddressObjectResourceService;
use crate::address_object::resource::AddressObjectResource;
use crate::addressbook::methods::get::route_get;
use crate::addressbook::methods::import::route_import;
use crate::addressbook::methods::post::route_post;
use crate::addressbook::methods::put::route_put;
use crate::addressbook::resource::AddressbookResource;
use crate::{CardDavPrincipalUri, Error};
use async_trait::async_trait;
@@ -26,7 +26,7 @@ pub struct AddressbookResourceService<AS: AddressbookStore, S: SubscriptionStore
}
impl<A: AddressbookStore, S: SubscriptionStore> AddressbookResourceService<A, S> {
pub const fn new(addr_store: Arc<A>, sub_store: Arc<S>) -> Self {
pub fn new(addr_store: Arc<A>, sub_store: Arc<S>) -> Self {
Self {
addr_store,
sub_store,
@@ -139,9 +139,9 @@ impl<AS: AddressbookStore, S: SubscriptionStore> AxumMethods for AddressbookReso
})
}
fn import() -> Option<fn(Self, Request) -> BoxFuture<'static, Result<Response, Infallible>>> {
fn put() -> Option<fn(Self, Request) -> BoxFuture<'static, Result<Response, Infallible>>> {
Some(|state, req| {
let mut service = Handler::with_state(route_import::<AS, S>, state);
let mut service = Handler::with_state(route_put::<AS, S>, state);
Box::pin(Service::call(&mut service, req))
})
}

View File

@@ -30,20 +30,20 @@ pub enum Error {
}
impl Error {
#[must_use]
pub const fn status_code(&self) -> StatusCode {
pub fn status_code(&self) -> StatusCode {
match self {
Self::StoreError(err) => match err {
Error::StoreError(err) => match err {
rustical_store::Error::NotFound => StatusCode::NOT_FOUND,
rustical_store::Error::AlreadyExists => StatusCode::CONFLICT,
rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR,
},
Self::DavError(err) => err.status_code(),
Self::Unauthorized => StatusCode::UNAUTHORIZED,
Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND,
Error::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::DavError(err) => err.status_code(),
Error::Unauthorized => StatusCode::UNAUTHORIZED,
Error::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Error::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotFound => StatusCode::NOT_FOUND,
Self::IcalError(err) => err.status_code(),
}
}

View File

@@ -1,5 +1,3 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
use axum::response::Redirect;
use axum::routing::any;
use axum::{Extension, Router};
@@ -38,15 +36,20 @@ pub fn carddav_router<AP: AuthenticationProvider, A: AddressbookStore, S: Subscr
store: Arc<A>,
subscription_store: Arc<S>,
) -> Router {
let principal_service =
PrincipalResourceService::new(store, auth_provider.clone(), subscription_store);
let principal_service = PrincipalResourceService::new(
store.clone(),
auth_provider.clone(),
subscription_store.clone(),
);
Router::new()
.nest(
prefix,
RootResourceService::<_, Principal, CardDavPrincipalUri>::new(principal_service)
.axum_router()
.layer(AuthenticationLayer::new(auth_provider))
.layer(Extension(CardDavPrincipalUri(prefix))),
RootResourceService::<_, Principal, CardDavPrincipalUri>::new(
principal_service.clone(),
)
.axum_router()
.layer(AuthenticationLayer::new(auth_provider))
.layer(Extension(CardDavPrincipalUri(prefix))),
)
.route(
"/.well-known/carddav",

View File

@@ -20,7 +20,7 @@ pub struct PrincipalResource {
impl ResourceName for PrincipalResource {
fn get_name(&self) -> String {
self.principal.id.clone()
self.principal.id.to_owned()
}
}

View File

@@ -4,18 +4,18 @@ use rustical_dav::{
};
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp {
// WebDAV Access Control (RFC 3744)
#[xml(rename = "principal-URL")]
#[xml(rename = b"principal-URL")]
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
PrincipalUrl(HrefElement),
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMembership(GroupMembership),
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
GroupMemberSet(GroupMemberSet),
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = "alternate-URI-set")]
#[xml(ns = "rustical_dav::namespace::NS_DAV", rename = b"alternate-URI-set")]
AlternateUriSet,
#[xml(ns = "rustical_dav::namespace::NS_DAV")]
PrincipalCollectionSet(HrefElement),
@@ -27,10 +27,10 @@ pub enum PrincipalProp {
PrincipalAddress(Option<HrefElement>),
}
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)]
pub struct AddressbookHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)]
pub enum PrincipalPropWrapper {
Principal(PrincipalProp),

View File

@@ -34,7 +34,7 @@ impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore> Clon
impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore>
PrincipalResourceService<A, AP, S>
{
pub const fn new(addr_store: Arc<A>, auth_provider: Arc<AP>, sub_store: Arc<S>) -> Self {
pub fn new(addr_store: Arc<A>, auth_provider: Arc<AP>, sub_store: Arc<S>) -> Self {
Self {
addr_store,
auth_provider,

View File

@@ -1,7 +1,6 @@
[package]
name = "rustical_dav"
version.workspace = true
rust-version.workspace = true
edition.workspace = true
description.workspace = true
repository.workspace = true
@@ -12,6 +11,7 @@ publish = false
axum.workspace = true
tower.workspace = true
axum-extra.workspace = true
rustical_xml.workspace = true
async-trait.workspace = true
futures-util.workspace = true

View File

@@ -1,4 +1,3 @@
use axum::body::Body;
use http::StatusCode;
use rustical_xml::XmlError;
use thiserror::Error;
@@ -35,9 +34,9 @@ pub enum Error {
}
impl Error {
#[must_use]
pub const fn status_code(&self) -> StatusCode {
pub fn status_code(&self) -> StatusCode {
match self {
Self::InternalError => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND,
Self::BadRequest(_) => StatusCode::BAD_REQUEST,
Self::Unauthorized => StatusCode::UNAUTHORIZED,
@@ -50,9 +49,9 @@ impl Error {
| XmlError::InvalidValue(_) => StatusCode::UNPROCESSABLE_ENTITY,
_ => StatusCode::BAD_REQUEST,
},
Self::PropReadOnly => StatusCode::CONFLICT,
Self::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
Self::InternalError | Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Error::PropReadOnly => StatusCode::CONFLICT,
Error::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Forbidden => StatusCode::FORBIDDEN,
}
}
@@ -60,15 +59,10 @@ impl Error {
impl axum::response::IntoResponse for Error {
fn into_response(self) -> axum::response::Response {
if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
error!("{self}");
}
use axum::body::Body;
let mut resp = axum::response::Response::builder().status(self.status_code());
if matches!(&self, &Self::Unauthorized) {
if matches!(&self, &Error::Unauthorized) {
resp.headers_mut()
.expect("This must always work")
.insert("WWW-Authenticate", "Basic".parse().unwrap());

View File

@@ -6,7 +6,7 @@ use crate::{
};
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "CommonPropertiesPropName")]
pub enum CommonPropertiesProp {
// WebDAV (RFC 2518)
@@ -39,9 +39,9 @@ pub trait CommonPropertiesExtension: Resource {
CommonPropertiesPropName::Resourcetype => {
CommonPropertiesProp::Resourcetype(self.get_resourcetype())
}
CommonPropertiesPropName::Displayname => CommonPropertiesProp::Displayname(
self.get_displayname().map(std::string::ToString::to_string),
),
CommonPropertiesPropName::Displayname => {
CommonPropertiesProp::Displayname(self.get_displayname().map(|s| s.to_string()))
}
CommonPropertiesPropName::CurrentUserPrincipal => {
CommonPropertiesProp::CurrentUserPrincipal(
principal_uri.principal_uri(principal.get_id()).into(),

View File

@@ -1,6 +1,6 @@
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "SyncTokenExtensionPropName")]
pub enum SyncTokenExtensionProp {
// Collection Synchronization (RFC 6578)

View File

@@ -19,7 +19,7 @@ impl IntoResponse for InvalidDepthHeader {
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
#[derive(Debug, Clone, PartialEq)]
pub enum Depth {
Zero,
One,
@@ -29,9 +29,9 @@ pub enum Depth {
impl ValueSerialize for Depth {
fn serialize(&self) -> String {
match self {
Self::Zero => "0",
Self::One => "1",
Self::Infinity => "infinity",
Depth::Zero => "0",
Depth::One => "1",
Depth::Infinity => "infinity",
}
.to_owned()
}
@@ -55,9 +55,9 @@ impl TryFrom<&[u8]> for Depth {
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match value {
b"0" => Ok(Self::Zero),
b"1" => Ok(Self::One),
b"Infinity" | b"infinity" => Ok(Self::Infinity),
b"0" => Ok(Depth::Zero),
b"1" => Ok(Depth::One),
b"Infinity" | b"infinity" => Ok(Depth::Infinity),
_ => Err(InvalidDepthHeader),
}
}
@@ -85,11 +85,10 @@ impl<S: Send + Sync> FromRequestParts<S> for Depth {
parts: &mut axum::http::request::Parts,
_state: &S,
) -> Result<Self, Self::Rejection> {
parts
.headers
.get("Depth")
.map_or(Ok(Self::Zero), |depth_header| {
depth_header.as_bytes().try_into()
})
if let Some(depth_header) = parts.headers.get("Depth") {
depth_header.as_bytes().try_into()
} else {
Ok(Self::Zero)
}
}
}

View File

@@ -14,12 +14,16 @@ impl IntoResponse for InvalidOverwriteHeader {
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct Overwrite(pub bool);
#[derive(Debug, PartialEq, Default)]
pub enum Overwrite {
#[default]
T,
F,
}
impl Default for Overwrite {
fn default() -> Self {
Self(true)
impl Overwrite {
pub fn is_true(&self) -> bool {
matches!(self, Self::T)
}
}
@@ -30,10 +34,11 @@ impl<S: Send + Sync> FromRequestParts<S> for Overwrite {
parts: &mut axum::http::request::Parts,
_state: &S,
) -> Result<Self, Self::Rejection> {
parts.headers.get("Overwrite").map_or_else(
|| Ok(Self::default()),
|overwrite_header| overwrite_header.as_bytes().try_into(),
)
if let Some(overwrite_header) = parts.headers.get("Overwrite") {
overwrite_header.as_bytes().try_into()
} else {
Ok(Self::default())
}
}
}
@@ -42,48 +47,9 @@ impl TryFrom<&[u8]> for Overwrite {
fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match value {
b"T" => Ok(Self(true)),
b"F" => Ok(Self(false)),
b"T" => Ok(Overwrite::T),
b"F" => Ok(Overwrite::F),
_ => Err(InvalidOverwriteHeader),
}
}
}
#[cfg(test)]
mod tests {
use axum::{extract::FromRequestParts, response::IntoResponse};
use http::Request;
use crate::header::Overwrite;
#[tokio::test]
async fn test_overwrite_default() {
let request = Request::put("asd").body(()).unwrap();
let (mut parts, ()) = request.into_parts();
let overwrite = Overwrite::from_request_parts(&mut parts, &())
.await
.unwrap();
assert_eq!(
Overwrite(true),
overwrite,
"By default we want to overwrite!"
);
}
#[test]
fn test_overwrite() {
assert_eq!(
Overwrite(true),
Overwrite::try_from(b"T".as_slice()).unwrap()
);
assert_eq!(
Overwrite(false),
Overwrite::try_from(b"F".as_slice()).unwrap()
);
if let Err(err) = Overwrite::try_from(b"aslkdjlad".as_slice()) {
let _ = err.into_response();
} else {
unreachable!("should return error")
}
}
}

View File

@@ -1,5 +1,3 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
pub mod error;
pub mod extensions;
pub mod header;

View File

@@ -1,10 +1,9 @@
use itertools::Itertools;
use quick_xml::name::Namespace;
use rustical_xml::{XmlDeserialize, XmlSerialize};
use std::collections::{HashMap, HashSet};
// https://datatracker.ietf.org/doc/html/rfc3744
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, Eq, Hash, PartialEq, PartialOrd, Ord)]
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, Eq, Hash, PartialEq)]
pub enum UserPrivilege {
Read,
Write,
@@ -20,18 +19,18 @@ impl XmlSerialize for UserPrivilegeSet {
fn serialize(
&self,
ns: Option<Namespace>,
tag: Option<&str>,
namespaces: &HashMap<Namespace, &str>,
tag: Option<&[u8]>,
namespaces: &HashMap<Namespace, &[u8]>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> {
#[derive(XmlSerialize)]
pub struct FakeUserPrivilegeSet {
#[xml(rename = "privilege", flatten)]
#[xml(rename = b"privilege", flatten)]
privileges: Vec<UserPrivilege>,
}
FakeUserPrivilegeSet {
privileges: self.privileges.iter().cloned().sorted().collect(),
privileges: self.privileges.iter().cloned().collect(),
}
.serialize(ns, tag, namespaces, writer)
}
@@ -41,13 +40,12 @@ impl XmlSerialize for UserPrivilegeSet {
}
}
#[derive(Debug, Clone, Default, PartialEq, Eq)]
#[derive(Debug, Clone, Default, PartialEq)]
pub struct UserPrivilegeSet {
privileges: HashSet<UserPrivilege>,
}
impl UserPrivilegeSet {
#[must_use]
pub fn has(&self, privilege: &UserPrivilege) -> bool {
if (privilege == &UserPrivilege::WriteProperties
|| privilege == &UserPrivilege::WriteContent)
@@ -58,14 +56,12 @@ impl UserPrivilegeSet {
self.privileges.contains(privilege) || self.privileges.contains(&UserPrivilege::All)
}
#[must_use]
pub fn all() -> Self {
Self {
privileges: HashSet::from([UserPrivilege::All]),
}
}
#[must_use]
pub fn owner_only(is_owner: bool) -> Self {
if is_owner {
Self::all()
@@ -74,7 +70,6 @@ impl UserPrivilegeSet {
}
}
#[must_use]
pub fn owner_read(is_owner: bool) -> Self {
if is_owner {
Self::read_only()
@@ -83,7 +78,6 @@ impl UserPrivilegeSet {
}
}
#[must_use]
pub fn owner_write_properties(is_owner: bool) -> Self {
// Content is read-only but we can write properties
if is_owner {
@@ -93,7 +87,6 @@ impl UserPrivilegeSet {
}
}
#[must_use]
pub fn read_only() -> Self {
Self {
privileges: HashSet::from([
@@ -104,7 +97,6 @@ impl UserPrivilegeSet {
}
}
#[must_use]
pub fn write_properties() -> Self {
Self {
privileges: HashSet::from([

View File

@@ -9,49 +9,36 @@ pub type MethodFunction<State> =
pub trait AxumMethods: Sized + Send + Sync + 'static {
#[inline]
#[must_use]
fn report() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn get() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn post() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn mkcol() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn mkcalendar() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn put() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn import() -> Option<MethodFunction<Self>> {
None
}
#[inline]
#[must_use]
fn allow_header() -> Allow {
let mut allow = vec![
Method::from_str("PROPFIND").unwrap(),
@@ -80,9 +67,6 @@ pub trait AxumMethods: Sized + Send + Sync + 'static {
if Self::put().is_some() {
allow.push(Method::PUT);
}
if Self::import().is_some() {
allow.push(Method::from_str("IMPORT").unwrap());
}
allow.into_iter().collect()
}

View File

@@ -23,7 +23,7 @@ pub struct AxumService<RS: ResourceService + AxumMethods> {
}
impl<RS: ResourceService + AxumMethods> AxumService<RS> {
pub const fn new(resource_service: RS) -> Self {
pub fn new(resource_service: RS) -> Self {
Self { resource_service }
}
}
@@ -97,13 +97,8 @@ where
return svc(self.resource_service.clone(), req);
}
}
"IMPORT" => {
if let Some(svc) = RS::import() {
return svc(self.resource_service.clone(), req);
}
}
_ => {}
}
};
Box::pin(async move {
Ok(Response::builder()
.status(StatusCode::METHOD_NOT_ALLOWED)

View File

@@ -12,12 +12,12 @@ use serde::Deserialize;
use tracing::instrument;
#[instrument(skip(path, resource_service,))]
pub async fn axum_route_copy<R: ResourceService>(
pub(crate) async fn axum_route_copy<R: ResourceService>(
Path(path): Path<R::PathComponents>,
State(resource_service): State<R>,
depth: Option<Depth>,
principal: R::Principal,
Overwrite(overwrite): Overwrite,
overwrite: Overwrite,
matched_path: MatchedPath,
header_map: HeaderMap,
) -> Result<Response, R::Error> {
@@ -39,7 +39,7 @@ pub async fn axum_route_copy<R: ResourceService>(
.map_err(|_| crate::Error::Forbidden)?;
if resource_service
.copy_resource(&path, &dest_path, &principal, overwrite)
.copy_resource(&path, &dest_path, &principal, overwrite.is_true())
.await?
{
// Overwritten

View File

@@ -7,7 +7,7 @@ use axum_extra::TypedHeader;
use headers::{IfMatch, IfNoneMatch};
use http::HeaderMap;
pub async fn axum_route_delete<R: ResourceService>(
pub(crate) async fn axum_route_delete<R: ResourceService>(
Path(path): Path<R::PathComponents>,
State(resource_service): State<R>,
principal: R::Principal,
@@ -24,7 +24,8 @@ pub async fn axum_route_delete<R: ResourceService>(
}
let no_trash = header_map
.get("X-No-Trashbin")
.is_some_and(|val| matches!(val.to_str(), Ok("1")));
.map(|val| matches!(val.to_str(), Ok("1")))
.unwrap_or(false);
route_delete(
&path,
&principal,
@@ -59,11 +60,11 @@ pub async fn route_delete<R: ResourceService>(
return Err(crate::Error::PreconditionFailed.into());
}
}
if let Some(if_none_match) = if_none_match
&& resource.satisfies_if_none_match(&if_none_match)
{
// Precondition failed
return Err(crate::Error::PreconditionFailed.into());
if let Some(if_none_match) = if_none_match {
if resource.satisfies_if_none_match(&if_none_match) {
// Precondition failed
return Err(crate::Error::PreconditionFailed.into());
}
}
resource_service
.delete_resource(path_components, !no_trash)

View File

@@ -4,8 +4,8 @@ mod mv;
mod propfind;
mod proppatch;
pub use copy::axum_route_copy;
pub use delete::axum_route_delete;
pub use mv::axum_route_move;
pub use propfind::axum_route_propfind;
pub use proppatch::axum_route_proppatch;
pub(crate) use copy::axum_route_copy;
pub(crate) use delete::axum_route_delete;
pub(crate) use mv::axum_route_move;
pub(crate) use propfind::axum_route_propfind;
pub(crate) use proppatch::axum_route_proppatch;

View File

@@ -12,12 +12,12 @@ use serde::Deserialize;
use tracing::instrument;
#[instrument(skip(path, resource_service,))]
pub async fn axum_route_move<R: ResourceService>(
pub(crate) async fn axum_route_move<R: ResourceService>(
Path(path): Path<R::PathComponents>,
State(resource_service): State<R>,
depth: Option<Depth>,
principal: R::Principal,
Overwrite(overwrite): Overwrite,
overwrite: Overwrite,
matched_path: MatchedPath,
header_map: HeaderMap,
) -> Result<Response, R::Error> {
@@ -39,7 +39,7 @@ pub async fn axum_route_move<R: ResourceService>(
.map_err(|_| crate::Error::Forbidden)?;
if resource_service
.copy_resource(&path, &dest_path, &principal, overwrite)
.copy_resource(&path, &dest_path, &principal, overwrite.is_true())
.await?
{
// Overwritten

View File

@@ -6,7 +6,11 @@ use crate::resource::Resource;
use crate::resource::ResourceName;
use crate::resource::ResourceService;
use crate::xml::MultistatusElement;
use crate::xml::PropfindElement;
use crate::xml::PropfindType;
use axum::extract::{Extension, OriginalUri, Path, State};
use rustical_xml::PropName;
use rustical_xml::XmlDocument;
use tracing::instrument;
type RSMultistatus<R> = MultistatusElement<
@@ -15,7 +19,7 @@ type RSMultistatus<R> = MultistatusElement<
>;
#[instrument(skip(path, resource_service, puri))]
pub async fn axum_route_propfind<R: ResourceService>(
pub(crate) async fn axum_route_propfind<R: ResourceService>(
Path(path): Path<R::PathComponents>,
State(resource_service): State<R>,
depth: Depth,
@@ -36,7 +40,7 @@ pub async fn axum_route_propfind<R: ResourceService>(
.await
}
pub async fn route_propfind<R: ResourceService>(
pub(crate) async fn route_propfind<R: ResourceService>(
path_components: &R::PathComponents,
path: &str,
body: &str,
@@ -54,8 +58,24 @@ pub async fn route_propfind<R: ResourceService>(
}
// A request body is optional. If empty we MUST return all props
let propfind_self = R::Resource::parse_propfind(body).map_err(Error::XmlError)?;
let propfind_member = R::MemberType::parse_propfind(body).map_err(Error::XmlError)?;
let propfind_self: PropfindElement<<<R::Resource as Resource>::Prop as PropName>::Names> =
if !body.is_empty() {
PropfindElement::parse_str(body).map_err(Error::XmlError)?
} else {
PropfindElement {
prop: PropfindType::Allprop,
include: None,
}
};
let propfind_member: PropfindElement<<<R::MemberType as Resource>::Prop as PropName>::Names> =
if !body.is_empty() {
PropfindElement::parse_str(body).map_err(Error::XmlError)?
} else {
PropfindElement {
prop: PropfindType::Allprop,
include: None,
}
};
let mut member_responses = Vec::new();
if depth != &Depth::Zero {

Some files were not shown because too many files have changed in this diff Show More