Compare commits

..

111 Commits

Author SHA1 Message Date
Lennart
d2f5f7c89b version 0.11.0 2025-12-05 15:06:01 +01:00
Lennart Kämmle
15e431ce12 Merge pull request #138 from lennart-k/feature/birthday-calendar
Feature/birthday calendar
2025-12-05 15:03:56 +01:00
Lennart
96a16951f4 sqlx prepare 2025-12-05 14:55:30 +01:00
Lennart
a32b766c0c Merge branch 'main' into feature/birthday-calendar 2025-12-05 14:51:51 +01:00
Lennart
7a101b7364 frontend: Fix cursor for anchors 2025-12-05 14:51:34 +01:00
Lennart
57275a10b4 Add birthday calendar creation to frontend 2025-12-05 14:50:02 +01:00
Lennart
af239e34bf birthday calendar store: Support manual birthday calendar creation 2025-12-05 14:49:09 +01:00
Lennart
e99b1d9123 calendar resource: Remove prop write guards 2025-12-05 14:48:35 +01:00
Lennart
e39657eb29 PROPPATCH: Fix privileges 2025-12-05 14:48:11 +01:00
Lennart
607db62859 Merge branch 'main' into feature/birthday-calendar 2025-12-05 11:47:42 +01:00
Lennart
eba377b980 update dependencies 2025-12-05 11:47:11 +01:00
Lennart
d5c1ddc590 caldav: Update test_propfind regression test 2025-11-22 18:49:32 +01:00
Lennart
a79e1901b8 test_propfind: Revert assert_eq order 2025-11-22 18:48:36 +01:00
Lennart
f29c8fa925 Merge branch 'main' into feature/birthday-calendar 2025-11-22 18:46:59 +01:00
Lennart
54f1ee0788 use similar-asserts for regression tests 2025-11-22 18:46:47 +01:00
Lennart
96f221f721 birthday_calendar: Refactor insert_birthday_calendar 2025-11-22 18:35:26 +01:00
Lennart
ba3b64a9c4 Merge branch 'main' into feature/birthday-calendar 2025-11-22 18:30:44 +01:00
Lennart
22a0337375 version 0.10.5 2025-11-17 19:14:17 +01:00
Lennart
21902e108a fix some error messages 2025-11-17 19:13:13 +01:00
Lennart
08f526fa5b Add startup routine to fix orphaned objects
fixes #145, related to #142
2025-11-17 19:11:30 +01:00
Lennart
ac73f3aaff addressbook_store: Commit import addressbooks to changelog 2025-11-17 18:35:10 +01:00
Lennart
9fdc8434db calendar import: log added events 2025-11-17 18:22:33 +01:00
Lennart
85f3d89235 version 0.10.4 2025-11-17 01:21:55 +01:00
Lennart
092604694a multiget: percent-decode hrefs 2025-11-17 01:21:20 +01:00
Lennart
8ef24668ba version 0.10.3 2025-11-14 11:02:27 +01:00
Lennart
416658d069 frontend: Fix missing getTimezones import in create-calendar-form
fixes #141
2025-11-14 11:01:59 +01:00
Lennart
80eae5db9e version 0.10.2 2025-11-09 21:39:09 +01:00
Lennart
66f541f1c7 Drop log level for 404 to info
fixes #139
2025-11-09 21:36:17 +01:00
Lennart
ea7196501e docs: Add verification for Google Search Console (not analytics) 2025-11-06 00:19:33 +01:00
Lennart
33d14a9ba0 sqlite_store: Add some more basic tests 2025-11-05 23:17:59 +01:00
Lennart
d843909084 Update Cargo.toml 2025-11-05 16:16:01 +01:00
Lennart
873b40ad10 stylesheet: Add flex-wrap to actions 2025-11-05 16:05:55 +01:00
Lennart
5588137f73 sqlx prepare 2025-11-04 17:01:54 +01:00
Lennart
7bf00da0e5 implement deleting and restoring birthday calendars 2025-11-04 16:56:17 +01:00
Lennart
be08275cd3 Merge branch 'main' into feature/birthday-calendar 2025-11-04 16:28:08 +01:00
Lennart
3a10a695f5 frontend: Only show logout button when logged in 2025-11-04 15:33:13 +01:00
Lennart
53c6e3b1f4 frontend: Update calendar,addressbook pages 2025-11-04 15:32:00 +01:00
Lennart
6838e8e379 frontend: update stylesheet 2025-11-04 15:31:35 +01:00
Lennart
9f28aaec41 frontend: Update deno dependencies 2025-11-04 15:31:18 +01:00
Lennart
381af1b877 run .sqlx prepare 2025-11-03 15:37:40 +01:00
Lennart
7ec62bc6ab attempt to fix docs build 2025-11-02 22:57:29 +01:00
Lennart
9538b68e77 version 0.10.1 2025-11-02 22:21:25 +01:00
Lennart
ea5175387b update licenses 2025-11-02 22:21:16 +01:00
Lennart
0095491a20 frontend: dumb test for timezones 2025-11-02 22:17:23 +01:00
Lennart
e9392cc00b frontend: Add dropdown for timezone selection 2025-11-02 22:08:28 +01:00
Lennart
425d10cb99 CalendarStore::is_read_only now refers to its content only and not its metadata 2025-11-02 21:07:06 +01:00
Lennart
5cdbb3b9d3 migrate birthday store to sqlite 2025-11-02 21:06:43 +01:00
Lennart
547e477eca make sure a birthday calendar will be created for each addressbook 2025-11-02 21:05:31 +01:00
Lennart
c19c3492c3 frontend: Remove birthday calendar guard 2025-11-02 20:45:58 +01:00
Lennart
5878b93d62 add birthday_calendar table migrations 2025-11-02 20:45:31 +01:00
Lennart
888591c952 add test case for converting filter to calendar query 2025-11-02 19:17:59 +01:00
Lennart
de77223170 Merge pull request #137 from lennart-k/feature/comp-filter
Re-implement comp-filter for calendar-query
2025-11-02 18:56:56 +01:00
Lennart
c42f8e5614 clippy appeasement 2025-11-02 18:42:55 +01:00
Lennart
f72559d027 caldav: Add supported-collation-set property 2025-11-02 18:33:54 +01:00
Lennart
167492318f xml: serialize: Support non-string text fields 2025-11-02 18:33:30 +01:00
Lennart
32f43951ac refactor text-match to support collations 2025-11-02 17:48:35 +01:00
Lennart
cd9993cd97 implement comp-filter matching for VTIMEZONE 2025-11-02 17:21:44 +01:00
Lennart
9f911fe5d7 prop-filter: Add time-range checking 2025-11-02 15:09:31 +01:00
Lennart
6361907152 re-implement comp-filter and add property filtering 2025-11-02 15:00:53 +01:00
Lennart
0c0be859f9 calendar object: Move occurence methods to CalendarObjectComponent and add get_property method 2025-11-02 15:00:13 +01:00
Lennart
d2c786eba6 merge main into feature/comp-filter 2025-11-02 13:10:56 +01:00
Lennart
dabddc6282 version 0.10.0 2025-11-01 21:49:44 +01:00
Lennart
76b4194b94 lift restriction on object_id and UID having to match
addresses #135
2025-11-01 21:48:37 +01:00
Lennart
db144ebcae calendarobject: Rename get_id to get_uid 2025-11-01 21:23:55 +01:00
Lennart
a53c333f1f version 0.9.14 2025-11-01 15:10:06 +01:00
Lennart
a05baea472 sqlite_store: Mark write transactions with BEGIN IMMEDIATE
Hopefully addresses SQLITE_BUSY error, see #131
2025-11-01 15:09:42 +01:00
Lennart
f34f7e420e Dockerfile: Update Rust to 1.91 2025-11-01 15:08:36 +01:00
Lennart
24ab323aa0 clippy appeasement 2025-11-01 14:21:44 +01:00
Lennart
f34f56ca89 update dependencies 2025-11-01 14:17:13 +01:00
Lennart
8c2025b674 version 0.9.13 2025-10-27 21:14:31 +01:00
Lennart
77d8f5dacc add ping endpoint and healthcheck command 2025-10-27 21:12:43 +01:00
Lennart
5d142289b3 tokio: Use multi-threaded runtime 2025-10-27 20:34:20 +01:00
Lennart
255282893a update matchit 2025-10-27 20:15:38 +01:00
Lennart
86cf490fa9 Lots of clippy appeasement 2025-10-27 20:12:21 +01:00
Lennart K
0d071d3b92 run clippy fix 2025-10-27 19:01:04 +01:00
Lennart
8ed4db5824 work on new comp-filter implementation 2025-10-27 18:59:00 +01:00
Lennart K
08041c60be clippy: Enable more warnings 2025-10-27 11:39:24 +01:00
Lennart
43d7aabf28 version 0.9.12 2025-10-21 21:06:32 +02:00
Lennart
2fc51fac66 remove duplicate statement 2025-10-21 21:04:41 +02:00
Lennart
18882b2175 version 0.9.11 2025-10-07 22:15:24 +02:00
Lennart
580922fd6b improve error output 2025-10-07 22:14:40 +02:00
Lennart
69274a9f5d chore: Update opentelemetry 2025-10-05 17:17:56 +02:00
Lennart
ef9642ae81 version 0.9.10 2025-10-02 21:05:32 +02:00
Lennart
1c192a452f oidc: Output error when provider discovery fails 2025-10-02 21:04:59 +02:00
Lennart
8c67c8c0e9 version 0.9.9 2025-09-25 19:51:41 +02:00
Lennart
0990342590 frontend: update and reduce dependencies 2025-09-25 19:50:48 +02:00
Lennart
ffef7608ac update licenses.html 2025-09-25 19:48:05 +02:00
Lennart
a28ff967e5 update Cargo.lock 2025-09-25 19:47:09 +02:00
Lennart
8bec653099 dav root: Add some new tests 2025-09-25 19:47:00 +02:00
Lennart
b0091d66d1 remove ci.yml since testing is included in cicd.yml 2025-09-23 11:47:08 +02:00
Lennart
4919514d09 dav: refactor overwrite header 2025-09-23 11:43:42 +02:00
Lennart
602c511c90 increase test coverage :D 2025-09-21 21:58:11 +02:00
Lennart
b208fbaac6 cicd: Update toolchain 2025-09-21 21:33:41 +02:00
Lennart
eef45ef612 clean up cicd 2025-09-21 21:24:18 +02:00
Lennart
dc860a9768 coverage: Exclude xml_derive 2025-09-21 21:10:56 +02:00
Lennart
dd52fd120c GitHub Workflows: Set permissions 2025-09-21 21:03:55 +02:00
Lennart
bc4c6489ff ci: Make sure whole workspace is tested 2025-09-21 20:59:25 +02:00
Lennart
944462ff5e clippy appeasement 2025-09-21 20:56:14 +02:00
Lennart
d51c44c2e7 Add some automated coverage testing 2025-09-21 20:52:31 +02:00
Lennart
8bbc03601a Enable a test for propfind responses 2025-09-21 20:40:03 +02:00
Lennart
1d2b90f7c3 xml: Sort namespaces
Fixes #104
2025-09-21 20:39:23 +02:00
Lennart
979a863b2d some calendar query refactoring 2025-09-21 20:37:24 +02:00
Lennart
660ac9b121 ical: Refactor calendar object type 2025-09-21 20:31:45 +02:00
Lennart
1e9be6c134 Dockerfile: Update Rust to 1.90 2025-09-21 20:15:07 +02:00
Lennart
b6bfb5a620 ical: Remove abstraction structs around journal and todo 2025-09-19 14:47:44 +02:00
Lennart
53f30fce3f version 0.9.8: revert to Rust 1.89 since 1.90 fully online yet 2025-09-18 21:20:07 +02:00
Lennart
4592afac10 version 0.9.7 2025-09-18 21:11:44 +02:00
Lennart
e7ab7c2987 ical: Fix import UID grouping 2025-09-18 21:08:00 +02:00
Lennart
242f7b9076 calendar export: Fix overrides 2025-09-18 20:38:54 +02:00
Lennart
cb1356acad ical: Fix data model to allow calendar objects with overrides
#125
2025-09-18 20:38:37 +02:00
Lennart
55dadbb06b update Rust to 1.90 2025-09-18 16:45:48 +02:00
202 changed files with 5311 additions and 3633 deletions

View File

@@ -1,20 +0,0 @@
name: Rust CI
on:
push:
branches: ["main"]
pull_request:
branches: ["main"]
env:
CARGO_TERM_COLOR: always
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose --workspace

57
.github/workflows/cicd.yml vendored Normal file
View File

@@ -0,0 +1,57 @@
name: "CICD"
on: [push, pull_request]
permissions:
contents: read
pull-requests: write
env:
CARGO_TERM_COLOR: always
jobs:
check:
name: Check
runs-on: ubuntu-latest
steps:
- run: rustup update
- name: Checkout sources
uses: actions/checkout@v4
- run: cargo check
test:
name: Test Suite
runs-on: ubuntu-latest
steps:
- run: rustup update
- name: Checkout sources
uses: actions/checkout@v4
- run: cargo test --all-features --verbose --workspace
coverage:
name: Test Coverage
runs-on: ubuntu-latest
steps:
- run: rustup update
- name: Install tarpaulin
run: cargo install cargo-tarpaulin
- name: Checkout sources
uses: actions/checkout@v4
- name: Run tarpaulin
run: cargo tarpaulin --workspace --all-features --exclude xml_derive --coveralls ${{ secrets.COVERALLS_REPO_TOKEN }}
lints:
name: Lints
runs-on: ubuntu-latest
steps:
- run: rustup update
- run: rustup component add rustfmt clippy
- name: Checkout sources
uses: actions/checkout@v4
- name: Run cargo fmt
run: cargo fmt --all -- --check
- name: Run cargo clippy
run: cargo clippy -- -D warnings

View File

@@ -17,6 +17,8 @@ jobs:
with: with:
python-version: 3.x python-version: 3.x
- run: rustup update
- run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV - run: echo "cache_id=$(date --utc '+%V')" >> $GITHUB_ENV
- name: Set up build cache - name: Set up build cache

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "\n SELECT principal, cal_id, id, (deleted_at IS NOT NULL) AS \"deleted: bool\"\n FROM calendarobjects\n WHERE (principal, cal_id, id) NOT IN (\n SELECT DISTINCT principal, cal_id, object_id FROM calendarobjectchangelog\n )\n ;\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "cal_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "deleted: bool",
"ordinal": 3,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "053c17f3b54ae3e153137926115486eb19a801bd73a74230bcf72a9a7254824a"
}

View File

@@ -0,0 +1,32 @@
{
"db_name": "SQLite",
"query": "SELECT id, uid, ics FROM calendarobjects\n WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL\n AND (last_occurence IS NULL OR ? IS NULL OR last_occurence >= date(?))\n AND (first_occurence IS NULL OR ? IS NULL OR first_occurence <= date(?))\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "uid",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 2,
"type_info": "Text"
}
],
"parameters": {
"Right": 6
},
"nullable": [
false,
false,
false
]
},
"hash": "3a29efff3d3f6e1e05595d1a2d095af5fc963572c90bd10a6616af78757f8c39"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "REPLACE INTO calendarobjects (principal, cal_id, id, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 8
},
"nullable": []
},
"hash": "3e1cca532372e891ab3e604ecb79311d8cd64108d4f238db4c79e9467a3b6d2e"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "UPDATE birthday_calendars SET principal = ?, id = ?, displayname = ?, description = ?, \"order\" = ?, color = ?, timezone_id = ?, push_topic = ?\n WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 10
},
"nullable": []
},
"hash": "4a05eda4e23e8652312548b179a1cc16f43768074ab9e7ab7b7783395384984e"
}

View File

@@ -1,6 +1,6 @@
{ {
"db_name": "SQLite", "db_name": "SQLite",
"query": "SELECT id, ics FROM calendarobjects WHERE (principal, cal_id, id) = (?, ?, ?) AND ((deleted_at IS NULL) OR ?)", "query": "SELECT id, uid, ics FROM calendarobjects WHERE (principal, cal_id, id) = (?, ?, ?) AND ((deleted_at IS NULL) OR ?)",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@@ -9,18 +9,24 @@
"type_info": "Text" "type_info": "Text"
}, },
{ {
"name": "ics", "name": "uid",
"ordinal": 1, "ordinal": 1,
"type_info": "Text" "type_info": "Text"
},
{
"name": "ics",
"ordinal": 2,
"type_info": "Text"
} }
], ],
"parameters": { "parameters": {
"Right": 4 "Right": 4
}, },
"nullable": [ "nullable": [
false,
false, false,
false false
] ]
}, },
"hash": "543838c030550cb09d1af08adfeade8b7ce3575d92fddbc6e9582d141bc9e49d" "hash": "505ebe8e64ac709b230dce7150240965e45442aca6c5f3b3115738ef508939ed"
} }

View File

@@ -0,0 +1,74 @@
{
"db_name": "SQLite",
"query": "SELECT principal, id, displayname, description, \"order\", color, timezone_id, deleted_at, addr_synctoken, push_topic\n FROM birthday_calendars\n INNER JOIN (\n SELECT principal AS addr_principal,\n id AS addr_id,\n synctoken AS addr_synctoken\n FROM addressbooks\n ) ON (principal, id) = (addr_principal, addr_id)\n WHERE (principal, id) = (?, ?)\n AND ((deleted_at IS NULL) OR ?)\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "displayname",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "order",
"ordinal": 4,
"type_info": "Integer"
},
{
"name": "color",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "timezone_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 7,
"type_info": "Datetime"
},
{
"name": "addr_synctoken",
"ordinal": 8,
"type_info": "Integer"
},
{
"name": "push_topic",
"ordinal": 9,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
false,
true,
true,
false,
true,
true,
true,
false,
false
]
},
"hash": "525fc4eab8a0f3eacff7e3c78ce809943f817abf8c8f9ae50073924bccdea2dc"
}

View File

@@ -1,12 +0,0 @@
{
"db_name": "SQLite",
"query": "INSERT INTO calendarobjects (principal, cal_id, id, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 8
},
"nullable": []
},
"hash": "6327bee90e5df01536a0ddb15adcc37af3027f6902aa3786365c5ab2fbf06bda"
}

View File

@@ -0,0 +1,74 @@
{
"db_name": "SQLite",
"query": "SELECT principal, id, displayname, description, \"order\", color, timezone_id, deleted_at, addr_synctoken, push_topic\n FROM birthday_calendars\n INNER JOIN (\n SELECT principal AS addr_principal,\n id AS addr_id,\n synctoken AS addr_synctoken\n FROM addressbooks\n ) ON (principal, id) = (addr_principal, addr_id)\n WHERE principal = ?\n AND (\n (deleted_at IS NULL AND NOT ?) -- not deleted, want not deleted\n OR (deleted_at IS NOT NULL AND ?) -- deleted, want deleted\n )\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "displayname",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "description",
"ordinal": 3,
"type_info": "Text"
},
{
"name": "order",
"ordinal": 4,
"type_info": "Integer"
},
{
"name": "color",
"ordinal": 5,
"type_info": "Text"
},
{
"name": "timezone_id",
"ordinal": 6,
"type_info": "Text"
},
{
"name": "deleted_at",
"ordinal": 7,
"type_info": "Datetime"
},
{
"name": "addr_synctoken",
"ordinal": 8,
"type_info": "Integer"
},
{
"name": "push_topic",
"ordinal": 9,
"type_info": "Text"
}
],
"parameters": {
"Right": 3
},
"nullable": [
false,
false,
true,
true,
false,
true,
true,
true,
false,
false
]
},
"hash": "66d57f2c99ef37b383a478aff99110e1efbc7ce9332f10da4fa69f7594fb7455"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "UPDATE birthday_calendars SET deleted_at = NULL WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "6c039308ad2ec29570ab492d7a0e85fb79c0a4d3b882b74ff1c2786c12324896"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO birthday_calendars (principal, id, displayname, description, \"order\", color, push_topic)\n VALUES (?, ?, ?, ?, ?, ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 7
},
"nullable": []
},
"hash": "72c7c67f4952ad669ecd54d96bbcb717815081f74575f0a65987163faf9fe30a"
}

View File

@@ -1,6 +1,6 @@
{ {
"db_name": "SQLite", "db_name": "SQLite",
"query": "SELECT id, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL", "query": "SELECT id, uid, ics FROM calendarobjects WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL",
"describe": { "describe": {
"columns": [ "columns": [
{ {
@@ -9,18 +9,24 @@
"type_info": "Text" "type_info": "Text"
}, },
{ {
"name": "ics", "name": "uid",
"ordinal": 1, "ordinal": 1,
"type_info": "Text" "type_info": "Text"
},
{
"name": "ics",
"ordinal": 2,
"type_info": "Text"
} }
], ],
"parameters": { "parameters": {
"Right": 2 "Right": 2
}, },
"nullable": [ "nullable": [
false,
false, false,
false false
] ]
}, },
"hash": "54c9c0e36a52e6963f11c6aa27f13aafb4204b8aa34b664fd825bd447db80e86" "hash": "804ed2a4a7032e9605d1871297498f5a96de0fc816ce660c705fb28318be0d42"
} }

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "UPDATE birthday_calendars SET deleted_at = datetime() WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "83f0aaf406785e323ac12019ac24f603c53125a1b2326f324c1e2d7b6c690adc"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "REPLACE INTO calendarobjects (principal, cal_id, id, uid, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 9
},
"nullable": []
},
"hash": "a68a1b96189b854a7ba2a3cd866ba583af5ad84bc1cd8b20cb805e9ce3bad820"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "\n SELECT principal, addressbook_id, id, (deleted_at IS NOT NULL) AS \"deleted: bool\"\n FROM addressobjects\n WHERE (principal, addressbook_id, id) NOT IN (\n SELECT DISTINCT principal, addressbook_id, object_id FROM addressobjectchangelog\n )\n ;\n ",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "addressbook_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "deleted: bool",
"ordinal": 3,
"type_info": "Integer"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "c138b1143ac04af4930266ffae0990e82005911c11a683ad565e92335e085f4d"
}

View File

@@ -1,26 +0,0 @@
{
"db_name": "SQLite",
"query": "SELECT id, ics FROM calendarobjects\n WHERE principal = ? AND cal_id = ? AND deleted_at IS NULL\n AND (last_occurence IS NULL OR ? IS NULL OR last_occurence >= date(?))\n AND (first_occurence IS NULL OR ? IS NULL OR first_occurence <= date(?))\n ",
"describe": {
"columns": [
{
"name": "id",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 1,
"type_info": "Text"
}
],
"parameters": {
"Right": 6
},
"nullable": [
false,
false
]
},
"hash": "c550dbf3d5ce7069f28d767ea9045e477ef8d29d6186851760757a06dec42339"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM birthday_calendars WHERE (principal, id) = (?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 2
},
"nullable": []
},
"hash": "cadc4ac16b7ac22b71c91ab36ad9dbb1dec943798d795fcbc811f4c651fea02a"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "INSERT INTO calendarobjects (principal, cal_id, id, uid, ics, first_occurence, last_occurence, etag, object_type) VALUES (?, ?, ?, ?, ?, date(?), date(?), ?, ?)",
"describe": {
"columns": [],
"parameters": {
"Right": 9
},
"nullable": []
},
"hash": "d498a758ed707408b00b7d2675250ea739a681ce1f009f05e97f2e101bd7e556"
}

1164
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,8 @@
members = ["crates/*"] members = ["crates/*"]
[workspace.package] [workspace.package]
version = "0.9.6" version = "0.11.0"
rust-version = "1.91"
edition = "2024" edition = "2024"
description = "A CalDAV server" description = "A CalDAV server"
documentation = "https://lennart-k.github.io/rustical/" documentation = "https://lennart-k.github.io/rustical/"
@@ -12,6 +13,7 @@ license = "AGPL-3.0-or-later"
[package] [package]
name = "rustical" name = "rustical"
version.workspace = true version.workspace = true
rust-version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true
@@ -35,7 +37,18 @@ opentelemetry = [
debug = 0 debug = 0
[workspace.dependencies] [workspace.dependencies]
matchit = "0.8" rustical_dav = { path = "./crates/dav/" }
rustical_dav_push = { path = "./crates/dav_push/" }
rustical_store = { path = "./crates/store/" }
rustical_store_sqlite = { path = "./crates/store_sqlite/" }
rustical_caldav = { path = "./crates/caldav/" }
rustical_carddav = { path = "./crates/carddav/" }
rustical_frontend = { path = "./crates/frontend/" }
rustical_xml = { path = "./crates/xml/" }
rustical_oidc = { path = "./crates/oidc/" }
rustical_ical = { path = "./crates/ical/" }
matchit = "0.9"
uuid = { version = "1.11", features = ["v4", "fast-rng"] } uuid = { version = "1.11", features = ["v4", "fast-rng"] }
async-trait = "0.1" async-trait = "0.1"
axum = "0.8" axum = "0.8"
@@ -48,7 +61,6 @@ pbkdf2 = { version = "0.12", features = ["simple"] }
rand_core = { version = "0.9", features = ["std"] } rand_core = { version = "0.9", features = ["std"] }
chrono = { version = "0.4", features = ["serde"] } chrono = { version = "0.4", features = ["serde"] }
regex = "1.10" regex = "1.10"
lazy_static = "1.5"
rstest = "0.26" rstest = "0.26"
rstest_reuse = "0.7" rstest_reuse = "0.7"
sha2 = "0.10" sha2 = "0.10"
@@ -109,20 +121,10 @@ tower-http = { version = "0.6", features = [
"catch-panic", "catch-panic",
] } ] }
percent-encoding = "2.3" percent-encoding = "2.3"
rustical_dav = { path = "./crates/dav/" }
rustical_dav_push = { path = "./crates/dav_push/" }
rustical_store = { path = "./crates/store/" }
rustical_store_sqlite = { path = "./crates/store_sqlite/" }
rustical_caldav = { path = "./crates/caldav/" }
rustical_carddav = { path = "./crates/carddav/" }
rustical_frontend = { path = "./crates/frontend/" }
rustical_xml = { path = "./crates/xml/" }
rustical_oidc = { path = "./crates/oidc/" }
rustical_ical = { path = "./crates/ical/" }
chrono-tz = "0.10" chrono-tz = "0.10"
chrono-humanize = "0.2" chrono-humanize = "0.2"
rand = "0.9" rand = "0.9"
axum-extra = { version = "0.10", features = ["typed-header"] } axum-extra = { version = "0.12", features = ["typed-header"] }
rrule = "0.14" rrule = "0.14"
argon2 = "0.5" argon2 = "0.5"
rpassword = "7.3" rpassword = "7.3"
@@ -131,7 +133,7 @@ syn = { version = "2.0", features = ["full"] }
quote = "1.0" quote = "1.0"
proc-macro2 = "1.0" proc-macro2 = "1.0"
heck = "0.5" heck = "0.5"
darling = "0.21" darling = "0.23"
reqwest = { version = "0.12", features = [ reqwest = { version = "0.12", features = [
"rustls-tls", "rustls-tls",
"charset", "charset",
@@ -139,40 +141,41 @@ reqwest = { version = "0.12", features = [
], default-features = false } ], default-features = false }
openidconnect = "4.0" openidconnect = "4.0"
clap = { version = "4.5", features = ["derive", "env"] } clap = { version = "4.5", features = ["derive", "env"] }
matchit-serde = { git = "https://github.com/lennart-k/matchit-serde", rev = "f0591d13" } matchit-serde = { git = "https://github.com/lennart-k/matchit-serde", rev = "e18e65d7" }
vtimezones-rs = "0.2" vtimezones-rs = "0.2"
ece = { version = "2.3", default-features = false, features = [ ece = { version = "2.3", default-features = false, features = [
"backend-openssl", "backend-openssl",
] } ] }
openssl = { version = "0.10", features = ["vendored"] } openssl = { version = "0.10", features = ["vendored"] }
async-std = { version = "1.13", features = ["attributes"] } async-std = { version = "1.13", features = ["attributes"] }
similar-asserts = "1.7"
[dependencies] [dependencies]
rustical_store = { workspace = true } rustical_store.workspace = true
rustical_store_sqlite = { workspace = true } rustical_store_sqlite.workspace = true
rustical_caldav = { workspace = true } rustical_caldav.workspace = true
rustical_carddav.workspace = true rustical_carddav.workspace = true
rustical_frontend = { workspace = true } rustical_frontend.workspace = true
toml = { workspace = true } toml.workspace = true
serde = { workspace = true } serde.workspace = true
tokio = { workspace = true } tokio.workspace = true
tracing = { workspace = true } tracing.workspace = true
anyhow = { workspace = true } anyhow.workspace = true
clap.workspace = true clap.workspace = true
sqlx = { workspace = true } sqlx.workspace = true
async-trait = { workspace = true } async-trait.workspace = true
uuid.workspace = true uuid.workspace = true
axum.workspace = true axum.workspace = true
opentelemetry = { version = "0.30", optional = true } opentelemetry = { version = "0.31", optional = true }
opentelemetry-otlp = { version = "0.30", optional = true, features = [ opentelemetry-otlp = { version = "0.31", optional = true, features = [
"grpc-tonic", "grpc-tonic",
] } ] }
opentelemetry_sdk = { version = "0.30", features = [ opentelemetry_sdk = { version = "0.31", features = [
"rt-tokio", "rt-tokio",
], optional = true } ], optional = true }
opentelemetry-semantic-conventions = { version = "0.30", optional = true } opentelemetry-semantic-conventions = { version = "0.31", optional = true }
tracing-opentelemetry = { version = "0.31", optional = true } tracing-opentelemetry = { version = "0.32", optional = true }
tracing-subscriber = { version = "0.3", features = [ tracing-subscriber = { version = "0.3", features = [
"env-filter", "env-filter",
"fmt", "fmt",

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM rust:1.89-alpine AS chef FROM --platform=$BUILDPLATFORM rust:1.91-alpine AS chef
ARG TARGETPLATFORM ARG TARGETPLATFORM
ARG BUILDPLATFORM ARG BUILDPLATFORM
@@ -47,3 +47,5 @@ ENV RUSTICAL_DATA_STORE__SQLITE__DB_URL=/var/lib/rustical/db.sqlite3
LABEL org.opencontainers.image.authors="Lennart K github.com/lennart-k" LABEL org.opencontainers.image.authors="Lennart K github.com/lennart-k"
LABEL org.opencontainers.image.licenses="AGPL-3.0-or-later" LABEL org.opencontainers.image.licenses="AGPL-3.0-or-later"
EXPOSE 4000 EXPOSE 4000
HEALTHCHECK --interval=30s --timeout=30s --start-period=3s --retries=3 CMD ["/usr/local/bin/rustical", "health"]

View File

@@ -12,3 +12,6 @@ docs:
docs-dev: docs-dev:
mkdocs serve mkdocs serve
coverage:
cargo tarpaulin --workspace --exclude xml_derive

View File

@@ -1,6 +1,7 @@
[package] [package]
name = "rustical_caldav" name = "rustical_caldav"
version.workspace = true version.workspace = true
rust-version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true
@@ -17,21 +18,21 @@ serde_json.workspace = true
axum.workspace = true axum.workspace = true
axum-extra.workspace = true axum-extra.workspace = true
tower.workspace = true tower.workspace = true
async-trait = { workspace = true } async-trait.workspace = true
thiserror = { workspace = true } thiserror.workspace = true
quick-xml = { workspace = true } quick-xml.workspace = true
tracing = { workspace = true } tracing.workspace = true
futures-util = { workspace = true } futures-util.workspace = true
derive_more = { workspace = true } derive_more.workspace = true
base64 = { workspace = true } base64.workspace = true
serde = { workspace = true } serde.workspace = true
tokio = { workspace = true } tokio.workspace = true
url = { workspace = true } url.workspace = true
rustical_dav = { workspace = true } rustical_dav.workspace = true
rustical_store = { workspace = true } rustical_store.workspace = true
chrono = { workspace = true } chrono.workspace = true
chrono-tz = { workspace = true } chrono-tz.workspace = true
sha2 = { workspace = true } sha2.workspace = true
ical.workspace = true ical.workspace = true
percent-encoding.workspace = true percent-encoding.workspace = true
rustical_xml.workspace = true rustical_xml.workspace = true
@@ -44,3 +45,4 @@ tower-http.workspace = true
strum.workspace = true strum.workspace = true
strum_macros.workspace = true strum_macros.workspace = true
vtimezones-rs.workspace = true vtimezones-rs.workspace = true
similar-asserts.workspace = true

View File

@@ -8,7 +8,7 @@ use http::{HeaderValue, Method, StatusCode, header};
use ical::generator::{Emitter, IcalCalendarBuilder}; use ical::generator::{Emitter, IcalCalendarBuilder};
use ical::property::Property; use ical::property::Property;
use percent_encoding::{CONTROLS, utf8_percent_encode}; use percent_encoding::{CONTROLS, utf8_percent_encode};
use rustical_ical::{CalendarObjectComponent, EventObject, JournalObject, TodoObject}; use rustical_ical::{CalendarObjectComponent, EventObject};
use rustical_store::{CalendarStore, SubscriptionStore, auth::Principal}; use rustical_store::{CalendarStore, SubscriptionStore, auth::Principal};
use std::collections::HashMap; use std::collections::HashMap;
use std::str::FromStr; use std::str::FromStr;
@@ -32,11 +32,6 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
return Err(crate::Error::Unauthorized); return Err(crate::Error::Unauthorized);
} }
let calendar = cal_store
.get_calendar(&principal, &calendar_id, true)
.await?;
let mut timezones = HashMap::new();
let mut vtimezones = HashMap::new(); let mut vtimezones = HashMap::new();
let objects = cal_store.get_objects(&principal, &calendar_id).await?; let objects = cal_store.get_objects(&principal, &calendar_id).await?;
@@ -68,19 +63,24 @@ pub async fn route_get<C: CalendarStore, S: SubscriptionStore>(
for object in &objects { for object in &objects {
vtimezones.extend(object.get_vtimezones()); vtimezones.extend(object.get_vtimezones());
match object.get_data() { match object.get_data() {
CalendarObjectComponent::Event(EventObject { CalendarObjectComponent::Event(EventObject { event, .. }, overrides) => {
event,
timezones: object_timezones,
..
}) => {
timezones.extend(object_timezones);
ical_calendar_builder = ical_calendar_builder.add_event(event.clone()); ical_calendar_builder = ical_calendar_builder.add_event(event.clone());
for ev_override in overrides {
ical_calendar_builder =
ical_calendar_builder.add_event(ev_override.event.clone());
}
} }
CalendarObjectComponent::Todo(TodoObject(todo)) => { CalendarObjectComponent::Todo(todo, overrides) => {
ical_calendar_builder = ical_calendar_builder.add_todo(todo.clone()); ical_calendar_builder = ical_calendar_builder.add_todo(todo.clone());
for ev_override in overrides {
ical_calendar_builder = ical_calendar_builder.add_todo(ev_override.clone());
}
} }
CalendarObjectComponent::Journal(JournalObject(journal)) => { CalendarObjectComponent::Journal(journal, overrides) => {
ical_calendar_builder = ical_calendar_builder.add_journal(journal.clone()); ical_calendar_builder = ical_calendar_builder.add_journal(journal.clone());
for ev_override in overrides {
ical_calendar_builder = ical_calendar_builder.add_journal(ev_override.clone());
}
} }
} }
} }

View File

@@ -22,7 +22,7 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
Path((principal, cal_id)): Path<(String, String)>, Path((principal, cal_id)): Path<(String, String)>,
user: Principal, user: Principal,
State(resource_service): State<CalendarResourceService<C, S>>, State(resource_service): State<CalendarResourceService<C, S>>,
overwrite: Overwrite, Overwrite(overwrite): Overwrite,
body: String, body: String,
) -> Result<Response, Error> { ) -> Result<Response, Error> {
if !user.is_principal(&principal) { if !user.is_principal(&principal) {
@@ -45,13 +45,13 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
// Extract calendar metadata // Extract calendar metadata
let displayname = cal let displayname = cal
.get_property("X-WR-CALNAME") .get_property("X-WR-CALNAME")
.and_then(|prop| prop.value.to_owned()); .and_then(|prop| prop.value.clone());
let description = cal let description = cal
.get_property("X-WR-CALDESC") .get_property("X-WR-CALDESC")
.and_then(|prop| prop.value.to_owned()); .and_then(|prop| prop.value.clone());
let timezone_id = cal let timezone_id = cal
.get_property("X-WR-TIMEZONE") .get_property("X-WR-TIMEZONE")
.and_then(|prop| prop.value.to_owned()); .and_then(|prop| prop.value.clone());
// These properties should not appear in the expanded calendar objects // These properties should not appear in the expanded calendar objects
cal.remove_property("X-WR-CALNAME"); cal.remove_property("X-WR-CALNAME");
cal.remove_property("X-WR-CALDESC"); cal.remove_property("X-WR-CALDESC");
@@ -82,7 +82,7 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
let objects = expanded_cals let objects = expanded_cals
.into_iter() .into_iter()
.map(|cal| cal.generate()) .map(|cal| cal.generate())
.map(CalendarObject::from_ics) .map(|ics| CalendarObject::from_ics(ics, None))
.collect::<Result<Vec<_>, _>>()?; .collect::<Result<Vec<_>, _>>()?;
let new_cal = Calendar { let new_cal = Calendar {
principal, principal,
@@ -103,7 +103,7 @@ pub async fn route_import<C: CalendarStore, S: SubscriptionStore>(
let cal_store = resource_service.cal_store; let cal_store = resource_service.cal_store;
cal_store cal_store
.import_calendar(new_cal, objects, overwrite.is_true()) .import_calendar(new_cal, objects, overwrite)
.await?; .await?;
Ok(StatusCode::OK.into_response()) Ok(StatusCode::OK.into_response())

View File

@@ -79,8 +79,8 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
_ => unreachable!("We never call with another method"), _ => unreachable!("We never call with another method"),
}; };
if let Some("") = request.displayname.as_deref() { if request.displayname.as_deref() == Some("") {
request.displayname = None request.displayname = None;
} }
let timezone_id = if let Some(tzid) = request.calendar_timezone_id { let timezone_id = if let Some(tzid) = request.calendar_timezone_id {
@@ -89,17 +89,12 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
// TODO: Proper error (calendar-timezone precondition) // TODO: Proper error (calendar-timezone precondition)
let calendar = IcalParser::new(tz.as_bytes()) let calendar = IcalParser::new(tz.as_bytes())
.next() .next()
.ok_or(rustical_dav::Error::BadRequest( .ok_or_else(|| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?
"No timezone data provided".to_owned(),
))?
.map_err(|_| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?; .map_err(|_| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?;
let timezone = calendar let timezone = calendar.timezones.first().ok_or_else(|| {
.timezones rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
.first() })?;
.ok_or(rustical_dav::Error::BadRequest(
"No timezone data provided".to_owned(),
))?;
let timezone: chrono_tz::Tz = timezone let timezone: chrono_tz::Tz = timezone
.try_into() .try_into()
.map_err(|_| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?; .map_err(|_| rustical_dav::Error::BadRequest("No timezone data provided".to_owned()))?;
@@ -110,8 +105,8 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
}; };
let calendar = Calendar { let calendar = Calendar {
id: cal_id.to_owned(), id: cal_id.clone(),
principal: principal.to_owned(), principal: principal.clone(),
meta: CalendarMetadata { meta: CalendarMetadata {
order: request.calendar_order.unwrap_or(0), order: request.calendar_order.unwrap_or(0),
displayname: request.displayname, displayname: request.displayname,
@@ -123,14 +118,16 @@ pub async fn route_mkcalendar<C: CalendarStore, S: SubscriptionStore>(
synctoken: 0, synctoken: 0,
subscription_url: request.source.map(|href| href.href), subscription_url: request.source.map(|href| href.href),
push_topic: uuid::Uuid::new_v4().to_string(), push_topic: uuid::Uuid::new_v4().to_string(),
components: request components: request.supported_calendar_component_set.map_or_else(
.supported_calendar_component_set || {
.map(Into::into) vec![
.unwrap_or(vec![ CalendarObjectType::Event,
CalendarObjectType::Event, CalendarObjectType::Todo,
CalendarObjectType::Todo, CalendarObjectType::Journal,
CalendarObjectType::Journal, ]
]), },
Into::into,
),
}; };
cal_store.insert_calendar(calendar).await?; cal_store.insert_calendar(calendar).await?;

View File

@@ -49,12 +49,12 @@ pub async fn route_post<C: CalendarStore, S: SubscriptionStore>(
}; };
let subscription = Subscription { let subscription = Subscription {
id: sub_id.to_owned(), id: sub_id.clone(),
push_resource: request push_resource: request
.subscription .subscription
.web_push_subscription .web_push_subscription
.push_resource .push_resource
.to_owned(), .clone(),
topic: calendar_resource.cal.push_topic, topic: calendar_resource.cal.push_topic,
expiration: expires.naive_local(), expiration: expires.naive_local(),
public_key: request public_key: request

View File

@@ -4,10 +4,10 @@ use rustical_ical::CalendarObject;
use rustical_store::CalendarStore; use rustical_store::CalendarStore;
use rustical_xml::XmlDeserialize; use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, href+)> // <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, href+)>
pub(crate) struct CalendarMultigetRequest { pub struct CalendarMultigetRequest {
#[xml(ty = "untagged")] #[xml(ty = "untagged")]
pub(crate) prop: PropfindType<CalendarObjectPropWrapperName>, pub(crate) prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(flatten)] #[xml(flatten)]
@@ -26,21 +26,21 @@ pub async fn get_objects_calendar_multiget<C: CalendarStore>(
let mut not_found = vec![]; let mut not_found = vec![];
for href in &cal_query.href { for href in &cal_query.href {
if let Some(filename) = href.strip_prefix(path) { if let Ok(href) = percent_encoding::percent_decode_str(href).decode_utf8()
let filename = filename.trim_start_matches("/"); && let Some(filename) = href.strip_prefix(path)
{
let filename = filename.trim_start_matches('/');
if let Some(object_id) = filename.strip_suffix(".ics") { if let Some(object_id) = filename.strip_suffix(".ics") {
match store.get_object(principal, cal_id, object_id, false).await { match store.get_object(principal, cal_id, object_id, false).await {
Ok(object) => result.push(object), Ok(object) => result.push(object),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()), Err(rustical_store::Error::NotFound) => not_found.push(href.to_string()),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
}; }
} else { } else {
not_found.push(href.to_owned()); not_found.push(href.to_string());
continue;
} }
} else { } else {
not_found.push(href.to_owned()); not_found.push(href.to_owned());
continue;
} }
} }

View File

@@ -1,308 +0,0 @@
use crate::{Error, calendar_object::CalendarObjectPropWrapperName};
use rustical_dav::xml::PropfindType;
use rustical_ical::{CalendarObject, UtcDateTime};
use rustical_store::{CalendarStore, calendar_store::CalendarQuery};
use rustical_xml::XmlDeserialize;
use std::ops::Deref;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
pub(crate) struct TimeRangeElement {
#[xml(ty = "attr")]
pub(crate) start: Option<UtcDateTime>,
#[xml(ty = "attr")]
pub(crate) end: Option<UtcDateTime>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.3
struct ParamFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
text_match: Option<TextMatchElement>,
#[xml(ty = "attr")]
name: String,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
struct TextMatchElement {
#[xml(ty = "attr")]
collation: String,
#[xml(ty = "attr")]
// "yes" or "no", default: "no"
negate_condition: Option<String>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.2
pub(crate) struct PropFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
text_match: Option<TextMatchElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
param_filter: Vec<ParamFilterElement>,
#[xml(ty = "attr")]
name: String,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
pub(crate) struct CompFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) prop_filter: Vec<PropFilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) comp_filter: Vec<CompFilterElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
impl CompFilterElement {
// match the VCALENDAR part
pub fn matches_root(&self, cal_object: &CalendarObject) -> bool {
let comp_vcal = self.name == "VCALENDAR";
match (self.is_not_defined, comp_vcal) {
// Client wants VCALENDAR to not exist but we are a VCALENDAR
(Some(()), true) => return false,
// Client is asking for something different than a vcalendar
(None, false) => return false,
_ => {}
};
if self.time_range.is_some() {
// <time-range> should be applied on VEVENT/VTODO but not on VCALENDAR
return false;
}
// TODO: Implement prop-filter at some point
// Apply sub-comp-filters on VEVENT/VTODO/VJOURNAL component
if self
.comp_filter
.iter()
.all(|filter| filter.matches(cal_object))
{
return true;
}
false
}
// match the VEVENT/VTODO/VJOURNAL part
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
let comp_name_matches = self.name == cal_object.get_component_name();
match (self.is_not_defined, comp_name_matches) {
// Client wants VCALENDAR to not exist but we are a VCALENDAR
(Some(()), true) => return false,
// Client is asking for something different than a vcalendar
(None, false) => return false,
_ => {}
};
// TODO: Implement prop-filter (and comp-filter?) at some point
if let Some(time_range) = &self.time_range {
if let Some(start) = &time_range.start
&& let Some(last_occurence) = cal_object.get_last_occurence().unwrap_or(None)
&& start.deref() > &last_occurence.utc()
{
return false;
}
if let Some(end) = &time_range.end
&& let Some(first_occurence) = cal_object.get_first_occurence().unwrap_or(None)
&& end.deref() < &first_occurence.utc()
{
return false;
}
}
true
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7
pub(crate) struct FilterElement {
// This comp-filter matches on VCALENDAR
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) comp_filter: CompFilterElement,
}
impl FilterElement {
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
self.comp_filter.matches_root(cal_object)
}
}
impl From<&FilterElement> for CalendarQuery {
fn from(value: &FilterElement) -> Self {
let comp_filter_vcalendar = &value.comp_filter;
for comp_filter in comp_filter_vcalendar.comp_filter.iter() {
// A calendar object cannot contain both VEVENT and VTODO, so we only have to handle
// whatever we get first
if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO")
&& let Some(time_range) = &comp_filter.time_range
{
let start = time_range.start.as_ref().map(|start| start.date_naive());
let end = time_range.end.as_ref().map(|end| end.date_naive());
return CalendarQuery {
time_start: start,
time_end: end,
};
}
}
Default::default()
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, filter, timezone?)>
pub struct CalendarQueryRequest {
#[xml(ty = "untagged")]
pub prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) filter: Option<FilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone: Option<String>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone_id: Option<String>,
}
impl From<&CalendarQueryRequest> for CalendarQuery {
fn from(value: &CalendarQueryRequest) -> Self {
value
.filter
.as_ref()
.map(CalendarQuery::from)
.unwrap_or_default()
}
}
pub async fn get_objects_calendar_query<C: CalendarStore>(
cal_query: &CalendarQueryRequest,
principal: &str,
cal_id: &str,
store: &C,
) -> Result<Vec<CalendarObject>, Error> {
let mut objects = store
.calendar_query(principal, cal_id, cal_query.into())
.await?;
if let Some(filter) = &cal_query.filter {
objects.retain(|object| filter.matches(object));
}
Ok(objects)
}
#[cfg(test)]
mod tests {
use rustical_dav::xml::PropElement;
use rustical_xml::XmlDocument;
use crate::{
calendar::methods::report::{
ReportRequest,
calendar_query::{
CalendarQueryRequest, CompFilterElement, FilterElement, ParamFilterElement,
PropFilterElement, TextMatchElement,
},
},
calendar_object::{CalendarObjectPropName, CalendarObjectPropWrapperName},
};
#[test]
fn calendar_query_7_8_7() {
const INPUT: &str = r#"
<?xml version="1.0" encoding="utf-8" ?>
<C:calendar-query xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:prop xmlns:D="DAV:">
<D:getetag/>
<C:calendar-data/>
</D:prop>
<C:filter>
<C:comp-filter name="VCALENDAR">
<C:comp-filter name="VEVENT">
<C:prop-filter name="ATTENDEE">
<C:text-match collation="i;ascii-casemap">mailto:lisa@example.com</C:text-match>
<C:param-filter name="PARTSTAT">
<C:text-match collation="i;ascii-casemap">NEEDS-ACTION</C:text-match>
</C:param-filter>
</C:prop-filter>
</C:comp-filter>
</C:comp-filter>
</C:filter>
</C:calendar-query>
"#;
let report = ReportRequest::parse_str(INPUT).unwrap();
let calendar_query: CalendarQueryRequest =
if let ReportRequest::CalendarQuery(query) = report {
query
} else {
panic!()
};
assert_eq!(
calendar_query,
CalendarQueryRequest {
prop: rustical_dav::xml::PropfindType::Prop(PropElement(
vec![
CalendarObjectPropWrapperName::CalendarObject(
CalendarObjectPropName::Getetag,
),
CalendarObjectPropWrapperName::CalendarObject(
CalendarObjectPropName::CalendarData(Default::default())
),
],
vec![]
)),
filter: Some(FilterElement {
comp_filter: CompFilterElement {
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
prop_filter: vec![PropFilterElement {
name: "ATTENDEE".to_owned(),
text_match: Some(TextMatchElement {
collation: "i;ascii-casemap".to_owned(),
negate_condition: None
}),
is_not_defined: None,
param_filter: vec![ParamFilterElement {
is_not_defined: None,
name: "PARTSTAT".to_owned(),
text_match: Some(TextMatchElement {
collation: "i;ascii-casemap".to_owned(),
negate_condition: None
}),
}],
time_range: None
}],
comp_filter: vec![],
is_not_defined: None,
name: "VEVENT".to_owned(),
time_range: None
}],
name: "VCALENDAR".to_owned()
}
}),
timezone: None,
timezone_id: None
}
)
}
}

View File

@@ -0,0 +1,345 @@
use crate::calendar::methods::report::calendar_query::{
TimeRangeElement,
prop_filter::{PropFilterElement, PropFilterable},
};
use ical::parser::ical::component::IcalTimeZone;
use rustical_ical::{CalendarObject, CalendarObjectComponent, CalendarObjectType};
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
pub struct CompFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) prop_filter: Vec<PropFilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) comp_filter: Vec<CompFilterElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
pub trait CompFilterable: PropFilterable + Sized {
fn get_comp_name(&self) -> &'static str;
fn match_time_range(&self, time_range: &TimeRangeElement) -> bool;
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool;
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7.1
// The scope of the
// CALDAV:comp-filter XML element is the calendar object when used as
// a child of the CALDAV:filter XML element. The scope of the
// CALDAV:comp-filter XML element is the enclosing calendar component
// when used as a child of another CALDAV:comp-filter XML element
fn matches(&self, comp_filter: &CompFilterElement) -> bool {
let name_matches = self.get_comp_name() == comp_filter.name;
match (comp_filter.is_not_defined.is_some(), name_matches) {
// We are the component that's not supposed to be defined
(true, true)
// We don't match
| (false, false) => return false,
// We shall not be and indeed we aren't
(true, false) => return true,
_ => {}
}
if let Some(time_range) = comp_filter.time_range.as_ref()
&& !self.match_time_range(time_range)
{
return false;
}
for prop_filter in &comp_filter.prop_filter {
if !prop_filter.match_component(self) {
return false;
}
}
comp_filter
.comp_filter
.iter()
.all(|filter| self.match_subcomponents(filter))
}
}
impl CompFilterable for CalendarObject {
fn get_comp_name(&self) -> &'static str {
"VCALENDAR"
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
// VCALENDAR has no concept of time range
false
}
fn match_subcomponents(&self, comp_filter: &CompFilterElement) -> bool {
let mut matches = self
.get_vtimezones()
.values()
.map(|tz| tz.matches(comp_filter))
.chain([self.get_data().matches(comp_filter)]);
if comp_filter.is_not_defined.is_some() {
matches.all(|x| x)
} else {
matches.any(|x| x)
}
}
}
impl CompFilterable for IcalTimeZone {
fn get_comp_name(&self) -> &'static str {
"VTIMEZONE"
}
fn match_time_range(&self, _time_range: &TimeRangeElement) -> bool {
false
}
fn match_subcomponents(&self, _comp_filter: &CompFilterElement) -> bool {
true
}
}
impl CompFilterable for CalendarObjectComponent {
fn get_comp_name(&self) -> &'static str {
CalendarObjectType::from(self).as_str()
}
fn match_time_range(&self, time_range: &TimeRangeElement) -> bool {
if let Some(start) = &time_range.start
&& let Some(last_occurence) = self.get_last_occurence().unwrap_or(None)
&& **start > last_occurence.utc()
{
return false;
}
if let Some(end) = &time_range.end
&& let Some(first_occurence) = self.get_first_occurence().unwrap_or(None)
&& **end < first_occurence.utc()
{
return false;
}
true
}
fn match_subcomponents(&self, _comp_filter: &CompFilterElement) -> bool {
// TODO: Properly check subcomponents
true
}
}
#[cfg(test)]
mod tests {
use chrono::{TimeZone, Utc};
use rustical_ical::{CalendarObject, UtcDateTime};
use crate::calendar::methods::report::calendar_query::{
CompFilterable, TextMatchElement, TimeRangeElement,
comp_filter::CompFilterElement,
prop_filter::PropFilterElement,
text_match::{NegateCondition, TextCollation},
};
const ICS: &str = r"BEGIN:VCALENDAR
CALSCALE:GREGORIAN
VERSION:2.0
BEGIN:VTIMEZONE
TZID:Europe/Berlin
X-LIC-LOCATION:Europe/Berlin
END:VTIMEZONE
BEGIN:VEVENT
UID:318ec6503573d9576818daf93dac07317058d95c
DTSTAMP:20250502T132758Z
DTSTART;TZID=Europe/Berlin:20250506T090000
DTEND;TZID=Europe/Berlin:20250506T092500
SEQUENCE:2
SUMMARY:weekly stuff
TRANSP:OPAQUE
RRULE:FREQ=WEEKLY;COUNT=4;INTERVAL=2;BYDAY=TU,TH,SU
END:VEVENT
END:VCALENDAR";
#[test]
fn test_comp_filter_matching() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: Some(()),
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![],
};
assert!(!object.matches(&comp_filter), "filter: wants no VCALENDAR");
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VTODO".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(!object.matches(&comp_filter), "filter matches VTODO");
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(object.matches(&comp_filter), "filter matches VEVENT");
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![
PropFilterElement {
is_not_defined: None,
name: "VERSION".to_string(),
time_range: None,
text_match: Some(TextMatchElement {
needle: "2.0".to_string(),
collation: TextCollation::default(),
negate_condition: NegateCondition::default(),
}),
param_filter: vec![],
},
PropFilterElement {
is_not_defined: Some(()),
name: "STUFF".to_string(),
time_range: None,
text_match: None,
param_filter: vec![],
},
],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![PropFilterElement {
is_not_defined: None,
name: "SUMMARY".to_string(),
time_range: None,
text_match: Some(TextMatchElement {
collation: TextCollation::default(),
negate_condition: NegateCondition(false),
needle: "weekly".to_string(),
}),
param_filter: vec![],
}],
comp_filter: vec![],
}],
};
assert!(
object.matches(&comp_filter),
"Some prop filters on VCALENDAR and VEVENT"
);
}
#[test]
fn test_comp_filter_time_range() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: Some(TimeRangeElement {
start: Some(UtcDateTime(
Utc.with_ymd_and_hms(2025, 4, 1, 0, 0, 0).unwrap(),
)),
end: Some(UtcDateTime(
Utc.with_ymd_and_hms(2025, 8, 1, 0, 0, 0).unwrap(),
)),
}),
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(
object.matches(&comp_filter),
"event should lie in time range"
);
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: Some(TimeRangeElement {
start: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 4, 1, 0, 0, 0).unwrap(),
)),
end: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 8, 1, 0, 0, 0).unwrap(),
)),
}),
prop_filter: vec![],
comp_filter: vec![],
}],
};
assert!(
!object.matches(&comp_filter),
"event should not lie in time range"
);
}
#[test]
fn test_match_timezone() {
let object = CalendarObject::from_ics(ICS.to_string(), None).unwrap();
let comp_filter = CompFilterElement {
is_not_defined: None,
name: "VCALENDAR".to_string(),
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VTIMEZONE".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![PropFilterElement {
is_not_defined: None,
name: "TZID".to_string(),
time_range: None,
text_match: Some(TextMatchElement {
collation: TextCollation::AsciiCasemap,
negate_condition: NegateCondition::default(),
needle: "Europe/Berlin".to_string(),
}),
param_filter: vec![],
}],
comp_filter: vec![],
}],
};
assert!(
object.matches(&comp_filter),
"Timezone should be Europe/Berlin"
);
}
}

View File

@@ -0,0 +1,132 @@
use crate::{
calendar::methods::report::calendar_query::{
TextMatchElement,
comp_filter::{CompFilterElement, CompFilterable},
},
calendar_object::CalendarObjectPropWrapperName,
};
use rustical_dav::xml::PropfindType;
use rustical_ical::{CalendarObject, UtcDateTime};
use rustical_store::calendar_store::CalendarQuery;
use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
pub struct TimeRangeElement {
#[xml(ty = "attr")]
pub(crate) start: Option<UtcDateTime>,
#[xml(ty = "attr")]
pub(crate) end: Option<UtcDateTime>,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.3
pub struct ParamFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) text_match: Option<TextMatchElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// https://datatracker.ietf.org/doc/html/rfc4791#section-9.7
pub struct FilterElement {
// This comp-filter matches on VCALENDAR
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) comp_filter: CompFilterElement,
}
impl FilterElement {
#[must_use]
pub fn matches(&self, cal_object: &CalendarObject) -> bool {
cal_object.matches(&self.comp_filter)
}
}
impl From<&FilterElement> for CalendarQuery {
fn from(value: &FilterElement) -> Self {
let comp_filter_vcalendar = &value.comp_filter;
for comp_filter in &comp_filter_vcalendar.comp_filter {
// A calendar object cannot contain both VEVENT and VTODO, so we only have to handle
// whatever we get first
if matches!(comp_filter.name.as_str(), "VEVENT" | "VTODO")
&& let Some(time_range) = &comp_filter.time_range
{
let start = time_range.start.as_ref().map(|start| start.date_naive());
let end = time_range.end.as_ref().map(|end| end.date_naive());
return Self {
time_start: start,
time_end: end,
};
}
}
Self::default()
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq)]
#[allow(dead_code)]
// <!ELEMENT calendar-query ((DAV:allprop | DAV:propname | DAV:prop)?, filter, timezone?)>
pub struct CalendarQueryRequest {
#[xml(ty = "untagged")]
pub prop: PropfindType<CalendarObjectPropWrapperName>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) filter: Option<FilterElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone: Option<String>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) timezone_id: Option<String>,
}
impl From<&CalendarQueryRequest> for CalendarQuery {
fn from(value: &CalendarQueryRequest) -> Self {
value.filter.as_ref().map(Self::from).unwrap_or_default()
}
}
#[cfg(test)]
mod tests {
use crate::calendar::methods::report::calendar_query::{
CompFilterElement, FilterElement, TimeRangeElement,
};
use chrono::{NaiveDate, TimeZone, Utc};
use rustical_ical::UtcDateTime;
use rustical_store::calendar_store::CalendarQuery;
#[test]
fn test_filter_element_calendar_query() {
let filter = FilterElement {
comp_filter: CompFilterElement {
name: "VCALENDAR".to_string(),
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
name: "VEVENT".to_string(),
is_not_defined: None,
time_range: Some(TimeRangeElement {
start: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 4, 1, 0, 0, 0).unwrap(),
)),
end: Some(UtcDateTime(
Utc.with_ymd_and_hms(2024, 8, 1, 0, 0, 0).unwrap(),
)),
}),
prop_filter: vec![],
comp_filter: vec![],
}],
},
};
let derived_query: CalendarQuery = (&filter).into();
let query = CalendarQuery {
time_start: Some(NaiveDate::from_ymd_opt(2024, 4, 1).unwrap()),
time_end: Some(NaiveDate::from_ymd_opt(2024, 8, 1).unwrap()),
};
assert_eq!(derived_query, query);
}
}

View File

@@ -0,0 +1,133 @@
use crate::Error;
use rustical_ical::CalendarObject;
use rustical_store::CalendarStore;
mod comp_filter;
mod elements;
mod prop_filter;
pub mod text_match;
#[allow(unused_imports)]
pub use comp_filter::{CompFilterElement, CompFilterable};
pub use elements::*;
#[allow(unused_imports)]
pub use prop_filter::{PropFilterElement, PropFilterable};
#[allow(unused_imports)]
pub use text_match::TextMatchElement;
pub async fn get_objects_calendar_query<C: CalendarStore>(
cal_query: &CalendarQueryRequest,
principal: &str,
cal_id: &str,
store: &C,
) -> Result<Vec<CalendarObject>, Error> {
let mut objects = store
.calendar_query(principal, cal_id, cal_query.into())
.await?;
if let Some(filter) = &cal_query.filter {
objects.retain(|object| filter.matches(object));
}
Ok(objects)
}
#[cfg(test)]
mod tests {
use rustical_dav::xml::PropElement;
use rustical_xml::XmlDocument;
use crate::{
calendar::methods::report::{
ReportRequest,
calendar_query::{
CalendarQueryRequest, FilterElement, ParamFilterElement, TextMatchElement,
comp_filter::CompFilterElement,
prop_filter::PropFilterElement,
text_match::{NegateCondition, TextCollation},
},
},
calendar_object::{CalendarData, CalendarObjectPropName, CalendarObjectPropWrapperName},
};
#[test]
fn calendar_query_7_8_7() {
const INPUT: &str = r#"
<?xml version="1.0" encoding="utf-8" ?>
<C:calendar-query xmlns:C="urn:ietf:params:xml:ns:caldav">
<D:prop xmlns:D="DAV:">
<D:getetag/>
<C:calendar-data/>
</D:prop>
<C:filter>
<C:comp-filter name="VCALENDAR">
<C:comp-filter name="VEVENT">
<C:prop-filter name="ATTENDEE">
<C:text-match collation="i;ascii-casemap">mailto:lisa@example.com</C:text-match>
<C:param-filter name="PARTSTAT">
<C:text-match collation="i;ascii-casemap">NEEDS-ACTION</C:text-match>
</C:param-filter>
</C:prop-filter>
</C:comp-filter>
</C:comp-filter>
</C:filter>
</C:calendar-query>
"#;
let report = ReportRequest::parse_str(INPUT).unwrap();
let calendar_query: CalendarQueryRequest =
if let ReportRequest::CalendarQuery(query) = report {
query
} else {
panic!()
};
assert_eq!(
calendar_query,
CalendarQueryRequest {
prop: rustical_dav::xml::PropfindType::Prop(PropElement(
vec![
CalendarObjectPropWrapperName::CalendarObject(
CalendarObjectPropName::Getetag,
),
CalendarObjectPropWrapperName::CalendarObject(
CalendarObjectPropName::CalendarData(CalendarData::default())
),
],
vec![]
)),
filter: Some(FilterElement {
comp_filter: CompFilterElement {
is_not_defined: None,
time_range: None,
prop_filter: vec![],
comp_filter: vec![CompFilterElement {
prop_filter: vec![PropFilterElement {
name: "ATTENDEE".to_owned(),
text_match: Some(TextMatchElement {
collation: TextCollation::AsciiCasemap,
negate_condition: NegateCondition(false),
needle: "mailto:lisa@example.com".to_string()
}),
is_not_defined: None,
param_filter: vec![ParamFilterElement {
is_not_defined: None,
name: "PARTSTAT".to_owned(),
text_match: Some(TextMatchElement {
collation: TextCollation::AsciiCasemap,
negate_condition: NegateCondition(false),
needle: "NEEDS-ACTION".to_string()
}),
}],
time_range: None
}],
comp_filter: vec![],
is_not_defined: None,
name: "VEVENT".to_owned(),
time_range: None
}],
name: "VCALENDAR".to_owned()
}
}),
timezone: None,
timezone_id: None
}
);
}
}

View File

@@ -0,0 +1,127 @@
use std::collections::HashMap;
use ical::{
generator::{IcalCalendar, IcalEvent},
parser::{
Component,
ical::component::{IcalJournal, IcalTimeZone, IcalTodo},
},
property::Property,
};
use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectComponent, UtcDateTime};
use rustical_xml::XmlDeserialize;
use crate::calendar::methods::report::calendar_query::{
ParamFilterElement, TextMatchElement, TimeRangeElement,
};
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
// https://www.rfc-editor.org/rfc/rfc4791#section-9.7.2
pub struct PropFilterElement {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) is_not_defined: Option<()>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) time_range: Option<TimeRangeElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
pub(crate) text_match: Option<TextMatchElement>,
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub(crate) param_filter: Vec<ParamFilterElement>,
#[xml(ty = "attr")]
pub(crate) name: String,
}
impl PropFilterElement {
pub fn match_component(&self, comp: &impl PropFilterable) -> bool {
let property = comp.get_property(&self.name);
let property = match (self.is_not_defined.is_some(), property) {
// We are the component that's not supposed to be defined
(true, Some(_))
// We don't match
| (false, None) => return false,
// We shall not be and indeed we aren't
(true, None) => return true,
(false, Some(property)) => property
};
if let Some(TimeRangeElement { start, end }) = &self.time_range {
// TODO: Respect timezones
let Ok(timestamp) = CalDateTime::parse_prop(property, &HashMap::default()) else {
return false;
};
let timestamp = timestamp.utc();
if let Some(UtcDateTime(start)) = start
&& start > &timestamp
{
return false;
}
if let Some(UtcDateTime(end)) = end
&& end < &timestamp
{
return false;
}
return true;
}
if let Some(text_match) = &self.text_match
&& !text_match.match_property(property)
{
return false;
}
// TODO: param-filter
true
}
}
pub trait PropFilterable {
fn get_property(&self, name: &str) -> Option<&Property>;
}
impl PropFilterable for CalendarObject {
fn get_property(&self, name: &str) -> Option<&Property> {
Self::get_property(self, name)
}
}
impl PropFilterable for IcalEvent {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalTodo {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalJournal {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalCalendar {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for IcalTimeZone {
fn get_property(&self, name: &str) -> Option<&Property> {
Component::get_property(self, name)
}
}
impl PropFilterable for CalendarObjectComponent {
fn get_property(&self, name: &str) -> Option<&Property> {
match self {
Self::Event(event, _) => PropFilterable::get_property(&event.event, name),
Self::Todo(todo, _) => PropFilterable::get_property(todo, name),
Self::Journal(journal, _) => PropFilterable::get_property(journal, name),
}
}
}

View File

@@ -0,0 +1,103 @@
use ical::property::Property;
use rustical_xml::{ValueDeserialize, XmlDeserialize};
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub enum TextCollation {
#[default]
AsciiCasemap,
Octet,
}
impl TextCollation {
// Check whether a haystack contains a needle respecting the collation
#[must_use]
pub fn match_text(&self, needle: &str, haystack: &str) -> bool {
match self {
// https://datatracker.ietf.org/doc/html/rfc4790#section-9.2
Self::AsciiCasemap => haystack
.to_ascii_uppercase()
.contains(&needle.to_ascii_uppercase()),
Self::Octet => haystack.contains(needle),
}
}
}
impl AsRef<str> for TextCollation {
fn as_ref(&self) -> &str {
match self {
Self::AsciiCasemap => "i;ascii-casemap",
Self::Octet => "i;octet",
}
}
}
impl ValueDeserialize for TextCollation {
fn deserialize(val: &str) -> Result<Self, rustical_xml::XmlError> {
match val {
"i;ascii-casemap" => Ok(Self::AsciiCasemap),
"i;octet" => Ok(Self::Octet),
_ => Err(rustical_xml::XmlError::InvalidVariant(format!(
"Invalid collation: {val}"
))),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct NegateCondition(pub bool);
impl ValueDeserialize for NegateCondition {
fn deserialize(val: &str) -> Result<Self, rustical_xml::XmlError> {
match val {
"yes" => Ok(Self(true)),
"no" => Ok(Self(false)),
_ => Err(rustical_xml::XmlError::InvalidVariant(format!(
"Invalid negate-condition parameter: {val}"
))),
}
}
}
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)]
pub struct TextMatchElement {
#[xml(ty = "attr", default = "Default::default")]
pub collation: TextCollation,
#[xml(ty = "attr", default = "Default::default")]
pub(crate) negate_condition: NegateCondition,
#[xml(ty = "text")]
pub(crate) needle: String,
}
impl TextMatchElement {
#[must_use]
pub fn match_property(&self, property: &Property) -> bool {
let Self {
collation,
negate_condition,
needle,
} = self;
let matches = property
.value
.as_ref()
.is_some_and(|haystack| collation.match_text(needle, haystack));
// XOR
negate_condition.0 ^ matches
}
}
#[cfg(test)]
mod tests {
use crate::calendar::methods::report::calendar_query::text_match::TextCollation;
#[test]
fn test_collation() {
assert!(TextCollation::AsciiCasemap.match_text("GrüN", "grün"));
assert!(!TextCollation::AsciiCasemap.match_text("GrÜN", "grün"));
assert!(!TextCollation::Octet.match_text("GrÜN", "grün"));
assert!(TextCollation::Octet.match_text("hallo", "hallo"));
assert!(TextCollation::AsciiCasemap.match_text("HaLlo", "hAllo"));
}
}

View File

@@ -27,7 +27,7 @@ use sync_collection::handle_sync_collection;
use tracing::instrument; use tracing::instrument;
mod calendar_multiget; mod calendar_multiget;
mod calendar_query; pub mod calendar_query;
mod sync_collection; mod sync_collection;
#[derive(XmlDeserialize, XmlDocument, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, XmlDocument, Clone, Debug, PartialEq)]
@@ -41,11 +41,11 @@ pub(crate) enum ReportRequest {
} }
impl ReportRequest { impl ReportRequest {
fn props(&self) -> &PropfindType<CalendarObjectPropWrapperName> { const fn props(&self) -> &PropfindType<CalendarObjectPropWrapperName> {
match &self { match &self {
ReportRequest::CalendarMultiget(CalendarMultigetRequest { prop, .. }) => prop, Self::CalendarMultiget(CalendarMultigetRequest { prop, .. })
ReportRequest::CalendarQuery(CalendarQueryRequest { prop, .. }) => prop, | Self::CalendarQuery(CalendarQueryRequest { prop, .. })
ReportRequest::SyncCollection(SyncCollectionRequest { prop, .. }) => prop, | Self::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
} }
} }
} }
@@ -184,7 +184,7 @@ mod tests {
"/caldav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned() "/caldav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned()
] ]
}) })
) );
} }
#[test] #[test]
@@ -241,7 +241,7 @@ mod tests {
timezone: None, timezone: None,
timezone_id: None, timezone_id: None,
}) })
) );
} }
#[test] #[test]
@@ -269,6 +269,6 @@ mod tests {
"/caldav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned() "/caldav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned()
] ]
}) })
) );
} }
} }

View File

@@ -3,13 +3,15 @@ use rustical_ical::CalendarObjectType;
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray; use strum_macros::VariantArray;
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, From, Into)] use crate::calendar::methods::report::calendar_query::text_match::TextCollation;
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq, From, Into)]
pub struct SupportedCalendarComponent { pub struct SupportedCalendarComponent {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub name: CalendarObjectType, pub name: CalendarObjectType,
} }
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq)]
pub struct SupportedCalendarComponentSet { pub struct SupportedCalendarComponentSet {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)] #[xml(ns = "rustical_dav::namespace::NS_CALDAV", flatten)]
pub comp: Vec<SupportedCalendarComponent>, pub comp: Vec<SupportedCalendarComponent>,
@@ -36,7 +38,29 @@ impl From<SupportedCalendarComponentSet> for Vec<CalendarObjectType> {
} }
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq, From, Into)]
pub struct SupportedCollation(#[xml(ty = "text")] pub TextCollation);
#[derive(Debug, Clone, XmlSerialize, XmlDeserialize, PartialEq, Eq)]
pub struct SupportedCollationSet(
#[xml(
ns = "rustical_dav::namespace::NS_CALDAV",
flatten,
rename = "supported-collation"
)]
pub Vec<SupportedCollation>,
);
impl Default for SupportedCollationSet {
fn default() -> Self {
Self(vec![
SupportedCollation(TextCollation::AsciiCasemap),
SupportedCollation(TextCollation::Octet),
])
}
}
#[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct CalendarData { pub struct CalendarData {
#[xml(ty = "attr")] #[xml(ty = "attr")]
content_type: String, content_type: String,
@@ -53,13 +77,13 @@ impl Default for CalendarData {
} }
} }
#[derive(Debug, Clone, XmlSerialize, Default, PartialEq)] #[derive(Debug, Clone, XmlSerialize, Default, PartialEq, Eq)]
pub struct SupportedCalendarData { pub struct SupportedCalendarData {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
calendar_data: CalendarData, calendar_data: CalendarData,
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq, VariantArray)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq, VariantArray)]
pub enum ReportMethod { pub enum ReportMethod {
#[xml(ns = "rustical_dav::namespace::NS_CALDAV")] #[xml(ns = "rustical_dav::namespace::NS_CALDAV")]
CalendarQuery, CalendarQuery,

View File

@@ -1,6 +1,6 @@
use super::prop::{SupportedCalendarComponentSet, SupportedCalendarData}; use super::prop::{SupportedCalendarComponentSet, SupportedCalendarData};
use crate::Error; use crate::Error;
use crate::calendar::prop::ReportMethod; use crate::calendar::prop::{ReportMethod, SupportedCollationSet};
use chrono::{DateTime, Utc}; use chrono::{DateTime, Utc};
use derive_more::derive::{From, Into}; use derive_more::derive::{From, Into};
use ical::IcalParser; use ical::IcalParser;
@@ -18,7 +18,7 @@ use rustical_xml::{EnumVariants, PropName};
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
use serde::Deserialize; use serde::Deserialize;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarPropName")] #[xml(unit_variants_ident = "CalendarPropName")]
pub enum CalendarProp { pub enum CalendarProp {
// CalDAV (RFC 4791) // CalDAV (RFC 4791)
@@ -39,6 +39,8 @@ pub enum CalendarProp {
SupportedCalendarComponentSet(SupportedCalendarComponentSet), SupportedCalendarComponentSet(SupportedCalendarComponentSet),
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", skip_deserializing)] #[xml(ns = "rustical_dav::namespace::NS_CALDAV", skip_deserializing)]
SupportedCalendarData(SupportedCalendarData), SupportedCalendarData(SupportedCalendarData),
#[xml(ns = "rustical_dav::namespace::NS_CALDAV", skip_deserializing)]
SupportedCollationSet(SupportedCollationSet),
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
MaxResourceSize(i64), MaxResourceSize(i64),
#[xml(skip_deserializing)] #[xml(skip_deserializing)]
@@ -54,7 +56,7 @@ pub enum CalendarProp {
MaxDateTime(String), MaxDateTime(String),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarPropWrapperName", untagged)] #[xml(unit_variants_ident = "CalendarPropWrapperName", untagged)]
pub enum CalendarPropWrapper { pub enum CalendarPropWrapper {
Calendar(CalendarProp), Calendar(CalendarProp),
@@ -71,7 +73,7 @@ pub struct CalendarResource {
impl ResourceName for CalendarResource { impl ResourceName for CalendarResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.cal.id.to_owned() self.cal.id.clone()
} }
} }
@@ -89,7 +91,7 @@ impl SyncTokenExtension for CalendarResource {
impl DavPushExtension for CalendarResource { impl DavPushExtension for CalendarResource {
fn get_topic(&self) -> String { fn get_topic(&self) -> String {
self.cal.push_topic.to_owned() self.cal.push_topic.clone()
} }
} }
@@ -135,7 +137,9 @@ impl Resource for CalendarResource {
} }
CalendarPropName::CalendarTimezone => { CalendarPropName::CalendarTimezone => {
CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| { CalendarProp::CalendarTimezone(self.cal.timezone_id.as_ref().and_then(|tzid| {
vtimezones_rs::VTIMEZONES.get(tzid).map(|tz| tz.to_string()) vtimezones_rs::VTIMEZONES
.get(tzid)
.map(|tz| (*tz).to_string())
})) }))
} }
// chrono_tz uses the IANA database // chrono_tz uses the IANA database
@@ -154,13 +158,16 @@ impl Resource for CalendarResource {
CalendarPropName::SupportedCalendarData => { CalendarPropName::SupportedCalendarData => {
CalendarProp::SupportedCalendarData(SupportedCalendarData::default()) CalendarProp::SupportedCalendarData(SupportedCalendarData::default())
} }
CalendarPropName::MaxResourceSize => CalendarProp::MaxResourceSize(10000000), CalendarPropName::SupportedCollationSet => {
CalendarProp::SupportedCollationSet(SupportedCollationSet::default())
}
CalendarPropName::MaxResourceSize => CalendarProp::MaxResourceSize(10_000_000),
CalendarPropName::SupportedReportSet => { CalendarPropName::SupportedReportSet => {
CalendarProp::SupportedReportSet(SupportedReportSet::all()) CalendarProp::SupportedReportSet(SupportedReportSet::all())
} }
CalendarPropName::Source => CalendarProp::Source( CalendarPropName::Source => {
self.cal.subscription_url.to_owned().map(HrefElement::from), CalendarProp::Source(self.cal.subscription_url.clone().map(HrefElement::from))
), }
CalendarPropName::MinDateTime => { CalendarPropName::MinDateTime => {
CalendarProp::MinDateTime(CalDateTime::from(DateTime::<Utc>::MIN_UTC).format()) CalendarProp::MinDateTime(CalDateTime::from(DateTime::<Utc>::MIN_UTC).format())
} }
@@ -181,9 +188,6 @@ impl Resource for CalendarResource {
} }
fn set_prop(&mut self, prop: Self::Prop) -> Result<(), rustical_dav::Error> { fn set_prop(&mut self, prop: Self::Prop) -> Result<(), rustical_dav::Error> {
if self.read_only {
return Err(rustical_dav::Error::PropReadOnly);
}
match prop { match prop {
CalendarPropWrapper::Calendar(prop) => match prop { CalendarPropWrapper::Calendar(prop) => match prop {
CalendarProp::CalendarColor(color) => { CalendarProp::CalendarColor(color) => {
@@ -199,22 +203,20 @@ impl Resource for CalendarResource {
// TODO: Proper error (calendar-timezone precondition) // TODO: Proper error (calendar-timezone precondition)
let calendar = IcalParser::new(tz.as_bytes()) let calendar = IcalParser::new(tz.as_bytes())
.next() .next()
.ok_or(rustical_dav::Error::BadRequest( .ok_or_else(|| {
"No timezone data provided".to_owned(), rustical_dav::Error::BadRequest(
))? "No timezone data provided".to_owned(),
)
})?
.map_err(|_| { .map_err(|_| {
rustical_dav::Error::BadRequest( rustical_dav::Error::BadRequest(
"No timezone data provided".to_owned(), "No timezone data provided".to_owned(),
) )
})?; })?;
let timezone = let timezone = calendar.timezones.first().ok_or_else(|| {
calendar rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
.timezones })?;
.first()
.ok_or(rustical_dav::Error::BadRequest(
"No timezone data provided".to_owned(),
))?;
let timezone: chrono_tz::Tz = timezone.try_into().map_err(|_| { let timezone: chrono_tz::Tz = timezone.try_into().map_err(|_| {
rustical_dav::Error::BadRequest("No timezone data provided".to_owned()) rustical_dav::Error::BadRequest("No timezone data provided".to_owned())
})?; })?;
@@ -223,7 +225,6 @@ impl Resource for CalendarResource {
} }
Ok(()) Ok(())
} }
CalendarProp::TimezoneServiceSet(_) => Err(rustical_dav::Error::PropReadOnly),
CalendarProp::CalendarTimezoneId(timezone_id) => { CalendarProp::CalendarTimezoneId(timezone_id) => {
if let Some(tzid) = &timezone_id if let Some(tzid) = &timezone_id
&& !vtimezones_rs::VTIMEZONES.contains_key(tzid) && !vtimezones_rs::VTIMEZONES.contains_key(tzid)
@@ -243,13 +244,14 @@ impl Resource for CalendarResource {
self.cal.components = comp_set.into(); self.cal.components = comp_set.into();
Ok(()) Ok(())
} }
CalendarProp::SupportedCalendarData(_) => Err(rustical_dav::Error::PropReadOnly), CalendarProp::TimezoneServiceSet(_)
CalendarProp::MaxResourceSize(_) => Err(rustical_dav::Error::PropReadOnly), | CalendarProp::SupportedCalendarData(_)
CalendarProp::SupportedReportSet(_) => Err(rustical_dav::Error::PropReadOnly), | CalendarProp::SupportedCollationSet(_)
// Converting between a calendar subscription calendar and a normal one would be weird | CalendarProp::MaxResourceSize(_)
CalendarProp::Source(_) => Err(rustical_dav::Error::PropReadOnly), | CalendarProp::SupportedReportSet(_)
CalendarProp::MinDateTime(_) => Err(rustical_dav::Error::PropReadOnly), | CalendarProp::Source(_)
CalendarProp::MaxDateTime(_) => Err(rustical_dav::Error::PropReadOnly), | CalendarProp::MinDateTime(_)
| CalendarProp::MaxDateTime(_) => Err(rustical_dav::Error::PropReadOnly),
}, },
CalendarPropWrapper::SyncToken(prop) => SyncTokenExtension::set_prop(self, prop), CalendarPropWrapper::SyncToken(prop) => SyncTokenExtension::set_prop(self, prop),
CalendarPropWrapper::DavPush(prop) => DavPushExtension::set_prop(self, prop), CalendarPropWrapper::DavPush(prop) => DavPushExtension::set_prop(self, prop),
@@ -258,9 +260,6 @@ impl Resource for CalendarResource {
} }
fn remove_prop(&mut self, prop: &CalendarPropWrapperName) -> Result<(), rustical_dav::Error> { fn remove_prop(&mut self, prop: &CalendarPropWrapperName) -> Result<(), rustical_dav::Error> {
if self.read_only {
return Err(rustical_dav::Error::PropReadOnly);
}
match prop { match prop {
CalendarPropWrapperName::Calendar(prop) => match prop { CalendarPropWrapperName::Calendar(prop) => match prop {
CalendarPropName::CalendarColor => { CalendarPropName::CalendarColor => {
@@ -275,7 +274,6 @@ impl Resource for CalendarResource {
self.cal.timezone_id = None; self.cal.timezone_id = None;
Ok(()) Ok(())
} }
CalendarPropName::TimezoneServiceSet => Err(rustical_dav::Error::PropReadOnly),
CalendarPropName::CalendarOrder => { CalendarPropName::CalendarOrder => {
self.cal.meta.order = 0; self.cal.meta.order = 0;
Ok(()) Ok(())
@@ -283,13 +281,14 @@ impl Resource for CalendarResource {
CalendarPropName::SupportedCalendarComponentSet => { CalendarPropName::SupportedCalendarComponentSet => {
Err(rustical_dav::Error::PropReadOnly) Err(rustical_dav::Error::PropReadOnly)
} }
CalendarPropName::SupportedCalendarData => Err(rustical_dav::Error::PropReadOnly), CalendarPropName::TimezoneServiceSet
CalendarPropName::MaxResourceSize => Err(rustical_dav::Error::PropReadOnly), | CalendarPropName::SupportedCalendarData
CalendarPropName::SupportedReportSet => Err(rustical_dav::Error::PropReadOnly), | CalendarPropName::SupportedCollationSet
// Converting a calendar subscription calendar into a normal one would be weird | CalendarPropName::MaxResourceSize
CalendarPropName::Source => Err(rustical_dav::Error::PropReadOnly), | CalendarPropName::SupportedReportSet
CalendarPropName::MinDateTime => Err(rustical_dav::Error::PropReadOnly), | CalendarPropName::Source
CalendarPropName::MaxDateTime => Err(rustical_dav::Error::PropReadOnly), | CalendarPropName::MinDateTime
| CalendarPropName::MaxDateTime => Err(rustical_dav::Error::PropReadOnly),
}, },
CalendarPropWrapperName::SyncToken(prop) => SyncTokenExtension::remove_prop(self, prop), CalendarPropWrapperName::SyncToken(prop) => SyncTokenExtension::remove_prop(self, prop),
CalendarPropWrapperName::DavPush(prop) => DavPushExtension::remove_prop(self, prop), CalendarPropWrapperName::DavPush(prop) => DavPushExtension::remove_prop(self, prop),
@@ -312,16 +311,11 @@ impl Resource for CalendarResource {
} }
fn get_user_privileges(&self, user: &Principal) -> Result<UserPrivilegeSet, Self::Error> { fn get_user_privileges(&self, user: &Principal) -> Result<UserPrivilegeSet, Self::Error> {
if self.cal.subscription_url.is_some() { if self.cal.subscription_url.is_some() || self.read_only {
return Ok(UserPrivilegeSet::owner_write_properties( return Ok(UserPrivilegeSet::owner_write_properties(
user.is_principal(&self.cal.principal), user.is_principal(&self.cal.principal),
)); ));
} }
if self.read_only {
return Ok(UserPrivilegeSet::owner_read(
user.is_principal(&self.cal.principal),
));
}
Ok(UserPrivilegeSet::owner_only( Ok(UserPrivilegeSet::owner_only(
user.is_principal(&self.cal.principal), user.is_principal(&self.cal.principal),

View File

@@ -35,7 +35,7 @@ impl<C: CalendarStore, S: SubscriptionStore> Clone for CalendarResourceService<C
} }
impl<C: CalendarStore, S: SubscriptionStore> CalendarResourceService<C, S> { impl<C: CalendarStore, S: SubscriptionStore> CalendarResourceService<C, S> {
pub fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self { pub const fn new(cal_store: Arc<C>, sub_store: Arc<S>) -> Self {
Self { Self {
cal_store, cal_store,
sub_store, sub_store,

View File

@@ -1,5 +1,5 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<response xmlns:CS="http://calendarserver.org/ns/" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns="DAV:" xmlns:PUSH="https://bitfire.at/webdav-push"> <response xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<href>/caldav/principal/user/calendar/</href> <href>/caldav/principal/user/calendar/</href>
<propstat> <propstat>
<prop> <prop>
@@ -11,6 +11,7 @@
<calendar-order xmlns="http://apple.com/ns/ical/"/> <calendar-order xmlns="http://apple.com/ns/ical/"/>
<supported-calendar-component-set xmlns="urn:ietf:params:xml:ns:caldav"/> <supported-calendar-component-set xmlns="urn:ietf:params:xml:ns:caldav"/>
<supported-calendar-data xmlns="urn:ietf:params:xml:ns:caldav"/> <supported-calendar-data xmlns="urn:ietf:params:xml:ns:caldav"/>
<supported-collation-set xmlns="urn:ietf:params:xml:ns:caldav"/>
<max-resource-size xmlns="DAV:"/> <max-resource-size xmlns="DAV:"/>
<supported-report-set xmlns="DAV:"/> <supported-report-set xmlns="DAV:"/>
<source xmlns="http://calendarserver.org/ns/"/> <source xmlns="http://calendarserver.org/ns/"/>
@@ -33,7 +34,7 @@
<?xml version="1.0" encoding="utf-8"?> <?xml version="1.0" encoding="utf-8"?>
<response xmlns:CS="http://calendarserver.org/ns/" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns="DAV:" xmlns:PUSH="https://bitfire.at/webdav-push"> <response xmlns="DAV:" xmlns:CAL="urn:ietf:params:xml:ns:caldav" xmlns:CARD="urn:ietf:params:xml:ns:carddav" xmlns:CS="http://calendarserver.org/ns/" xmlns:PUSH="https://bitfire.at/webdav-push">
<href>/caldav/principal/user/calendar/</href> <href>/caldav/principal/user/calendar/</href>
<propstat> <propstat>
<prop> <prop>
@@ -160,6 +161,10 @@ END:VCALENDAR
<CAL:supported-calendar-data> <CAL:supported-calendar-data>
<CAL:calendar-data content-type="text/calendar" version="2.0"/> <CAL:calendar-data content-type="text/calendar" version="2.0"/>
</CAL:supported-calendar-data> </CAL:supported-calendar-data>
<CAL:supported-collation-set>
<CAL:supported-collation>i;ascii-casemap</CAL:supported-collation>
<CAL:supported-collation>i;octet</CAL:supported-collation>
</CAL:supported-collation-set>
<max-resource-size>10000000</max-resource-size> <max-resource-size>10000000</max-resource-size>
<supported-report-set> <supported-report-set>
<supported-report> <supported-report>
@@ -206,6 +211,9 @@ END:VCALENDAR
<privilege> <privilege>
<read/> <read/>
</privilege> </privilege>
<privilege>
<write-properties/>
</privilege>
<privilege> <privilege>
<read-acl/> <read-acl/>
</privilege> </privilege>

View File

@@ -4,7 +4,7 @@ use rustical_store::auth::Principal;
use rustical_xml::XmlSerializeRoot; use rustical_xml::XmlSerializeRoot;
use serde_json::from_str; use serde_json::from_str;
// #[tokio::test] #[tokio::test]
async fn test_propfind() { async fn test_propfind() {
let requests: Vec<_> = include_str!("./test_files/propfind.requests") let requests: Vec<_> = include_str!("./test_files/propfind.requests")
.trim() .trim()
@@ -39,9 +39,7 @@ async fn test_propfind() {
.unwrap() .unwrap()
.trim() .trim()
.replace("\r\n", "\n"); .replace("\r\n", "\n");
println!("{output}"); similar_asserts::assert_eq!(expected_output, output);
println!("{}, {} \n\n\n", output.len(), expected_output.len());
assert_eq!(output, expected_output);
} }
} }
} }

View File

@@ -78,14 +78,10 @@ pub async fn put_event<C: CalendarStore>(
true true
}; };
let object = match CalendarObject::from_ics(body.clone()) { let Ok(object) = CalendarObject::from_ics(body.clone(), Some(object_id)) else {
Ok(obj) => obj, debug!("invalid calendar data:\n{body}");
Err(_) => { return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
debug!("invalid calendar data:\n{body}");
return Err(Error::PreconditionFailed(Precondition::ValidCalendarData));
}
}; };
assert_eq!(object.get_id(), object_id);
cal_store cal_store
.put_object(principal, calendar_id, object, overwrite) .put_object(principal, calendar_id, object, overwrite)
.await?; .await?;

View File

@@ -2,7 +2,7 @@ use rustical_dav::extensions::CommonPropertiesProp;
use rustical_ical::UtcDateTime; use rustical_ical::UtcDateTime;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarObjectPropName")] #[xml(unit_variants_ident = "CalendarObjectPropName")]
pub enum CalendarObjectProp { pub enum CalendarObjectProp {
// WebDAV (RFC 2518) // WebDAV (RFC 2518)
@@ -17,7 +17,7 @@ pub enum CalendarObjectProp {
CalendarData(String), CalendarData(String),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "CalendarObjectPropWrapperName", untagged)] #[xml(unit_variants_ident = "CalendarObjectPropWrapperName", untagged)]
pub enum CalendarObjectPropWrapper { pub enum CalendarObjectPropWrapper {
CalendarObject(CalendarObjectProp), CalendarObject(CalendarObjectProp),
@@ -25,7 +25,7 @@ pub enum CalendarObjectPropWrapper {
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq, Hash)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct ExpandElement { pub struct ExpandElement {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub(crate) start: UtcDateTime, pub(crate) start: UtcDateTime,
#[xml(ty = "attr")] #[xml(ty = "attr")]

View File

@@ -1,4 +1,7 @@
use super::prop::*; use super::prop::{
CalendarData, CalendarObjectProp, CalendarObjectPropName, CalendarObjectPropWrapper,
CalendarObjectPropWrapperName,
};
use crate::Error; use crate::Error;
use derive_more::derive::{From, Into}; use derive_more::derive::{From, Into};
use rustical_dav::{ use rustical_dav::{

View File

@@ -35,7 +35,7 @@ impl<C: CalendarStore> Clone for CalendarObjectResourceService<C> {
} }
impl<C: CalendarStore> CalendarObjectResourceService<C> { impl<C: CalendarStore> CalendarObjectResourceService<C> {
pub fn new(cal_store: Arc<C>) -> Self { pub const fn new(cal_store: Arc<C>) -> Self {
Self { cal_store } Self { cal_store }
} }
} }
@@ -106,9 +106,8 @@ where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let name: String = Deserialize::deserialize(deserializer)?; let name: String = Deserialize::deserialize(deserializer)?;
if let Some(object_id) = name.strip_suffix(".ics") { name.strip_suffix(".ics").map_or_else(
Ok(object_id.to_owned()) || Err(serde::de::Error::custom("Missing .ics extension")),
} else { |object_id| Ok(object_id.to_owned()),
Err(serde::de::Error::custom("Missing .ics extension")) )
}
} }

View File

@@ -60,29 +60,35 @@ pub enum Error {
} }
impl Error { impl Error {
#[must_use]
pub fn status_code(&self) -> StatusCode { pub fn status_code(&self) -> StatusCode {
match self { match self {
Error::StoreError(err) => match err { Self::StoreError(err) => match err {
rustical_store::Error::NotFound => StatusCode::NOT_FOUND, rustical_store::Error::NotFound => StatusCode::NOT_FOUND,
rustical_store::Error::AlreadyExists => StatusCode::CONFLICT, rustical_store::Error::AlreadyExists => StatusCode::CONFLICT,
rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN, rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
}, },
Error::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::DavError(err) => StatusCode::try_from(err.status_code().as_u16())
Error::DavError(err) => StatusCode::try_from(err.status_code().as_u16())
.expect("Just converting between versions"), .expect("Just converting between versions"),
Error::Unauthorized => StatusCode::UNAUTHORIZED, Self::Unauthorized => StatusCode::UNAUTHORIZED,
Error::XmlDecodeError(_) => StatusCode::BAD_REQUEST, Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Error::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR, Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
Error::IcalError(err) => err.status_code(), Self::IcalError(err) => err.status_code(),
Error::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED, Self::PreconditionFailed(_err) => StatusCode::PRECONDITION_FAILED,
} }
} }
} }
impl IntoResponse for Error { impl IntoResponse for Error {
fn into_response(self) -> axum::response::Response { fn into_response(self) -> axum::response::Response {
if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
error!("{self}");
}
(self.status_code(), self.to_string()).into_response() (self.status_code(), self.to_string()).into_response()
} }
} }

View File

@@ -1,3 +1,5 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
use axum::{Extension, Router}; use axum::{Extension, Router};
use derive_more::Constructor; use derive_more::Constructor;
use principal::PrincipalResourceService; use principal::PrincipalResourceService;
@@ -37,8 +39,8 @@ pub fn caldav_router<AP: AuthenticationProvider, C: CalendarStore, S: Subscripti
prefix, prefix,
RootResourceService::<_, Principal, CalDavPrincipalUri>::new(PrincipalResourceService { RootResourceService::<_, Principal, CalDavPrincipalUri>::new(PrincipalResourceService {
auth_provider: auth_provider.clone(), auth_provider: auth_provider.clone(),
sub_store: subscription_store.clone(), sub_store: subscription_store,
cal_store: store.clone(), cal_store: store,
simplified_home_set, simplified_home_set,
}) })
.axum_router() .axum_router()

View File

@@ -24,7 +24,7 @@ pub struct PrincipalResource {
impl ResourceName for PrincipalResource { impl ResourceName for PrincipalResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.principal.id.to_owned() self.principal.id.clone()
} }
} }
@@ -56,7 +56,7 @@ impl Resource for PrincipalResource {
PrincipalPropWrapperName::Principal(prop) => { PrincipalPropWrapperName::Principal(prop) => {
PrincipalPropWrapper::Principal(match prop { PrincipalPropWrapper::Principal(match prop {
PrincipalPropName::CalendarUserType => { PrincipalPropName::CalendarUserType => {
PrincipalProp::CalendarUserType(self.principal.principal_type.to_owned()) PrincipalProp::CalendarUserType(self.principal.principal_type.clone())
} }
PrincipalPropName::PrincipalUrl => { PrincipalPropName::PrincipalUrl => {
PrincipalProp::PrincipalUrl(principal_url.into()) PrincipalProp::PrincipalUrl(principal_url.into())

View File

@@ -6,7 +6,7 @@ use rustical_store::auth::PrincipalType;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray; use strum_macros::VariantArray;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropName")] #[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp { pub enum PrincipalProp {
// Scheduling Extensions to CalDAV (RFC 6638) // Scheduling Extensions to CalDAV (RFC 6638)
@@ -34,17 +34,17 @@ pub enum PrincipalProp {
CalendarHomeSet(CalendarHomeSet), CalendarHomeSet(CalendarHomeSet),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct CalendarHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct CalendarHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)] #[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)]
pub enum PrincipalPropWrapper { pub enum PrincipalPropWrapper {
Principal(PrincipalProp), Principal(PrincipalProp),
Common(CommonPropertiesProp), Common(CommonPropertiesProp),
} }
#[derive(XmlSerialize, PartialEq, Clone, VariantArray)] #[derive(XmlSerialize, PartialEq, Eq, Clone, VariantArray)]
pub enum ReportMethod { pub enum ReportMethod {
// We don't actually support principal-match // We don't actually support principal-match
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]

View File

@@ -35,6 +35,15 @@ async fn test_principal_resource(
simplified_home_set: false, simplified_home_set: false,
}; };
// We don't have any calendars here
assert!(
service
.get_members(&("user".to_owned(),))
.await
.unwrap()
.is_empty()
);
assert!(matches!( assert!(matches!(
service service
.get_resource(&("invalid-user".to_owned(),), true) .get_resource(&("invalid-user".to_owned(),), true)

View File

@@ -1,6 +1,7 @@
[package] [package]
name = "rustical_carddav" name = "rustical_carddav"
version.workspace = true version.workspace = true
rust-version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true
@@ -11,19 +12,19 @@ publish = false
axum.workspace = true axum.workspace = true
axum-extra.workspace = true axum-extra.workspace = true
tower.workspace = true tower.workspace = true
async-trait = { workspace = true } async-trait.workspace = true
thiserror = { workspace = true } thiserror.workspace = true
quick-xml = { workspace = true } quick-xml.workspace = true
tracing = { workspace = true } tracing.workspace = true
futures-util = { workspace = true } futures-util.workspace = true
derive_more = { workspace = true } derive_more.workspace = true
base64 = { workspace = true } base64.workspace = true
serde = { workspace = true } serde.workspace = true
tokio = { workspace = true } tokio.workspace = true
url = { workspace = true } url.workspace = true
rustical_dav = { workspace = true } rustical_dav.workspace = true
rustical_store = { workspace = true } rustical_store.workspace = true
chrono = { workspace = true } chrono.workspace = true
rustical_xml.workspace = true rustical_xml.workspace = true
uuid.workspace = true uuid.workspace = true
rustical_dav_push.workspace = true rustical_dav_push.workspace = true

View File

@@ -1,7 +1,7 @@
use rustical_dav::extensions::CommonPropertiesProp; use rustical_dav::extensions::CommonPropertiesProp;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressObjectPropName")] #[xml(unit_variants_ident = "AddressObjectPropName")]
pub enum AddressObjectProp { pub enum AddressObjectProp {
// WebDAV (RFC 2518) // WebDAV (RFC 2518)
@@ -15,7 +15,7 @@ pub enum AddressObjectProp {
AddressData(String), AddressData(String),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressObjectPropWrapperName", untagged)] #[xml(unit_variants_ident = "AddressObjectPropWrapperName", untagged)]
pub enum AddressObjectPropWrapper { pub enum AddressObjectPropWrapper {
AddressObject(AddressObjectProp), AddressObject(AddressObjectProp),

View File

@@ -98,9 +98,8 @@ where
D: Deserializer<'de>, D: Deserializer<'de>,
{ {
let name: String = Deserialize::deserialize(deserializer)?; let name: String = Deserialize::deserialize(deserializer)?;
if let Some(object_id) = name.strip_suffix(".vcf") { name.strip_suffix(".vcf").map_or_else(
Ok(object_id.to_owned()) || Err(serde::de::Error::custom("Missing .vcf extension")),
} else { |object_id| Ok(object_id.to_owned()),
Err(serde::de::Error::custom("Missing .vcf extension")) )
}
} }

View File

@@ -8,7 +8,7 @@ use rustical_store::{Addressbook, AddressbookStore, SubscriptionStore, auth::Pri
use rustical_xml::{XmlDeserialize, XmlDocument, XmlRootTag}; use rustical_xml::{XmlDeserialize, XmlDocument, XmlRootTag};
use tracing::instrument; use tracing::instrument;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct Resourcetype { pub struct Resourcetype {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
addressbook: Option<()>, addressbook: Option<()>,
@@ -16,7 +16,7 @@ pub struct Resourcetype {
collection: Option<()>, collection: Option<()>,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct MkcolAddressbookProp { pub struct MkcolAddressbookProp {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
resourcetype: Option<Resourcetype>, resourcetype: Option<Resourcetype>,
@@ -27,7 +27,7 @@ pub struct MkcolAddressbookProp {
description: Option<String>, description: Option<String>,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct PropElement<T: XmlDeserialize> { pub struct PropElement<T: XmlDeserialize> {
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
prop: T, prop: T,
@@ -53,13 +53,13 @@ pub async fn route_mkcol<AS: AddressbookStore, S: SubscriptionStore>(
} }
let mut request = MkcolRequest::parse_str(&body)?.set.prop; let mut request = MkcolRequest::parse_str(&body)?.set.prop;
if let Some("") = request.displayname.as_deref() { if request.displayname.as_deref() == Some("") {
request.displayname = None request.displayname = None;
} }
let addressbook = Addressbook { let addressbook = Addressbook {
id: addressbook_id.to_owned(), id: addressbook_id.clone(),
principal: principal.to_owned(), principal: principal.clone(),
displayname: request.displayname, displayname: request.displayname,
description: request.description, description: request.description,
deleted_at: None, deleted_at: None,
@@ -127,6 +127,6 @@ mod tests {
} }
} }
} }
) );
} }
} }

View File

@@ -45,12 +45,12 @@ pub async fn route_post<AS: AddressbookStore, S: SubscriptionStore>(
}; };
let subscription = Subscription { let subscription = Subscription {
id: sub_id.to_owned(), id: sub_id.clone(),
push_resource: request push_resource: request
.subscription .subscription
.web_push_subscription .web_push_subscription
.push_resource .push_resource
.to_owned(), .clone(),
topic: addressbook_resource.0.push_topic, topic: addressbook_resource.0.push_topic,
expiration: expires.naive_local(), expiration: expires.naive_local(),
public_key: request public_key: request

View File

@@ -13,7 +13,7 @@ use rustical_ical::AddressObject;
use rustical_store::{AddressbookStore, auth::Principal}; use rustical_store::{AddressbookStore, auth::Principal};
use rustical_xml::XmlDeserialize; use rustical_xml::XmlDeserialize;
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[allow(dead_code)] #[allow(dead_code)]
#[xml(ns = "rustical_dav::namespace::NS_DAV")] #[xml(ns = "rustical_dav::namespace::NS_DAV")]
pub struct AddressbookMultigetRequest { pub struct AddressbookMultigetRequest {
@@ -34,24 +34,24 @@ pub async fn get_objects_addressbook_multiget<AS: AddressbookStore>(
let mut not_found = vec![]; let mut not_found = vec![];
for href in &addressbook_multiget.href { for href in &addressbook_multiget.href {
if let Some(filename) = href.strip_prefix(path) { if let Ok(href) = percent_encoding::percent_decode_str(href).decode_utf8()
let filename = filename.trim_start_matches("/"); && let Some(filename) = href.strip_prefix(path)
{
let filename = filename.trim_start_matches('/');
if let Some(object_id) = filename.strip_suffix(".vcf") { if let Some(object_id) = filename.strip_suffix(".vcf") {
match store match store
.get_object(principal, addressbook_id, object_id, false) .get_object(principal, addressbook_id, object_id, false)
.await .await
{ {
Ok(object) => result.push(object), Ok(object) => result.push(object),
Err(rustical_store::Error::NotFound) => not_found.push(href.to_owned()), Err(rustical_store::Error::NotFound) => not_found.push(href.to_string()),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
}; }
} else { } else {
not_found.push(href.to_owned()); not_found.push(href.to_string());
continue;
} }
} else { } else {
not_found.push(href.to_owned()); not_found.push(href.to_owned());
continue;
} }
} }

View File

@@ -26,10 +26,10 @@ pub(crate) enum ReportRequest {
} }
impl ReportRequest { impl ReportRequest {
fn props(&self) -> &PropfindType<AddressObjectPropWrapperName> { const fn props(&self) -> &PropfindType<AddressObjectPropWrapperName> {
match self { match self {
ReportRequest::AddressbookMultiget(AddressbookMultigetRequest { prop, .. }) => prop, Self::AddressbookMultiget(AddressbookMultigetRequest { prop, .. })
ReportRequest::SyncCollection(SyncCollectionRequest { prop, .. }) => prop, | Self::SyncCollection(SyncCollectionRequest { prop, .. }) => prop,
} }
} }
} }
@@ -101,7 +101,7 @@ mod tests {
assert_eq!( assert_eq!(
report_request, report_request,
ReportRequest::SyncCollection(SyncCollectionRequest { ReportRequest::SyncCollection(SyncCollectionRequest {
sync_token: "".to_owned(), sync_token: String::new(),
sync_level: SyncLevel::One, sync_level: SyncLevel::One,
prop: rustical_dav::xml::PropfindType::Prop(PropElement( prop: rustical_dav::xml::PropfindType::Prop(PropElement(
vec![AddressObjectPropWrapperName::AddressObject( vec![AddressObjectPropWrapperName::AddressObject(
@@ -111,7 +111,7 @@ mod tests {
)), )),
limit: None limit: None
}) })
) );
} }
#[test] #[test]
@@ -142,6 +142,6 @@ mod tests {
"/carddav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned() "/carddav/user/user/6f787542-5256-401a-8db97003260da/ae7a998fdfd1d84a20391168962c62b".to_owned()
] ]
}) })
) );
} }
} }

View File

@@ -6,7 +6,7 @@ use rustical_dav_push::DavPushExtensionProp;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
use strum_macros::VariantArray; use strum_macros::VariantArray;
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressbookPropName")] #[xml(unit_variants_ident = "AddressbookPropName")]
pub enum AddressbookProp { pub enum AddressbookProp {
// CardDAV (RFC 6352) // CardDAV (RFC 6352)
@@ -20,7 +20,7 @@ pub enum AddressbookProp {
MaxResourceSize(i64), MaxResourceSize(i64),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "AddressbookPropWrapperName", untagged)] #[xml(unit_variants_ident = "AddressbookPropWrapperName", untagged)]
pub enum AddressbookPropWrapper { pub enum AddressbookPropWrapper {
Addressbook(AddressbookProp), Addressbook(AddressbookProp),
@@ -29,7 +29,7 @@ pub enum AddressbookPropWrapper {
Common(CommonPropertiesProp), Common(CommonPropertiesProp),
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct AddressDataType { pub struct AddressDataType {
#[xml(ty = "attr")] #[xml(ty = "attr")]
pub content_type: &'static str, pub content_type: &'static str,
@@ -37,7 +37,7 @@ pub struct AddressDataType {
pub version: &'static str, pub version: &'static str,
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct SupportedAddressData { pub struct SupportedAddressData {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV", flatten)] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV", flatten)]
address_data_type: &'static [AddressDataType], address_data_type: &'static [AddressDataType],
@@ -60,7 +60,7 @@ impl Default for SupportedAddressData {
} }
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq, VariantArray)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq, VariantArray)]
pub enum ReportMethod { pub enum ReportMethod {
#[xml(ns = "rustical_dav::namespace::NS_CARDDAV")] #[xml(ns = "rustical_dav::namespace::NS_CARDDAV")]
AddressbookMultiget, AddressbookMultiget,

View File

@@ -17,7 +17,7 @@ pub struct AddressbookResource(pub(crate) Addressbook);
impl ResourceName for AddressbookResource { impl ResourceName for AddressbookResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.0.id.to_owned() self.0.id.clone()
} }
} }
@@ -29,7 +29,7 @@ impl SyncTokenExtension for AddressbookResource {
impl DavPushExtension for AddressbookResource { impl DavPushExtension for AddressbookResource {
fn get_topic(&self) -> String { fn get_topic(&self) -> String {
self.0.push_topic.to_owned() self.0.push_topic.clone()
} }
} }
@@ -59,13 +59,13 @@ impl Resource for AddressbookResource {
AddressbookPropWrapperName::Addressbook(prop) => { AddressbookPropWrapperName::Addressbook(prop) => {
AddressbookPropWrapper::Addressbook(match prop { AddressbookPropWrapper::Addressbook(match prop {
AddressbookPropName::MaxResourceSize => { AddressbookPropName::MaxResourceSize => {
AddressbookProp::MaxResourceSize(10000000) AddressbookProp::MaxResourceSize(10_000_000)
} }
AddressbookPropName::SupportedReportSet => { AddressbookPropName::SupportedReportSet => {
AddressbookProp::SupportedReportSet(SupportedReportSet::all()) AddressbookProp::SupportedReportSet(SupportedReportSet::all())
} }
AddressbookPropName::AddressbookDescription => { AddressbookPropName::AddressbookDescription => {
AddressbookProp::AddressbookDescription(self.0.description.to_owned()) AddressbookProp::AddressbookDescription(self.0.description.clone())
} }
AddressbookPropName::SupportedAddressData => { AddressbookPropName::SupportedAddressData => {
AddressbookProp::SupportedAddressData(SupportedAddressData::default()) AddressbookProp::SupportedAddressData(SupportedAddressData::default())
@@ -92,9 +92,11 @@ impl Resource for AddressbookResource {
self.0.description = description; self.0.description = description;
Ok(()) Ok(())
} }
AddressbookProp::MaxResourceSize(_) => Err(rustical_dav::Error::PropReadOnly), AddressbookProp::MaxResourceSize(_)
AddressbookProp::SupportedReportSet(_) => Err(rustical_dav::Error::PropReadOnly), | AddressbookProp::SupportedReportSet(_)
AddressbookProp::SupportedAddressData(_) => Err(rustical_dav::Error::PropReadOnly), | AddressbookProp::SupportedAddressData(_) => {
Err(rustical_dav::Error::PropReadOnly)
}
}, },
AddressbookPropWrapper::SyncToken(prop) => SyncTokenExtension::set_prop(self, prop), AddressbookPropWrapper::SyncToken(prop) => SyncTokenExtension::set_prop(self, prop),
AddressbookPropWrapper::DavPush(prop) => DavPushExtension::set_prop(self, prop), AddressbookPropWrapper::DavPush(prop) => DavPushExtension::set_prop(self, prop),
@@ -112,9 +114,11 @@ impl Resource for AddressbookResource {
self.0.description = None; self.0.description = None;
Ok(()) Ok(())
} }
AddressbookPropName::MaxResourceSize => Err(rustical_dav::Error::PropReadOnly), AddressbookPropName::MaxResourceSize
AddressbookPropName::SupportedReportSet => Err(rustical_dav::Error::PropReadOnly), | AddressbookPropName::SupportedReportSet
AddressbookPropName::SupportedAddressData => Err(rustical_dav::Error::PropReadOnly), | AddressbookPropName::SupportedAddressData => {
Err(rustical_dav::Error::PropReadOnly)
}
}, },
AddressbookPropWrapperName::SyncToken(prop) => { AddressbookPropWrapperName::SyncToken(prop) => {
SyncTokenExtension::remove_prop(self, prop) SyncTokenExtension::remove_prop(self, prop)

View File

@@ -26,7 +26,7 @@ pub struct AddressbookResourceService<AS: AddressbookStore, S: SubscriptionStore
} }
impl<A: AddressbookStore, S: SubscriptionStore> AddressbookResourceService<A, S> { impl<A: AddressbookStore, S: SubscriptionStore> AddressbookResourceService<A, S> {
pub fn new(addr_store: Arc<A>, sub_store: Arc<S>) -> Self { pub const fn new(addr_store: Arc<A>, sub_store: Arc<S>) -> Self {
Self { Self {
addr_store, addr_store,
sub_store, sub_store,

View File

@@ -30,20 +30,20 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use]
pub const fn status_code(&self) -> StatusCode {
match self { match self {
Error::StoreError(err) => match err { Self::StoreError(err) => match err {
rustical_store::Error::NotFound => StatusCode::NOT_FOUND, rustical_store::Error::NotFound => StatusCode::NOT_FOUND,
rustical_store::Error::AlreadyExists => StatusCode::CONFLICT, rustical_store::Error::AlreadyExists => StatusCode::CONFLICT,
rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN, rustical_store::Error::ReadOnly => StatusCode::FORBIDDEN,
_ => StatusCode::INTERNAL_SERVER_ERROR, _ => StatusCode::INTERNAL_SERVER_ERROR,
}, },
Error::ChronoParseError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::DavError(err) => err.status_code(),
Error::DavError(err) => err.status_code(), Self::Unauthorized => StatusCode::UNAUTHORIZED,
Error::Unauthorized => StatusCode::UNAUTHORIZED, Self::XmlDecodeError(_) => StatusCode::BAD_REQUEST,
Error::XmlDecodeError(_) => StatusCode::BAD_REQUEST, Self::ChronoParseError(_) | Self::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR,
Error::NotImplemented => StatusCode::INTERNAL_SERVER_ERROR, Self::NotFound => StatusCode::NOT_FOUND,
Error::NotFound => StatusCode::NOT_FOUND,
Self::IcalError(err) => err.status_code(), Self::IcalError(err) => err.status_code(),
} }
} }

View File

@@ -1,3 +1,5 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
use axum::response::Redirect; use axum::response::Redirect;
use axum::routing::any; use axum::routing::any;
use axum::{Extension, Router}; use axum::{Extension, Router};
@@ -36,20 +38,15 @@ pub fn carddav_router<AP: AuthenticationProvider, A: AddressbookStore, S: Subscr
store: Arc<A>, store: Arc<A>,
subscription_store: Arc<S>, subscription_store: Arc<S>,
) -> Router { ) -> Router {
let principal_service = PrincipalResourceService::new( let principal_service =
store.clone(), PrincipalResourceService::new(store, auth_provider.clone(), subscription_store);
auth_provider.clone(),
subscription_store.clone(),
);
Router::new() Router::new()
.nest( .nest(
prefix, prefix,
RootResourceService::<_, Principal, CardDavPrincipalUri>::new( RootResourceService::<_, Principal, CardDavPrincipalUri>::new(principal_service)
principal_service.clone(), .axum_router()
) .layer(AuthenticationLayer::new(auth_provider))
.axum_router() .layer(Extension(CardDavPrincipalUri(prefix))),
.layer(AuthenticationLayer::new(auth_provider))
.layer(Extension(CardDavPrincipalUri(prefix))),
) )
.route( .route(
"/.well-known/carddav", "/.well-known/carddav",

View File

@@ -20,7 +20,7 @@ pub struct PrincipalResource {
impl ResourceName for PrincipalResource { impl ResourceName for PrincipalResource {
fn get_name(&self) -> String { fn get_name(&self) -> String {
self.principal.id.to_owned() self.principal.id.clone()
} }
} }

View File

@@ -4,7 +4,7 @@ use rustical_dav::{
}; };
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropName")] #[xml(unit_variants_ident = "PrincipalPropName")]
pub enum PrincipalProp { pub enum PrincipalProp {
// WebDAV Access Control (RFC 3744) // WebDAV Access Control (RFC 3744)
@@ -27,10 +27,10 @@ pub enum PrincipalProp {
PrincipalAddress(Option<HrefElement>), PrincipalAddress(Option<HrefElement>),
} }
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct AddressbookHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct AddressbookHomeSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, EnumVariants, PropName)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, EnumVariants, PropName)]
#[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)] #[xml(unit_variants_ident = "PrincipalPropWrapperName", untagged)]
pub enum PrincipalPropWrapper { pub enum PrincipalPropWrapper {
Principal(PrincipalProp), Principal(PrincipalProp),

View File

@@ -34,7 +34,7 @@ impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore> Clon
impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore> impl<A: AddressbookStore, AP: AuthenticationProvider, S: SubscriptionStore>
PrincipalResourceService<A, AP, S> PrincipalResourceService<A, AP, S>
{ {
pub fn new(addr_store: Arc<A>, auth_provider: Arc<AP>, sub_store: Arc<S>) -> Self { pub const fn new(addr_store: Arc<A>, auth_provider: Arc<AP>, sub_store: Arc<S>) -> Self {
Self { Self {
addr_store, addr_store,
auth_provider, auth_provider,

View File

@@ -1,6 +1,7 @@
[package] [package]
name = "rustical_dav" name = "rustical_dav"
version.workspace = true version.workspace = true
rust-version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true
@@ -11,7 +12,6 @@ publish = false
axum.workspace = true axum.workspace = true
tower.workspace = true tower.workspace = true
axum-extra.workspace = true axum-extra.workspace = true
rustical_xml.workspace = true rustical_xml.workspace = true
async-trait.workspace = true async-trait.workspace = true
futures-util.workspace = true futures-util.workspace = true

View File

@@ -1,3 +1,4 @@
use axum::body::Body;
use http::StatusCode; use http::StatusCode;
use rustical_xml::XmlError; use rustical_xml::XmlError;
use thiserror::Error; use thiserror::Error;
@@ -34,9 +35,9 @@ pub enum Error {
} }
impl Error { impl Error {
pub fn status_code(&self) -> StatusCode { #[must_use]
pub const fn status_code(&self) -> StatusCode {
match self { match self {
Self::InternalError => StatusCode::INTERNAL_SERVER_ERROR,
Self::NotFound => StatusCode::NOT_FOUND, Self::NotFound => StatusCode::NOT_FOUND,
Self::BadRequest(_) => StatusCode::BAD_REQUEST, Self::BadRequest(_) => StatusCode::BAD_REQUEST,
Self::Unauthorized => StatusCode::UNAUTHORIZED, Self::Unauthorized => StatusCode::UNAUTHORIZED,
@@ -49,9 +50,9 @@ impl Error {
| XmlError::InvalidValue(_) => StatusCode::UNPROCESSABLE_ENTITY, | XmlError::InvalidValue(_) => StatusCode::UNPROCESSABLE_ENTITY,
_ => StatusCode::BAD_REQUEST, _ => StatusCode::BAD_REQUEST,
}, },
Error::PropReadOnly => StatusCode::CONFLICT, Self::PropReadOnly => StatusCode::CONFLICT,
Error::PreconditionFailed => StatusCode::PRECONDITION_FAILED, Self::PreconditionFailed => StatusCode::PRECONDITION_FAILED,
Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR, Self::InternalError | Self::IOError(_) => StatusCode::INTERNAL_SERVER_ERROR,
Self::Forbidden => StatusCode::FORBIDDEN, Self::Forbidden => StatusCode::FORBIDDEN,
} }
} }
@@ -59,10 +60,15 @@ impl Error {
impl axum::response::IntoResponse for Error { impl axum::response::IntoResponse for Error {
fn into_response(self) -> axum::response::Response { fn into_response(self) -> axum::response::Response {
use axum::body::Body; if matches!(
self.status_code(),
StatusCode::INTERNAL_SERVER_ERROR | StatusCode::PRECONDITION_FAILED
) {
error!("{self}");
}
let mut resp = axum::response::Response::builder().status(self.status_code()); let mut resp = axum::response::Response::builder().status(self.status_code());
if matches!(&self, &Error::Unauthorized) { if matches!(&self, &Self::Unauthorized) {
resp.headers_mut() resp.headers_mut()
.expect("This must always work") .expect("This must always work")
.insert("WWW-Authenticate", "Basic".parse().unwrap()); .insert("WWW-Authenticate", "Basic".parse().unwrap());

View File

@@ -6,7 +6,7 @@ use crate::{
}; };
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, PropName, EnumVariants)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "CommonPropertiesPropName")] #[xml(unit_variants_ident = "CommonPropertiesPropName")]
pub enum CommonPropertiesProp { pub enum CommonPropertiesProp {
// WebDAV (RFC 2518) // WebDAV (RFC 2518)
@@ -39,9 +39,9 @@ pub trait CommonPropertiesExtension: Resource {
CommonPropertiesPropName::Resourcetype => { CommonPropertiesPropName::Resourcetype => {
CommonPropertiesProp::Resourcetype(self.get_resourcetype()) CommonPropertiesProp::Resourcetype(self.get_resourcetype())
} }
CommonPropertiesPropName::Displayname => { CommonPropertiesPropName::Displayname => CommonPropertiesProp::Displayname(
CommonPropertiesProp::Displayname(self.get_displayname().map(|s| s.to_string())) self.get_displayname().map(std::string::ToString::to_string),
} ),
CommonPropertiesPropName::CurrentUserPrincipal => { CommonPropertiesPropName::CurrentUserPrincipal => {
CommonPropertiesProp::CurrentUserPrincipal( CommonPropertiesProp::CurrentUserPrincipal(
principal_uri.principal_uri(principal.get_id()).into(), principal_uri.principal_uri(principal.get_id()).into(),

View File

@@ -1,6 +1,6 @@
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, PropName, EnumVariants)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "SyncTokenExtensionPropName")] #[xml(unit_variants_ident = "SyncTokenExtensionPropName")]
pub enum SyncTokenExtensionProp { pub enum SyncTokenExtensionProp {
// Collection Synchronization (RFC 6578) // Collection Synchronization (RFC 6578)

View File

@@ -19,7 +19,7 @@ impl IntoResponse for InvalidDepthHeader {
} }
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub enum Depth { pub enum Depth {
Zero, Zero,
One, One,
@@ -29,9 +29,9 @@ pub enum Depth {
impl ValueSerialize for Depth { impl ValueSerialize for Depth {
fn serialize(&self) -> String { fn serialize(&self) -> String {
match self { match self {
Depth::Zero => "0", Self::Zero => "0",
Depth::One => "1", Self::One => "1",
Depth::Infinity => "infinity", Self::Infinity => "infinity",
} }
.to_owned() .to_owned()
} }
@@ -55,9 +55,9 @@ impl TryFrom<&[u8]> for Depth {
fn try_from(value: &[u8]) -> Result<Self, Self::Error> { fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match value { match value {
b"0" => Ok(Depth::Zero), b"0" => Ok(Self::Zero),
b"1" => Ok(Depth::One), b"1" => Ok(Self::One),
b"Infinity" | b"infinity" => Ok(Depth::Infinity), b"Infinity" | b"infinity" => Ok(Self::Infinity),
_ => Err(InvalidDepthHeader), _ => Err(InvalidDepthHeader),
} }
} }
@@ -85,10 +85,11 @@ impl<S: Send + Sync> FromRequestParts<S> for Depth {
parts: &mut axum::http::request::Parts, parts: &mut axum::http::request::Parts,
_state: &S, _state: &S,
) -> Result<Self, Self::Rejection> { ) -> Result<Self, Self::Rejection> {
if let Some(depth_header) = parts.headers.get("Depth") { parts
depth_header.as_bytes().try_into() .headers
} else { .get("Depth")
Ok(Self::Zero) .map_or(Ok(Self::Zero), |depth_header| {
} depth_header.as_bytes().try_into()
})
} }
} }

View File

@@ -14,16 +14,12 @@ impl IntoResponse for InvalidOverwriteHeader {
} }
} }
#[derive(Debug, PartialEq, Default)] #[derive(Debug, PartialEq, Eq)]
pub enum Overwrite { pub struct Overwrite(pub bool);
#[default]
T,
F,
}
impl Overwrite { impl Default for Overwrite {
pub fn is_true(&self) -> bool { fn default() -> Self {
matches!(self, Self::T) Self(true)
} }
} }
@@ -34,11 +30,10 @@ impl<S: Send + Sync> FromRequestParts<S> for Overwrite {
parts: &mut axum::http::request::Parts, parts: &mut axum::http::request::Parts,
_state: &S, _state: &S,
) -> Result<Self, Self::Rejection> { ) -> Result<Self, Self::Rejection> {
if let Some(overwrite_header) = parts.headers.get("Overwrite") { parts.headers.get("Overwrite").map_or_else(
overwrite_header.as_bytes().try_into() || Ok(Self::default()),
} else { |overwrite_header| overwrite_header.as_bytes().try_into(),
Ok(Self::default()) )
}
} }
} }
@@ -47,9 +42,48 @@ impl TryFrom<&[u8]> for Overwrite {
fn try_from(value: &[u8]) -> Result<Self, Self::Error> { fn try_from(value: &[u8]) -> Result<Self, Self::Error> {
match value { match value {
b"T" => Ok(Overwrite::T), b"T" => Ok(Self(true)),
b"F" => Ok(Overwrite::F), b"F" => Ok(Self(false)),
_ => Err(InvalidOverwriteHeader), _ => Err(InvalidOverwriteHeader),
} }
} }
} }
#[cfg(test)]
mod tests {
use axum::{extract::FromRequestParts, response::IntoResponse};
use http::Request;
use crate::header::Overwrite;
#[tokio::test]
async fn test_overwrite_default() {
let request = Request::put("asd").body(()).unwrap();
let (mut parts, ()) = request.into_parts();
let overwrite = Overwrite::from_request_parts(&mut parts, &())
.await
.unwrap();
assert_eq!(
Overwrite(true),
overwrite,
"By default we want to overwrite!"
);
}
#[test]
fn test_overwrite() {
assert_eq!(
Overwrite(true),
Overwrite::try_from(b"T".as_slice()).unwrap()
);
assert_eq!(
Overwrite(false),
Overwrite::try_from(b"F".as_slice()).unwrap()
);
if let Err(err) = Overwrite::try_from(b"aslkdjlad".as_slice()) {
let _ = err.into_response();
} else {
unreachable!("should return error")
}
}
}

View File

@@ -1,3 +1,5 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
pub mod error; pub mod error;
pub mod extensions; pub mod extensions;
pub mod header; pub mod header;

View File

@@ -41,12 +41,13 @@ impl XmlSerialize for UserPrivilegeSet {
} }
} }
#[derive(Debug, Clone, Default, PartialEq)] #[derive(Debug, Clone, Default, PartialEq, Eq)]
pub struct UserPrivilegeSet { pub struct UserPrivilegeSet {
privileges: HashSet<UserPrivilege>, privileges: HashSet<UserPrivilege>,
} }
impl UserPrivilegeSet { impl UserPrivilegeSet {
#[must_use]
pub fn has(&self, privilege: &UserPrivilege) -> bool { pub fn has(&self, privilege: &UserPrivilege) -> bool {
if (privilege == &UserPrivilege::WriteProperties if (privilege == &UserPrivilege::WriteProperties
|| privilege == &UserPrivilege::WriteContent) || privilege == &UserPrivilege::WriteContent)
@@ -57,12 +58,14 @@ impl UserPrivilegeSet {
self.privileges.contains(privilege) || self.privileges.contains(&UserPrivilege::All) self.privileges.contains(privilege) || self.privileges.contains(&UserPrivilege::All)
} }
#[must_use]
pub fn all() -> Self { pub fn all() -> Self {
Self { Self {
privileges: HashSet::from([UserPrivilege::All]), privileges: HashSet::from([UserPrivilege::All]),
} }
} }
#[must_use]
pub fn owner_only(is_owner: bool) -> Self { pub fn owner_only(is_owner: bool) -> Self {
if is_owner { if is_owner {
Self::all() Self::all()
@@ -71,6 +74,7 @@ impl UserPrivilegeSet {
} }
} }
#[must_use]
pub fn owner_read(is_owner: bool) -> Self { pub fn owner_read(is_owner: bool) -> Self {
if is_owner { if is_owner {
Self::read_only() Self::read_only()
@@ -79,6 +83,7 @@ impl UserPrivilegeSet {
} }
} }
#[must_use]
pub fn owner_write_properties(is_owner: bool) -> Self { pub fn owner_write_properties(is_owner: bool) -> Self {
// Content is read-only but we can write properties // Content is read-only but we can write properties
if is_owner { if is_owner {
@@ -88,6 +93,7 @@ impl UserPrivilegeSet {
} }
} }
#[must_use]
pub fn read_only() -> Self { pub fn read_only() -> Self {
Self { Self {
privileges: HashSet::from([ privileges: HashSet::from([
@@ -98,6 +104,7 @@ impl UserPrivilegeSet {
} }
} }
#[must_use]
pub fn write_properties() -> Self { pub fn write_properties() -> Self {
Self { Self {
privileges: HashSet::from([ privileges: HashSet::from([

View File

@@ -9,41 +9,49 @@ pub type MethodFunction<State> =
pub trait AxumMethods: Sized + Send + Sync + 'static { pub trait AxumMethods: Sized + Send + Sync + 'static {
#[inline] #[inline]
#[must_use]
fn report() -> Option<MethodFunction<Self>> { fn report() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn get() -> Option<MethodFunction<Self>> { fn get() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn post() -> Option<MethodFunction<Self>> { fn post() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn mkcol() -> Option<MethodFunction<Self>> { fn mkcol() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn mkcalendar() -> Option<MethodFunction<Self>> { fn mkcalendar() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn put() -> Option<MethodFunction<Self>> { fn put() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn import() -> Option<MethodFunction<Self>> { fn import() -> Option<MethodFunction<Self>> {
None None
} }
#[inline] #[inline]
#[must_use]
fn allow_header() -> Allow { fn allow_header() -> Allow {
let mut allow = vec![ let mut allow = vec![
Method::from_str("PROPFIND").unwrap(), Method::from_str("PROPFIND").unwrap(),

View File

@@ -23,7 +23,7 @@ pub struct AxumService<RS: ResourceService + AxumMethods> {
} }
impl<RS: ResourceService + AxumMethods> AxumService<RS> { impl<RS: ResourceService + AxumMethods> AxumService<RS> {
pub fn new(resource_service: RS) -> Self { pub const fn new(resource_service: RS) -> Self {
Self { resource_service } Self { resource_service }
} }
} }
@@ -103,7 +103,7 @@ where
} }
} }
_ => {} _ => {}
}; }
Box::pin(async move { Box::pin(async move {
Ok(Response::builder() Ok(Response::builder()
.status(StatusCode::METHOD_NOT_ALLOWED) .status(StatusCode::METHOD_NOT_ALLOWED)

View File

@@ -12,12 +12,12 @@ use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
#[instrument(skip(path, resource_service,))] #[instrument(skip(path, resource_service,))]
pub(crate) async fn axum_route_copy<R: ResourceService>( pub async fn axum_route_copy<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
depth: Option<Depth>, depth: Option<Depth>,
principal: R::Principal, principal: R::Principal,
overwrite: Overwrite, Overwrite(overwrite): Overwrite,
matched_path: MatchedPath, matched_path: MatchedPath,
header_map: HeaderMap, header_map: HeaderMap,
) -> Result<Response, R::Error> { ) -> Result<Response, R::Error> {
@@ -39,7 +39,7 @@ pub(crate) async fn axum_route_copy<R: ResourceService>(
.map_err(|_| crate::Error::Forbidden)?; .map_err(|_| crate::Error::Forbidden)?;
if resource_service if resource_service
.copy_resource(&path, &dest_path, &principal, overwrite.is_true()) .copy_resource(&path, &dest_path, &principal, overwrite)
.await? .await?
{ {
// Overwritten // Overwritten

View File

@@ -7,7 +7,7 @@ use axum_extra::TypedHeader;
use headers::{IfMatch, IfNoneMatch}; use headers::{IfMatch, IfNoneMatch};
use http::HeaderMap; use http::HeaderMap;
pub(crate) async fn axum_route_delete<R: ResourceService>( pub async fn axum_route_delete<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
principal: R::Principal, principal: R::Principal,
@@ -24,8 +24,7 @@ pub(crate) async fn axum_route_delete<R: ResourceService>(
} }
let no_trash = header_map let no_trash = header_map
.get("X-No-Trashbin") .get("X-No-Trashbin")
.map(|val| matches!(val.to_str(), Ok("1"))) .is_some_and(|val| matches!(val.to_str(), Ok("1")));
.unwrap_or(false);
route_delete( route_delete(
&path, &path,
&principal, &principal,

View File

@@ -4,8 +4,8 @@ mod mv;
mod propfind; mod propfind;
mod proppatch; mod proppatch;
pub(crate) use copy::axum_route_copy; pub use copy::axum_route_copy;
pub(crate) use delete::axum_route_delete; pub use delete::axum_route_delete;
pub(crate) use mv::axum_route_move; pub use mv::axum_route_move;
pub(crate) use propfind::axum_route_propfind; pub use propfind::axum_route_propfind;
pub(crate) use proppatch::axum_route_proppatch; pub use proppatch::axum_route_proppatch;

View File

@@ -12,12 +12,12 @@ use serde::Deserialize;
use tracing::instrument; use tracing::instrument;
#[instrument(skip(path, resource_service,))] #[instrument(skip(path, resource_service,))]
pub(crate) async fn axum_route_move<R: ResourceService>( pub async fn axum_route_move<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
depth: Option<Depth>, depth: Option<Depth>,
principal: R::Principal, principal: R::Principal,
overwrite: Overwrite, Overwrite(overwrite): Overwrite,
matched_path: MatchedPath, matched_path: MatchedPath,
header_map: HeaderMap, header_map: HeaderMap,
) -> Result<Response, R::Error> { ) -> Result<Response, R::Error> {
@@ -39,7 +39,7 @@ pub(crate) async fn axum_route_move<R: ResourceService>(
.map_err(|_| crate::Error::Forbidden)?; .map_err(|_| crate::Error::Forbidden)?;
if resource_service if resource_service
.copy_resource(&path, &dest_path, &principal, overwrite.is_true()) .copy_resource(&path, &dest_path, &principal, overwrite)
.await? .await?
{ {
// Overwritten // Overwritten

View File

@@ -15,7 +15,7 @@ type RSMultistatus<R> = MultistatusElement<
>; >;
#[instrument(skip(path, resource_service, puri))] #[instrument(skip(path, resource_service, puri))]
pub(crate) async fn axum_route_propfind<R: ResourceService>( pub async fn axum_route_propfind<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
depth: Depth, depth: Depth,
@@ -36,7 +36,7 @@ pub(crate) async fn axum_route_propfind<R: ResourceService>(
.await .await
} }
pub(crate) async fn route_propfind<R: ResourceService>( pub async fn route_propfind<R: ResourceService>(
path_components: &R::PathComponents, path_components: &R::PathComponents,
path: &str, path: &str,
body: &str, body: &str,

View File

@@ -61,7 +61,7 @@ enum Operation<T: XmlDeserialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
struct PropertyupdateElement<T: XmlDeserialize>(#[xml(ty = "untagged", flatten)] Vec<Operation<T>>); struct PropertyupdateElement<T: XmlDeserialize>(#[xml(ty = "untagged", flatten)] Vec<Operation<T>>);
pub(crate) async fn axum_route_proppatch<R: ResourceService>( pub async fn axum_route_proppatch<R: ResourceService>(
Path(path): Path<R::PathComponents>, Path(path): Path<R::PathComponents>,
State(resource_service): State<R>, State(resource_service): State<R>,
principal: R::Principal, principal: R::Principal,
@@ -71,7 +71,7 @@ pub(crate) async fn axum_route_proppatch<R: ResourceService>(
route_proppatch(&path, uri.path(), &body, &principal, &resource_service).await route_proppatch(&path, uri.path(), &body, &principal, &resource_service).await
} }
pub(crate) async fn route_proppatch<R: ResourceService>( pub async fn route_proppatch<R: ResourceService>(
path_components: &R::PathComponents, path_components: &R::PathComponents,
path: &str, path: &str,
body: &str, body: &str,
@@ -88,7 +88,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
.get_resource(path_components, false) .get_resource(path_components, false)
.await?; .await?;
let privileges = resource.get_user_privileges(principal)?; let privileges = resource.get_user_privileges(principal)?;
if !privileges.has(&UserPrivilege::Write) { if !privileges.has(&UserPrivilege::WriteProperties) {
return Err(Error::Unauthorized.into()); return Err(Error::Unauthorized.into());
} }
@@ -96,7 +96,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
let mut props_conflict = Vec::new(); let mut props_conflict = Vec::new();
let mut props_not_found = Vec::new(); let mut props_not_found = Vec::new();
for operation in operations.into_iter() { for operation in operations {
match operation { match operation {
Operation::Set(SetPropertyElement { Operation::Set(SetPropertyElement {
prop: SetPropertyPropWrapperWrapper(properties), prop: SetPropertyPropWrapperWrapper(properties),
@@ -113,7 +113,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
Err(Error::PropReadOnly) => props_conflict Err(Error::PropReadOnly) => props_conflict
.push((ns.map(NamespaceOwned::from), propname.to_owned())), .push((ns.map(NamespaceOwned::from), propname.to_owned())),
Err(err) => return Err(err.into()), Err(err) => return Err(err.into()),
}; }
} }
SetPropertyPropWrapper::Invalid(invalid) => { SetPropertyPropWrapper::Invalid(invalid) => {
let propname = invalid.tag_name(); let propname = invalid.tag_name();
@@ -131,7 +131,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
// This happens in following cases: // This happens in following cases:
// - read-only properties with #[serde(skip_deserializing)] // - read-only properties with #[serde(skip_deserializing)]
// - internal properties // - internal properties
props_conflict.push(full_propname) props_conflict.push(full_propname);
} else { } else {
props_not_found.push((None, propname)); props_not_found.push((None, propname));
} }
@@ -154,7 +154,7 @@ pub(crate) async fn route_proppatch<R: ResourceService>(
}, },
// I guess removing a nonexisting property should be successful :) // I guess removing a nonexisting property should be successful :)
Err(_) => props_ok.push((None, propname)), Err(_) => props_ok.push((None, propname)),
}; }
} }
} }
} }

View File

@@ -42,6 +42,7 @@ pub trait Resource: Clone + Send + 'static {
fn get_resourcetype(&self) -> Resourcetype; fn get_resourcetype(&self) -> Resourcetype;
#[must_use]
fn list_props() -> Vec<(Option<Namespace<'static>>, &'static str)> { fn list_props() -> Vec<(Option<Namespace<'static>>, &'static str)> {
Self::Prop::variant_names() Self::Prop::variant_names()
} }
@@ -75,27 +76,27 @@ pub trait Resource: Clone + Send + 'static {
} }
fn satisfies_if_match(&self, if_match: &IfMatch) -> bool { fn satisfies_if_match(&self, if_match: &IfMatch) -> bool {
if let Some(etag) = self.get_etag() { self.get_etag().map_or_else(
if let Ok(etag) = ETag::from_str(&etag) { || if_match.is_any(),
if_match.precondition_passes(&etag) |etag| {
} else { ETag::from_str(&etag).map_or_else(
if_match.is_any() |_| if_match.is_any(),
} |etag| if_match.precondition_passes(&etag),
} else { )
if_match.is_any() },
} )
} }
fn satisfies_if_none_match(&self, if_none_match: &IfNoneMatch) -> bool { fn satisfies_if_none_match(&self, if_none_match: &IfNoneMatch) -> bool {
if let Some(etag) = self.get_etag() { self.get_etag().map_or_else(
if let Ok(etag) = ETag::from_str(&etag) { || if_none_match != &IfNoneMatch::any(),
if_none_match.precondition_passes(&etag) |etag| {
} else { ETag::from_str(&etag).map_or_else(
if_none_match != &IfNoneMatch::any() |_| if_none_match != &IfNoneMatch::any(),
} |etag| if_none_match.precondition_passes(&etag),
} else { )
if_none_match != &IfNoneMatch::any() },
} )
} }
fn get_user_privileges( fn get_user_privileges(
@@ -106,13 +107,13 @@ pub trait Resource: Clone + Send + 'static {
fn parse_propfind( fn parse_propfind(
body: &str, body: &str,
) -> Result<PropfindElement<<Self::Prop as PropName>::Names>, rustical_xml::XmlError> { ) -> Result<PropfindElement<<Self::Prop as PropName>::Names>, rustical_xml::XmlError> {
if !body.is_empty() { if body.is_empty() {
PropfindElement::parse_str(body)
} else {
Ok(PropfindElement { Ok(PropfindElement {
prop: PropfindType::Allprop, prop: PropfindType::Allprop,
include: None, include: None,
}) })
} else {
PropfindElement::parse_str(body)
} }
} }
@@ -139,7 +140,7 @@ pub trait Resource: Clone + Send + 'static {
.collect_vec(); .collect_vec();
return Ok(ResponseElement { return Ok(ResponseElement {
href: path.to_owned(), href: path.clone(),
propstat: vec![PropstatWrapper::TagList(PropstatElement { propstat: vec![PropstatWrapper::TagList(PropstatElement {
prop: TagList::from(props), prop: TagList::from(props),
status: StatusCode::OK, status: StatusCode::OK,
@@ -181,7 +182,7 @@ pub trait Resource: Clone + Send + 'static {
})); }));
} }
Ok(ResponseElement { Ok(ResponseElement {
href: path.to_owned(), href: path.clone(),
propstat: propstats, propstat: propstats,
..Default::default() ..Default::default()
}) })

View File

@@ -76,10 +76,7 @@ pub trait ResourceService: Clone + Sized + Send + Sync + AxumMethods + 'static {
Err(crate::Error::Forbidden.into()) Err(crate::Error::Forbidden.into())
} }
fn axum_service(self) -> AxumService<Self> fn axum_service(self) -> AxumService<Self> {
where
Self: AxumMethods,
{
AxumService::new(self) AxumService::new(self)
} }

View File

@@ -1,3 +1,72 @@
pub mod root; pub mod root;
pub use root::{RootResource, RootResourceService}; pub use root::{RootResource, RootResourceService};
#[cfg(test)]
pub mod test {
use crate::{
Error, Principal,
extensions::{CommonPropertiesExtension, CommonPropertiesProp},
namespace::NS_DAV,
privileges::UserPrivilegeSet,
resource::{PrincipalUri, Resource},
xml::{Resourcetype, ResourcetypeInner},
};
#[derive(Debug, Clone)]
pub struct TestPrincipal(pub String);
impl Principal for TestPrincipal {
fn get_id(&self) -> &str {
&self.0
}
}
impl Resource for TestPrincipal {
type Prop = CommonPropertiesProp;
type Error = Error;
type Principal = Self;
fn is_collection(&self) -> bool {
true
}
fn get_resourcetype(&self) -> crate::xml::Resourcetype {
Resourcetype(&[ResourcetypeInner(Some(NS_DAV), "collection")])
}
fn get_prop(
&self,
principal_uri: &impl crate::resource::PrincipalUri,
principal: &Self::Principal,
prop: &<Self::Prop as rustical_xml::PropName>::Names,
) -> Result<Self::Prop, Self::Error> {
<Self as CommonPropertiesExtension>::get_prop(self, principal_uri, principal, prop)
}
fn get_displayname(&self) -> Option<&str> {
Some(&self.0)
}
fn get_user_privileges(
&self,
principal: &Self::Principal,
) -> Result<UserPrivilegeSet, Self::Error> {
Ok(UserPrivilegeSet::owner_only(
principal.get_id() == self.get_id(),
))
}
}
#[derive(Debug, Clone)]
pub struct TestPrincipalUri;
impl PrincipalUri for TestPrincipalUri {
fn principal_collection(&self) -> String {
"/".to_owned()
}
fn principal_uri(&self, principal: &str) -> String {
format!("/{principal}/")
}
}
}

View File

@@ -63,7 +63,7 @@ pub struct RootResourceService<PRS: ResourceService + Clone, P: Principal, PURI:
impl<PRS: ResourceService + Clone, P: Principal, PURI: PrincipalUri> impl<PRS: ResourceService + Clone, P: Principal, PURI: PrincipalUri>
RootResourceService<PRS, P, PURI> RootResourceService<PRS, P, PURI>
{ {
pub fn new(principal_resource_service: PRS) -> Self { pub const fn new(principal_resource_service: PRS) -> Self {
Self(principal_resource_service, PhantomData, PhantomData) Self(principal_resource_service, PhantomData, PhantomData)
} }
} }
@@ -88,7 +88,7 @@ where
async fn get_resource( async fn get_resource(
&self, &self,
_: &(), (): &(),
_show_deleted: bool, _show_deleted: bool,
) -> Result<Self::Resource, Self::Error> { ) -> Result<Self::Resource, Self::Error> {
Ok(RootResource::<PRS::Resource, P>::default()) Ok(RootResource::<PRS::Resource, P>::default())
@@ -105,3 +105,33 @@ impl<PRS: ResourceService<Principal = P> + Clone, P: Principal, PURI: PrincipalU
for RootResourceService<PRS, P, PURI> for RootResourceService<PRS, P, PURI>
{ {
} }
#[cfg(test)]
mod test {
use crate::{
resource::Resource,
resources::{
RootResource,
test::{TestPrincipal, TestPrincipalUri},
},
};
#[test]
fn test_root_resource() {
let resource = RootResource::<TestPrincipal, TestPrincipal>::default();
let propfind = RootResource::<TestPrincipal, TestPrincipal>::parse_propfind(
r#"<?xml version="1.0" encoding="UTF-8"?><propfind xmlns="DAV:"><allprop/></propfind>"#,
)
.unwrap();
let _response = resource
.propfind(
"/",
&propfind.prop,
propfind.include.as_ref(),
&TestPrincipalUri,
&TestPrincipal("user".to_owned()),
)
.unwrap();
}
}

View File

@@ -1,8 +1,8 @@
use crate::xml::HrefElement; use crate::xml::HrefElement;
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct GroupMembership(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct GroupMembership(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone)]
pub struct GroupMemberSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>); pub struct GroupMemberSet(#[xml(ty = "untagged", flatten)] pub Vec<HrefElement>);

View File

@@ -1,14 +1,15 @@
use derive_more::From; use derive_more::From;
use rustical_xml::{XmlDeserialize, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, Debug, Clone, From, PartialEq)] #[derive(XmlDeserialize, XmlSerialize, Debug, Clone, From, PartialEq, Eq)]
pub struct HrefElement { pub struct HrefElement {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
pub href: String, pub href: String,
} }
impl HrefElement { impl HrefElement {
pub fn new(href: String) -> Self { #[must_use]
pub const fn new(href: String) -> Self {
Self { href } Self { href }
} }
} }

View File

@@ -19,6 +19,7 @@ pub struct PropstatElement<PropType: XmlSerialize> {
pub status: StatusCode, pub status: StatusCode,
} }
#[allow(clippy::trivially_copy_pass_by_ref)]
fn xml_serialize_status( fn xml_serialize_status(
status: &StatusCode, status: &StatusCode,
ns: Option<Namespace>, ns: Option<Namespace>,
@@ -26,7 +27,7 @@ fn xml_serialize_status(
namespaces: &HashMap<Namespace, &str>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
XmlSerialize::serialize(&format!("HTTP/1.1 {}", status), ns, tag, namespaces, writer) XmlSerialize::serialize(&format!("HTTP/1.1 {status}"), ns, tag, namespaces, writer)
} }
#[derive(XmlSerialize)] #[derive(XmlSerialize)]
@@ -56,6 +57,7 @@ pub struct ResponseElement<PropstatType: XmlSerialize> {
pub propstat: Vec<PropstatWrapper<PropstatType>>, pub propstat: Vec<PropstatWrapper<PropstatType>>,
} }
#[allow(clippy::trivially_copy_pass_by_ref, clippy::ref_option)]
fn xml_serialize_optional_status( fn xml_serialize_optional_status(
val: &Option<StatusCode>, val: &Option<StatusCode>,
ns: Option<Namespace>, ns: Option<Namespace>,
@@ -64,7 +66,7 @@ fn xml_serialize_optional_status(
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
XmlSerialize::serialize( XmlSerialize::serialize(
&val.map(|status| format!("HTTP/1.1 {}", status)), &val.map(|status| format!("HTTP/1.1 {status}")),
ns, ns,
tag, tag,
namespaces, namespaces,

View File

@@ -6,7 +6,7 @@ use rustical_xml::XmlDeserialize;
use rustical_xml::XmlError; use rustical_xml::XmlError;
use rustical_xml::XmlRootTag; use rustical_xml::XmlRootTag;
#[derive(Debug, Clone, XmlDeserialize, XmlRootTag, PartialEq)] #[derive(Debug, Clone, XmlDeserialize, XmlRootTag, PartialEq, Eq)]
#[xml(root = "propfind", ns = "crate::namespace::NS_DAV")] #[xml(root = "propfind", ns = "crate::namespace::NS_DAV")]
pub struct PropfindElement<PN: XmlDeserialize> { pub struct PropfindElement<PN: XmlDeserialize> {
#[xml(ty = "untagged")] #[xml(ty = "untagged")]
@@ -15,7 +15,7 @@ pub struct PropfindElement<PN: XmlDeserialize> {
pub include: Option<PropElement<PN>>, pub include: Option<PropElement<PN>>,
} }
#[derive(Debug, Clone, PartialEq)] #[derive(Debug, Clone, PartialEq, Eq)]
pub struct PropElement<PN: XmlDeserialize>( pub struct PropElement<PN: XmlDeserialize>(
// valid // valid
pub Vec<PN>, pub Vec<PN>,
@@ -82,7 +82,7 @@ impl<PN: XmlDeserialize> XmlDeserialize for PropElement<PN> {
} }
} }
#[derive(Debug, Clone, XmlDeserialize, PartialEq)] #[derive(Debug, Clone, XmlDeserialize, PartialEq, Eq)]
pub enum PropfindType<PN: XmlDeserialize> { pub enum PropfindType<PN: XmlDeserialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
Propname, Propname,

View File

@@ -2,7 +2,7 @@ use rustical_xml::XmlSerialize;
use strum::VariantArray; use strum::VariantArray;
// RFC 3253 section-3.1.5 // RFC 3253 section-3.1.5
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct SupportedReportSet<T: XmlSerialize + 'static> { pub struct SupportedReportSet<T: XmlSerialize + 'static> {
#[xml(flatten)] #[xml(flatten)]
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
@@ -10,6 +10,7 @@ pub struct SupportedReportSet<T: XmlSerialize + 'static> {
} }
impl<T: XmlSerialize + Clone + 'static> SupportedReportSet<T> { impl<T: XmlSerialize + Clone + 'static> SupportedReportSet<T> {
#[must_use]
pub fn new(methods: Vec<T>) -> Self { pub fn new(methods: Vec<T>) -> Self {
Self { Self {
supported_report: methods supported_report: methods
@@ -27,7 +28,7 @@ impl<T: XmlSerialize + Clone + 'static> SupportedReportSet<T> {
} }
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct ReportWrapper<T: XmlSerialize> { pub struct ReportWrapper<T: XmlSerialize> {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
report: T, report: T,

View File

@@ -1,9 +1,9 @@
use rustical_xml::XmlSerialize; use rustical_xml::XmlSerialize;
#[derive(Debug, Clone, PartialEq, XmlSerialize)] #[derive(Debug, Clone, PartialEq, Eq, XmlSerialize)]
pub struct Resourcetype(#[xml(flatten, ty = "untagged")] pub &'static [ResourcetypeInner]); pub struct Resourcetype(#[xml(flatten, ty = "untagged")] pub &'static [ResourcetypeInner]);
#[derive(Debug, Clone, PartialEq, XmlSerialize)] #[derive(Debug, Clone, PartialEq, Eq, XmlSerialize)]
pub struct ResourcetypeInner( pub struct ResourcetypeInner(
#[xml(ty = "namespace")] pub Option<quick_xml::name::Namespace<'static>>, #[xml(ty = "namespace")] pub Option<quick_xml::name::Namespace<'static>>,
#[xml(ty = "tag_name")] pub &'static str, #[xml(ty = "tag_name")] pub &'static str,
@@ -40,6 +40,6 @@ mod tests {
<calendar-color xmlns="http://calendarserver.org/ns/"/> <calendar-color xmlns="http://calendarserver.org/ns/"/>
</resourcetype> </resourcetype>
</document>"# </document>"#
) );
} }
} }

View File

@@ -2,7 +2,7 @@ use rustical_xml::{ValueDeserialize, ValueSerialize, XmlDeserialize, XmlRootTag}
use super::PropfindType; use super::PropfindType;
#[derive(Clone, Debug, PartialEq)] #[derive(Clone, Debug, PartialEq, Eq)]
pub enum SyncLevel { pub enum SyncLevel {
One, One,
Infinity, Infinity,
@@ -25,15 +25,15 @@ impl ValueDeserialize for SyncLevel {
impl ValueSerialize for SyncLevel { impl ValueSerialize for SyncLevel {
fn serialize(&self) -> String { fn serialize(&self) -> String {
match self { match self {
SyncLevel::One => "1", Self::One => "1",
SyncLevel::Infinity => "Infinity", Self::Infinity => "Infinity",
} }
.to_owned() .to_owned()
} }
} }
// https://datatracker.ietf.org/doc/html/rfc5323#section-5.17 // https://datatracker.ietf.org/doc/html/rfc5323#section-5.17
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct LimitElement { pub struct LimitElement {
#[xml(ns = "crate::namespace::NS_DAV")] #[xml(ns = "crate::namespace::NS_DAV")]
pub nresults: NresultsElement, pub nresults: NresultsElement,
@@ -53,10 +53,10 @@ impl From<LimitElement> for u64 {
} }
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct NresultsElement(#[xml(ty = "text")] u64); pub struct NresultsElement(#[xml(ty = "text")] u64);
#[derive(XmlDeserialize, Clone, Debug, PartialEq, XmlRootTag)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq, XmlRootTag)]
// <!ELEMENT sync-collection (sync-token, sync-level, limit?, prop)> // <!ELEMENT sync-collection (sync-token, sync-level, limit?, prop)>
// <!-- DAV:limit defined in RFC 5323, Section 5.17 --> // <!-- DAV:limit defined in RFC 5323, Section 5.17 -->
// <!-- DAV:prop defined in RFC 4918, Section 14.18 --> // <!-- DAV:prop defined in RFC 4918, Section 14.18 -->
@@ -106,11 +106,11 @@ mod tests {
assert_eq!( assert_eq!(
request, request,
SyncCollectionRequest { SyncCollectionRequest {
sync_token: "".to_owned(), sync_token: String::new(),
sync_level: SyncLevel::One, sync_level: SyncLevel::One,
prop: PropfindType::Prop(PropElement(vec![TestPropName::Getetag], vec![])), prop: PropfindType::Prop(PropElement(vec![TestPropName::Getetag], vec![])),
limit: Some(100.into()) limit: Some(100.into())
} }
) );
} }
} }

View File

@@ -6,7 +6,7 @@ use quick_xml::{
use rustical_xml::{NamespaceOwned, XmlSerialize}; use rustical_xml::{NamespaceOwned, XmlSerialize};
use std::collections::HashMap; use std::collections::HashMap;
#[derive(Clone, Debug, PartialEq, From)] #[derive(Clone, Debug, PartialEq, Eq, From)]
pub struct TagList(Vec<(Option<NamespaceOwned>, String)>); pub struct TagList(Vec<(Option<NamespaceOwned>, String)>);
impl XmlSerialize for TagList { impl XmlSerialize for TagList {
@@ -17,16 +17,13 @@ impl XmlSerialize for TagList {
namespaces: &HashMap<Namespace, &str>, namespaces: &HashMap<Namespace, &str>,
writer: &mut quick_xml::Writer<&mut Vec<u8>>, writer: &mut quick_xml::Writer<&mut Vec<u8>>,
) -> std::io::Result<()> { ) -> std::io::Result<()> {
let prefix = ns let prefix = ns.and_then(|ns| namespaces.get(&ns)).map(|prefix| {
.map(|ns| namespaces.get(&ns)) if prefix.is_empty() {
.unwrap_or(None) String::new()
.map(|prefix| { } else {
if !prefix.is_empty() { format!("{prefix}:")
format!("{prefix}:") }
} else { });
String::new()
}
});
let has_prefix = prefix.is_some(); let has_prefix = prefix.is_some();
let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat()); let tagname = tag.map(|tag| [&prefix.unwrap_or_default(), tag].concat());

View File

@@ -1,6 +1,7 @@
[package] [package]
name = "rustical_dav_push" name = "rustical_dav_push"
version.workspace = true version.workspace = true
rust-version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true
@@ -9,15 +10,15 @@ publish = false
[dependencies] [dependencies]
rustical_xml.workspace = true rustical_xml.workspace = true
async-trait = { workspace = true } async-trait.workspace = true
futures-util = { workspace = true } futures-util.workspace = true
quick-xml = { workspace = true } quick-xml.workspace = true
serde = { workspace = true } serde.workspace = true
thiserror = { workspace = true } thiserror.workspace = true
itertools = { workspace = true } itertools.workspace = true
log = { workspace = true } log.workspace = true
derive_more = { workspace = true } derive_more.workspace = true
tracing = { workspace = true } tracing.workspace = true
reqwest.workspace = true reqwest.workspace = true
tokio.workspace = true tokio.workspace = true
rustical_dav.workspace = true rustical_dav.workspace = true

View File

@@ -2,7 +2,7 @@ use crate::{ContentUpdate, PropertyUpdate, SupportedTriggers, Transports, Trigge
use rustical_dav::header::Depth; use rustical_dav::header::Depth;
use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize}; use rustical_xml::{EnumVariants, PropName, XmlDeserialize, XmlSerialize};
#[derive(XmlDeserialize, XmlSerialize, PartialEq, Clone, PropName, EnumVariants)] #[derive(XmlDeserialize, XmlSerialize, PartialEq, Eq, Clone, PropName, EnumVariants)]
#[xml(unit_variants_ident = "DavPushExtensionPropName")] #[xml(unit_variants_ident = "DavPushExtensionPropName")]
pub enum DavPushExtensionProp { pub enum DavPushExtensionProp {
// WebDav Push // WebDav Push
@@ -32,7 +32,7 @@ pub trait DavPushExtension {
) -> Result<DavPushExtensionProp, rustical_dav::Error> { ) -> Result<DavPushExtensionProp, rustical_dav::Error> {
Ok(match &prop { Ok(match &prop {
DavPushExtensionPropName::Transports => { DavPushExtensionPropName::Transports => {
DavPushExtensionProp::Transports(Default::default()) DavPushExtensionProp::Transports(Transports::default())
} }
DavPushExtensionPropName::Topic => DavPushExtensionProp::Topic(self.get_topic()), DavPushExtensionPropName::Topic => DavPushExtensionProp::Topic(self.get_topic()),
DavPushExtensionPropName::SupportedTriggers => { DavPushExtensionPropName::SupportedTriggers => {

View File

@@ -1,3 +1,5 @@
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
#![allow(clippy::missing_errors_doc)]
mod extension; mod extension;
mod prop; mod prop;
pub mod register; pub mod register;
@@ -57,7 +59,7 @@ impl<S: SubscriptionStore> DavPushController<S> {
let mut latest_messages = HashMap::new(); let mut latest_messages = HashMap::new();
for message in messages { for message in messages {
if matches!(message.data, CollectionOperationInfo::Content { .. }) { if matches!(message.data, CollectionOperationInfo::Content { .. }) {
latest_messages.insert(message.topic.to_string(), message); latest_messages.insert(message.topic.clone(), message);
} }
} }
let messages = latest_messages.into_values(); let messages = latest_messages.into_values();
@@ -68,6 +70,7 @@ impl<S: SubscriptionStore> DavPushController<S> {
} }
} }
#[allow(clippy::cognitive_complexity)]
async fn send_message(&self, message: CollectionOperation) { async fn send_message(&self, message: CollectionOperation) {
let subscriptions = match self.sub_store.get_subscriptions(&message.topic).await { let subscriptions = match self.sub_store.get_subscriptions(&message.topic).await {
Ok(subs) => subs, Ok(subs) => subs,
@@ -124,7 +127,7 @@ impl<S: SubscriptionStore> DavPushController<S> {
subsciption.id, subsciption.topic subsciption.id, subsciption.topic
); );
self.try_delete_subscription(&subsciption.id).await; self.try_delete_subscription(&subsciption.id).await;
}; }
} }
if let Err(err) = self.send_payload(&payload, &subsciption).await { if let Err(err) = self.send_payload(&payload, &subsciption).await {
@@ -153,12 +156,13 @@ impl<S: SubscriptionStore> DavPushController<S> {
) -> Result<(), NotifierError> { ) -> Result<(), NotifierError> {
if subsciption.public_key_type != "p256dh" { if subsciption.public_key_type != "p256dh" {
return Err(NotifierError::InvalidPublicKeyType( return Err(NotifierError::InvalidPublicKeyType(
subsciption.public_key_type.to_string(), subsciption.public_key_type.clone(),
)); ));
} }
let endpoint = subsciption.push_resource.parse().map_err(|_| { let endpoint = subsciption
NotifierError::InvalidEndpointUrl(subsciption.push_resource.to_string()) .push_resource
})?; .parse()
.map_err(|_| NotifierError::InvalidEndpointUrl(subsciption.push_resource.clone()))?;
let ua_public = base64::engine::general_purpose::URL_SAFE_NO_PAD let ua_public = base64::engine::general_purpose::URL_SAFE_NO_PAD
.decode(&subsciption.public_key) .decode(&subsciption.public_key)
.map_err(|_| NotifierError::InvalidKeyEncoding)?; .map_err(|_| NotifierError::InvalidKeyEncoding)?;
@@ -206,7 +210,7 @@ enum NotifierError {
impl NotifierError { impl NotifierError {
// Decide whether the error should cause the subscription to be removed // Decide whether the error should cause the subscription to be removed
pub fn is_permament_error(&self) -> bool { pub const fn is_permament_error(&self) -> bool {
match self { match self {
Self::InvalidPublicKeyType(_) Self::InvalidPublicKeyType(_)
| Self::InvalidEndpointUrl(_) | Self::InvalidEndpointUrl(_)

View File

@@ -1,13 +1,13 @@
use rustical_dav::header::Depth; use rustical_dav::header::Depth;
use rustical_xml::{Unparsed, XmlDeserialize, XmlSerialize}; use rustical_xml::{Unparsed, XmlDeserialize, XmlSerialize};
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub enum Transport { pub enum Transport {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
WebPush, WebPush,
} }
#[derive(Debug, Clone, XmlSerialize, PartialEq)] #[derive(Debug, Clone, XmlSerialize, PartialEq, Eq)]
pub struct Transports { pub struct Transports {
#[xml(flatten, ty = "untagged")] #[xml(flatten, ty = "untagged")]
#[xml(ns = "crate::namespace::NS_DAVPUSH")] #[xml(ns = "crate::namespace::NS_DAVPUSH")]
@@ -22,10 +22,10 @@ impl Default for Transports {
} }
} }
#[derive(XmlSerialize, XmlDeserialize, PartialEq, Clone)] #[derive(XmlSerialize, XmlDeserialize, PartialEq, Eq, Clone)]
pub struct SupportedTriggers(#[xml(flatten, ty = "untagged")] pub Vec<Trigger>); pub struct SupportedTriggers(#[xml(flatten, ty = "untagged")] pub Vec<Trigger>);
#[derive(XmlSerialize, XmlDeserialize, PartialEq, Debug, Clone)] #[derive(XmlSerialize, XmlDeserialize, PartialEq, Eq, Debug, Clone)]
pub enum Trigger { pub enum Trigger {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
ContentUpdate(ContentUpdate), ContentUpdate(ContentUpdate),
@@ -33,12 +33,12 @@ pub enum Trigger {
PropertyUpdate(PropertyUpdate), PropertyUpdate(PropertyUpdate),
} }
#[derive(XmlSerialize, XmlDeserialize, PartialEq, Clone, Debug)] #[derive(XmlSerialize, XmlDeserialize, PartialEq, Eq, Clone, Debug)]
pub struct ContentUpdate( pub struct ContentUpdate(
#[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
); );
#[derive(XmlSerialize, PartialEq, Clone, Debug)] #[derive(XmlSerialize, PartialEq, Eq, Clone, Debug)]
pub struct PropertyUpdate( pub struct PropertyUpdate(
#[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth, #[xml(rename = "depth", ns = "rustical_dav::namespace::NS_DAV")] pub Depth,
); );

View File

@@ -1,7 +1,7 @@
use crate::Trigger; use crate::Trigger;
use rustical_xml::{XmlDeserialize, XmlRootTag, XmlSerialize}; use rustical_xml::{XmlDeserialize, XmlRootTag, XmlSerialize};
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
#[xml(ns = "crate::namespace::NS_DAVPUSH")] #[xml(ns = "crate::namespace::NS_DAVPUSH")]
pub struct WebPushSubscription { pub struct WebPushSubscription {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
@@ -15,7 +15,7 @@ pub struct WebPushSubscription {
pub auth_secret: String, pub auth_secret: String,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct SubscriptionPublicKey { pub struct SubscriptionPublicKey {
#[xml(ty = "attr", rename = "type")] #[xml(ty = "attr", rename = "type")]
pub ty: String, pub ty: String,
@@ -23,16 +23,16 @@ pub struct SubscriptionPublicKey {
pub key: String, pub key: String,
} }
#[derive(XmlDeserialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, Clone, Debug, PartialEq, Eq)]
pub struct SubscriptionElement { pub struct SubscriptionElement {
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
pub web_push_subscription: WebPushSubscription, pub web_push_subscription: WebPushSubscription,
} }
#[derive(XmlDeserialize, XmlSerialize, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, XmlSerialize, Clone, Debug, PartialEq, Eq)]
pub struct TriggerElement(#[xml(ty = "untagged", flatten)] Vec<Trigger>); pub struct TriggerElement(#[xml(ty = "untagged", flatten)] Vec<Trigger>);
#[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq)] #[derive(XmlDeserialize, XmlRootTag, Clone, Debug, PartialEq, Eq)]
#[xml(root = "push-register")] #[xml(root = "push-register")]
#[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")] #[xml(ns = "rustical_dav::namespace::NS_DAVPUSH")]
pub struct PushRegister { pub struct PushRegister {
@@ -100,6 +100,6 @@ mod tests {
Trigger::PropertyUpdate(PropertyUpdate(Depth::Zero)), Trigger::PropertyUpdate(PropertyUpdate(Depth::Zero)),
])) ]))
} }
) );
} }
} }

View File

@@ -1,6 +1,7 @@
[package] [package]
name = "rustical_frontend" name = "rustical_frontend"
version.workspace = true version.workspace = true
rust-version.workspace = true
edition.workspace = true edition.workspace = true
description.workspace = true description.workspace = true
repository.workspace = true repository.workspace = true
@@ -39,3 +40,6 @@ headers.workspace = true
tower-sessions.workspace = true tower-sessions.workspace = true
percent-encoding.workspace = true percent-encoding.workspace = true
tower-http = { workspace = true, optional = true } tower-http = { workspace = true, optional = true }
vtimezones-rs.workspace = true
serde_json.workspace = true
itertools.workspace = true

Some files were not shown because too many files have changed in this diff Show More