Compare commits

...

15 Commits

Author SHA1 Message Date
Lennart K
dde05d2f45 Workflow: Publish container images for feature branches too 2026-01-16 16:29:38 +01:00
Lennart K
f503bf2bf7 Update quick-xml 2026-01-16 15:47:10 +01:00
Lennart K
d84158e8ad version 0.11.17 2026-01-16 12:26:43 +01:00
Lennart K
7ef566040a Disable a test that will be fixed in 0.12 2026-01-16 12:16:02 +01:00
Lennart K
1c1f0c6da2 Update ical-rs@dev to fix cargo vendor 2026-01-16 12:10:10 +01:00
Lennart
3fafbd22f4 version 0.11.16 2026-01-15 23:43:00 +01:00
Lennart
e68dc921e6 Now actually fix builds 2026-01-15 23:19:35 +01:00
Lennart
60b45e70ad fix docker builds 2026-01-15 22:31:40 +01:00
Lennart
a0c33c82dd version 0.11.14 2026-01-15 13:32:45 +01:00
Lennart
8ae5e46abf Automatic repair for calendar objects with invalid VERSION:4.0 2026-01-15 13:30:14 +01:00
Lennart
48b2e614a8 Suppress ical invalid version error 2026-01-15 11:15:20 +01:00
Lennart K
f26214abb9 build Docker images for dev branch 2026-01-12 11:15:08 +01:00
Lennart
276e65d41a version 0.11.11 2026-01-10 13:37:24 +01:00
Lennart
7c3e9ecbc1 update ical-rs dev to remove panics 2026-01-10 13:35:14 +01:00
Lennart
53f81a9433 Add a startup test to check whether existing data will be compatible with v0.12 2026-01-10 13:22:49 +01:00
17 changed files with 542 additions and 232 deletions

View File

@@ -2,7 +2,10 @@ name: Docker
on:
push:
branches: ["main"]
branches:
- main
- dev
- feat/*
release:
types: ["published"]
@@ -45,7 +48,8 @@ jobs:
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
tags: |
type=ref,event=branch
${{ github.ref_name == 'main' && 'type=ref,event=branch' || '' }}
type=ref,event=branch,prefix=br-
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "DELETE FROM calendarobjectchangelog WHERE (principal, cal_id, object_id) = (?, ?, ?);",
"describe": {
"columns": [],
"parameters": {
"Right": 3
},
"nullable": []
},
"hash": "146e23ae4e0eaae4d65ac7563c67d4f295ccc2534dcc4b3bd710de773ed137f9"
}

View File

@@ -0,0 +1,12 @@
{
"db_name": "SQLite",
"query": "UPDATE calendarobjects SET ics = ? WHERE (principal, cal_id, id) = (?, ?, ?);",
"describe": {
"columns": [],
"parameters": {
"Right": 4
},
"nullable": []
},
"hash": "354decac84758c88280f60fbf0f93dddc6c7ff92ac7b8ba44049d31df3c680e3"
}

View File

@@ -0,0 +1,38 @@
{
"db_name": "SQLite",
"query": "SELECT principal, cal_id, id, ics FROM calendarobjects WHERE ics LIKE '%VERSION:4.0%';",
"describe": {
"columns": [
{
"name": "principal",
"ordinal": 0,
"type_info": "Text"
},
{
"name": "cal_id",
"ordinal": 1,
"type_info": "Text"
},
{
"name": "id",
"ordinal": 2,
"type_info": "Text"
},
{
"name": "ics",
"ordinal": 3,
"type_info": "Text"
}
],
"parameters": {
"Right": 0
},
"nullable": [
false,
false,
false,
false
]
},
"hash": "bdaa4bee8b01d0e3773e34672ed4805d1e71d24888f2227045afd90bf080fc23"
}

426
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -2,7 +2,7 @@
members = ["crates/*"]
[workspace.package]
version = "0.11.10"
version = "0.11.17"
rust-version = "1.92"
edition = "2024"
description = "A CalDAV server"
@@ -73,7 +73,7 @@ tokio = { version = "1.48", features = [
url = "2.5"
base64 = "0.22"
thiserror = "2.0"
quick-xml = { version = "0.38" }
quick-xml = { version = "0.39" }
rust-embed = "8.9"
tower-sessions = "0.14"
futures-core = "0.3"
@@ -107,9 +107,7 @@ strum = "0.27"
strum_macros = "0.27"
serde_json = { version = "1.0", features = ["raw_value"] }
sqlx-sqlite = { version = "0.8", features = ["bundled"] }
ical = { git = "https://github.com/lennart-k/ical-rs", features = [
"chrono-tz",
] }
ical = { git = "https://github.com/lennart-k/ical-rs", rev = "7c2ab1f3" }
toml = "0.9"
tower = "0.5"
tower-http = { version = "0.6", features = [
@@ -201,3 +199,7 @@ tower-http.workspace = true
axum-extra.workspace = true
headers.workspace = true
http.workspace = true
# TODO: Remove in next major release
ical_dev = { package = "ical", git = "https://github.com/lennart-k/ical-rs", branch = "dev", features = [
"chrono-tz",
] }

View File

@@ -36,7 +36,7 @@ COPY --from=planner /rustical/recipe.json recipe.json
RUN cargo chef cook --release --target "$(cat /tmp/rust_target)"
COPY . .
RUN cargo install --target "$(cat /tmp/rust_target)" --path .
RUN cargo install --locked --target "$(cat /tmp/rust_target)" --path .
FROM scratch
COPY --from=builder /usr/local/cargo/bin/rustical /usr/local/bin/rustical

View File

@@ -45,7 +45,7 @@ impl<PN: XmlDeserialize> XmlDeserialize for PropElement<PN> {
// start of a child element
Event::Start(start) | Event::Empty(start) => {
let empty = matches!(event, Event::Empty(_));
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
let ns = match ns {
ResolveResult::Bound(ns) => Some(NamespaceOwned::from(ns)),
ResolveResult::Unknown(_ns) => todo!("handle error"),

View File

@@ -349,37 +349,38 @@ UID:abcd3
X-ABC-GUID:E1CX5Dr-0007ym-Hz@example.com
END:VEVENT"];
#[rstest]
#[case(ICS_1, EXPANDED_1, None, None)]
// from https://datatracker.ietf.org/doc/html/rfc4791#section-7.8.3
#[case(ICS_2, EXPANDED_2,
Some(CalDateTime::parse("20060103T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc()),
Some(CalDateTime::parse("20060105T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc())
)]
#[case(ICS_3, EXPANDED_3,
Some(CalDateTime::parse("20060103T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc()),
Some(CalDateTime::parse("20060105T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc())
)]
fn test_expand_recurrence(
#[case] ics: &'static str,
#[case] expanded: &[&str],
#[case] from: Option<DateTime<Utc>>,
#[case] to: Option<DateTime<Utc>>,
) {
let event = CalendarObject::from_ics(ics.to_string(), None).unwrap();
let crate::CalendarObjectComponent::Event(event, overrides) = event.get_data() else {
panic!()
};
let events: Vec<String> = event
.expand_recurrence(from, to, overrides)
.unwrap()
.into_iter()
.map(|event| Emitter::generate(&event))
.collect();
assert_eq!(events.len(), expanded.len());
for (output, reference) in events.iter().zip(expanded) {
similar_asserts::assert_eq!(output, reference);
}
}
// The implementation never was entirely correct but will be fixed in v0.12
// #[rstest]
// #[case(ICS_1, EXPANDED_1, None, None)]
// // from https://datatracker.ietf.org/doc/html/rfc4791#section-7.8.3
// #[case(ICS_2, EXPANDED_2,
// Some(CalDateTime::parse("20060103T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc()),
// Some(CalDateTime::parse("20060105T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc())
// )]
// #[case(ICS_3, EXPANDED_3,
// Some(CalDateTime::parse("20060103T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc()),
// Some(CalDateTime::parse("20060105T000000Z", Some(chrono_tz::US::Eastern)).unwrap().utc())
// )]
// fn test_expand_recurrence(
// #[case] ics: &'static str,
// #[case] expanded: &[&str],
// #[case] from: Option<DateTime<Utc>>,
// #[case] to: Option<DateTime<Utc>>,
// ) {
// let event = CalendarObject::from_ics(ics.to_string(), None).unwrap();
// let crate::CalendarObjectComponent::Event(event, overrides) = event.get_data() else {
// panic!()
// };
//
// let events: Vec<String> = event
// .expand_recurrence(from, to, overrides)
// .unwrap()
// .into_iter()
// .map(|event| Emitter::generate(&event))
// .collect();
// assert_eq!(events.len(), expanded.len());
// for (output, reference) in events.iter().zip(expanded) {
// similar_asserts::assert_eq!(output, reference);
// }
// }
}

View File

@@ -36,3 +36,4 @@ pbkdf2.workspace = true
rustical_ical.workspace = true
rstest = { workspace = true, optional = true }
sha2.workspace = true
regex.workspace = true

View File

@@ -3,6 +3,7 @@ use crate::BEGIN_IMMEDIATE;
use async_trait::async_trait;
use chrono::TimeDelta;
use derive_more::derive::Constructor;
use regex::Regex;
use rustical_ical::{CalDateTime, CalendarObject, CalendarObjectType};
use rustical_store::calendar_store::CalendarQuery;
use rustical_store::synctoken::format_synctoken;
@@ -145,6 +146,83 @@ impl SqliteCalendarStore {
}
}
/// In the past exports generated objects with invalid VERSION:4.0
/// This repair sets them to VERSION:2.0
#[allow(clippy::missing_panics_doc)]
pub async fn repair_invalid_version_4_0(&self) -> Result<(), Error> {
struct Row {
principal: String,
cal_id: String,
id: String,
ics: String,
}
let mut tx = self
.db
.begin_with(BEGIN_IMMEDIATE)
.await
.map_err(crate::Error::from)?;
#[allow(clippy::missing_panics_doc)]
let version_pattern = Regex::new(r"(?mi)^VERSION:4.0").unwrap();
let repairs: Vec<Row> = sqlx::query_as!(
Row,
r#"SELECT principal, cal_id, id, ics FROM calendarobjects WHERE ics LIKE '%VERSION:4.0%';"#
)
.fetch_all(&mut *tx)
.await
.map_err(crate::Error::from)?
.into_iter()
.filter_map(|mut row| {
version_pattern.find(&row.ics)?;
let new_ics = version_pattern.replace(&row.ics, "VERSION:2.0");
// Safeguard that we really only changed the version
assert_eq!(row.ics.len(), new_ics.len());
row.ics = new_ics.to_string();
Some(row)
})
.collect();
if repairs.is_empty() {
return Ok(());
}
warn!(
"Found {} calendar objects with invalid VERSION:4.0. Repairing by setting to VERSION:2.0",
repairs.len()
);
for repair in &repairs {
// calendarobjectchangelog is used by sync-collection to fetch changes
// By deleting entries we will later regenerate new entries such that clients will notice
// the objects have changed
warn!(
"Repairing VERSION for {}/{}/{}.ics",
repair.principal, repair.cal_id, repair.id
);
sqlx::query!(
"DELETE FROM calendarobjectchangelog WHERE (principal, cal_id, object_id) = (?, ?, ?);",
repair.principal, repair.cal_id, repair.id
).execute(&mut *tx).await
.map_err(crate::Error::from)?;
sqlx::query!(
"UPDATE calendarobjects SET ics = ? WHERE (principal, cal_id, id) = (?, ?, ?);",
repair.ics,
repair.principal,
repair.cal_id,
repair.id
)
.execute(&mut *tx)
.await
.map_err(crate::Error::from)?;
}
tx.commit().await.map_err(crate::Error::from)?;
Ok(())
}
// Commit "orphaned" objects to the changelog table
pub async fn repair_orphans(&self) -> Result<(), Error> {
struct Row {

View File

@@ -136,7 +136,7 @@ impl NamedStruct {
#(#builder_field_inits),*
};
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
#(#tagname_field_branches);*
#(#namespace_field_branches);*
@@ -161,7 +161,7 @@ impl NamedStruct {
// start of a child element
Event::Start(start) | Event::Empty(start) => {
let empty = matches!(event, Event::Empty(_));
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
match (ns, name.as_ref()) {
#(#named_field_branches),*
#(#untagged_field_branches),*

View File

@@ -42,7 +42,7 @@ impl<T: XmlRootTag + XmlDeserialize> XmlDocument for T {
match event {
Event::Decl(_) | Event::Comment(_) => { /* ignore this */ }
Event::Start(start) | Event::Empty(start) => {
let (ns, name) = reader.resolve_element(start.name());
let (ns, name) = reader.resolver().resolve_element(start.name());
let matches = match (Self::root_ns(), &ns, name) {
// Wrong tag
(_, _, name) if name.as_ref() != Self::root_tag().as_bytes() => false,

View File

@@ -17,6 +17,7 @@ pub fn cmd_gen_config(_args: GenConfigArgs) -> anyhow::Result<()> {
http: HttpConfig::default(),
data_store: DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url: "/var/lib/rustical/db.sqlite3".to_owned(),
run_repairs: true,
}),
tracing: TracingConfig::default(),
frontend: FrontendConfig {

View File

@@ -26,6 +26,8 @@ impl Default for HttpConfig {
#[serde(deny_unknown_fields)]
pub struct SqliteDataStoreConfig {
pub db_url: String,
#[serde(default = "default_true")]
pub run_repairs: bool,
}
#[derive(Debug, Deserialize, Serialize)]

View File

@@ -25,7 +25,7 @@ use std::sync::Arc;
use tokio::sync::mpsc::Receiver;
use tower::Layer;
use tower_http::normalize_path::NormalizePathLayer;
use tracing::info;
use tracing::{info, warn};
mod app;
mod commands;
@@ -34,6 +34,9 @@ mod config;
pub mod integration_tests;
mod setup_tracing;
mod migration_0_12;
use migration_0_12::{validate_address_objects_0_12, validate_calendar_objects_0_12};
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
struct Args {
@@ -67,15 +70,22 @@ async fn get_data_stores(
Receiver<CollectionOperation>,
)> {
Ok(match &config {
DataStoreConfig::Sqlite(SqliteDataStoreConfig { db_url }) => {
DataStoreConfig::Sqlite(SqliteDataStoreConfig {
db_url,
run_repairs,
}) => {
let db = create_db_pool(db_url, migrate).await?;
// Channel to watch for changes (for DAV Push)
let (send, recv) = tokio::sync::mpsc::channel(1000);
let addressbook_store = Arc::new(SqliteAddressbookStore::new(db.clone(), send.clone()));
addressbook_store.repair_orphans().await?;
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send));
cal_store.repair_orphans().await?;
if *run_repairs {
info!("Running repair tasks");
addressbook_store.repair_orphans().await?;
cal_store.repair_invalid_version_4_0().await?;
cal_store.repair_orphans().await?;
}
let subscription_store = Arc::new(SqliteStore::new(db.clone()));
let principal_store = Arc::new(SqlitePrincipalStore::new(db));
(
@@ -115,6 +125,14 @@ async fn main() -> Result<()> {
let (addr_store, cal_store, subscription_store, principal_store, update_recv) =
get_data_stores(!args.no_migrations, &config.data_store).await?;
warn!(
"Validating calendar data against the next-version ical parser.
In the next major release these will be rejected and cause errors.
If any errors occur, please open an issue so they can be fixed before the next major release."
);
validate_calendar_objects_0_12(principal_store.as_ref(), cal_store.as_ref()).await?;
validate_address_objects_0_12(principal_store.as_ref(), addr_store.as_ref()).await?;
let mut tasks = vec![];
if config.dav_push.enabled {

81
src/migration_0_12.rs Normal file
View File

@@ -0,0 +1,81 @@
use rustical_store::{AddressbookStore, CalendarStore, auth::AuthenticationProvider};
use tracing::{error, info};
pub async fn validate_calendar_objects_0_12(
principal_store: &impl AuthenticationProvider,
cal_store: &impl CalendarStore,
) -> Result<(), rustical_store::Error> {
let mut success = true;
for principal in principal_store.get_principals().await? {
for calendar in cal_store.get_calendars(&principal.id).await? {
for object in cal_store
.get_objects(&calendar.principal, &calendar.id)
.await?
{
if let Err(err) = ical_dev::parser::ical::IcalObjectParser::from_slice(
object.get_ics().as_bytes(),
)
.expect_one()
{
success = false;
error!(
"An error occured parsing a calendar object: principal={principal}, calendar={calendar}, object_id={object_id}: {err}",
principal = principal.id,
calendar = calendar.id,
object_id = object.get_id()
);
println!("{}", object.get_ics());
}
}
}
}
if success {
info!("Your calendar data seems to be valid in the next major version.");
} else {
error!(
"Not all calendar objects will be successfully parsed in the next major version (v0.12).
This will not cause issues in this version, but please comment under the tracking issue on GitHub:
https://github.com/lennart-k/rustical/issues/165"
);
}
Ok(())
}
pub async fn validate_address_objects_0_12(
principal_store: &impl AuthenticationProvider,
addr_store: &impl AddressbookStore,
) -> Result<(), rustical_store::Error> {
let mut success = true;
for principal in principal_store.get_principals().await? {
for addressbook in addr_store.get_addressbooks(&principal.id).await? {
for object in addr_store
.get_objects(&addressbook.principal, &addressbook.id)
.await?
{
if let Err(err) =
ical_dev::parser::vcard::VcardParser::from_slice(object.get_vcf().as_bytes())
.expect_one()
{
success = false;
error!(
"An error occured parsing an address object: principal={principal}, addressbook={addressbook}, object_id={object_id}: {err}",
principal = principal.id,
addressbook = addressbook.id,
object_id = object.get_id()
);
println!("{}", object.get_vcf());
}
}
}
}
if success {
info!("Your addressbook data seems to be valid in the next major version.");
} else {
error!(
"Not all address objects will be successfully parsed in the next major version (v0.12).
This will not cause issues in this version, but please comment under the tracking issue on GitHub:
https://github.com/lennart-k/rustical/issues/165"
);
}
Ok(())
}