mirror of
https://github.com/lennart-k/rustical.git
synced 2026-01-30 08:08:23 +00:00
Compare commits
4 Commits
v0.12.3
...
c763a682ed
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c763a682ed | ||
|
|
8ab9c61b0f | ||
|
|
8b2bb1b0d6 | ||
|
|
da72aa26cb |
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,7 +3,7 @@ crates/*/target
|
||||
# For libraries ignore Cargo.lock
|
||||
crates/*/Cargo.lock
|
||||
|
||||
db.sqlite3*
|
||||
**/*.sqlite3*
|
||||
config.toml
|
||||
principals.toml
|
||||
|
||||
|
||||
@@ -35,6 +35,12 @@ opentelemetry = [
|
||||
[profile.dev]
|
||||
debug = 0
|
||||
|
||||
[lib]
|
||||
doc = true
|
||||
name = "rustical"
|
||||
path = "src/lib.rs"
|
||||
test = true
|
||||
|
||||
[workspace.dependencies]
|
||||
rustical_dav = { path = "./crates/dav/", features = ["ical"] }
|
||||
rustical_dav_push = { path = "./crates/dav_push/" }
|
||||
|
||||
@@ -24,6 +24,7 @@ a CalDAV/CardDAV server
|
||||
- Apple configuration profiles (skip copy-pasting passwords and instead generate the configuration in the frontend)
|
||||
- **OpenID Connect** support (with option to disable password login)
|
||||
- Group-based **sharing**
|
||||
- Partial [RFC 7809](https://datatracker.ietf.org/doc/html/rfc7809) support. RustiCal will accept timezones by reference and handle omitted timezones in objects.
|
||||
|
||||
## Getting Started
|
||||
|
||||
|
||||
@@ -48,3 +48,26 @@ Since the app tokens are random they use the faster `pbkdf2` algorithm.
|
||||
```sh
|
||||
cargo install --locked --git https://github.com/lennart-k/rustical
|
||||
```
|
||||
|
||||
## NixOS (community-maintained by [@PopeRigby](https://github.com/PopeRigby))
|
||||
|
||||
!!! warning
|
||||
The NixOS package is not maintained by myself but since I appreciate [@PopeRigby](https://github.com/PopeRigby)'s work on it I want to mention it.
|
||||
Since rustical's development is still quite active I **strongly** recommend installing from the `nixpkgs-unstable` branch.
|
||||
|
||||
In the `nixpkgs-unstable` you'll find a `rustical` package you can install.
|
||||
|
||||
There's also a service that has not been merged yet. If you know how to add modules from PRs in Nix
|
||||
you can already install it <https://github.com/NixOS/nixpkgs/pull/424188>
|
||||
and then setup rustical as a service:
|
||||
|
||||
```nix title="In your configuration.nix"
|
||||
services.rustical = {
|
||||
enable = true;
|
||||
package = inputs.rustical.legacyPackages.${pkgs.stdenv.hostPlatform.system}.rustical;
|
||||
settings = {
|
||||
# Settings the same as in config.toml but in Nix syntax
|
||||
# http.port = 3002;
|
||||
};
|
||||
};
|
||||
```
|
||||
|
||||
@@ -32,7 +32,8 @@ use tracing::field::display;
|
||||
#[allow(
|
||||
clippy::too_many_arguments,
|
||||
clippy::too_many_lines,
|
||||
clippy::cognitive_complexity
|
||||
clippy::cognitive_complexity,
|
||||
clippy::missing_panics_doc
|
||||
)]
|
||||
pub fn make_app<
|
||||
AS: AddressbookStore + PrefixedCalendarStore,
|
||||
@@ -109,9 +110,9 @@ pub fn make_app<
|
||||
options(async || {
|
||||
let mut resp = Response::builder().status(StatusCode::OK);
|
||||
resp.headers_mut()
|
||||
.unwrap()
|
||||
.expect("this always works")
|
||||
.insert("DAV", HeaderValue::from_static("1"));
|
||||
resp.body(Body::empty()).unwrap()
|
||||
resp.body(Body::empty()).expect("empty body always works")
|
||||
}),
|
||||
);
|
||||
|
||||
|
||||
@@ -7,6 +7,7 @@ pub struct HealthArgs {}
|
||||
|
||||
/// Healthcheck for running rustical instance
|
||||
/// Currently just pings to see if it's reachable via HTTP
|
||||
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
|
||||
pub async fn cmd_health(http_config: HttpConfig, _health_args: HealthArgs) -> anyhow::Result<()> {
|
||||
let client = reqwest::ClientBuilder::new().build().unwrap();
|
||||
|
||||
|
||||
@@ -33,7 +33,8 @@ pub struct MembershipArgs {
|
||||
command: MembershipCommand,
|
||||
}
|
||||
|
||||
pub async fn handle_membership_command(
|
||||
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
|
||||
pub async fn cmd_membership(
|
||||
user_store: &impl AuthenticationProvider,
|
||||
MembershipArgs { command }: MembershipArgs,
|
||||
) -> anyhow::Result<()> {
|
||||
|
||||
@@ -6,13 +6,17 @@ use clap::Parser;
|
||||
use rustical_caldav::CalDavConfig;
|
||||
use rustical_frontend::FrontendConfig;
|
||||
|
||||
pub mod health;
|
||||
mod health;
|
||||
pub mod membership;
|
||||
pub mod principals;
|
||||
mod principals;
|
||||
|
||||
pub use health::{HealthArgs, cmd_health};
|
||||
pub use principals::{PrincipalsArgs, cmd_principals};
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
pub struct GenConfigArgs {}
|
||||
|
||||
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
|
||||
pub fn cmd_gen_config(_args: GenConfigArgs) -> anyhow::Result<()> {
|
||||
let config = Config {
|
||||
http: HttpConfig::default(),
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
use super::membership::{MembershipArgs, handle_membership_command};
|
||||
use crate::{config::Config, get_data_stores};
|
||||
use super::membership::MembershipArgs;
|
||||
use crate::{config::Config, get_data_stores, membership::cmd_membership};
|
||||
use clap::{Parser, Subcommand};
|
||||
use figment::{
|
||||
Figment,
|
||||
@@ -58,6 +58,7 @@ enum Command {
|
||||
Membership(MembershipArgs),
|
||||
}
|
||||
|
||||
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
|
||||
pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
|
||||
let config: Config = Figment::new()
|
||||
.merge(Toml::file(&args.config_file))
|
||||
@@ -152,7 +153,7 @@ pub async fn cmd_principals(args: PrincipalsArgs) -> anyhow::Result<()> {
|
||||
println!("Principal {id} updated");
|
||||
}
|
||||
Command::Membership(args) => {
|
||||
handle_membership_command(principal_store.as_ref(), args).await?;
|
||||
cmd_membership(principal_store.as_ref(), args).await?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
|
||||
155
src/lib.rs
Normal file
155
src/lib.rs
Normal file
@@ -0,0 +1,155 @@
|
||||
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
|
||||
use crate::config::Config;
|
||||
use anyhow::Result;
|
||||
use app::make_app;
|
||||
use axum::ServiceExt;
|
||||
use axum::extract::Request;
|
||||
use clap::{Parser, Subcommand};
|
||||
use config::{DataStoreConfig, SqliteDataStoreConfig};
|
||||
use rustical_dav_push::DavPushController;
|
||||
use rustical_store::auth::AuthenticationProvider;
|
||||
use rustical_store::{
|
||||
AddressbookStore, CalendarStore, CollectionOperation, PrefixedCalendarStore, SubscriptionStore,
|
||||
};
|
||||
use rustical_store_sqlite::addressbook_store::SqliteAddressbookStore;
|
||||
use rustical_store_sqlite::calendar_store::SqliteCalendarStore;
|
||||
use rustical_store_sqlite::principal_store::SqlitePrincipalStore;
|
||||
use rustical_store_sqlite::{SqliteStore, create_db_pool};
|
||||
use setup_tracing::setup_tracing;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc::Receiver;
|
||||
use tower::Layer;
|
||||
use tower_http::normalize_path::NormalizePathLayer;
|
||||
use tracing::{info, warn};
|
||||
|
||||
pub mod app;
|
||||
mod commands;
|
||||
pub use commands::*;
|
||||
pub mod config;
|
||||
#[cfg(test)]
|
||||
pub mod integration_tests;
|
||||
mod setup_tracing;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
pub struct Args {
|
||||
#[arg(short, long, env, default_value = "/etc/rustical/config.toml")]
|
||||
pub config_file: String,
|
||||
#[arg(long, env, help = "Do no run database migrations (only for sql store)")]
|
||||
pub no_migrations: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
pub command: Option<Command>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
pub enum Command {
|
||||
GenConfig(commands::GenConfigArgs),
|
||||
Principals(PrincipalsArgs),
|
||||
#[command(
|
||||
about = "Healthcheck for running instance (Used for HEALTHCHECK in Docker container)"
|
||||
)]
|
||||
Health(HealthArgs),
|
||||
}
|
||||
|
||||
#[allow(clippy::missing_errors_doc)]
|
||||
pub async fn get_data_stores(
|
||||
migrate: bool,
|
||||
config: &DataStoreConfig,
|
||||
) -> Result<(
|
||||
Arc<impl AddressbookStore + PrefixedCalendarStore>,
|
||||
Arc<impl CalendarStore>,
|
||||
Arc<impl SubscriptionStore>,
|
||||
Arc<impl AuthenticationProvider>,
|
||||
Receiver<CollectionOperation>,
|
||||
)> {
|
||||
Ok(match &config {
|
||||
DataStoreConfig::Sqlite(SqliteDataStoreConfig {
|
||||
db_url,
|
||||
run_repairs,
|
||||
skip_broken,
|
||||
}) => {
|
||||
let db = create_db_pool(db_url, migrate).await?;
|
||||
// Channel to watch for changes (for DAV Push)
|
||||
let (send, recv) = tokio::sync::mpsc::channel(1000);
|
||||
|
||||
let addressbook_store = Arc::new(SqliteAddressbookStore::new(
|
||||
db.clone(),
|
||||
send.clone(),
|
||||
*skip_broken,
|
||||
));
|
||||
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send, *skip_broken));
|
||||
if *run_repairs {
|
||||
info!("Running repair tasks");
|
||||
addressbook_store.repair_orphans().await?;
|
||||
cal_store.repair_invalid_version_4_0().await?;
|
||||
cal_store.repair_orphans().await?;
|
||||
}
|
||||
let subscription_store = Arc::new(SqliteStore::new(db.clone()));
|
||||
let principal_store = Arc::new(SqlitePrincipalStore::new(db));
|
||||
|
||||
// Validate all calendar objects
|
||||
for principal in principal_store.get_principals().await? {
|
||||
cal_store.validate_objects(&principal.id).await?;
|
||||
addressbook_store.validate_objects(&principal.id).await?;
|
||||
}
|
||||
|
||||
(
|
||||
addressbook_store,
|
||||
cal_store,
|
||||
subscription_store,
|
||||
principal_store,
|
||||
recv,
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[allow(clippy::missing_errors_doc, clippy::missing_panics_doc)]
|
||||
pub async fn cmd_default(args: Args, config: Config) -> Result<()> {
|
||||
setup_tracing(&config.tracing);
|
||||
|
||||
let (addr_store, cal_store, subscription_store, principal_store, update_recv) =
|
||||
get_data_stores(!args.no_migrations, &config.data_store).await?;
|
||||
|
||||
let mut tasks = vec![];
|
||||
|
||||
if config.dav_push.enabled {
|
||||
let dav_push_controller = DavPushController::new(
|
||||
config.dav_push.allowed_push_servers,
|
||||
subscription_store.clone(),
|
||||
);
|
||||
tasks.push(tokio::spawn(async move {
|
||||
dav_push_controller.notifier(update_recv).await;
|
||||
}));
|
||||
}
|
||||
|
||||
let app = make_app(
|
||||
addr_store.clone(),
|
||||
cal_store.clone(),
|
||||
subscription_store.clone(),
|
||||
principal_store.clone(),
|
||||
config.frontend.clone(),
|
||||
config.oidc.clone(),
|
||||
config.caldav,
|
||||
&config.nextcloud_login,
|
||||
config.dav_push.enabled,
|
||||
config.http.session_cookie_samesite_strict,
|
||||
config.http.payload_limit_mb,
|
||||
);
|
||||
let app = ServiceExt::<Request>::into_make_service(
|
||||
NormalizePathLayer::trim_trailing_slash().layer(app),
|
||||
);
|
||||
|
||||
let address = format!("{}:{}", config.http.host, config.http.port);
|
||||
let listener = tokio::net::TcpListener::bind(&address).await?;
|
||||
tasks.push(tokio::spawn(async move {
|
||||
info!("RustiCal serving on http://{address}");
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}));
|
||||
|
||||
for task in tasks {
|
||||
task.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
163
src/main.rs
163
src/main.rs
@@ -1,112 +1,12 @@
|
||||
#![warn(clippy::all, clippy::pedantic, clippy::nursery)]
|
||||
use crate::commands::health::{HealthArgs, cmd_health};
|
||||
use crate::config::Config;
|
||||
use anyhow::Result;
|
||||
use app::make_app;
|
||||
use axum::ServiceExt;
|
||||
use axum::extract::Request;
|
||||
use clap::{Parser, Subcommand};
|
||||
use commands::cmd_gen_config;
|
||||
use commands::principals::{PrincipalsArgs, cmd_principals};
|
||||
use config::{DataStoreConfig, SqliteDataStoreConfig};
|
||||
use clap::Parser;
|
||||
use figment::Figment;
|
||||
use figment::providers::{Env, Format, Toml};
|
||||
use rustical_dav_push::DavPushController;
|
||||
use rustical_store::auth::AuthenticationProvider;
|
||||
use rustical_store::{
|
||||
AddressbookStore, CalendarStore, CollectionOperation, PrefixedCalendarStore, SubscriptionStore,
|
||||
};
|
||||
use rustical_store_sqlite::addressbook_store::SqliteAddressbookStore;
|
||||
use rustical_store_sqlite::calendar_store::SqliteCalendarStore;
|
||||
use rustical_store_sqlite::principal_store::SqlitePrincipalStore;
|
||||
use rustical_store_sqlite::{SqliteStore, create_db_pool};
|
||||
use setup_tracing::setup_tracing;
|
||||
use std::sync::Arc;
|
||||
use tokio::sync::mpsc::Receiver;
|
||||
use tower::Layer;
|
||||
use tower_http::normalize_path::NormalizePathLayer;
|
||||
use tracing::{info, warn};
|
||||
|
||||
mod app;
|
||||
mod commands;
|
||||
mod config;
|
||||
#[cfg(test)]
|
||||
pub mod integration_tests;
|
||||
mod setup_tracing;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[command(author, version, about, long_about = None)]
|
||||
struct Args {
|
||||
#[arg(short, long, env, default_value = "/etc/rustical/config.toml")]
|
||||
config_file: String,
|
||||
#[arg(long, env, help = "Do no run database migrations (only for sql store)")]
|
||||
no_migrations: bool,
|
||||
|
||||
#[command(subcommand)]
|
||||
command: Option<Command>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Subcommand)]
|
||||
enum Command {
|
||||
GenConfig(commands::GenConfigArgs),
|
||||
Principals(PrincipalsArgs),
|
||||
#[command(
|
||||
about = "Healthcheck for running instance (Used for HEALTHCHECK in Docker container)"
|
||||
)]
|
||||
Health(HealthArgs),
|
||||
}
|
||||
|
||||
async fn get_data_stores(
|
||||
migrate: bool,
|
||||
config: &DataStoreConfig,
|
||||
) -> Result<(
|
||||
Arc<impl AddressbookStore + PrefixedCalendarStore>,
|
||||
Arc<impl CalendarStore>,
|
||||
Arc<impl SubscriptionStore>,
|
||||
Arc<impl AuthenticationProvider>,
|
||||
Receiver<CollectionOperation>,
|
||||
)> {
|
||||
Ok(match &config {
|
||||
DataStoreConfig::Sqlite(SqliteDataStoreConfig {
|
||||
db_url,
|
||||
run_repairs,
|
||||
skip_broken,
|
||||
}) => {
|
||||
let db = create_db_pool(db_url, migrate).await?;
|
||||
// Channel to watch for changes (for DAV Push)
|
||||
let (send, recv) = tokio::sync::mpsc::channel(1000);
|
||||
|
||||
let addressbook_store = Arc::new(SqliteAddressbookStore::new(
|
||||
db.clone(),
|
||||
send.clone(),
|
||||
*skip_broken,
|
||||
));
|
||||
let cal_store = Arc::new(SqliteCalendarStore::new(db.clone(), send, *skip_broken));
|
||||
if *run_repairs {
|
||||
info!("Running repair tasks");
|
||||
addressbook_store.repair_orphans().await?;
|
||||
cal_store.repair_invalid_version_4_0().await?;
|
||||
cal_store.repair_orphans().await?;
|
||||
}
|
||||
let subscription_store = Arc::new(SqliteStore::new(db.clone()));
|
||||
let principal_store = Arc::new(SqlitePrincipalStore::new(db));
|
||||
|
||||
// Validate all calendar objects
|
||||
for principal in principal_store.get_principals().await? {
|
||||
cal_store.validate_objects(&principal.id).await?;
|
||||
addressbook_store.validate_objects(&principal.id).await?;
|
||||
}
|
||||
|
||||
(
|
||||
addressbook_store,
|
||||
cal_store,
|
||||
subscription_store,
|
||||
principal_store,
|
||||
recv,
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
use rustical::config::Config;
|
||||
use rustical::{Args, Command};
|
||||
use rustical::{cmd_default, cmd_gen_config, cmd_health, cmd_principals};
|
||||
use tracing::warn;
|
||||
|
||||
#[tokio::main(flavor = "multi_thread")]
|
||||
async fn main() -> Result<()> {
|
||||
@@ -120,60 +20,15 @@ async fn main() -> Result<()> {
|
||||
};
|
||||
|
||||
match args.command {
|
||||
Some(Command::GenConfig(gen_config_args)) => cmd_gen_config(gen_config_args)?,
|
||||
Some(Command::Principals(principals_args)) => cmd_principals(principals_args).await?,
|
||||
Some(Command::GenConfig(gen_config_args)) => cmd_gen_config(gen_config_args),
|
||||
Some(Command::Principals(principals_args)) => cmd_principals(principals_args).await,
|
||||
Some(Command::Health(health_args)) => {
|
||||
let config: Config = parse_config()?;
|
||||
cmd_health(config.http, health_args).await?;
|
||||
cmd_health(config.http, health_args).await
|
||||
}
|
||||
None => {
|
||||
let config: Config = parse_config()?;
|
||||
|
||||
setup_tracing(&config.tracing);
|
||||
|
||||
let (addr_store, cal_store, subscription_store, principal_store, update_recv) =
|
||||
get_data_stores(!args.no_migrations, &config.data_store).await?;
|
||||
|
||||
let mut tasks = vec![];
|
||||
|
||||
if config.dav_push.enabled {
|
||||
let dav_push_controller = DavPushController::new(
|
||||
config.dav_push.allowed_push_servers,
|
||||
subscription_store.clone(),
|
||||
);
|
||||
tasks.push(tokio::spawn(async move {
|
||||
dav_push_controller.notifier(update_recv).await;
|
||||
}));
|
||||
}
|
||||
|
||||
let app = make_app(
|
||||
addr_store.clone(),
|
||||
cal_store.clone(),
|
||||
subscription_store.clone(),
|
||||
principal_store.clone(),
|
||||
config.frontend.clone(),
|
||||
config.oidc.clone(),
|
||||
config.caldav,
|
||||
&config.nextcloud_login,
|
||||
config.dav_push.enabled,
|
||||
config.http.session_cookie_samesite_strict,
|
||||
config.http.payload_limit_mb,
|
||||
);
|
||||
let app = ServiceExt::<Request>::into_make_service(
|
||||
NormalizePathLayer::trim_trailing_slash().layer(app),
|
||||
);
|
||||
|
||||
let address = format!("{}:{}", config.http.host, config.http.port);
|
||||
let listener = tokio::net::TcpListener::bind(&address).await?;
|
||||
tasks.push(tokio::spawn(async move {
|
||||
info!("RustiCal serving on http://{address}");
|
||||
axum::serve(listener, app).await.unwrap();
|
||||
}));
|
||||
|
||||
for task in tasks {
|
||||
task.await?;
|
||||
}
|
||||
cmd_default(args, config).await
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user