diff --git a/build.rs b/build.rs index 0277d21e..bce425e8 100644 --- a/build.rs +++ b/build.rs @@ -1,7 +1,7 @@ -use std::process::Command; use std::env; +use std::process::Command; -fn main() { +fn main() { // This allow using #[cfg(sqlite)] instead of #[cfg(feature = "sqlite")], which helps when trying to add them through macros #[cfg(feature = "sqlite")] println!("cargo:rustc-cfg=sqlite"); @@ -11,8 +11,10 @@ fn main() { println!("cargo:rustc-cfg=postgresql"); #[cfg(not(any(feature = "sqlite", feature = "mysql", feature = "postgresql")))] - compile_error!("You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite"); - + compile_error!( + "You need to enable one DB backend. To build with previous defaults do: cargo build --features sqlite" + ); + if let Ok(version) = env::var("BWRS_VERSION") { println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); @@ -61,7 +63,7 @@ fn read_git_info() -> Result<(), std::io::Error> { } else { format!("{}-{}", last_tag, rev_short) }; - + println!("cargo:rustc-env=BWRS_VERSION={}", version); println!("cargo:rustc-env=CARGO_PKG_VERSION={}", version); diff --git a/src/api/admin.rs b/src/api/admin.rs index d484407a..7ed3d270 100644 --- a/src/api/admin.rs +++ b/src/api/admin.rs @@ -142,7 +142,8 @@ fn admin_url(referer: Referer) -> String { fn admin_login(flash: Option) -> ApiResult> { // If there is an error, show it let msg = flash.map(|msg| format!("{}: {}", msg.name(), msg.msg())); - let json = json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); + let json = + json!({"page_content": "admin/login", "version": VERSION, "error": msg, "urlpath": CONFIG.domain_path()}); // Return the page let text = CONFIG.render_template(BASE_TEMPLATE, &json)?; @@ -329,7 +330,8 @@ fn get_users_json(_token: AdminToken, conn: DbConn) -> Json { fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { let users = User::get_all(&conn); let dt_fmt = "%Y-%m-%d %H:%M:%S %Z"; - let users_json: Vec = users.iter() + let users_json: Vec = users + .iter() .map(|u| { let mut usr = u.to_json(&conn); usr["cipher_count"] = json!(Cipher::count_owned_by_user(&u.uuid, &conn)); @@ -339,7 +341,7 @@ fn users_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { usr["created_at"] = json!(format_naive_datetime_local(&u.created_at, dt_fmt)); usr["last_active"] = match u.last_active(&conn) { Some(dt) => json!(format_naive_datetime_local(&dt, dt_fmt)), - None => json!("Never") + None => json!("Never"), }; usr }) @@ -424,7 +426,6 @@ fn update_user_org_type(data: Json, _token: AdminToken, conn: D user_to_edit.save(&conn) } - #[post("/users/update_revision")] fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { User::update_all_revisions(&conn) @@ -433,7 +434,8 @@ fn update_revision_users(_token: AdminToken, conn: DbConn) -> EmptyResult { #[get("/organizations/overview")] fn organizations_overview(_token: AdminToken, conn: DbConn) -> ApiResult> { let organizations = Organization::get_all(&conn); - let organizations_json: Vec = organizations.iter() + let organizations_json: Vec = organizations + .iter() .map(|o| { let mut org = o.to_json(); org["user_count"] = json!(UserOrganization::count_by_org(&o.uuid, &conn)); @@ -524,7 +526,8 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu // TODO: Maybe we need to cache this using a LazyStatic or something. Github only allows 60 requests per hour, and we use 3 here already. let (latest_release, latest_commit, latest_web_build) = if has_http_access { ( - match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") { + match get_github_api::("https://api.github.com/repos/dani-garcia/bitwarden_rs/releases/latest") + { Ok(r) => r.tag_name, _ => "-".to_string(), }, @@ -540,7 +543,9 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu if running_within_docker { "-".to_string() } else { - match get_github_api::("https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest") { + match get_github_api::( + "https://api.github.com/repos/dani-garcia/bw_web_builds/releases/latest", + ) { Ok(r) => r.tag_name.trim_start_matches('v').to_string(), _ => "-".to_string(), } @@ -552,7 +557,7 @@ fn diagnostics(_token: AdminToken, ip_header: IpHeader, conn: DbConn) -> ApiResu let ip_header_name = match &ip_header.0 { Some(h) => h, - _ => "" + _ => "", }; let diagnostics_json = json!({ diff --git a/src/api/core/ciphers.rs b/src/api/core/ciphers.rs index 7b0de205..d566b0b9 100644 --- a/src/api/core/ciphers.rs +++ b/src/api/core/ciphers.rs @@ -91,7 +91,8 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { let folders_json: Vec = folders.iter().map(Folder::to_json).collect(); let collections = Collection::find_by_user_uuid(&headers.user.uuid, &conn); - let collections_json: Vec = collections.iter() + let collections_json: Vec = collections + .iter() .map(|c| c.to_json_details(&headers.user.uuid, &conn)) .collect(); @@ -105,10 +106,7 @@ fn sync(data: Form, headers: Headers, conn: DbConn) -> Json { .collect(); let sends = Send::find_by_user(&headers.user.uuid, &conn); - let sends_json: Vec = sends - .iter() - .map(|s| s.to_json()) - .collect(); + let sends_json: Vec = sends.iter().map(|s| s.to_json()).collect(); let domains_json = if data.exclude_domains { Value::Null @@ -236,7 +234,7 @@ fn post_ciphers_create(data: JsonUpcase, headers: Headers, conn // Check if there are one more more collections selected when this cipher is part of an organization. // err if this is not the case before creating an empty cipher. - if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { + if data.Cipher.OrganizationId.is_some() && data.CollectionIds.is_empty() { err!("You must select at least one collection."); } @@ -278,17 +276,15 @@ fn post_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, nt /// allowed to delete or share such ciphers to an org, however. /// /// Ref: https://bitwarden.com/help/article/policies/#personal-ownership -fn enforce_personal_ownership_policy( - data: &CipherData, - headers: &Headers, - conn: &DbConn -) -> EmptyResult { +fn enforce_personal_ownership_policy(data: &CipherData, headers: &Headers, conn: &DbConn) -> EmptyResult { if data.OrganizationId.is_none() { let user_uuid = &headers.user.uuid; let policy_type = OrgPolicyType::PersonalOwnership; if OrgPolicy::is_applicable_to_user(user_uuid, policy_type, conn) { - err!("Due to an Enterprise Policy, you are restricted from \ - saving items to your personal vault.") + err!( + "Due to an Enterprise Policy, you are restricted from \ + saving items to your personal vault." + ) } } Ok(()) @@ -307,11 +303,12 @@ pub fn update_cipher_from_data( // Check that the client isn't updating an existing cipher with stale data. if let Some(dt) = data.LastKnownRevisionDate { - match NaiveDateTime::parse_from_str(&dt, "%+") { // ISO 8601 format - Err(err) => - warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), - Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => - err!("The client copy of this cipher is out of date. Resync the client and try again."), + match NaiveDateTime::parse_from_str(&dt, "%+") { + // ISO 8601 format + Err(err) => warn!("Error parsing LastKnownRevisionDate '{}': {}", dt, err), + Ok(dt) if cipher.updated_at.signed_duration_since(dt).num_seconds() > 1 => { + err!("The client copy of this cipher is out of date. Resync the client and try again.") + } Ok(_) => (), } } @@ -384,12 +381,9 @@ pub fn update_cipher_from_data( // But, we at least know we do not need to store and return this specific key. fn _clean_cipher_data(mut json_data: Value) -> Value { if json_data.is_array() { - json_data.as_array_mut() - .unwrap() - .iter_mut() - .for_each(|ref mut f| { - f.as_object_mut().unwrap().remove("Response"); - }); + json_data.as_array_mut().unwrap().iter_mut().for_each(|ref mut f| { + f.as_object_mut().unwrap().remove("Response"); + }); }; json_data } @@ -411,13 +405,13 @@ pub fn update_cipher_from_data( data["Uris"] = _clean_cipher_data(data["Uris"].clone()); } data - }, + } None => err!("Data missing"), }; cipher.name = data.Name; cipher.notes = data.Notes; - cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string() ); + cipher.fields = data.Fields.map(|f| _clean_cipher_data(f).to_string()); cipher.data = type_data.to_string(); cipher.password_history = data.PasswordHistory.map(|f| f.to_string()); @@ -832,7 +826,13 @@ fn post_attachment( let file_name = HEXLOWER.encode(&crypto::get_random(vec![0; 10])); let path = base_path.join(&file_name); - let size = match field.data.save().memory_threshold(0).size_limit(size_limit).with_path(path.clone()) { + let size = match field + .data + .save() + .memory_threshold(0) + .size_limit(size_limit) + .with_path(path.clone()) + { SaveResult::Full(SavedData::File(_, size)) => size as i32, SaveResult::Full(other) => { std::fs::remove_file(path).ok(); @@ -881,7 +881,11 @@ fn post_attachment_admin( post_attachment(uuid, data, content_type, headers, conn, nt) } -#[post("/ciphers//attachment//share", format = "multipart/form-data", data = "")] +#[post( + "/ciphers//attachment//share", + format = "multipart/form-data", + data = "" +)] fn post_attachment_share( uuid: String, attachment_id: String, @@ -984,12 +988,22 @@ fn delete_cipher_selected_admin(data: JsonUpcase, headers: Headers, conn: } #[post("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_post_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_post_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_post(data, headers, conn, nt) } #[put("/ciphers/delete-admin", data = "")] -fn delete_cipher_selected_put_admin(data: JsonUpcase, headers: Headers, conn: DbConn, nt: Notify) -> EmptyResult { +fn delete_cipher_selected_put_admin( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + nt: Notify, +) -> EmptyResult { delete_cipher_selected_put(data, headers, conn, nt) } @@ -1140,7 +1154,13 @@ fn _delete_cipher_by_uuid(uuid: &str, headers: &Headers, conn: &DbConn, soft_del Ok(()) } -fn _delete_multiple_ciphers(data: JsonUpcase, headers: Headers, conn: DbConn, soft_delete: bool, nt: Notify) -> EmptyResult { +fn _delete_multiple_ciphers( + data: JsonUpcase, + headers: Headers, + conn: DbConn, + soft_delete: bool, + nt: Notify, +) -> EmptyResult { let data: Value = data.into_inner().data; let uuids = match data.get("Ids") { @@ -1192,7 +1212,7 @@ fn _restore_multiple_ciphers(data: JsonUpcase, headers: &Headers, conn: & for uuid in uuids { match _restore_cipher_by_uuid(uuid, headers, conn, nt) { Ok(json) => ciphers.push(json.into_inner()), - err => return err + err => return err, } } diff --git a/src/api/core/mod.rs b/src/api/core/mod.rs index 36e83f0e..70e1866e 100644 --- a/src/api/core/mod.rs +++ b/src/api/core/mod.rs @@ -2,8 +2,8 @@ mod accounts; mod ciphers; mod folders; mod organizations; -pub mod two_factor; mod sends; +pub mod two_factor; pub use sends::start_send_deletion_scheduler; @@ -32,9 +32,9 @@ pub fn routes() -> Vec { // // Move this somewhere else // +use rocket::response::Response; use rocket::Route; use rocket_contrib::json::Json; -use rocket::response::Response; use serde_json::Value; use crate::{ diff --git a/src/api/core/organizations.rs b/src/api/core/organizations.rs index 5698c187..ce3c46ea 100644 --- a/src/api/core/organizations.rs +++ b/src/api/core/organizations.rs @@ -5,7 +5,7 @@ use serde_json::Value; use crate::{ api::{EmptyResult, JsonResult, JsonUpcase, JsonUpcaseVec, Notify, NumberOrString, PasswordData, UpdateType}, - auth::{decode_invite, AdminHeaders, Headers, OwnerHeaders, ManagerHeaders, ManagerHeadersLoose}, + auth::{decode_invite, AdminHeaders, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, db::{models::*, DbConn}, mail, CONFIG, }; @@ -333,7 +333,12 @@ fn post_organization_collection_delete_user( } #[delete("/organizations//collections/")] -fn delete_organization_collection(org_id: String, col_id: String, _headers: ManagerHeaders, conn: DbConn) -> EmptyResult { +fn delete_organization_collection( + org_id: String, + col_id: String, + _headers: ManagerHeaders, + conn: DbConn, +) -> EmptyResult { match Collection::find_by_uuid(&col_id, &conn) { None => err!("Collection not found"), Some(collection) => { @@ -426,9 +431,7 @@ fn put_collection_users( continue; } - CollectionUser::save(&user.user_uuid, &coll_id, - d.ReadOnly, d.HidePasswords, - &conn)?; + CollectionUser::save(&user.user_uuid, &coll_id, d.ReadOnly, d.HidePasswords, &conn)?; } Ok(()) @@ -544,9 +547,7 @@ fn send_invite(org_id: String, data: JsonUpcase, headers: AdminHeade match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user.uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save(&user.uuid, &collection.uuid, col.ReadOnly, col.HidePasswords, &conn)?; } } } @@ -801,9 +802,13 @@ fn edit_user( match Collection::find_by_uuid_and_org(&col.Id, &org_id, &conn) { None => err!("Collection not found in Organization"), Some(collection) => { - CollectionUser::save(&user_to_edit.user_uuid, &collection.uuid, - col.ReadOnly, col.HidePasswords, - &conn)?; + CollectionUser::save( + &user_to_edit.user_uuid, + &collection.uuid, + col.ReadOnly, + col.HidePasswords, + &conn, + )?; } } } @@ -989,7 +994,13 @@ struct PolicyData { } #[put("/organizations//policies/", data = "")] -fn put_policy(org_id: String, pol_type: i32, data: Json, _headers: AdminHeaders, conn: DbConn) -> JsonResult { +fn put_policy( + org_id: String, + pol_type: i32, + data: Json, + _headers: AdminHeaders, + conn: DbConn, +) -> JsonResult { let data: PolicyData = data.into_inner(); let pol_type_enum = match OrgPolicyType::from_i32(pol_type) { @@ -1127,8 +1138,7 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con // If user is not part of the organization, but it exists } else if UserOrganization::find_by_email_and_org(&user_data.Email, &org_id, &conn).is_none() { - if let Some (user) = User::find_by_mail(&user_data.Email, &conn) { - + if let Some(user) = User::find_by_mail(&user_data.Email, &conn) { let user_org_status = if CONFIG.mail_enabled() { UserOrgStatus::Invited as i32 } else { @@ -1164,7 +1174,7 @@ fn import(org_id: String, data: JsonUpcase, headers: Headers, con // If this flag is enabled, any user that isn't provided in the Users list will be removed (by default they will be kept unless they have Deleted == true) if data.OverwriteExisting { for user_org in UserOrganization::find_by_org_and_type(&org_id, UserOrgType::User as i32, &conn) { - if let Some (user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { + if let Some(user_email) = User::find_by_uuid(&user_org.user_uuid, &conn).map(|u| u.email) { if !data.Users.iter().any(|u| u.Email == user_email) { user_org.delete(&conn)?; } diff --git a/src/api/core/two_factor/authenticator.rs b/src/api/core/two_factor/authenticator.rs index f4bd5df5..5f6cb452 100644 --- a/src/api/core/two_factor/authenticator.rs +++ b/src/api/core/two_factor/authenticator.rs @@ -141,7 +141,11 @@ pub fn validate_totp_code(user_uuid: &str, totp_code: u64, secret: &str, ip: &Cl // The amount of steps back and forward in time // Also check if we need to disable time drifted TOTP codes. // If that is the case, we set the steps to 0 so only the current TOTP is valid. - let steps: i64 = if CONFIG.authenticator_disable_time_drift() { 0 } else { 1 }; + let steps: i64 = if CONFIG.authenticator_disable_time_drift() { + 0 + } else { + 1 + }; for step in -steps..=steps { let time_step = current_timestamp / 30i64 + step; diff --git a/src/api/core/two_factor/email.rs b/src/api/core/two_factor/email.rs index 6aa6e013..a12d47ec 100644 --- a/src/api/core/two_factor/email.rs +++ b/src/api/core/two_factor/email.rs @@ -65,7 +65,10 @@ pub fn send_token(user_uuid: &str, conn: &DbConn) -> EmptyResult { twofactor.data = twofactor_data.to_json(); twofactor.save(&conn)?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token( + &twofactor_data.email, + &twofactor_data.last_token.map_res("Token is empty")?, + )?; Ok(()) } @@ -132,7 +135,10 @@ fn send_email(data: JsonUpcase, headers: Headers, conn: DbConn) - ); twofactor.save(&conn)?; - mail::send_token(&twofactor_data.email, &twofactor_data.last_token.map_res("Token is empty")?)?; + mail::send_token( + &twofactor_data.email, + &twofactor_data.last_token.map_res("Token is empty")?, + )?; Ok(()) } @@ -186,7 +192,8 @@ fn email(data: JsonUpcase, headers: Headers, conn: DbConn) -> JsonRes /// Validate the email code when used as TwoFactor token mechanism pub fn validate_email_code_str(user_uuid: &str, token: &str, data: &str, conn: &DbConn) -> EmptyResult { let mut email_data = EmailTokenData::from_json(&data)?; - let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn).map_res("Two factor not found")?; + let mut twofactor = TwoFactor::find_by_user_and_type(&user_uuid, TwoFactorType::Email as i32, &conn) + .map_res("Two factor not found")?; let issued_token = match &email_data.last_token { Some(t) => t, _ => err!("No token available"), diff --git a/src/api/icons.rs b/src/api/icons.rs index 6da3af0b..ae3187cb 100644 --- a/src/api/icons.rs +++ b/src/api/icons.rs @@ -22,10 +22,18 @@ static CLIENT: Lazy = Lazy::new(|| { // Generate the default headers let mut default_headers = header::HeaderMap::new(); default_headers.insert(header::USER_AGENT, header::HeaderValue::from_static("Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_4) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/13.1.1 Safari/605.1.15")); - default_headers.insert(header::ACCEPT_LANGUAGE, header::HeaderValue::from_static("en-US,en;q=0.8")); + default_headers.insert( + header::ACCEPT_LANGUAGE, + header::HeaderValue::from_static("en-US,en;q=0.8"), + ); default_headers.insert(header::CACHE_CONTROL, header::HeaderValue::from_static("no-cache")); default_headers.insert(header::PRAGMA, header::HeaderValue::from_static("no-cache")); - default_headers.insert(header::ACCEPT, header::HeaderValue::from_static("text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8")); + default_headers.insert( + header::ACCEPT, + header::HeaderValue::from_static( + "text/html,application/xhtml+xml,application/xml; q=0.9,image/webp,image/apng,*/*;q=0.8", + ), + ); // Reuse the client between requests Client::builder() @@ -48,12 +56,18 @@ fn icon(domain: String) -> Cached>> { if !is_valid_domain(&domain) { warn!("Invalid domain: {}", domain); - return Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()); + return Cached::ttl( + Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + ); } match get_icon(&domain) { Some(i) => Cached::ttl(Content(ContentType::new("image", "x-icon"), i), CONFIG.icon_cache_ttl()), - _ => Cached::ttl(Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), CONFIG.icon_cache_negttl()), + _ => Cached::ttl( + Content(ContentType::new("image", "png"), FALLBACK_ICON.to_vec()), + CONFIG.icon_cache_negttl(), + ), } } @@ -74,7 +88,10 @@ fn is_valid_domain(domain: &str) -> bool { || domain.starts_with('-') || domain.ends_with('-') { - debug!("Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", domain); + debug!( + "Domain validation error: '{}' is either empty, contains '..', starts with an '.', starts or ends with a '-'", + domain + ); return false; } else if domain.len() > 255 { debug!("Domain validation error: '{}' exceeds 255 characters", domain); @@ -83,7 +100,10 @@ fn is_valid_domain(domain: &str) -> bool { for c in domain.chars() { if !c.is_alphanumeric() && !ALLOWED_CHARS.contains(c) { - debug!("Domain validation error: '{}' contains an invalid character '{}'", domain, c); + debug!( + "Domain validation error: '{}' contains an invalid character '{}'", + domain, c + ); return false; } } diff --git a/src/api/identity.rs b/src/api/identity.rs index dcfe607a..22a7a11e 100644 --- a/src/api/identity.rs +++ b/src/api/identity.rs @@ -114,7 +114,10 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult if user.verified_at.is_none() && CONFIG.mail_enabled() && CONFIG.signups_verify() { let now = now.naive_utc(); - if user.last_verifying_at.is_none() || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() > CONFIG.signups_verify_resend_time() as i64 { + if user.last_verifying_at.is_none() + || now.signed_duration_since(user.last_verifying_at.unwrap()).num_seconds() + > CONFIG.signups_verify_resend_time() as i64 + { let resend_limit = CONFIG.signups_verify_resend_limit() as i32; if resend_limit == 0 || user.login_verify_count < resend_limit { // We want to send another email verification if we require signups to verify @@ -168,7 +171,7 @@ fn _password_login(data: ConnectData, conn: DbConn, ip: &ClientIp) -> JsonResult "Key": user.akey, "PrivateKey": user.private_key, //"TwoFactorToken": "11122233333444555666777888999" - + "Kdf": user.client_kdf_type, "KdfIterations": user.client_kdf_iter, "ResetMasterPassword": false,// TODO: Same as above @@ -231,12 +234,13 @@ fn twofactor_auth( let twofactor_code = match data.two_factor_token { Some(ref code) => code, - None => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA token not provided"), + None => err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, + "2FA token not provided" + ), }; - let selected_twofactor = twofactors - .into_iter() - .find(|tf| tf.atype == selected_id && tf.enabled); + let selected_twofactor = twofactors.into_iter().find(|tf| tf.atype == selected_id && tf.enabled); use crate::api::core::two_factor as _tf; use crate::crypto::ct_eq; @@ -245,18 +249,27 @@ fn twofactor_auth( let mut remember = data.two_factor_remember.unwrap_or(0); match TwoFactorType::from_i32(selected_id) { - Some(TwoFactorType::Authenticator) => _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)?, + Some(TwoFactorType::Authenticator) => { + _tf::authenticator::validate_totp_code_str(user_uuid, twofactor_code, &selected_data?, ip, conn)? + } Some(TwoFactorType::U2f) => _tf::u2f::validate_u2f_login(user_uuid, twofactor_code, conn)?, Some(TwoFactorType::YubiKey) => _tf::yubikey::validate_yubikey_login(twofactor_code, &selected_data?)?, - Some(TwoFactorType::Duo) => _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)?, - Some(TwoFactorType::Email) => _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)?, + Some(TwoFactorType::Duo) => { + _tf::duo::validate_duo_login(data.username.as_ref().unwrap(), twofactor_code, conn)? + } + Some(TwoFactorType::Email) => { + _tf::email::validate_email_code_str(user_uuid, twofactor_code, &selected_data?, conn)? + } Some(TwoFactorType::Remember) => { match device.twofactor_remember { Some(ref code) if !CONFIG.disable_2fa_remember() && ct_eq(code, twofactor_code) => { remember = 1; // Make sure we also return the token here, otherwise it will only remember the first time } - _ => err_json!(_json_err_twofactor(&twofactor_ids, user_uuid, conn)?, "2FA Remember token not provided"), + _ => err_json!( + _json_err_twofactor(&twofactor_ids, user_uuid, conn)?, + "2FA Remember token not provided" + ), } } _ => err!("Invalid two factor provider"), diff --git a/src/api/notifications.rs b/src/api/notifications.rs index 8876937d..c1b9c316 100644 --- a/src/api/notifications.rs +++ b/src/api/notifications.rs @@ -4,12 +4,7 @@ use rocket::Route; use rocket_contrib::json::Json; use serde_json::Value as JsonValue; -use crate::{ - api::EmptyResult, - auth::Headers, - db::DbConn, - Error, CONFIG, -}; +use crate::{api::EmptyResult, auth::Headers, db::DbConn, Error, CONFIG}; pub fn routes() -> Vec { routes![negotiate, websockets_err] @@ -19,12 +14,18 @@ static SHOW_WEBSOCKETS_MSG: AtomicBool = AtomicBool::new(true); #[get("/hub")] fn websockets_err() -> EmptyResult { - if CONFIG.websocket_enabled() && SHOW_WEBSOCKETS_MSG.compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed).is_ok() { - err!(" + if CONFIG.websocket_enabled() + && SHOW_WEBSOCKETS_MSG + .compare_exchange(true, false, Ordering::Relaxed, Ordering::Relaxed) + .is_ok() + { + err!( + " ########################################################### '/notifications/hub' should be proxied to the websocket server or notifications won't work. Go to the Wiki for more info, or disable WebSockets setting WEBSOCKET_ENABLED=false. - ###########################################################################################\n") + ###########################################################################################\n" + ) } else { Err(Error::empty()) } diff --git a/src/api/web.rs b/src/api/web.rs index 90e572a8..5d2048ea 100644 --- a/src/api/web.rs +++ b/src/api/web.rs @@ -76,18 +76,48 @@ fn alive() -> Json { #[get("/bwrs_static/")] fn static_files(filename: String) -> Result, Error> { match filename.as_ref() { - "mail-github.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/mail-github.png"))), - "logo-gray.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/logo-gray.png"))), - "shield-white.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/shield-white.png"))), - "error-x.svg" => Ok(Content(ContentType::SVG, include_bytes!("../static/images/error-x.svg"))), + "mail-github.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/mail-github.png"), + )), + "logo-gray.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/logo-gray.png"), + )), + "shield-white.png" => Ok(Content( + ContentType::PNG, + include_bytes!("../static/images/shield-white.png"), + )), + "error-x.svg" => Ok(Content( + ContentType::SVG, + include_bytes!("../static/images/error-x.svg"), + )), "hibp.png" => Ok(Content(ContentType::PNG, include_bytes!("../static/images/hibp.png"))), - "bootstrap.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/bootstrap.css"))), - "bootstrap-native.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/bootstrap-native.js"))), - "identicon.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/identicon.js"))), - "datatables.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/datatables.js"))), - "datatables.css" => Ok(Content(ContentType::CSS, include_bytes!("../static/scripts/datatables.css"))), - "jquery-3.5.1.slim.js" => Ok(Content(ContentType::JavaScript, include_bytes!("../static/scripts/jquery-3.5.1.slim.js"))), + "bootstrap.css" => Ok(Content( + ContentType::CSS, + include_bytes!("../static/scripts/bootstrap.css"), + )), + "bootstrap-native.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/bootstrap-native.js"), + )), + "identicon.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/identicon.js"), + )), + "datatables.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/datatables.js"), + )), + "datatables.css" => Ok(Content( + ContentType::CSS, + include_bytes!("../static/scripts/datatables.css"), + )), + "jquery-3.5.1.slim.js" => Ok(Content( + ContentType::JavaScript, + include_bytes!("../static/scripts/jquery-3.5.1.slim.js"), + )), _ => err!(format!("Static file not found: {}", filename)), } } diff --git a/src/auth.rs b/src/auth.rs index 59d1370d..4fe9a7b5 100644 --- a/src/auth.rs +++ b/src/auth.rs @@ -223,10 +223,9 @@ use crate::db::{ }; pub struct Host { - pub host: String + pub host: String, } - impl<'a, 'r> FromRequest<'a, 'r> for Host { type Error = &'static str; @@ -506,7 +505,11 @@ impl<'a, 'r> FromRequest<'a, 'r> for ManagerHeaders { }; if !headers.org_user.has_full_access() { - match CollectionUser::find_by_collection_and_user(&col_id, &headers.org_user.user_uuid, &conn) { + match CollectionUser::find_by_collection_and_user( + &col_id, + &headers.org_user.user_uuid, + &conn, + ) { Some(_) => (), None => err_handler!("The current user isn't a manager for this collection"), } diff --git a/src/config.rs b/src/config.rs index 6c41c975..df4e90b4 100644 --- a/src/config.rs +++ b/src/config.rs @@ -878,9 +878,7 @@ fn js_escape_helper<'reg, 'rc>( .param(0) .ok_or_else(|| RenderError::new("Param not found for helper \"js_escape\""))?; - let no_quote = h - .param(1) - .is_some(); + let no_quote = h.param(1).is_some(); let value = param .value() @@ -888,7 +886,7 @@ fn js_escape_helper<'reg, 'rc>( .ok_or_else(|| RenderError::new("Param for helper \"js_escape\" is not a String"))?; let mut escaped_value = value.replace('\\', "").replace('\'', "\\x22").replace('\"', "\\x27"); - if ! no_quote { + if !no_quote { escaped_value = format!(""{}"", escaped_value); } diff --git a/src/db/mod.rs b/src/db/mod.rs index 2472caa6..85926162 100644 --- a/src/db/mod.rs +++ b/src/db/mod.rs @@ -25,7 +25,6 @@ pub mod __mysql_schema; #[path = "schemas/postgresql/schema.rs"] pub mod __postgresql_schema; - // This is used to generate the main DbConn and DbPool enums, which contain one variant for each database supported macro_rules! generate_connections { ( $( $name:ident: $ty:ty ),+ ) => { @@ -110,7 +109,6 @@ impl DbConnType { } } - #[macro_export] macro_rules! db_run { // Same for all dbs @@ -155,7 +153,6 @@ macro_rules! db_run { }; } - pub trait FromDb { type Output; #[allow(clippy::wrong_self_convention)] @@ -240,7 +237,6 @@ pub fn backup_database() -> Result<(), Error> { Ok(()) } - /// Get the SQL Server version pub fn get_sql_server_version(conn: &DbConn) -> String { use diesel::sql_types::Text; @@ -308,8 +304,7 @@ mod sqlite_migrations { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::sqlite::SqliteConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection. @@ -337,8 +332,7 @@ mod mysql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::mysql::MysqlConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // Scoped to a connection/session. @@ -359,8 +353,7 @@ mod postgresql_migrations { pub fn run_migrations() -> Result<(), super::Error> { use diesel::{Connection, RunQueryDsl}; // Make sure the database is up to date (create if it doesn't exist, or run the migrations) - let connection = - diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; + let connection = diesel::pg::PgConnection::establish(&crate::CONFIG.database_url())?; // Disable Foreign Key Checks during migration // FIXME: Per https://www.postgresql.org/docs/12/sql-set-constraints.html, diff --git a/src/db/models/attachment.rs b/src/db/models/attachment.rs index 6a8fc5b0..76bc474e 100644 --- a/src/db/models/attachment.rs +++ b/src/db/models/attachment.rs @@ -59,7 +59,6 @@ use crate::error::MapResult; /// Database methods impl Attachment { - pub fn save(&self, conn: &DbConn) -> EmptyResult { db_run! { conn: sqlite, mysql { diff --git a/src/db/models/cipher.rs b/src/db/models/cipher.rs index 365865f8..6de8afbd 100644 --- a/src/db/models/cipher.rs +++ b/src/db/models/cipher.rs @@ -2,14 +2,7 @@ use chrono::{NaiveDateTime, Utc}; use serde_json::Value; use super::{ - Attachment, - CollectionCipher, - Favorite, - FolderCipher, - Organization, - User, - UserOrgStatus, - UserOrgType, + Attachment, CollectionCipher, Favorite, FolderCipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization, }; @@ -90,17 +83,24 @@ impl Cipher { attachments.iter().map(|c| c.to_json(host)).collect() }; - let fields_json = self.fields.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); - let password_history_json = self.password_history.as_ref().and_then(|s| serde_json::from_str(s).ok()).unwrap_or(Value::Null); + let fields_json = self + .fields + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or(Value::Null); + let password_history_json = self + .password_history + .as_ref() + .and_then(|s| serde_json::from_str(s).ok()) + .unwrap_or(Value::Null); - let (read_only, hide_passwords) = - match self.get_access_restrictions(&user_uuid, conn) { - Some((ro, hp)) => (ro, hp), - None => { - error!("Cipher ownership assertion failure"); - (true, true) - }, - }; + let (read_only, hide_passwords) = match self.get_access_restrictions(&user_uuid, conn) { + Some((ro, hp)) => (ro, hp), + None => { + error!("Cipher ownership assertion failure"); + (true, true) + } + }; // Get the type_data or a default to an empty json object '{}'. // If not passing an empty object, mobile clients will crash. diff --git a/src/db/models/collection.rs b/src/db/models/collection.rs index 5fbc3128..06a2d671 100644 --- a/src/db/models/collection.rs +++ b/src/db/models/collection.rs @@ -1,6 +1,6 @@ use serde_json::Value; -use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization, User, Cipher}; +use super::{Cipher, Organization, User, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -284,7 +284,13 @@ impl CollectionUser { }} } - pub fn save(user_uuid: &str, collection_uuid: &str, read_only: bool, hide_passwords: bool, conn: &DbConn) -> EmptyResult { + pub fn save( + user_uuid: &str, + collection_uuid: &str, + read_only: bool, + hide_passwords: bool, + conn: &DbConn, + ) -> EmptyResult { User::update_uuid_revision(&user_uuid, conn); db_run! { conn: diff --git a/src/db/models/device.rs b/src/db/models/device.rs index 77837fca..b2297fe2 100644 --- a/src/db/models/device.rs +++ b/src/db/models/device.rs @@ -74,10 +74,26 @@ impl Device { let time_now = Utc::now().naive_utc(); self.updated_at = time_now; - let orgowner: Vec<_> = orgs.iter().filter(|o| o.atype == 0).map(|o| o.org_uuid.clone()).collect(); - let orgadmin: Vec<_> = orgs.iter().filter(|o| o.atype == 1).map(|o| o.org_uuid.clone()).collect(); - let orguser: Vec<_> = orgs.iter().filter(|o| o.atype == 2).map(|o| o.org_uuid.clone()).collect(); - let orgmanager: Vec<_> = orgs.iter().filter(|o| o.atype == 3).map(|o| o.org_uuid.clone()).collect(); + let orgowner: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 0) + .map(|o| o.org_uuid.clone()) + .collect(); + let orgadmin: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 1) + .map(|o| o.org_uuid.clone()) + .collect(); + let orguser: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 2) + .map(|o| o.org_uuid.clone()) + .collect(); + let orgmanager: Vec<_> = orgs + .iter() + .filter(|o| o.atype == 3) + .map(|o| o.org_uuid.clone()) + .collect(); // Create the JWT claims struct, to send to the client use crate::auth::{encode_jwt, LoginJwtClaims, DEFAULT_VALIDITY, JWT_LOGIN_ISSUER}; diff --git a/src/db/models/favorite.rs b/src/db/models/favorite.rs index 4f610f21..2fc20380 100644 --- a/src/db/models/favorite.rs +++ b/src/db/models/favorite.rs @@ -20,7 +20,7 @@ use crate::error::MapResult; impl Favorite { // Returns whether the specified cipher is a favorite of the specified user. pub fn is_favorite(cipher_uuid: &str, user_uuid: &str, conn: &DbConn) -> bool { - db_run!{ conn: { + db_run! { conn: { let query = favorites::table .filter(favorites::cipher_uuid.eq(cipher_uuid)) .filter(favorites::user_uuid.eq(user_uuid)) @@ -36,19 +36,19 @@ impl Favorite { match (old, new) { (false, true) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { - diesel::insert_into(favorites::table) - .values(( - favorites::user_uuid.eq(user_uuid), - favorites::cipher_uuid.eq(cipher_uuid), - )) - .execute(conn) - .map_res("Error adding favorite") - }} + db_run! { conn: { + diesel::insert_into(favorites::table) + .values(( + favorites::user_uuid.eq(user_uuid), + favorites::cipher_uuid.eq(cipher_uuid), + )) + .execute(conn) + .map_res("Error adding favorite") + }} } (true, false) => { User::update_uuid_revision(user_uuid, &conn); - db_run!{ conn: { + db_run! { conn: { diesel::delete( favorites::table .filter(favorites::user_uuid.eq(user_uuid)) @@ -59,7 +59,7 @@ impl Favorite { }} } // Otherwise, the favorite status is already what it should be. - _ => Ok(()) + _ => Ok(()), } } diff --git a/src/db/models/folder.rs b/src/db/models/folder.rs index b5bbcc79..ec83d117 100644 --- a/src/db/models/folder.rs +++ b/src/db/models/folder.rs @@ -109,7 +109,6 @@ impl Folder { User::update_uuid_revision(&self.user_uuid, conn); FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(folders::table.filter(folders::uuid.eq(&self.uuid))) .execute(conn) diff --git a/src/db/models/mod.rs b/src/db/models/mod.rs index a4fb635b..2179b7f0 100644 --- a/src/db/models/mod.rs +++ b/src/db/models/mod.rs @@ -6,9 +6,9 @@ mod favorite; mod folder; mod org_policy; mod organization; +mod send; mod two_factor; mod user; -mod send; pub use self::attachment::Attachment; pub use self::cipher::Cipher; @@ -18,6 +18,6 @@ pub use self::favorite::Favorite; pub use self::folder::{Folder, FolderCipher}; pub use self::org_policy::{OrgPolicy, OrgPolicyType}; pub use self::organization::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; +pub use self::send::{Send, SendType}; pub use self::two_factor::{TwoFactor, TwoFactorType}; pub use self::user::{Invitation, User, UserStampException}; -pub use self::send::{Send, SendType}; \ No newline at end of file diff --git a/src/db/models/org_policy.rs b/src/db/models/org_policy.rs index 0707eccc..810a9c3f 100644 --- a/src/db/models/org_policy.rs +++ b/src/db/models/org_policy.rs @@ -4,7 +4,7 @@ use crate::api::EmptyResult; use crate::db::DbConn; use crate::error::MapResult; -use super::{Organization, UserOrganization, UserOrgStatus, UserOrgType}; +use super::{Organization, UserOrgStatus, UserOrgType, UserOrganization}; db_object! { #[derive(Identifiable, Queryable, Insertable, Associations, AsChangeset)] @@ -20,8 +20,7 @@ db_object! { } } -#[derive(Copy, Clone)] -#[derive(num_derive::FromPrimitive)] +#[derive(Copy, Clone, num_derive::FromPrimitive)] pub enum OrgPolicyType { TwoFactorAuthentication = 0, MasterPassword = 1, @@ -175,7 +174,8 @@ impl OrgPolicy { /// and the user is not an owner or admin of that org. This is only useful for checking /// applicability of policy types that have these particular semantics. pub fn is_applicable_to_user(user_uuid: &str, policy_type: OrgPolicyType, conn: &DbConn) -> bool { - for policy in OrgPolicy::find_by_user(user_uuid, conn) { // Returns confirmed users only. + for policy in OrgPolicy::find_by_user(user_uuid, conn) { + // Returns confirmed users only. if policy.enabled && policy.has_type(policy_type) { let org_uuid = &policy.org_uuid; if let Some(user) = UserOrganization::find_by_user_and_org(user_uuid, org_uuid, conn) { diff --git a/src/db/models/organization.rs b/src/db/models/organization.rs index 1eeb04d2..9931ed5b 100644 --- a/src/db/models/organization.rs +++ b/src/db/models/organization.rs @@ -1,8 +1,8 @@ +use num_traits::FromPrimitive; use serde_json::Value; use std::cmp::Ordering; -use num_traits::FromPrimitive; -use super::{CollectionUser, User, OrgPolicy}; +use super::{CollectionUser, OrgPolicy, User}; db_object! { #[derive(Identifiable, Queryable, Insertable, AsChangeset)] @@ -35,8 +35,7 @@ pub enum UserOrgStatus { Confirmed = 2, } -#[derive(Copy, Clone, PartialEq, Eq)] -#[derive(num_derive::FromPrimitive)] +#[derive(Copy, Clone, PartialEq, Eq, num_derive::FromPrimitive)] pub enum UserOrgType { Owner = 0, Admin = 1, @@ -117,7 +116,10 @@ impl PartialOrd for i32 { } fn le(&self, other: &UserOrgType) -> bool { - matches!(self.partial_cmp(other), Some(Ordering::Less) | Some(Ordering::Equal) | None) + matches!( + self.partial_cmp(other), + Some(Ordering::Less) | Some(Ordering::Equal) | None + ) } } @@ -236,7 +238,6 @@ impl Organization { UserOrganization::delete_all_by_organization(&self.uuid, &conn)?; OrgPolicy::delete_all_by_organization(&self.uuid, &conn)?; - db_run! { conn: { diesel::delete(organizations::table.filter(organizations::uuid.eq(self.uuid))) .execute(conn) @@ -347,11 +348,13 @@ impl UserOrganization { let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); collections .iter() - .map(|c| json!({ - "Id": c.collection_uuid, - "ReadOnly": c.read_only, - "HidePasswords": c.hide_passwords, - })) + .map(|c| { + json!({ + "Id": c.collection_uuid, + "ReadOnly": c.read_only, + "HidePasswords": c.hide_passwords, + }) + }) .collect() }; @@ -446,8 +449,7 @@ impl UserOrganization { } pub fn has_full_access(&self) -> bool { - (self.access_all || self.atype >= UserOrgType::Admin) && - self.has_status(UserOrgStatus::Confirmed) + (self.access_all || self.atype >= UserOrgType::Admin) && self.has_status(UserOrgStatus::Confirmed) } pub fn find_by_uuid(uuid: &str, conn: &DbConn) -> Option { diff --git a/src/db/models/user.rs b/src/db/models/user.rs index fdd2dcae..6f61dfeb 100644 --- a/src/db/models/user.rs +++ b/src/db/models/user.rs @@ -63,8 +63,8 @@ enum UserStatus { #[derive(Serialize, Deserialize)] pub struct UserStampException { - pub route: String, - pub security_stamp: String + pub route: String, + pub security_stamp: String, } /// Local methods @@ -162,7 +162,7 @@ impl User { pub fn set_stamp_exception(&mut self, route_exception: &str) { let stamp_exception = UserStampException { route: route_exception.to_string(), - security_stamp: self.security_stamp.to_string() + security_stamp: self.security_stamp.to_string(), }; self.stamp_exception = Some(serde_json::to_string(&stamp_exception).unwrap_or_default()); } @@ -341,7 +341,7 @@ impl User { pub fn last_active(&self, conn: &DbConn) -> Option { match Device::find_latest_active_by_user(&self.uuid, conn) { Some(device) => Some(device.updated_at), - None => None + None => None, } } } diff --git a/src/error.rs b/src/error.rs index a0b28a4b..9c597a8b 100644 --- a/src/error.rs +++ b/src/error.rs @@ -33,10 +33,10 @@ macro_rules! make_error { }; } +use diesel::r2d2::PoolError as R2d2Err; use diesel::result::Error as DieselErr; use diesel::ConnectionError as DieselConErr; use diesel_migrations::RunMigrationsError as DieselMigErr; -use diesel::r2d2::PoolError as R2d2Err; use handlebars::RenderError as HbErr; use jsonwebtoken::errors::Error as JwtErr; use regex::Error as RegexErr; @@ -191,7 +191,7 @@ use rocket::response::{self, Responder, Response}; impl<'r> Responder<'r> for Error { fn respond_to(self, _: &Request) -> response::Result<'r> { match self.error { - ErrorKind::EmptyError(_) => {} // Don't print the error in this situation + ErrorKind::EmptyError(_) => {} // Don't print the error in this situation ErrorKind::SimpleError(_) => {} // Don't print the error in this situation _ => error!(target: "error", "{:#?}", self), }; diff --git a/src/mail.rs b/src/mail.rs index cd9edd9e..eb7c84c5 100644 --- a/src/mail.rs +++ b/src/mail.rs @@ -1,4 +1,4 @@ -use std::{str::FromStr}; +use std::str::FromStr; use chrono::{DateTime, Local}; use percent_encoding::{percent_encode, NON_ALPHANUMERIC}; @@ -58,21 +58,32 @@ fn mailer() -> SmtpTransport { let smtp_client = match CONFIG.smtp_auth_mechanism() { Some(mechanism) => { - let allowed_mechanisms = [SmtpAuthMechanism::Plain, SmtpAuthMechanism::Login, SmtpAuthMechanism::Xoauth2]; + let allowed_mechanisms = [ + SmtpAuthMechanism::Plain, + SmtpAuthMechanism::Login, + SmtpAuthMechanism::Xoauth2, + ]; let mut selected_mechanisms = vec![]; for wanted_mechanism in mechanism.split(',') { for m in &allowed_mechanisms { - if m.to_string().to_lowercase() == wanted_mechanism.trim_matches(|c| c == '"' || c == '\'' || c == ' ').to_lowercase() { + if m.to_string().to_lowercase() + == wanted_mechanism + .trim_matches(|c| c == '"' || c == '\'' || c == ' ') + .to_lowercase() + { selected_mechanisms.push(*m); } } - }; + } if !selected_mechanisms.is_empty() { smtp_client.authentication(selected_mechanisms) } else { // Only show a warning, and return without setting an actual authentication mechanism - warn!("No valid SMTP Auth mechanism found for '{}', using default values", mechanism); + warn!( + "No valid SMTP Auth mechanism found for '{}', using default values", + mechanism + ); smtp_client } } @@ -316,31 +327,30 @@ fn send_email(address: &str, subject: &str, body_html: String, body_text: String let smtp_from = &CONFIG.smtp_from(); let email = Message::builder() - .message_id(Some(format!("<{}@{}>", crate::util::get_uuid(), smtp_from.split('@').collect::>()[1] ))) + .message_id(Some(format!( + "<{}@{}>", + crate::util::get_uuid(), + smtp_from.split('@').collect::>()[1] + ))) .to(Mailbox::new(None, Address::from_str(&address)?)) .from(Mailbox::new( Some(CONFIG.smtp_from_name()), Address::from_str(smtp_from)?, )) .subject(subject) - .multipart( - MultiPart::alternative() - .singlepart(text) - .singlepart(html) - )?; + .multipart(MultiPart::alternative().singlepart(text).singlepart(html))?; match mailer().send(&email) { Ok(_) => Ok(()), // Match some common errors and make them more user friendly Err(e) => { - if e.is_client() { - err!(format!("SMTP Client error: {}", e)); + err!(format!("SMTP Client error: {}", e)); } else if e.is_transient() { err!(format!("SMTP 4xx error: {:?}", e)); } else if e.is_permanent() { err!(format!("SMTP 5xx error: {:?}", e)); - } else if e.is_timeout() { + } else if e.is_timeout() { err!(format!("SMTP timeout error: {:?}", e)); } else { Err(e.into()) diff --git a/src/main.rs b/src/main.rs index 50975c66..1908d967 100644 --- a/src/main.rs +++ b/src/main.rs @@ -123,7 +123,9 @@ fn init_logging(level: log::LevelFilter) -> Result<(), fern::InitError> { // Enable smtp debug logging only specifically for smtp when need. // This can contain sensitive information we do not want in the default debug/trace logging. if CONFIG.smtp_debug() { - println!("[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!"); + println!( + "[WARNING] SMTP Debugging is enabled (SMTP_DEBUG=true). Sensitive information could be disclosed via logs!" + ); println!("[WARNING] Only enable SMTP_DEBUG during troubleshooting!\n"); logger = logger.level_for("lettre::transport::smtp", log::LevelFilter::Debug) } else { @@ -294,7 +296,10 @@ fn check_web_vault() { let index_path = Path::new(&CONFIG.web_vault_folder()).join("index.html"); if !index_path.exists() { - error!("Web vault is not found at '{}'. To install it, please follow the steps in: ", CONFIG.web_vault_folder()); + error!( + "Web vault is not found at '{}'. To install it, please follow the steps in: ", + CONFIG.web_vault_folder() + ); error!("https://github.com/dani-garcia/bitwarden_rs/wiki/Building-binary#install-the-web-vault"); error!("You can also set the environment variable 'WEB_VAULT_ENABLED=false' to disable it"); exit(1); diff --git a/src/util.rs b/src/util.rs index feafa467..1858d4a3 100644 --- a/src/util.rs +++ b/src/util.rs @@ -28,7 +28,10 @@ impl Fairing for AppHeaders { res.set_raw_header("X-Frame-Options", "SAMEORIGIN"); res.set_raw_header("X-Content-Type-Options", "nosniff"); res.set_raw_header("X-XSS-Protection", "1; mode=block"); - let csp = format!("frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", CONFIG.allowed_iframe_ancestors()); + let csp = format!( + "frame-ancestors 'self' chrome-extension://nngceckbapebfimnlniiiahkandclblb moz-extension://* {};", + CONFIG.allowed_iframe_ancestors() + ); res.set_raw_header("Content-Security-Policy", csp); // Disable cache unless otherwise specified @@ -293,8 +296,7 @@ where use std::env; -pub fn get_env_str_value(key: &str) -> Option -{ +pub fn get_env_str_value(key: &str) -> Option { let key_file = format!("{}_FILE", key); let value_from_env = env::var(key); let value_file = env::var(&key_file); @@ -304,9 +306,9 @@ pub fn get_env_str_value(key: &str) -> Option (Ok(v_env), Err(_)) => Some(v_env), (Err(_), Ok(v_file)) => match fs::read_to_string(v_file) { Ok(content) => Some(content.trim().to_string()), - Err(e) => panic!("Failed to load {}: {:?}", key, e) + Err(e) => panic!("Failed to load {}: {:?}", key, e), }, - _ => None + _ => None, } }