Browse Source
Fix failing large note imports
When importing to Vaultwarden (or Bitwarden) notes larger then 10_000
encrypted characters are invalid. This because it for one isn't
compatible with Bitwarden. And some clients tend to break on very large
notes.
We already added a check for this limit when adding a single cipher, but
this caused issues during import, and could cause a partial imported
vault. Bitwarden does some validations before actually running it
through the import process and generates a special error message which
helps the user indicate which items are invalid during the import.
This PR adds that validation check and returns the same kind of error.
Fixes #3048
pull/3087/head
BlackDex
2 years ago
No known key found for this signature in database
GPG Key ID: 58C80A2AA6C765E1
4 changed files with
42 additions and
3 deletions
src/api/core/ciphers.rs
src/api/core/mod.rs
src/api/core/organizations.rs
src/db/models/cipher.rs
@ -205,7 +205,7 @@ pub struct CipherData {
* /
pub Type : i32 ,
pub Name : String ,
Notes : Option < String > ,
pub Notes : Option < String > ,
Fields : Option < Value > ,
// Only one of these should exist, depending on type
@ -542,6 +542,12 @@ async fn post_ciphers_import(
let data : ImportData = data . into_inner ( ) . data ;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher ::validate_notes ( & data . Ciphers ) ? ;
// Read and create the folders
let mut folders : Vec < _ > = Vec ::new ( ) ;
for folder in data . Folders . into_iter ( ) {
@ -7,7 +7,7 @@ mod organizations;
mod sends ;
pub mod two_factor ;
pub use ciphers ::{ purge_trashed_ciphers , CipherSyncData , CipherSyncType } ;
pub use ciphers ::{ purge_trashed_ciphers , CipherData , Cipher SyncData , CipherSyncType } ;
pub use emergency_access ::{ emergency_notification_reminder_job , emergency_request_timeout_job } ;
pub use events ::{ event_cleanup_job , log_event , log_user_event } ;
pub use sends ::purge_sends ;
@ -1378,6 +1378,12 @@ async fn post_org_import(
let data : ImportData = data . into_inner ( ) . data ;
let org_id = query . organization_id ;
// Validate the import before continuing
// Bitwarden does not process the import if there is one item invalid.
// Since we check for the size of the encrypted note length, we need to do that here to pre-validate it.
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher ::validate_notes ( & data . Ciphers ) ? ;
let mut collections = Vec ::new ( ) ;
for coll in data . Collections {
let collection = Collection ::new ( org_id . clone ( ) , coll . Name ) ;
@ -6,7 +6,7 @@ use super::{
Attachment , CollectionCipher , Favorite , FolderCipher , Group , User , UserOrgStatus , UserOrgType , UserOrganization ,
} ;
use crate ::api ::core ::CipherSyncData ;
use crate ::api ::core ::{ CipherData , CipherSyncData } ;
use std ::borrow ::Cow ;
@ -73,6 +73,33 @@ impl Cipher {
reprompt : None ,
}
}
pub fn validate_notes ( cipher_data : & [ CipherData ] ) -> EmptyResult {
let mut validation_errors = serde_json ::Map ::new ( ) ;
for ( index , cipher ) in cipher_data . iter ( ) . enumerate ( ) {
if let Some ( note ) = & cipher . Notes {
if note . len ( ) > 10_000 {
validation_errors . insert (
format ! ( "Ciphers[{index}].Notes" ) ,
serde_json ::to_value ( [
"The field Notes exceeds the maximum encrypted value length of 10000 characters." ,
] )
. unwrap ( ) ,
) ;
}
}
}
if ! validation_errors . is_empty ( ) {
let err_json = json ! ( {
"message" : "The model state is invalid." ,
"validationErrors" : validation_errors ,
"object" : "error"
} ) ;
err_json ! ( err_json , "Import validation errors" )
} else {
Ok ( ( ) )
}
}
}
use crate ::db ::DbConn ;