Browse Source

Fix Org Import duplicate collections

This fixes an issue with collections be duplicated same as was an issue with folders.
Also made some optimizations by using HashSet where possible and device the Vec/Hash capacity.
And instead of passing objects only use the UUID which was the only value we needed.

Also found an issue with importing a personal export via the Org import where folders are used.
Since Org's do not use folder we needed to clear those out, same as Bitwarden does.

Fixes #5193

Signed-off-by: BlackDex <black.dex@gmail.com>
pull/5200/head
BlackDex 5 months ago
parent
commit
787a645575
No known key found for this signature in database GPG Key ID: 58C80A2AA6C765E1
  1. 10
      src/api/core/ciphers.rs
  2. 39
      src/api/core/organizations.rs
  3. 5
      src/api/core/two_factor/duo_oidc.rs

10
src/api/core/ciphers.rs

@ -222,7 +222,7 @@ pub struct CipherData {
// Id is optional as it is included only in bulk share // Id is optional as it is included only in bulk share
pub id: Option<String>, pub id: Option<String>,
// Folder id is not included in import // Folder id is not included in import
folder_id: Option<String>, pub folder_id: Option<String>,
// TODO: Some of these might appear all the time, no need for Option // TODO: Some of these might appear all the time, no need for Option
#[serde(alias = "organizationID")] #[serde(alias = "organizationID")]
pub organization_id: Option<String>, pub organization_id: Option<String>,
@ -585,11 +585,11 @@ async fn post_ciphers_import(
Cipher::validate_cipher_data(&data.ciphers)?; Cipher::validate_cipher_data(&data.ciphers)?;
// Read and create the folders // Read and create the folders
let existing_folders: Vec<String> = let existing_folders: HashSet<Option<String>> =
Folder::find_by_user(&headers.user.uuid, &mut conn).await.into_iter().map(|f| f.uuid).collect(); Folder::find_by_user(&headers.user.uuid, &mut conn).await.into_iter().map(|f| Some(f.uuid)).collect();
let mut folders: Vec<String> = Vec::with_capacity(data.folders.len()); let mut folders: Vec<String> = Vec::with_capacity(data.folders.len());
for folder in data.folders.into_iter() { for folder in data.folders.into_iter() {
let folder_uuid = if folder.id.is_some() && existing_folders.contains(folder.id.as_ref().unwrap()) { let folder_uuid = if existing_folders.contains(&folder.id) {
folder.id.unwrap() folder.id.unwrap()
} else { } else {
let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name); let mut new_folder = Folder::new(headers.user.uuid.clone(), folder.name);
@ -601,8 +601,8 @@ async fn post_ciphers_import(
} }
// Read the relations between folders and ciphers // Read the relations between folders and ciphers
// Ciphers can only be in one folder at the same time
let mut relations_map = HashMap::with_capacity(data.folder_relationships.len()); let mut relations_map = HashMap::with_capacity(data.folder_relationships.len());
for relation in data.folder_relationships { for relation in data.folder_relationships {
relations_map.insert(relation.key, relation.value); relations_map.insert(relation.key, relation.value);
} }

39
src/api/core/organizations.rs

@ -11,7 +11,6 @@ use crate::{
}, },
auth::{decode_invite, AdminHeaders, ClientVersion, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders}, auth::{decode_invite, AdminHeaders, ClientVersion, Headers, ManagerHeaders, ManagerHeadersLoose, OwnerHeaders},
db::{models::*, DbConn}, db::{models::*, DbConn},
error::Error,
mail, mail,
util::{convert_json_key_lcase_first, NumberOrString}, util::{convert_json_key_lcase_first, NumberOrString},
CONFIG, CONFIG,
@ -127,6 +126,7 @@ struct NewCollectionData {
name: String, name: String,
groups: Vec<NewCollectionObjectData>, groups: Vec<NewCollectionObjectData>,
users: Vec<NewCollectionObjectData>, users: Vec<NewCollectionObjectData>,
id: Option<String>,
external_id: Option<String>, external_id: Option<String>,
} }
@ -1598,40 +1598,43 @@ async fn post_org_import(
// TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks. // TODO: See if we can optimize the whole cipher adding/importing and prevent duplicate code and checks.
Cipher::validate_cipher_data(&data.ciphers)?; Cipher::validate_cipher_data(&data.ciphers)?;
let mut collections = Vec::new(); let existing_collections: HashSet<Option<String>> =
Collection::find_by_organization(&org_id, &mut conn).await.into_iter().map(|c| (Some(c.uuid))).collect();
let mut collections: Vec<String> = Vec::with_capacity(data.collections.len());
for coll in data.collections { for coll in data.collections {
let collection = Collection::new(org_id.clone(), coll.name, coll.external_id); let collection_uuid = if existing_collections.contains(&coll.id) {
if collection.save(&mut conn).await.is_err() { coll.id.unwrap()
collections.push(Err(Error::new("Failed to create Collection", "Failed to create Collection")));
} else { } else {
collections.push(Ok(collection)); let new_collection = Collection::new(org_id.clone(), coll.name, coll.external_id);
} new_collection.save(&mut conn).await?;
new_collection.uuid
};
collections.push(collection_uuid);
} }
// Read the relations between collections and ciphers // Read the relations between collections and ciphers
let mut relations = Vec::new(); // Ciphers can be in multiple collections at the same time
let mut relations = Vec::with_capacity(data.collection_relationships.len());
for relation in data.collection_relationships { for relation in data.collection_relationships {
relations.push((relation.key, relation.value)); relations.push((relation.key, relation.value));
} }
let headers: Headers = headers.into(); let headers: Headers = headers.into();
let mut ciphers = Vec::new(); let mut ciphers: Vec<String> = Vec::with_capacity(data.ciphers.len());
for cipher_data in data.ciphers { for mut cipher_data in data.ciphers {
// Always clear folder_id's via an organization import
cipher_data.folder_id = None;
let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone()); let mut cipher = Cipher::new(cipher_data.r#type, cipher_data.name.clone());
update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok(); update_cipher_from_data(&mut cipher, cipher_data, &headers, None, &mut conn, &nt, UpdateType::None).await.ok();
ciphers.push(cipher); ciphers.push(cipher.uuid);
} }
// Assign the collections // Assign the collections
for (cipher_index, coll_index) in relations { for (cipher_index, coll_index) in relations {
let cipher_id = &ciphers[cipher_index].uuid; let cipher_id = &ciphers[cipher_index];
let coll = &collections[coll_index]; let coll_id = &collections[coll_index];
let coll_id = match coll {
Ok(coll) => coll.uuid.as_str(),
Err(_) => err!("Failed to assign to collection"),
};
CollectionCipher::save(cipher_id, coll_id, &mut conn).await?; CollectionCipher::save(cipher_id, coll_id, &mut conn).await?;
} }

5
src/api/core/two_factor/duo_oidc.rs

@ -211,10 +211,7 @@ impl DuoClient {
nonce, nonce,
}; };
let token = match self.encode_duo_jwt(jwt_payload) { let token = self.encode_duo_jwt(jwt_payload)?;
Ok(token) => token,
Err(e) => return Err(e),
};
let authz_endpoint = format!("https://{}/oauth/v1/authorize", self.api_host); let authz_endpoint = format!("https://{}/oauth/v1/authorize", self.api_host);
let mut auth_url = match Url::parse(authz_endpoint.as_str()) { let mut auth_url = match Url::parse(authz_endpoint.as_str()) {

Loading…
Cancel
Save