Browse Source

Merging upstream to master

pull/156/head
Kumar Ankur 7 years ago
parent
commit
3767de9cfe
  1. 7
      .travis.yml
  2. 33
      BUILD.md
  3. 681
      Cargo.lock
  4. 24
      Cargo.toml
  5. 28
      Dockerfile
  6. 59
      README.md
  7. 23
      docker/set-vault-baseurl.patch
  8. 9
      docker/settings.Production.json
  9. 2
      rust-toolchain
  10. 33
      src/api/core/accounts.rs
  11. 91
      src/api/core/ciphers.rs
  12. 18
      src/api/core/mod.rs
  13. 74
      src/api/core/organizations.rs
  14. 23
      src/api/core/two_factor.rs
  15. 23
      src/api/identity.rs
  16. 2
      src/api/mod.rs
  17. 31
      src/api/notifications.rs
  18. 6
      src/db/models/attachment.rs
  19. 23
      src/db/models/cipher.rs
  20. 12
      src/db/models/collection.rs
  21. 2
      src/db/models/folder.rs
  22. 37
      src/db/models/organization.rs
  23. 19
      src/db/models/user.rs
  24. 2
      src/main.rs
  25. 19
      src/util.rs

7
.travis.yml

@ -0,0 +1,7 @@
# Copied from Rocket's .travis.yml
language: rust
sudo: required # so we get a VM with higher specs
dist: trusty # so we get a VM with higher specs
cache: cargo
rust:
- nightly

33
BUILD.md

@ -17,28 +17,29 @@ cargo build --release
When run, the server is accessible in [http://localhost:80](http://localhost:80). When run, the server is accessible in [http://localhost:80](http://localhost:80).
### Install the web-vault ### Install the web-vault
Download the latest official release from the [releases page](https://github.com/bitwarden/web/releases) and extract it. Clone the git repository at [bitwarden/web](https://github.com/bitwarden/web) and checkout the latest release tag (e.g. v2.1.1):
```sh
Modify `web-vault/settings.Production.json` to look like this: # clone the repository
```json git clone https://github.com/bitwarden/web.git web-vault
{ cd web-vault
"appSettings": { # switch to the latest tag
"apiUri": "/api", git checkout "$(git tag | tail -n1)"
"identityUri": "/identity", ```
"iconsUri": "/icons",
"stripeKey": "", Apply the patch file from `docker/set-vault-baseurl.patch`:
"braintreeKey": "" ```sh
} # In the Vault repository directory
} git apply /path/to/bitwarden_rs/docker/set-vault-baseurl.patch
``` ```
Then, run the following from the `web-vault` directory: Then, build the Vault:
```sh ```sh
npm run sub:init
npm install npm install
npx gulp dist:selfHosted npm run dist
``` ```
Finally copy the contents of the `web-vault/dist` folder into the `bitwarden_rs/web-vault` folder. Finally copy the contents of the `build` folder into the `bitwarden_rs/web-vault` folder.
# Configuration # Configuration
The available configuration options are documented in the default `.env` file, and they can be modified by uncommenting the desired options in that file or by setting their respective environment variables. Look at the README file for the main configuration options available. The available configuration options are documented in the default `.env` file, and they can be modified by uncommenting the desired options in that file or by setting their respective environment variables. Look at the README file for the main configuration options available.

681
Cargo.lock

File diff suppressed because it is too large

24
Cargo.toml

@ -1,31 +1,31 @@
[package] [package]
name = "bitwarden_rs" name = "bitwarden_rs"
version = "0.12.0" version = "1.0.0"
authors = ["Daniel García <dani-garcia@users.noreply.github.com>"] authors = ["Daniel García <dani-garcia@users.noreply.github.com>"]
[dependencies] [dependencies]
# Web framework for nightly with a focus on ease-of-use, expressibility, and speed. # Web framework for nightly with a focus on ease-of-use, expressibility, and speed.
rocket = { version = "0.3.14", features = ["tls"] } rocket = { version = "0.3.16", features = ["tls"] }
rocket_codegen = "0.3.14" rocket_codegen = "0.3.16"
rocket_contrib = "0.3.14" rocket_contrib = "0.3.16"
# HTTP client # HTTP client
reqwest = "0.8.6" reqwest = "0.8.8"
# multipart/form-data support # multipart/form-data support
multipart = "0.14.2" multipart = "0.15.2"
# A generic serialization/deserialization framework # A generic serialization/deserialization framework
serde = "1.0.70" serde = "1.0.74"
serde_derive = "1.0.70" serde_derive = "1.0.74"
serde_json = "1.0.22" serde_json = "1.0.26"
# A safe, extensible ORM and Query builder # A safe, extensible ORM and Query builder
diesel = { version = "1.3.2", features = ["sqlite", "chrono", "r2d2"] } diesel = { version = "1.3.2", features = ["sqlite", "chrono", "r2d2"] }
diesel_migrations = { version = "1.3.0", features = ["sqlite"] } diesel_migrations = { version = "1.3.0", features = ["sqlite"] }
# Bundled SQLite # Bundled SQLite
libsqlite3-sys = { version = "0.9.1", features = ["bundled"] } libsqlite3-sys = { version = "0.9.3", features = ["bundled"] }
# Crypto library # Crypto library
ring = { version = "= 0.11.0", features = ["rsa_signing"] } ring = { version = "= 0.11.0", features = ["rsa_signing"] }
@ -34,7 +34,7 @@ ring = { version = "= 0.11.0", features = ["rsa_signing"] }
uuid = { version = "0.6.5", features = ["v4"] } uuid = { version = "0.6.5", features = ["v4"] }
# Date and time library for Rust # Date and time library for Rust
chrono = "0.4.4" chrono = "0.4.5"
# TOTP library # TOTP library
oath = "0.10.2" oath = "0.10.2"
@ -52,7 +52,7 @@ u2f = "0.1.2"
dotenv = { version = "0.13.0", default-features = false } dotenv = { version = "0.13.0", default-features = false }
# Lazy static macro # Lazy static macro
lazy_static = "1.0.1" lazy_static = "1.1.0"
# Numerical libraries # Numerical libraries
num-traits = "0.2.5" num-traits = "0.2.5"

28
Dockerfile

@ -2,31 +2,27 @@
# https://docs.docker.com/develop/develop-images/multistage-build/ # https://docs.docker.com/develop/develop-images/multistage-build/
# https://whitfin.io/speeding-up-rust-docker-builds/ # https://whitfin.io/speeding-up-rust-docker-builds/
####################### VAULT BUILD IMAGE ####################### ####################### VAULT BUILD IMAGE #######################
FROM node:9-alpine as vault FROM node:8-alpine as vault
ENV VAULT_VERSION "1.27.0" ENV VAULT_VERSION "v2.1.1"
ENV URL "https://github.com/bitwarden/web/archive/v${VAULT_VERSION}.tar.gz"
ENV URL "https://github.com/bitwarden/web.git"
RUN apk add --update-cache --upgrade \ RUN apk add --update-cache --upgrade \
curl \ curl \
git \ git \
tar \ tar
&& npm install -g \
gulp-cli \
gulp
RUN mkdir /web-build \
&& cd /web-build \
&& curl -L "${URL}" | tar -xvz --strip-components=1
RUN git clone -b $VAULT_VERSION --depth 1 $URL web-build
WORKDIR /web-build WORKDIR /web-build
COPY /docker/settings.Production.json /web-build/ COPY /docker/set-vault-baseurl.patch /web-build/
RUN git apply set-vault-baseurl.patch
RUN npm run sub:init && npm install
RUN git config --global url."https://github.com/".insteadOf ssh://git@github.com/ \ RUN npm run dist \
&& npm install \ && mv build /web-vault
&& gulp dist:selfHosted \
&& mv dist /web-vault
########################## BUILD IMAGE ########################## ########################## BUILD IMAGE ##########################
# We need to use the Rust build image, because # We need to use the Rust build image, because

59
README.md

@ -1,9 +1,19 @@
This is Bitwarden server API implementation written in rust compatible with [upstream Bitwarden clients](https://bitwarden.com/#download)*, ideal for self-hosted deployment where running official resource-heavy service might not be ideal. ### This is a Bitwarden server API implementation written in Rust compatible with [upstream Bitwarden clients](https://bitwarden.com/#download)*, perfect for self-hosted deployment where running the official resource-heavy service might not be ideal.
---
[![Travis Build Status](https://travis-ci.org/dani-garcia/bitwarden_rs.svg?branch=master)](https://travis-ci.org/dani-garcia/bitwarden_rs)
[![Dependency Status](https://deps.rs/repo/github/dani-garcia/bitwarden_rs/status.svg)](https://deps.rs/repo/github/dani-garcia/bitwarden_rs)
[![GitHub Release](https://img.shields.io/github/release/dani-garcia/bitwarden_rs.svg)](https://github.com/dani-garcia/bitwarden_rs/releases/latest)
[![GPL-3.0 Licensed](https://img.shields.io/github/license/dani-garcia/bitwarden_rs.svg)](https://github.com/dani-garcia/bitwarden_rs/blob/master/LICENSE.txt)
[![Matrix Chat](https://matrix.to/img/matrix-badge.svg)](https://matrix.to/#/#bitwarden_rs:matrix.org)
Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/bitwarden_rs). Image is based on [Rust implementation of Bitwarden API](https://github.com/dani-garcia/bitwarden_rs).
_*Note, that this project is not associated with the [Bitwarden](https://bitwarden.com/) project nor 8bit Solutions LLC._ _*Note, that this project is not associated with the [Bitwarden](https://bitwarden.com/) project nor 8bit Solutions LLC._
---
**Table of contents** **Table of contents**
- [Features](#features) - [Features](#features)
@ -22,6 +32,7 @@ _*Note, that this project is not associated with the [Bitwarden](https://bitward
- [icons cache](#icons-cache) - [icons cache](#icons-cache)
- [Changing the API request size limit](#changing-the-api-request-size-limit) - [Changing the API request size limit](#changing-the-api-request-size-limit)
- [Changing the number of workers](#changing-the-number-of-workers) - [Changing the number of workers](#changing-the-number-of-workers)
- [Disabling or overriding the Vault interface hosting](#disabling-or-overriding-the-vault-interface-hosting)
- [Other configuration](#other-configuration) - [Other configuration](#other-configuration)
- [Building your own image](#building-your-own-image) - [Building your own image](#building-your-own-image)
- [Building binary](#building-binary) - [Building binary](#building-binary)
@ -33,6 +44,10 @@ _*Note, that this project is not associated with the [Bitwarden](https://bitward
- [3. the key files](#3-the-key-files) - [3. the key files](#3-the-key-files)
- [4. Icon Cache](#4-icon-cache) - [4. Icon Cache](#4-icon-cache)
- [Running the server with non-root user](#running-the-server-with-non-root-user) - [Running the server with non-root user](#running-the-server-with-non-root-user)
- [Differences from upstream API implementation](#differences-from-upstream-api-implementation)
- [Changing user email](#changing-user-email)
- [Creating organization](#creating-organization)
- [Inviting users into organization](#inviting-users-into-organization)
- [Get in touch](#get-in-touch) - [Get in touch](#get-in-touch)
## Features ## Features
@ -134,10 +149,9 @@ Where:
```sh ```sh
docker run -d --name bitwarden \ docker run -d --name bitwarden \
-e ROCKET_TLS={certs='"/ssl/certs.pem",key="/ssl/key.pem"}' \ -e ROCKET_TLS='{certs="/ssl/certs.pem",key="/ssl/key.pem"}' \
-v /ssl/keys/:/ssl/ \ -v /ssl/keys/:/ssl/ \
-v /bw-data/:/data/ \ -v /bw-data/:/data/ \
-v /icon_cache/ \
-p 443:80 \ -p 443:80 \
mprasil/bitwarden:latest mprasil/bitwarden:latest
``` ```
@ -248,6 +262,30 @@ docker run -d --name bitwarden \
mprasil/bitwarden:latest mprasil/bitwarden:latest
``` ```
### Disabling or overriding the Vault interface hosting
As a convenience bitwarden_rs image will also host static files for Vault web interface. You can disable this static file hosting completely by setting the WEB_VAULT_ENABLED variable.
```sh
docker run -d --name bitwarden \
-e WEB_VAULT_ENABLED=false \
-v /bw-data/:/data/ \
-p 80:80 \
mprasil/bitwarden:latest
```
Alternatively you can override the Vault files and provide your own static files to host. You can do that by mounting a path with your files over the `/web-vault` directory in the container. Just make sure the directory contains at least `index.html` file.
```sh
docker run -d --name bitwarden \
-v /path/to/static/files_directory:/web-vault \
-v /bw-data/:/data/ \
-p 80:80 \
mprasil/bitwarden:latest
```
Note that you can also change the path where bitwarden_rs looks for static files by providing the `WEB_VAULT_FOLDER` environment variable with the path.
### Other configuration ### Other configuration
Though this is unlikely to be required in small deployment, you can fine-tune some other settings like number of workers using environment variables that are processed by [Rocket](https://rocket.rs), please see details in [documentation](https://rocket.rs/guide/configuration/#environment-variables). Though this is unlikely to be required in small deployment, you can fine-tune some other settings like number of workers using environment variables that are processed by [Rocket](https://rocket.rs), please see details in [documentation](https://rocket.rs/guide/configuration/#environment-variables).
@ -313,6 +351,21 @@ docker run -d --name bitwarden \
-p 80:8080 \ -p 80:8080 \
mprasil/bitwarden:latest mprasil/bitwarden:latest
``` ```
## Differences from upstream API implementation
### Changing user email
Because we don't have any SMTP functionality at the moment, there's no way to deliver the verification token when you try to change the email. User just needs to enter any random token to continue and the change will be applied.
### Creating organization
We use upstream Vault interface directly without any (significant) changes, this is why user is presented with paid options when creating organization. To create an organization, just use the free option, none of the limits apply when using bitwarden_rs as back-end API and after the organization is created it should behave like Enterprise organization.
### Inviting users into organization
The users must already be registered on your server to invite them, because we can't send the invitation via email. The invited users won't get the invitation email, instead they will appear in the interface as if they already accepted the invitation. Organization admin then just needs to confirm them to be proper Organization members and to give them access to the shared secrets.
## Get in touch ## Get in touch
To ask an question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine, also please report any bugs spotted here. To ask an question, [raising an issue](https://github.com/dani-garcia/bitwarden_rs/issues/new) is fine, also please report any bugs spotted here.

23
docker/set-vault-baseurl.patch

@ -0,0 +1,23 @@
--- a/src/app/services/services.module.ts
+++ b/src/app/services/services.module.ts
@@ -116,17 +116,15 @@ const exportService = new ExportService(folderService, cipherService, apiService
const importService = new ImportService(cipherService, folderService, apiService, i18nService, collectionService);
const auditService = new AuditService(cryptoFunctionService, apiService);
-const analytics = new Analytics(window, () => platformUtilsService.isDev() || platformUtilsService.isSelfHost(),
+const analytics = new Analytics(window, () => platformUtilsService.isDev() || platformUtilsService.isSelfHost() || true,
platformUtilsService, storageService, appIdService);
containerService.attachToWindow(window);
export function initFactory(): Function {
return async () => {
await (storageService as HtmlStorageService).init();
- const isDev = platformUtilsService.isDev();
- if (!isDev && platformUtilsService.isSelfHost()) {
- environmentService.baseUrl = window.location.origin;
- }
+ const isDev = false;
+ environmentService.baseUrl = window.location.origin;
await apiService.setUrls({
base: isDev ? null : window.location.origin,
api: isDev ? 'http://localhost:4000' : null,

9
docker/settings.Production.json

@ -1,9 +0,0 @@
{
"appSettings": {
"apiUri": "/api",
"identityUri": "/identity",
"iconsUri": "/icons",
"stripeKey": "",
"braintreeKey": ""
}
}

2
rust-toolchain

@ -1 +1 @@
nightly-2018-06-26 nightly-2018-08-24

33
src/api/core/accounts.rs

@ -73,6 +73,11 @@ struct ProfileData {
Name: String, Name: String,
} }
#[put("/accounts/profile", data = "<data>")]
fn put_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult {
post_profile(data, headers, conn)
}
#[post("/accounts/profile", data = "<data>")] #[post("/accounts/profile", data = "<data>")]
fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult { fn post_profile(data: JsonUpcase<ProfileData>, headers: Headers, conn: DbConn) -> JsonResult {
let data: ProfileData = data.into_inner().data; let data: ProfileData = data.into_inner().data;
@ -275,3 +280,31 @@ fn password_hint(data: JsonUpcase<PasswordHintData>, conn: DbConn) -> EmptyResul
None => Ok(()), None => Ok(()),
} }
} }
#[derive(Deserialize)]
#[allow(non_snake_case)]
struct PreloginData {
Email: String,
}
#[post("/accounts/prelogin", data = "<data>")]
fn prelogin(data: JsonUpcase<PreloginData>, conn: DbConn) -> JsonResult {
let data: PreloginData = data.into_inner().data;
match User::find_by_mail(&data.Email, &conn) {
Some(user) => {
let kdf_type = 0; // PBKDF2: 0
let _server_iter = user.password_iterations;
let client_iter = 5000; // TODO: Make iterations user configurable
Ok(Json(json!({
"Kdf": kdf_type,
"KdfIterations": client_iter
})))
},
None => err!("Invalid user"),
}
}

91
src/api/core/ciphers.rs

@ -87,6 +87,8 @@ fn get_cipher_details(uuid: String, headers: Headers, conn: DbConn) -> JsonResul
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct CipherData { struct CipherData {
// Id is optional as it is included only in bulk share
Id: Option<String>,
// Folder id is not included in import // Folder id is not included in import
FolderId: Option<String>, FolderId: Option<String>,
// TODO: Some of these might appear all the time, no need for Option // TODO: Some of these might appear all the time, no need for Option
@ -242,12 +244,21 @@ fn post_ciphers_import(data: JsonUpcase<ImportData>, headers: Headers, conn: DbC
cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn).ok(); cipher.move_to_folder(folder_uuid, &headers.user.uuid.clone(), &conn).ok();
} }
Ok(()) let mut user = headers.user;
match user.update_revision(&conn) {
Ok(()) => Ok(()),
Err(_) => err!("Failed to update the revision, please log out and log back in to finish import.")
}
}
#[put("/ciphers/<uuid>/admin", data = "<data>")]
fn put_cipher_admin(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn) -> JsonResult {
put_cipher(uuid, data, headers, conn)
} }
#[post("/ciphers/<uuid>/admin", data = "<data>")] #[post("/ciphers/<uuid>/admin", data = "<data>")]
fn post_cipher_admin(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn) -> JsonResult { fn post_cipher_admin(uuid: String, data: JsonUpcase<CipherData>, headers: Headers, conn: DbConn) -> JsonResult {
// TODO: Implement this correctly
post_cipher(uuid, data, headers, conn) post_cipher(uuid, data, headers, conn)
} }
@ -285,6 +296,11 @@ fn post_collections_update(uuid: String, data: JsonUpcase<CollectionsAdminData>,
post_collections_admin(uuid, data, headers, conn) post_collections_admin(uuid, data, headers, conn)
} }
#[put("/ciphers/<uuid>/collections-admin", data = "<data>")]
fn put_collections_admin(uuid: String, data: JsonUpcase<CollectionsAdminData>, headers: Headers, conn: DbConn) -> EmptyResult {
post_collections_admin(uuid, data, headers, conn)
}
#[post("/ciphers/<uuid>/collections-admin", data = "<data>")] #[post("/ciphers/<uuid>/collections-admin", data = "<data>")]
fn post_collections_admin(uuid: String, data: JsonUpcase<CollectionsAdminData>, headers: Headers, conn: DbConn) -> EmptyResult { fn post_collections_admin(uuid: String, data: JsonUpcase<CollectionsAdminData>, headers: Headers, conn: DbConn) -> EmptyResult {
let data: CollectionsAdminData = data.into_inner().data; let data: CollectionsAdminData = data.into_inner().data;
@ -332,6 +348,65 @@ struct ShareCipherData {
fn post_cipher_share(uuid: String, data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn) -> JsonResult { fn post_cipher_share(uuid: String, data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn) -> JsonResult {
let data: ShareCipherData = data.into_inner().data; let data: ShareCipherData = data.into_inner().data;
share_cipher_by_uuid(&uuid, data, &headers, &conn)
}
#[put("/ciphers/<uuid>/share", data = "<data>")]
fn put_cipher_share(uuid: String, data: JsonUpcase<ShareCipherData>, headers: Headers, conn: DbConn) -> JsonResult {
let data: ShareCipherData = data.into_inner().data;
share_cipher_by_uuid(&uuid, data, &headers, &conn)
}
#[derive(Deserialize)]
#[allow(non_snake_case)]
struct ShareSelectedCipherData {
Ciphers: Vec<CipherData>,
CollectionIds: Vec<String>
}
#[put("/ciphers/share", data = "<data>")]
fn put_cipher_share_seleted(data: JsonUpcase<ShareSelectedCipherData>, headers: Headers, conn: DbConn) -> EmptyResult {
let mut data: ShareSelectedCipherData = data.into_inner().data;
let mut cipher_ids: Vec<String> = Vec::new();
if data.Ciphers.len() == 0 {
err!("You must select at least one cipher.")
}
if data.CollectionIds.len() == 0 {
err!("You must select at least one collection.")
}
for cipher in data.Ciphers.iter() {
match cipher.Id {
Some(ref id) => cipher_ids.push(id.to_string()),
None => err!("Request missing ids field")
};
}
let attachments = Attachment::find_by_ciphers(cipher_ids, &conn);
if attachments.len() > 0 {
err!("Ciphers should not have any attachments.")
}
while let Some(cipher) = data.Ciphers.pop() {
let mut shared_cipher_data = ShareCipherData {
Cipher: cipher,
CollectionIds: data.CollectionIds.clone()
};
match shared_cipher_data.Cipher.Id.take() {
Some(id) => share_cipher_by_uuid(&id, shared_cipher_data , &headers, &conn)?,
None => err!("Request missing ids field")
};
}
Ok(())
}
fn share_cipher_by_uuid(uuid: &str, data: ShareCipherData, headers: &Headers, conn: &DbConn) -> JsonResult {
let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) { let mut cipher = match Cipher::find_by_uuid(&uuid, &conn) {
Some(cipher) => { Some(cipher) => {
if cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) { if cipher.is_write_accessible_to_user(&headers.user.uuid, &conn) {
@ -456,7 +531,7 @@ fn delete_cipher(uuid: String, headers: Headers, conn: DbConn) -> EmptyResult {
_delete_cipher_by_uuid(&uuid, &headers, &conn) _delete_cipher_by_uuid(&uuid, &headers, &conn)
} }
#[post("/ciphers/delete", data = "<data>")] #[delete("/ciphers", data = "<data>")]
fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn) -> EmptyResult { fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn) -> EmptyResult {
let data: Value = data.into_inner().data; let data: Value = data.into_inner().data;
@ -477,6 +552,11 @@ fn delete_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbCon
Ok(()) Ok(())
} }
#[post("/ciphers/delete", data = "<data>")]
fn delete_cipher_selected_post(data: JsonUpcase<Value>, headers: Headers, conn: DbConn) -> EmptyResult {
delete_cipher_selected(data, headers, conn)
}
#[post("/ciphers/move", data = "<data>")] #[post("/ciphers/move", data = "<data>")]
fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn) -> EmptyResult { fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn) -> EmptyResult {
let data = data.into_inner().data; let data = data.into_inner().data;
@ -529,6 +609,11 @@ fn move_cipher_selected(data: JsonUpcase<Value>, headers: Headers, conn: DbConn)
Ok(()) Ok(())
} }
#[put("/ciphers/move", data = "<data>")]
fn move_cipher_selected_put(data: JsonUpcase<Value>, headers: Headers, conn: DbConn) -> EmptyResult {
move_cipher_selected(data, headers, conn)
}
#[post("/ciphers/purge", data = "<data>")] #[post("/ciphers/purge", data = "<data>")]
fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult { fn delete_all(data: JsonUpcase<PasswordData>, headers: Headers, conn: DbConn) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;

18
src/api/core/mod.rs

@ -14,6 +14,7 @@ pub fn routes() -> Vec<Route> {
routes![ routes![
register, register,
profile, profile,
put_profile,
post_profile, post_profile,
get_public_keys, get_public_keys,
post_keys, post_keys,
@ -24,6 +25,7 @@ pub fn routes() -> Vec<Route> {
delete_account, delete_account,
revision_date, revision_date,
password_hint, password_hint,
prelogin,
sync, sync,
@ -32,6 +34,7 @@ pub fn routes() -> Vec<Route> {
get_cipher_admin, get_cipher_admin,
get_cipher_details, get_cipher_details,
post_ciphers, post_ciphers,
put_cipher_admin,
post_ciphers_admin, post_ciphers_admin,
post_ciphers_import, post_ciphers_import,
post_attachment, post_attachment,
@ -42,14 +45,18 @@ pub fn routes() -> Vec<Route> {
delete_attachment, delete_attachment,
post_cipher_admin, post_cipher_admin,
post_cipher_share, post_cipher_share,
put_cipher_share,
put_cipher_share_seleted,
post_cipher, post_cipher,
put_cipher, put_cipher,
delete_cipher_post, delete_cipher_post,
delete_cipher_post_admin, delete_cipher_post_admin,
delete_cipher, delete_cipher,
delete_cipher_selected, delete_cipher_selected,
delete_cipher_selected_post,
delete_all, delete_all,
move_cipher_selected, move_cipher_selected,
move_cipher_selected_put,
get_folders, get_folders,
get_folder, get_folder,
@ -63,33 +70,44 @@ pub fn routes() -> Vec<Route> {
get_recover, get_recover,
recover, recover,
disable_twofactor, disable_twofactor,
disable_twofactor_put,
generate_authenticator, generate_authenticator,
activate_authenticator, activate_authenticator,
activate_authenticator_put,
generate_u2f, generate_u2f,
activate_u2f, activate_u2f,
activate_u2f_put,
get_organization, get_organization,
create_organization, create_organization,
delete_organization, delete_organization,
post_delete_organization,
leave_organization, leave_organization,
get_user_collections, get_user_collections,
get_org_collections, get_org_collections,
get_org_collection_detail, get_org_collection_detail,
get_collection_users, get_collection_users,
put_organization,
post_organization, post_organization,
post_organization_collections, post_organization_collections,
delete_organization_collection_user,
post_organization_collection_delete_user, post_organization_collection_delete_user,
post_organization_collection_update, post_organization_collection_update,
put_organization_collection_update,
delete_organization_collection,
post_organization_collection_delete, post_organization_collection_delete,
post_collections_update, post_collections_update,
post_collections_admin, post_collections_admin,
put_collections_admin,
get_org_details, get_org_details,
get_org_users, get_org_users,
send_invite, send_invite,
confirm_invite, confirm_invite,
get_user, get_user,
edit_user, edit_user,
put_organization_user,
delete_user, delete_user,
post_delete_user,
clear_device_token, clear_device_token,
put_device_token, put_device_token,

74
src/api/core/organizations.rs

@ -8,6 +8,8 @@ use db::models::*;
use api::{PasswordData, JsonResult, EmptyResult, NumberOrString, JsonUpcase}; use api::{PasswordData, JsonResult, EmptyResult, NumberOrString, JsonUpcase};
use auth::{Headers, AdminHeaders, OwnerHeaders}; use auth::{Headers, AdminHeaders, OwnerHeaders};
use serde::{Deserialize, Deserializer};
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
@ -17,7 +19,7 @@ struct OrgData {
Key: String, Key: String,
Name: String, Name: String,
#[serde(rename = "PlanType")] #[serde(rename = "PlanType")]
_PlanType: String, // Ignored, always use the same plan _PlanType: NumberOrString, // Ignored, always use the same plan
} }
#[derive(Deserialize, Debug)] #[derive(Deserialize, Debug)]
@ -55,7 +57,7 @@ fn create_organization(headers: Headers, data: JsonUpcase<OrgData>, conn: DbConn
Ok(Json(org.to_json())) Ok(Json(org.to_json()))
} }
#[post("/organizations/<org_id>/delete", data = "<data>")] #[delete("/organizations/<org_id>", data = "<data>")]
fn delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers: OwnerHeaders, conn: DbConn) -> EmptyResult { fn delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers: OwnerHeaders, conn: DbConn) -> EmptyResult {
let data: PasswordData = data.into_inner().data; let data: PasswordData = data.into_inner().data;
let password_hash = data.MasterPasswordHash; let password_hash = data.MasterPasswordHash;
@ -73,6 +75,11 @@ fn delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers:
} }
} }
#[post("/organizations/<org_id>/delete", data = "<data>")]
fn post_delete_organization(org_id: String, data: JsonUpcase<PasswordData>, headers: OwnerHeaders, conn: DbConn) -> EmptyResult {
delete_organization(org_id, data, headers, conn)
}
#[post("/organizations/<org_id>/leave")] #[post("/organizations/<org_id>/leave")]
fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyResult { fn leave_organization(org_id: String, headers: Headers, conn: DbConn) -> EmptyResult {
match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) { match UserOrganization::find_by_user_and_org(&headers.user.uuid, &org_id, &conn) {
@ -104,6 +111,11 @@ fn get_organization(org_id: String, _headers: OwnerHeaders, conn: DbConn) -> Jso
} }
} }
#[put("/organizations/<org_id>", data = "<data>")]
fn put_organization(org_id: String, headers: OwnerHeaders, data: JsonUpcase<OrganizationUpdateData>, conn: DbConn) -> JsonResult {
post_organization(org_id, headers, data, conn)
}
#[post("/organizations/<org_id>", data = "<data>")] #[post("/organizations/<org_id>", data = "<data>")]
fn post_organization(org_id: String, _headers: OwnerHeaders, data: JsonUpcase<OrganizationUpdateData>, conn: DbConn) -> JsonResult { fn post_organization(org_id: String, _headers: OwnerHeaders, data: JsonUpcase<OrganizationUpdateData>, conn: DbConn) -> JsonResult {
let data: OrganizationUpdateData = data.into_inner().data; let data: OrganizationUpdateData = data.into_inner().data;
@ -164,6 +176,11 @@ fn post_organization_collections(org_id: String, _headers: AdminHeaders, data: J
Ok(Json(collection.to_json())) Ok(Json(collection.to_json()))
} }
#[put("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
fn put_organization_collection_update(org_id: String, col_id: String, headers: AdminHeaders, data: JsonUpcase<NewCollectionData>, conn: DbConn) -> JsonResult {
post_organization_collection_update(org_id, col_id, headers, data, conn)
}
#[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")] #[post("/organizations/<org_id>/collections/<col_id>", data = "<data>")]
fn post_organization_collection_update(org_id: String, col_id: String, _headers: AdminHeaders, data: JsonUpcase<NewCollectionData>, conn: DbConn) -> JsonResult { fn post_organization_collection_update(org_id: String, col_id: String, _headers: AdminHeaders, data: JsonUpcase<NewCollectionData>, conn: DbConn) -> JsonResult {
let data: NewCollectionData = data.into_inner().data; let data: NewCollectionData = data.into_inner().data;
@ -188,8 +205,9 @@ fn post_organization_collection_update(org_id: String, col_id: String, _headers:
Ok(Json(collection.to_json())) Ok(Json(collection.to_json()))
} }
#[post("/organizations/<org_id>/collections/<col_id>/delete-user/<org_user_id>")]
fn post_organization_collection_delete_user(org_id: String, col_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult { #[delete("/organizations/<org_id>/collections/<col_id>/user/<org_user_id>")]
fn delete_organization_collection_user(org_id: String, col_id: String, org_user_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult {
let collection = match Collection::find_by_uuid(&col_id, &conn) { let collection = match Collection::find_by_uuid(&col_id, &conn) {
None => err!("Collection not found"), None => err!("Collection not found"),
Some(collection) => if collection.org_uuid == org_id { Some(collection) => if collection.org_uuid == org_id {
@ -215,17 +233,13 @@ fn post_organization_collection_delete_user(org_id: String, col_id: String, org_
} }
} }
#[derive(Deserialize, Debug)] #[post("/organizations/<org_id>/collections/<col_id>/delete-user/<org_user_id>")]
#[allow(non_snake_case)] fn post_organization_collection_delete_user(org_id: String, col_id: String, org_user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
struct DeleteCollectionData { delete_organization_collection_user(org_id, col_id, org_user_id, headers, conn)
Id: String,
OrgId: String,
} }
#[post("/organizations/<org_id>/collections/<col_id>/delete", data = "<data>")] #[delete("/organizations/<org_id>/collections/<col_id>")]
fn post_organization_collection_delete(org_id: String, col_id: String, _headers: AdminHeaders, data: JsonUpcase<DeleteCollectionData>, conn: DbConn) -> EmptyResult { fn delete_organization_collection(org_id: String, col_id: String, _headers: AdminHeaders, conn: DbConn) -> EmptyResult {
let _data: DeleteCollectionData = data.into_inner().data;
match Collection::find_by_uuid(&col_id, &conn) { match Collection::find_by_uuid(&col_id, &conn) {
None => err!("Collection not found"), None => err!("Collection not found"),
Some(collection) => if collection.org_uuid == org_id { Some(collection) => if collection.org_uuid == org_id {
@ -239,6 +253,18 @@ fn post_organization_collection_delete(org_id: String, col_id: String, _headers:
} }
} }
#[derive(Deserialize, Debug)]
#[allow(non_snake_case)]
struct DeleteCollectionData {
Id: String,
OrgId: String,
}
#[post("/organizations/<org_id>/collections/<col_id>/delete", data = "<_data>")]
fn post_organization_collection_delete(org_id: String, col_id: String, headers: AdminHeaders, _data: JsonUpcase<DeleteCollectionData>, conn: DbConn) -> EmptyResult {
delete_organization_collection(org_id, col_id, headers, conn)
}
#[get("/organizations/<org_id>/collections/<coll_id>/details")] #[get("/organizations/<org_id>/collections/<coll_id>/details")]
fn get_org_collection_detail(org_id: String, coll_id: String, headers: AdminHeaders, conn: DbConn) -> JsonResult { fn get_org_collection_detail(org_id: String, coll_id: String, headers: AdminHeaders, conn: DbConn) -> JsonResult {
match Collection::find_by_uuid_and_user(&coll_id, &headers.user.uuid, &conn) { match Collection::find_by_uuid_and_user(&coll_id, &headers.user.uuid, &conn) {
@ -308,6 +334,14 @@ fn get_org_users(org_id: String, headers: AdminHeaders, conn: DbConn) -> JsonRes
}))) })))
} }
fn deserialize_collections<'de, D>(deserializer: D) -> Result<Vec<CollectionData>, D::Error>
where
D: Deserializer<'de>,
{
// Deserialize null to empty Vec
Deserialize::deserialize(deserializer).or(Ok(vec![]))
}
#[derive(Deserialize)] #[derive(Deserialize)]
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct CollectionData { struct CollectionData {
@ -320,6 +354,7 @@ struct CollectionData {
struct InviteData { struct InviteData {
Emails: Vec<String>, Emails: Vec<String>,
Type: NumberOrString, Type: NumberOrString,
#[serde(deserialize_with = "deserialize_collections")]
Collections: Vec<CollectionData>, Collections: Vec<CollectionData>,
AccessAll: Option<bool>, AccessAll: Option<bool>,
} }
@ -424,10 +459,16 @@ fn get_user(org_id: String, user_id: String, _headers: AdminHeaders, conn: DbCon
#[allow(non_snake_case)] #[allow(non_snake_case)]
struct EditUserData { struct EditUserData {
Type: NumberOrString, Type: NumberOrString,
#[serde(deserialize_with = "deserialize_collections")]
Collections: Vec<CollectionData>, Collections: Vec<CollectionData>,
AccessAll: bool, AccessAll: bool,
} }
#[put("/organizations/<org_id>/users/<user_id>", data = "<data>", rank = 1)]
fn put_organization_user(org_id: String, user_id: String, data: JsonUpcase<EditUserData>, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
edit_user(org_id, user_id, data, headers, conn)
}
#[post("/organizations/<org_id>/users/<user_id>", data = "<data>", rank = 1)] #[post("/organizations/<org_id>/users/<user_id>", data = "<data>", rank = 1)]
fn edit_user(org_id: String, user_id: String, data: JsonUpcase<EditUserData>, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn edit_user(org_id: String, user_id: String, data: JsonUpcase<EditUserData>, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
let data: EditUserData = data.into_inner().data; let data: EditUserData = data.into_inner().data;
@ -494,7 +535,7 @@ fn edit_user(org_id: String, user_id: String, data: JsonUpcase<EditUserData>, he
Ok(()) Ok(())
} }
#[post("/organizations/<org_id>/users/<user_id>/delete")] #[delete("/organizations/<org_id>/users/<user_id>")]
fn delete_user(org_id: String, user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult { fn delete_user(org_id: String, user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
let user_to_delete = match UserOrganization::find_by_uuid(&user_id, &conn) { let user_to_delete = match UserOrganization::find_by_uuid(&user_id, &conn) {
Some(user) => user, Some(user) => user,
@ -522,3 +563,8 @@ fn delete_user(org_id: String, user_id: String, headers: AdminHeaders, conn: DbC
Err(_) => err!("Failed deleting user from organization") Err(_) => err!("Failed deleting user from organization")
} }
} }
#[post("/organizations/<org_id>/users/<user_id>/delete")]
fn post_delete_user(org_id: String, user_id: String, headers: AdminHeaders, conn: DbConn) -> EmptyResult {
delete_user(org_id, user_id, headers, conn)
}

23
src/api/core/two_factor.rs

@ -112,6 +112,15 @@ fn disable_twofactor(
}))) })))
} }
#[put("/two-factor/disable", data = "<data>")]
fn disable_twofactor_put(
data: JsonUpcase<DisableTwoFactorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
disable_twofactor(data, headers, conn)
}
#[post("/two-factor/get-authenticator", data = "<data>")] #[post("/two-factor/get-authenticator", data = "<data>")]
fn generate_authenticator( fn generate_authenticator(
data: JsonUpcase<PasswordData>, data: JsonUpcase<PasswordData>,
@ -194,6 +203,15 @@ fn activate_authenticator(
}))) })))
} }
#[put("/two-factor/authenticator", data = "<data>")]
fn activate_authenticator_put(
data: JsonUpcase<EnableAuthenticatorData>,
headers: Headers,
conn: DbConn,
) -> JsonResult {
activate_authenticator(data, headers, conn)
}
fn _generate_recover_code(user: &mut User, conn: &DbConn) { fn _generate_recover_code(user: &mut User, conn: &DbConn) {
if user.totp_recover.is_none() { if user.totp_recover.is_none() {
let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20])); let totp_recover = BASE32.encode(&crypto::get_random(vec![0u8; 20]));
@ -356,6 +374,11 @@ fn activate_u2f(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn)
} }
} }
#[put("/two-factor/u2f", data = "<data>")]
fn activate_u2f_put(data: JsonUpcase<EnableU2FData>, headers: Headers, conn: DbConn) -> JsonResult {
activate_u2f(data,headers, conn)
}
fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge { fn _create_u2f_challenge(user_uuid: &str, type_: TwoFactorType, conn: &DbConn) -> Challenge {
let challenge = U2F.generate_challenge().unwrap(); let challenge = U2F.generate_challenge().unwrap();

23
src/api/identity.rs

@ -1,4 +1,5 @@
use std::collections::HashMap; use std::collections::HashMap;
use std::net::{IpAddr, Ipv4Addr, SocketAddr};
use rocket::request::{self, Form, FormItems, FromForm, FromRequest, Request}; use rocket::request::{self, Form, FormItems, FromForm, FromRequest, Request};
use rocket::{Outcome, Route}; use rocket::{Outcome, Route};
@ -21,12 +22,12 @@ pub fn routes() -> Vec<Route> {
} }
#[post("/connect/token", data = "<connect_data>")] #[post("/connect/token", data = "<connect_data>")]
fn login(connect_data: Form<ConnectData>, device_type: DeviceType, conn: DbConn) -> JsonResult { fn login(connect_data: Form<ConnectData>, device_type: DeviceType, conn: DbConn, socket: Option<SocketAddr>) -> JsonResult {
let data = connect_data.get(); let data = connect_data.get();
match data.grant_type { match data.grant_type {
GrantType::RefreshToken => _refresh_login(data, device_type, conn), GrantType::RefreshToken => _refresh_login(data, device_type, conn),
GrantType::Password => _password_login(data, device_type, conn), GrantType::Password => _password_login(data, device_type, conn, socket),
} }
} }
@ -57,7 +58,13 @@ fn _refresh_login(data: &ConnectData, _device_type: DeviceType, conn: DbConn) ->
}))) })))
} }
fn _password_login(data: &ConnectData, device_type: DeviceType, conn: DbConn) -> JsonResult { fn _password_login(data: &ConnectData, device_type: DeviceType, conn: DbConn, remote: Option<SocketAddr>) -> JsonResult {
// Get the ip for error reporting
let ip = match remote {
Some(ip) => ip.ip(),
None => IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)),
};
// Validate scope // Validate scope
let scope = data.get("scope"); let scope = data.get("scope");
if scope != "api offline_access" { if scope != "api offline_access" {
@ -68,13 +75,19 @@ fn _password_login(data: &ConnectData, device_type: DeviceType, conn: DbConn) ->
let username = data.get("username"); let username = data.get("username");
let user = match User::find_by_mail(username, &conn) { let user = match User::find_by_mail(username, &conn) {
Some(user) => user, Some(user) => user,
None => err!("Username or password is incorrect. Try again."), None => err!(format!(
"Username or password is incorrect. Try again. IP: {}. Username: {}.",
ip, username
)),
}; };
// Check password // Check password
let password = data.get("password"); let password = data.get("password");
if !user.check_valid_password(password) { if !user.check_valid_password(password) {
err!("Username or password is incorrect. Try again.") err!(format!(
"Username or password is incorrect. Try again. IP: {}. Username: {}.",
ip, username
))
} }
// Let's only use the header and ignore the 'devicetype' parameter // Let's only use the header and ignore the 'devicetype' parameter

2
src/api/mod.rs

@ -2,11 +2,13 @@ pub(crate) mod core;
mod icons; mod icons;
mod identity; mod identity;
mod web; mod web;
mod notifications;
pub use self::core::routes as core_routes; pub use self::core::routes as core_routes;
pub use self::icons::routes as icons_routes; pub use self::icons::routes as icons_routes;
pub use self::identity::routes as identity_routes; pub use self::identity::routes as identity_routes;
pub use self::web::routes as web_routes; pub use self::web::routes as web_routes;
pub use self::notifications::routes as notifications_routes;
use rocket::response::status::BadRequest; use rocket::response::status::BadRequest;
use rocket_contrib::Json; use rocket_contrib::Json;

31
src/api/notifications.rs

@ -0,0 +1,31 @@
use rocket::Route;
use rocket_contrib::Json;
use db::DbConn;
use api::JsonResult;
use auth::Headers;
pub fn routes() -> Vec<Route> {
routes![negotiate]
}
#[post("/hub/negotiate")]
fn negotiate(_headers: Headers, _conn: DbConn) -> JsonResult {
use data_encoding::BASE64URL;
use crypto;
// Store this in db?
let conn_id = BASE64URL.encode(&crypto::get_random(vec![0u8; 16]));
// TODO: Implement transports
// Rocket WS support: https://github.com/SergioBenitez/Rocket/issues/90
// Rocket SSE support: https://github.com/SergioBenitez/Rocket/issues/33
Ok(Json(json!({
"connectionId": conn_id,
"availableTransports":[
// {"transport":"WebSockets", "transferFormats":["Text","Binary"]},
// {"transport":"ServerSentEvents", "transferFormats":["Text"]},
// {"transport":"LongPolling", "transferFormats":["Text","Binary"]}
]
})))
}

6
src/db/models/attachment.rs

@ -111,4 +111,10 @@ impl Attachment {
.filter(attachments::cipher_uuid.eq(cipher_uuid)) .filter(attachments::cipher_uuid.eq(cipher_uuid))
.load::<Self>(&**conn).expect("Error loading attachments") .load::<Self>(&**conn).expect("Error loading attachments")
} }
pub fn find_by_ciphers(cipher_uuids: Vec<String>, conn: &DbConn) -> Vec<Self> {
attachments::table
.filter(attachments::cipher_uuid.eq_any(cipher_uuids))
.load::<Self>(&**conn).expect("Error loading attachments")
}
} }

23
src/db/models/cipher.rs

@ -3,7 +3,7 @@ use serde_json::Value as JsonValue;
use uuid::Uuid; use uuid::Uuid;
use super::{User, Organization, Attachment, FolderCipher, CollectionCipher, UserOrgType, UserOrgStatus}; use super::{User, Organization, Attachment, FolderCipher, CollectionCipher, UserOrganization, UserOrgType, UserOrgStatus};
#[derive(Debug, Identifiable, Queryable, Insertable, Associations)] #[derive(Debug, Identifiable, Queryable, Insertable, Associations)]
#[table_name = "ciphers"] #[table_name = "ciphers"]
@ -122,7 +122,23 @@ impl Cipher {
json_object json_object
} }
pub fn update_users_revision(&self, conn: &DbConn) {
match self.user_uuid {
Some(ref user_uuid) => User::update_uuid_revision(&user_uuid, conn),
None => { // Belongs to Organization, need to update affected users
if let Some(ref org_uuid) = self.organization_uuid {
UserOrganization::find_by_cipher_and_org(&self.uuid, &org_uuid, conn)
.iter()
.for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn)
});
}
}
};
}
pub fn save(&mut self, conn: &DbConn) -> bool { pub fn save(&mut self, conn: &DbConn) -> bool {
self.update_users_revision(conn);
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
match diesel::replace_into(ciphers::table) match diesel::replace_into(ciphers::table)
@ -134,6 +150,8 @@ impl Cipher {
} }
pub fn delete(self, conn: &DbConn) -> QueryResult<()> { pub fn delete(self, conn: &DbConn) -> QueryResult<()> {
self.update_users_revision(conn);
FolderCipher::delete_all_by_cipher(&self.uuid, &conn)?; FolderCipher::delete_all_by_cipher(&self.uuid, &conn)?;
CollectionCipher::delete_all_by_cipher(&self.uuid, &conn)?; CollectionCipher::delete_all_by_cipher(&self.uuid, &conn)?;
Attachment::delete_all_by_cipher(&self.uuid, &conn)?; Attachment::delete_all_by_cipher(&self.uuid, &conn)?;
@ -157,6 +175,7 @@ impl Cipher {
None => { None => {
match folder_uuid { match folder_uuid {
Some(new_folder) => { Some(new_folder) => {
self.update_users_revision(conn);
let folder_cipher = FolderCipher::new(&new_folder, &self.uuid); let folder_cipher = FolderCipher::new(&new_folder, &self.uuid);
folder_cipher.save(&conn).or(Err("Couldn't save folder setting")) folder_cipher.save(&conn).or(Err("Couldn't save folder setting"))
}, },
@ -169,6 +188,7 @@ impl Cipher {
if current_folder == new_folder { if current_folder == new_folder {
Ok(()) //nothing to do Ok(()) //nothing to do
} else { } else {
self.update_users_revision(conn);
match FolderCipher::find_by_folder_and_cipher(&current_folder, &self.uuid, &conn) { match FolderCipher::find_by_folder_and_cipher(&current_folder, &self.uuid, &conn) {
Some(current_folder) => { Some(current_folder) => {
current_folder.delete(&conn).or(Err("Failed removing old folder mapping")) current_folder.delete(&conn).or(Err("Failed removing old folder mapping"))
@ -181,6 +201,7 @@ impl Cipher {
} }
}, },
None => { None => {
self.update_users_revision(conn);
match FolderCipher::find_by_folder_and_cipher(&current_folder, &self.uuid, &conn) { match FolderCipher::find_by_folder_and_cipher(&current_folder, &self.uuid, &conn) {
Some(current_folder) => { Some(current_folder) => {
current_folder.delete(&conn).or(Err("Failed removing old folder mapping")) current_folder.delete(&conn).or(Err("Failed removing old folder mapping"))

12
src/db/models/collection.rs

@ -185,6 +185,8 @@ impl CollectionUser {
} }
pub fn save(user_uuid: &str, collection_uuid: &str, read_only:bool, conn: &DbConn) -> QueryResult<()> { pub fn save(user_uuid: &str, collection_uuid: &str, read_only:bool, conn: &DbConn) -> QueryResult<()> {
User::update_uuid_revision(&user_uuid, conn);
diesel::replace_into(users_collections::table) diesel::replace_into(users_collections::table)
.values(( .values((
users_collections::user_uuid.eq(user_uuid), users_collections::user_uuid.eq(user_uuid),
@ -194,6 +196,8 @@ impl CollectionUser {
} }
pub fn delete(self, conn: &DbConn) -> QueryResult<()> { pub fn delete(self, conn: &DbConn) -> QueryResult<()> {
User::update_uuid_revision(&self.user_uuid, conn);
diesel::delete(users_collections::table diesel::delete(users_collections::table
.filter(users_collections::user_uuid.eq(&self.user_uuid)) .filter(users_collections::user_uuid.eq(&self.user_uuid))
.filter(users_collections::collection_uuid.eq(&self.collection_uuid))) .filter(users_collections::collection_uuid.eq(&self.collection_uuid)))
@ -216,12 +220,20 @@ impl CollectionUser {
} }
pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> QueryResult<()> { pub fn delete_all_by_collection(collection_uuid: &str, conn: &DbConn) -> QueryResult<()> {
CollectionUser::find_by_collection(&collection_uuid, conn)
.iter()
.for_each(|collection| {
User::update_uuid_revision(&collection.user_uuid, conn)
});
diesel::delete(users_collections::table diesel::delete(users_collections::table
.filter(users_collections::collection_uuid.eq(collection_uuid)) .filter(users_collections::collection_uuid.eq(collection_uuid))
).execute(&**conn).and(Ok(())) ).execute(&**conn).and(Ok(()))
} }
pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> QueryResult<()> { pub fn delete_all_by_user(user_uuid: &str, conn: &DbConn) -> QueryResult<()> {
User::update_uuid_revision(&user_uuid, conn);
diesel::delete(users_collections::table diesel::delete(users_collections::table
.filter(users_collections::user_uuid.eq(user_uuid)) .filter(users_collections::user_uuid.eq(user_uuid))
).execute(&**conn).and(Ok(())) ).execute(&**conn).and(Ok(()))

2
src/db/models/folder.rs

@ -71,6 +71,7 @@ use db::schema::{folders, folders_ciphers};
/// Database methods /// Database methods
impl Folder { impl Folder {
pub fn save(&mut self, conn: &DbConn) -> bool { pub fn save(&mut self, conn: &DbConn) -> bool {
User::update_uuid_revision(&self.user_uuid, conn);
self.updated_at = Utc::now().naive_utc(); self.updated_at = Utc::now().naive_utc();
match diesel::replace_into(folders::table) match diesel::replace_into(folders::table)
@ -82,6 +83,7 @@ impl Folder {
} }
pub fn delete(self, conn: &DbConn) -> QueryResult<()> { pub fn delete(self, conn: &DbConn) -> QueryResult<()> {
User::update_uuid_revision(&self.user_uuid, conn);
FolderCipher::delete_all_by_folder(&self.uuid, &conn)?; FolderCipher::delete_all_by_folder(&self.uuid, &conn)?;
diesel::delete( diesel::delete(

37
src/db/models/organization.rs

@ -1,6 +1,7 @@
use serde_json::Value as JsonValue; use serde_json::Value as JsonValue;
use uuid::Uuid; use uuid::Uuid;
use super::{User, CollectionUser};
#[derive(Debug, Identifiable, Queryable, Insertable)] #[derive(Debug, Identifiable, Queryable, Insertable)]
#[table_name = "organizations"] #[table_name = "organizations"]
@ -108,12 +109,17 @@ impl UserOrganization {
use diesel; use diesel;
use diesel::prelude::*; use diesel::prelude::*;
use db::DbConn; use db::DbConn;
use db::schema::organizations; use db::schema::{organizations, users_organizations, users_collections, ciphers_collections};
use db::schema::users_organizations;
/// Database methods /// Database methods
impl Organization { impl Organization {
pub fn save(&mut self, conn: &DbConn) -> bool { pub fn save(&mut self, conn: &DbConn) -> bool {
UserOrganization::find_by_org(&self.uuid, conn)
.iter()
.for_each(|user_org| {
User::update_uuid_revision(&user_org.user_uuid, conn);
});
match diesel::replace_into(organizations::table) match diesel::replace_into(organizations::table)
.values(&*self) .values(&*self)
.execute(&**conn) { .execute(&**conn) {
@ -172,7 +178,6 @@ impl UserOrganization {
} }
pub fn to_json_user_details(&self, conn: &DbConn) -> JsonValue { pub fn to_json_user_details(&self, conn: &DbConn) -> JsonValue {
use super::User;
let user = User::find_by_uuid(&self.user_uuid, conn).unwrap(); let user = User::find_by_uuid(&self.user_uuid, conn).unwrap();
json!({ json!({
@ -190,7 +195,6 @@ impl UserOrganization {
} }
pub fn to_json_collection_user_details(&self, read_only: &bool, conn: &DbConn) -> JsonValue { pub fn to_json_collection_user_details(&self, read_only: &bool, conn: &DbConn) -> JsonValue {
use super::User;
let user = User::find_by_uuid(&self.user_uuid, conn).unwrap(); let user = User::find_by_uuid(&self.user_uuid, conn).unwrap();
json!({ json!({
@ -209,7 +213,6 @@ impl UserOrganization {
let coll_uuids = if self.access_all { let coll_uuids = if self.access_all {
vec![] // If we have complete access, no need to fill the array vec![] // If we have complete access, no need to fill the array
} else { } else {
use super::CollectionUser;
let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn); let collections = CollectionUser::find_by_organization_and_user_uuid(&self.org_uuid, &self.user_uuid, conn);
collections.iter().map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only})).collect() collections.iter().map(|c| json!({"Id": c.collection_uuid, "ReadOnly": c.read_only})).collect()
}; };
@ -228,6 +231,8 @@ impl UserOrganization {
} }
pub fn save(&mut self, conn: &DbConn) -> bool { pub fn save(&mut self, conn: &DbConn) -> bool {
User::update_uuid_revision(&self.user_uuid, conn);
match diesel::replace_into(users_organizations::table) match diesel::replace_into(users_organizations::table)
.values(&*self) .values(&*self)
.execute(&**conn) { .execute(&**conn) {
@ -237,7 +242,7 @@ impl UserOrganization {
} }
pub fn delete(self, conn: &DbConn) -> QueryResult<()> { pub fn delete(self, conn: &DbConn) -> QueryResult<()> {
use super::CollectionUser; User::update_uuid_revision(&self.user_uuid, conn);
CollectionUser::delete_all_by_user(&self.user_uuid, &conn)?; CollectionUser::delete_all_by_user(&self.user_uuid, &conn)?;
@ -291,6 +296,26 @@ impl UserOrganization {
.filter(users_organizations::org_uuid.eq(org_uuid)) .filter(users_organizations::org_uuid.eq(org_uuid))
.first::<Self>(&**conn).ok() .first::<Self>(&**conn).ok()
} }
pub fn find_by_cipher_and_org(cipher_uuid: &str, org_uuid: &str, conn: &DbConn) -> Vec<Self> {
users_organizations::table
.filter(users_organizations::org_uuid.eq(org_uuid))
.left_join(users_collections::table.on(
users_collections::user_uuid.eq(users_organizations::user_uuid)
))
.left_join(ciphers_collections::table.on(
ciphers_collections::collection_uuid.eq(users_collections::collection_uuid).and(
ciphers_collections::cipher_uuid.eq(&cipher_uuid)
)
))
.filter(
users_organizations::access_all.eq(true).or( // AccessAll..
ciphers_collections::cipher_uuid.eq(&cipher_uuid) // ..or access to collection with cipher
)
)
.select(users_organizations::all_columns)
.load::<Self>(&**conn).expect("Error loading user organizations")
}
} }

19
src/db/models/user.rs

@ -154,6 +154,25 @@ impl User {
} }
} }
pub fn update_uuid_revision(uuid: &str, conn: &DbConn) {
if let Some(mut user) = User::find_by_uuid(&uuid, conn) {
if user.update_revision(conn).is_err(){
println!("Warning: Failed to update revision for {}", user.email);
};
};
}
pub fn update_revision(&mut self, conn: &DbConn) -> QueryResult<()> {
self.updated_at = Utc::now().naive_utc();
diesel::update(
users::table.filter(
users::uuid.eq(&self.uuid)
)
)
.set(users::updated_at.eq(&self.updated_at))
.execute(&**conn).and(Ok(()))
}
pub fn find_by_mail(mail: &str, conn: &DbConn) -> Option<Self> { pub fn find_by_mail(mail: &str, conn: &DbConn) -> Option<Self> {
let lower_mail = mail.to_lowercase(); let lower_mail = mail.to_lowercase();
users::table users::table

2
src/main.rs

@ -1,5 +1,6 @@
#![feature(plugin, custom_derive)] #![feature(plugin, custom_derive)]
#![plugin(rocket_codegen)] #![plugin(rocket_codegen)]
#![allow(proc_macro_derive_resolution_fallback)] // TODO: Remove this when diesel update fixes warnings
extern crate rocket; extern crate rocket;
extern crate rocket_contrib; extern crate rocket_contrib;
extern crate reqwest; extern crate reqwest;
@ -44,6 +45,7 @@ fn init_rocket() -> Rocket {
.mount("/api", api::core_routes()) .mount("/api", api::core_routes())
.mount("/identity", api::identity_routes()) .mount("/identity", api::identity_routes())
.mount("/icons", api::icons_routes()) .mount("/icons", api::icons_routes())
.mount("/notifications", api::notifications_routes())
.manage(db::init_pool()) .manage(db::init_pool())
} }

19
src/util.rs

@ -3,19 +3,20 @@
/// ///
#[macro_export] #[macro_export]
macro_rules! err { macro_rules! err {
($err:expr, $err_desc:expr, $msg:expr) => {{ ($err:expr, $msg:expr) => {{
println!("ERROR: {}", $msg); println!("ERROR: {}", $msg);
err_json!(json!({ err_json!(json!({
"error": $err, "Message": $err,
"error_description": $err_desc, "ValidationErrors": {
"ErrorModel": { "": [$msg,],
"Message": $msg, },
"ValidationErrors": null, "ExceptionMessage": null,
"Object": "error" "ExceptionStackTrace": null,
} "InnerExceptionMessage": null,
"Object": "error",
})) }))
}}; }};
($msg:expr) => { err!("default_error", "default_error_description", $msg) } ($msg:expr) => { err!("The model state is invalid", $msg) }
} }
#[macro_export] #[macro_export]

Loading…
Cancel
Save