From f5237c3c624c5ccaae0df5de3b55fea3bd59a71b Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Sun, 2 Feb 2020 20:51:05 +0100 Subject: [PATCH 01/70] Fixing errors while moving to the latest libraries and Rust 2018 standards. --- .gitignore | 2 + Cargo.toml | 14 ++++--- src/client.rs | 92 +++++++++++++++++++++++++------------------ src/database.rs | 58 +++++++++++++-------------- src/document.rs | 5 ++- src/error.rs | 67 +++++++++++++++++++++++++++++-- src/lib.rs | 38 ++++++++---------- src/model.rs | 2 +- src/types/document.rs | 2 + src/types/find.rs | 5 ++- src/types/index.rs | 3 ++ src/types/mod.rs | 8 ++-- src/types/system.rs | 2 + 13 files changed, 192 insertions(+), 106 deletions(-) diff --git a/.gitignore b/.gitignore index 058002c..e25035f 100644 --- a/.gitignore +++ b/.gitignore @@ -12,5 +12,7 @@ Cargo.lock # These are backup files generated by rustfmt **/*.rs.bk +# Exclude IntelliJ files +.idea # End of https://www.gitignore.io/api/rust \ No newline at end of file diff --git a/Cargo.toml b/Cargo.toml index 82a20fa..4317cf5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "sofa" version = "0.6.0" -authors = ["Mathieu Amiot "] +authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" readme = "README.md" @@ -10,6 +10,7 @@ homepage = "https://github.com/YellowInnovation/sofa" repository = "https://github.com/YellowInnovation/sofa" keywords = ["couchdb", "orm", "database", "nosql"] categories = ["database"] +edition = "2018" include = [ "**/*.rs", "Cargo.toml" @@ -17,10 +18,13 @@ include = [ [dependencies] failure = "0.1" -serde = "1.0" -serde_derive = "1.0" -serde_json = "1.0" -reqwest = "0.8" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0.45" +url = "2.1.1" + +[dependencies.reqwest] +version = "0.10.1" +features = ["json", "gzip", "cookies", "blocking"] [dev-dependencies] pretty_assertions = "0.5" diff --git a/src/client.rs b/src/client.rs index 7c21680..2089130 100644 --- a/src/client.rs +++ b/src/client.rs @@ -1,19 +1,32 @@ use std::collections::HashMap; use std::time::Duration; -use failure::Error; use serde_json::from_reader; +use serde::{Serialize, Deserialize}; -use reqwest::{self, Url, Method, RequestBuilder, StatusCode}; +use reqwest::blocking::RequestBuilder; +use reqwest::{self, Url, Method, StatusCode}; +use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT, CONTENT_TYPE, REFERER}; +use crate::database::Database; +use crate::error::CouchError; +use crate::types::system::{CouchResponse, CouchStatus}; -use ::database::*; -use ::types::*; -use ::error::SofaError; +fn construct_json_headers(uri: Option<&str>) -> HeaderMap { + let mut headers = HeaderMap::new(); + headers.insert(USER_AGENT, HeaderValue::from_static("reqwest")); + headers.insert(CONTENT_TYPE, HeaderValue::from_static("application/json")); + + if let Some(u) = uri { + headers.insert(REFERER, HeaderValue::from_str(u).unwrap()); + } + + headers +} /// Client handles the URI manipulation logic and the HTTP calls to the CouchDB REST API. /// It is also responsible for the creation/access/destruction of databases. #[derive(Debug, Clone)] pub struct Client { - _client: reqwest::Client, + _client: reqwest::blocking::Client, dbs: Vec<&'static str>, _gzip: bool, _timeout: u8, @@ -22,15 +35,15 @@ pub struct Client { } impl Client { - pub fn new(uri: String) -> Result { - let client = reqwest::Client::builder() + pub fn new(uri: &str) -> Result { + let client = reqwest::blocking::Client::builder() .gzip(true) .timeout(Duration::new(4, 0)) .build()?; Ok(Client { _client: client, - uri: uri, + uri: uri.to_string(), _gzip: true, _timeout: 4, dbs: Vec::new(), @@ -38,8 +51,8 @@ impl Client { }) } - fn create_client(&self) -> Result { - let client = reqwest::Client::builder() + fn create_client(&self) -> Result { + let client = reqwest::blocking::Client::builder() .gzip(self._gzip) .timeout(Duration::new(self._timeout as u64, 0)) .build()?; @@ -61,23 +74,23 @@ impl Client { self } - pub fn gzip(&mut self, enabled: bool) -> Result<&Self, Error> { + pub fn gzip(&mut self, enabled: bool) -> Result<&Self, CouchError> { self._gzip = enabled; self._client = self.create_client()?; Ok(self) } - pub fn timeout(&mut self, to: u8) -> Result<&Self, Error> { + pub fn timeout(&mut self, to: u8) -> Result<&Self, CouchError> { self._timeout = to; self._client = self.create_client()?; Ok(self) } - pub fn list_dbs(&self) -> Result, Error> { + pub fn list_dbs(&self) -> Result, CouchError> { let mut response = self.get(String::from("/_all_dbs"), None)?.send()?; - let data = response.json::>()?; + let data = response.json()?; Ok(data) } @@ -86,7 +99,7 @@ impl Client { self.db_prefix.clone() + dbname } - pub fn db(&self, dbname: &'static str) -> Result { + pub fn db(&self, dbname: &'static str) -> Result { let name = self.build_dbname(dbname); let db = Database::new(name.clone(), self.clone()); @@ -94,16 +107,16 @@ impl Client { let path = self.create_path(name, None)?; let head_response = self._client.head(&path) - .header(reqwest::header::ContentType::json()) + .headers(construct_json_headers(None)) .send()?; match head_response.status() { - StatusCode::Ok => Ok(db), + StatusCode::OK => Ok(db), _ => self.make_db(dbname), } } - pub fn make_db(&self, dbname: &'static str) -> Result { + pub fn make_db(&self, dbname: &'static str) -> Result { let name = self.build_dbname(dbname); let db = Database::new(name.clone(), self.clone()); @@ -111,7 +124,7 @@ impl Client { let path = self.create_path(name, None)?; let put_response = self._client.put(&path) - .header(reqwest::header::ContentType::json()) + .headers(construct_json_headers(None)) .send()?; let s: CouchResponse = from_reader(put_response)?; @@ -120,15 +133,16 @@ impl Client { Some(true) => Ok(db), Some(false) | _ => { let err = s.error.unwrap_or(s!("unspecified error")); - Err(SofaError(err).into()) + let status = put_response.status(); + Err(CouchError::new(err, status)) }, } } - pub fn destroy_db(&self, dbname: &'static str) -> Result { + pub fn destroy_db(&self, dbname: &'static str) -> Result { let path = self.create_path(self.build_dbname(dbname), None)?; let response = self._client.delete(&path) - .header(reqwest::header::ContentType::json()) + .headers(construct_json_headers(None)) .send()?; let s: CouchResponse = from_reader(response)?; @@ -136,9 +150,9 @@ impl Client { Ok(s.ok.unwrap_or(false)) } - pub fn check_status(&self) -> Result { + pub fn check_status(&self) -> Result { let response = self._client.get(&self.uri) - .header(reqwest::header::ContentType::json()) + .headers(construct_json_headers(None)) .send()?; let status = from_reader(response)?; @@ -149,7 +163,7 @@ impl Client { fn create_path(&self, path: String, args: Option> - ) -> Result { + ) -> Result { let mut uri = Url::parse(&self.uri)?.join(&path)?; if let Some(ref map) = args { @@ -166,36 +180,36 @@ impl Client { method: Method, path: String, opts: Option> - ) -> Result { + ) -> Result { let uri = self.create_path(path, opts)?; let mut req = self._client.request(method, &uri); - req.header(reqwest::header::Referer::new(uri.clone())); - req.header(reqwest::header::ContentType::json()); + // req.header(reqwest::header::Referer::new(uri.clone())); + req.headers(construct_json_headers(Some(&uri))); Ok(req) } - pub fn get(&self, path: String, args: Option>) -> Result { - Ok(self.req(Method::Get, path, args)?) + pub fn get(&self, path: String, args: Option>) -> Result { + Ok(self.req(Method::GET, path, args)?) } - pub fn post(&self, path: String, body: String) -> Result { - let mut req = self.req(Method::Post, path, None)?; + pub fn post(&self, path: String, body: String) -> Result { + let mut req = self.req(Method::POST, path, None)?; req.body(body); Ok(req) } - pub fn put(&self, path: String, body: String) -> Result { - let mut req = self.req(Method::Put, path, None)?; + pub fn put(&self, path: String, body: String) -> Result { + let mut req = self.req(Method::PUT, path, None)?; req.body(body); Ok(req) } - pub fn head(&self, path: String, args: Option>) -> Result { - Ok(self.req(Method::Head, path, args)?) + pub fn head(&self, path: String, args: Option>) -> Result { + Ok(self.req(Method::HEAD, path, args)?) } - pub fn delete(&self, path: String, args: Option>) -> Result { - Ok(self.req(Method::Delete, path, args)?) + pub fn delete(&self, path: String, args: Option>) -> Result { + Ok(self.req(Method::DELETE, path, args)?) } } diff --git a/src/database.rs b/src/database.rs index bb482b0..1a312ae 100644 --- a/src/database.rs +++ b/src/database.rs @@ -2,14 +2,14 @@ use std::collections::HashMap; use reqwest::StatusCode; -use failure::Error; use serde_json; -use serde_json::{from_reader, to_string, Value}; - -use client::*; -use document::*; -use error::SofaError; -use types::*; +use serde_json::{from_reader, to_string, Value, json}; +use crate::document::{Document, DocumentCollection}; +use crate::error::CouchError; +use crate::client::Client; +use crate::types::document::{DocumentId, DocumentCreatedResult}; +use crate::types::find::FindResult; +use crate::types::index::{IndexFields, IndexCreated, DatabaseIndexList}; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -59,7 +59,7 @@ impl Database { request .and_then(|mut req| { Ok(req.send() - .and_then(|res| Ok(res.status() == StatusCode::Accepted)) + .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) .unwrap_or(false)) }) .unwrap_or(false) @@ -75,7 +75,7 @@ impl Database { request .and_then(|mut req| { Ok(req.send() - .and_then(|res| Ok(res.status() == StatusCode::Accepted)) + .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) .unwrap_or(false)) }) .unwrap_or(false) @@ -88,7 +88,7 @@ impl Database { request .and_then(|mut req| { Ok(req.send() - .and_then(|res| Ok(res.status() == StatusCode::Accepted)) + .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) .unwrap_or(false)) }) .unwrap_or(false) @@ -103,7 +103,7 @@ impl Database { Ok(req.send() .and_then(|res| { Ok(match res.status() { - StatusCode::Ok | StatusCode::NotModified => true, + StatusCode::OK | StatusCode::NOT_MODIFIED => true, _ => false, }) }) @@ -113,14 +113,14 @@ impl Database { } /// Gets one document - pub fn get(&self, id: DocumentId) -> Result { + pub fn get(&self, id: DocumentId) -> Result { let response = self._client.get(self.create_document_path(id), None)?.send()?; Ok(Document::new(from_reader(response)?)) } /// Gets documents in bulk with provided IDs list - pub fn get_bulk(&self, ids: Vec) -> Result { + pub fn get_bulk(&self, ids: Vec) -> Result { self.get_bulk_params(ids, None) } @@ -129,7 +129,7 @@ impl Database { &self, ids: Vec, params: Option>, - ) -> Result { + ) -> Result { let mut options; if let Some(opts) = params { options = opts; @@ -151,12 +151,12 @@ impl Database { } /// Gets all the documents in database - pub fn get_all(&self) -> Result { + pub fn get_all(&self) -> Result { self.get_all_params(None) } /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view - pub fn get_all_params(&self, params: Option>) -> Result { + pub fn get_all_params(&self, params: Option>) -> Result { let mut options; if let Some(opts) = params { options = opts; @@ -174,7 +174,7 @@ impl Database { } /// Finds a document in the database through a Mango query. Parameters here http://docs.couchdb.org/en/latest/api/database/find.html - pub fn find(&self, params: Value) -> Result { + pub fn find(&self, params: Value) -> Result { let path = self.create_document_path("_find".into()); let response = self._client.post(path, js!(¶ms))?.send()?; @@ -192,14 +192,14 @@ impl Database { Ok(DocumentCollection::new_from_documents(documents)) } else if let Some(err) = data.error { - Err(SofaError(err).into()) + Err(CouchError::new(err, response.status()).into()) } else { Ok(DocumentCollection::default()) } } /// Updates a document - pub fn save(&self, doc: Document) -> Result { + pub fn save(&self, doc: Document) -> Result { let id = doc._id.to_owned(); let raw = doc.get_data(); @@ -218,13 +218,13 @@ impl Database { } Some(false) | _ => { let err = data.error.unwrap_or(s!("unspecified error")); - return Err(SofaError(err).into()); + return Err(CouchError::new(err, response.status()).into()); } } } /// Creates a document from a raw JSON document Value. - pub fn create(&self, raw_doc: Value) -> Result { + pub fn create(&self, raw_doc: Value) -> Result { let response = self._client.post(self.name.clone(), to_string(&raw_doc)?)?.send()?; let data: DocumentCreatedResult = from_reader(response)?; @@ -233,12 +233,12 @@ impl Database { Some(true) => { let data_id = match data.id { Some(id) => id, - _ => return Err(SofaError(s!("invalid id")).into()), + _ => return Err(CouchError::new(s!("invalid id"), response.status()).into()), }; let data_rev = match data.rev { Some(rev) => rev, - _ => return Err(SofaError(s!("invalid rev")).into()), + _ => return Err(CouchError::new(s!("invalid rev"), response.status()).into()), }; let mut val = raw_doc.clone(); @@ -249,7 +249,7 @@ impl Database { } Some(false) | _ => { let err = data.error.unwrap_or(s!("unspecified error")); - return Err(SofaError(err).into()); + return Err(CouchError::new(err, response.status()).into()); } } } @@ -270,7 +270,7 @@ impl Database { Ok(req.send() .and_then(|res| { Ok(match res.status() { - StatusCode::Ok | StatusCode::Accepted => true, + StatusCode::OK | StatusCode::ACCEPTED => true, _ => false, }) }) @@ -281,7 +281,7 @@ impl Database { /// Inserts an index in a naive way, if it already exists, will throw an /// `Err` - pub fn insert_index(&self, name: String, spec: IndexFields) -> Result { + pub fn insert_index(&self, name: String, spec: IndexFields) -> Result { let response = self._client .post( self.create_document_path("_index".into()), @@ -296,14 +296,14 @@ impl Database { if data.error.is_some() { let err = data.error.unwrap_or(s!("unspecified error")); - Err(SofaError(err).into()) + Err(CouchError::new(err, response.status()).into()) } else { Ok(data) } } /// Reads the database's indexes and returns them - pub fn read_indexes(&self) -> Result { + pub fn read_indexes(&self) -> Result { let response = self._client .get(self.create_document_path("_index".into()), None)? .send()?; @@ -314,7 +314,7 @@ impl Database { /// Method to ensure an index is created on the database with the following /// spec. Returns `true` when we created a new one, or `false` when the /// index was already existing. - pub fn ensure_index(&self, name: String, spec: IndexFields) -> Result { + pub fn ensure_index(&self, name: String, spec: IndexFields) -> Result { let db_indexes = self.read_indexes()?; // We look for our index diff --git a/src/document.rs b/src/document.rs index ca33f78..56e201b 100644 --- a/src/document.rs +++ b/src/document.rs @@ -1,8 +1,9 @@ -use database::*; use serde_json; use serde_json::Value; use std::ops::{Index, IndexMut}; -use types::*; +use serde::{Serialize, Deserialize}; +use crate::types::document::{DocumentId}; +use crate::database::Database; /// Document abstracts the handling of JSON values and provides direct access /// and casting to the fields of your documents You can get access to the diff --git a/src/error.rs b/src/error.rs index bc57522..34d1cce 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,3 +1,64 @@ -#[derive(Fail, Debug)] -#[fail(display = "Custom error: {}", _0)] -pub struct SofaError(pub String); +use std::error; +use std::fmt; +use reqwest::Error; +use url::ParseError; + +// Define our error types. These may be customized for our error handling cases. +// Now we will be able to write our own errors, defer to an underlying error +// implementation, or do something in between. +#[derive(Debug, Clone)] +pub struct CouchError { + status: reqwest::StatusCode, + message: String, +} + +impl CouchError { + pub fn new(message: String, status: reqwest::StatusCode) -> CouchError { + CouchError { + message, + status, + } + } +} + +impl fmt::Display for CouchError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "{}: {}", self.status, self.message) + } +} + +// This is important for other errors to wrap this one. +impl error::Error for CouchError { + fn source(&self) -> Option<&(dyn error::Error + 'static)> { + // Generic error, underlying cause isn't tracked. + None + } +} + +impl std::convert::From for CouchError { + fn from(err: reqwest::Error) -> Self { + CouchError { + status: err.status().unwrap_or(reqwest::StatusCode::NOT_IMPLEMENTED), + message: err.to_string(), + } + } +} + +impl std::convert::From for CouchError { + fn from(err: serde_json::Error) -> Self { + CouchError { + status: reqwest::StatusCode::NOT_IMPLEMENTED, + message: err.to_string(), + } + } +} + +impl std::convert::From for CouchError { + fn from(err: url::ParseError) -> Self { + CouchError { + status: reqwest::StatusCode::NOT_IMPLEMENTED, + message: err.to_string(), + } + } +} + diff --git a/src/lib.rs b/src/lib.rs index ad38104..850e5f9 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -70,19 +70,6 @@ //! //! [Yellow Innovation's website and works](http://yellowinnovation.fr/en/) -#[macro_use] -extern crate failure; -extern crate reqwest; -extern crate serde; -#[macro_use] -extern crate serde_json; -#[macro_use] -extern crate serde_derive; - -#[cfg(test)] -#[macro_use] -extern crate pretty_assertions; - /// Macros that the crate exports to facilitate most of the /// doc-to-json-to-string-related tasks #[allow(unused_macros)] @@ -141,18 +128,21 @@ mod macros { } } -mod_use!(client); -mod_use!(database); -mod_use!(document); -mod_use!(error); pub mod types; -mod_use!(model); +mod document; +mod database; +mod client; +mod error; +mod model; + +use client::{Client}; #[allow(unused_mut, unused_variables)] #[cfg(test)] mod sofa_tests { mod a_sys { - use *; + use serde_json::{json}; + use crate::client::Client; #[test] fn a_should_check_couchdbs_status() { @@ -199,7 +189,11 @@ mod sofa_tests { } mod b_db { - use *; + use serde_json::{json}; + use crate::client::Client; + use crate::database::Database; + use crate::document::Document; + use crate::types; fn setup(dbname: &'static str) -> (Client, Database, Document) { let client = Client::new("http://localhost:5984".into()).unwrap(); @@ -255,7 +249,7 @@ mod sofa_tests { fn setup_create_indexes(dbname: &'static str) -> (Client, Database, Document) { let (client, db, doc) = setup(dbname); - let spec = types::IndexFields::new(vec![types::SortSpec::Simple(s!("thing"))]); + let spec = types::index::IndexFields::new(vec![types::find::SortSpec::Simple(s!("thing"))]); let res = db.insert_index("thing-index".into(), spec); @@ -287,7 +281,7 @@ mod sofa_tests { fn f_should_ensure_index_in_db() { let (client, db, _) = setup("f_should_ensure_index_in_db"); - let spec = types::IndexFields::new(vec![types::SortSpec::Simple(s!("thing"))]); + let spec = types::index::IndexFields::new(vec![types::find::SortSpec::Simple(s!("thing"))]); let res = db.ensure_index("thing-index".into(), spec); assert!(res.is_ok()); diff --git a/src/model.rs b/src/model.rs index ddf9d5b..f1cd680 100644 --- a/src/model.rs +++ b/src/model.rs @@ -1,8 +1,8 @@ use serde::de::DeserializeOwned; use serde::ser::Serialize; use serde_json::{from_value, to_value}; -use ::document::*; use std::marker::Sized; +use crate::document::Document; /// Trait that provides methods that can be used to switch between abstract `Document` and concrete `Model` implementors (such as your custom data models) pub trait Model { diff --git a/src/types/document.rs b/src/types/document.rs index 706bbdc..6d210cd 100644 --- a/src/types/document.rs +++ b/src/types/document.rs @@ -1,3 +1,5 @@ +use serde::{Serialize, Deserialize}; + /// String that represents a Document ID in CouchDB pub type DocumentId = String; diff --git a/src/types/find.rs b/src/types/find.rs index 7cccd0a..c0f661f 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -1,5 +1,6 @@ use serde_json::{Value}; use std::collections::HashMap; +use serde::{Serialize, Deserialize}; /// Sort direction abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] @@ -54,9 +55,11 @@ pub struct FindResult { pub docs: Option>, pub warning: Option, pub error: Option, - pub reason: Option + pub reason: Option, } +//todo: include status on structs + /// Explain result abstraction #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct ExplainResult { diff --git a/src/types/index.rs b/src/types/index.rs index dc9e85e..12f2292 100644 --- a/src/types/index.rs +++ b/src/types/index.rs @@ -1,4 +1,7 @@ use super::*; +use serde::{Serialize, Deserialize}; +use find::{SortSpec}; +use document::{DocumentId}; /// Index fields abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] diff --git a/src/types/mod.rs b/src/types/mod.rs index fd7be42..2eeb6d9 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -1,4 +1,4 @@ -mod_use!(system); -mod_use!(document); -mod_use!(find); -mod_use!(index); +pub mod system; +pub mod document; +pub mod find; +pub mod index; diff --git a/src/types/system.rs b/src/types/system.rs index 69e1667..414d842 100644 --- a/src/types/system.rs +++ b/src/types/system.rs @@ -1,3 +1,5 @@ +use serde::{Serialize, Deserialize}; + /// Couch vendor abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] pub struct CouchVendor { From f260c41ef2ff987ea9bd7bae914d2ba460b0636c Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Sun, 2 Feb 2020 20:57:40 +0100 Subject: [PATCH 02/70] All tests pass! :-) --- src/client.rs | 13 ++++++------- src/database.rs | 16 ++++++++++------ src/lib.rs | 14 +++++++++----- 3 files changed, 25 insertions(+), 18 deletions(-) diff --git a/src/client.rs b/src/client.rs index 2089130..7147f71 100644 --- a/src/client.rs +++ b/src/client.rs @@ -127,13 +127,13 @@ impl Client { .headers(construct_json_headers(None)) .send()?; + let status = put_response.status(); let s: CouchResponse = from_reader(put_response)?; match s.ok { Some(true) => Ok(db), Some(false) | _ => { let err = s.error.unwrap_or(s!("unspecified error")); - let status = put_response.status(); Err(CouchError::new(err, status)) }, } @@ -182,9 +182,10 @@ impl Client { opts: Option> ) -> Result { let uri = self.create_path(path, opts)?; - let mut req = self._client.request(method, &uri); + let mut req = self._client.request(method, &uri). + headers(construct_json_headers(Some(&uri))); + // req.header(reqwest::header::Referer::new(uri.clone())); - req.headers(construct_json_headers(Some(&uri))); Ok(req) } @@ -194,14 +195,12 @@ impl Client { } pub fn post(&self, path: String, body: String) -> Result { - let mut req = self.req(Method::POST, path, None)?; - req.body(body); + let mut req = self.req(Method::POST, path, None)?.body(body); Ok(req) } pub fn put(&self, path: String, body: String) -> Result { - let mut req = self.req(Method::PUT, path, None)?; - req.body(body); + let mut req = self.req(Method::PUT, path, None)?.body(body); Ok(req) } diff --git a/src/database.rs b/src/database.rs index 1a312ae..0ce40e2 100644 --- a/src/database.rs +++ b/src/database.rs @@ -177,6 +177,7 @@ impl Database { pub fn find(&self, params: Value) -> Result { let path = self.create_document_path("_find".into()); let response = self._client.post(path, js!(¶ms))?.send()?; + let status = response.status(); let data: FindResult = from_reader(response)?; if let Some(doc_val) = data.docs { @@ -192,7 +193,7 @@ impl Database { Ok(DocumentCollection::new_from_documents(documents)) } else if let Some(err) = data.error { - Err(CouchError::new(err, response.status()).into()) + Err(CouchError::new(err, status).into()) } else { Ok(DocumentCollection::default()) } @@ -207,6 +208,7 @@ impl Database { .put(self.create_document_path(id), to_string(&raw)?)? .send()?; + let status = response.status(); let data: DocumentCreatedResult = from_reader(response)?; match data.ok { @@ -218,7 +220,7 @@ impl Database { } Some(false) | _ => { let err = data.error.unwrap_or(s!("unspecified error")); - return Err(CouchError::new(err, response.status()).into()); + return Err(CouchError::new(err, status).into()); } } } @@ -226,6 +228,7 @@ impl Database { /// Creates a document from a raw JSON document Value. pub fn create(&self, raw_doc: Value) -> Result { let response = self._client.post(self.name.clone(), to_string(&raw_doc)?)?.send()?; + let status = response.status(); let data: DocumentCreatedResult = from_reader(response)?; @@ -233,12 +236,12 @@ impl Database { Some(true) => { let data_id = match data.id { Some(id) => id, - _ => return Err(CouchError::new(s!("invalid id"), response.status()).into()), + _ => return Err(CouchError::new(s!("invalid id"), status).into()), }; let data_rev = match data.rev { Some(rev) => rev, - _ => return Err(CouchError::new(s!("invalid rev"), response.status()).into()), + _ => return Err(CouchError::new(s!("invalid rev"), status).into()), }; let mut val = raw_doc.clone(); @@ -249,7 +252,7 @@ impl Database { } Some(false) | _ => { let err = data.error.unwrap_or(s!("unspecified error")); - return Err(CouchError::new(err, response.status()).into()); + return Err(CouchError::new(err, status).into()); } } } @@ -292,11 +295,12 @@ impl Database { )? .send()?; + let status = response.status(); let data: IndexCreated = from_reader(response)?; if data.error.is_some() { let err = data.error.unwrap_or(s!("unspecified error")); - Err(CouchError::new(err, response.status()).into()) + Err(CouchError::new(err, status).into()) } else { Ok(data) } diff --git a/src/lib.rs b/src/lib.rs index 850e5f9..475e8a6 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -141,19 +141,21 @@ use client::{Client}; #[cfg(test)] mod sofa_tests { mod a_sys { + const DB_HOST: &'static str = "http://192.168.64.5:5984"; + use serde_json::{json}; use crate::client::Client; #[test] fn a_should_check_couchdbs_status() { - let client = Client::new("http://localhost:5984".into()).unwrap(); + let client = Client::new(DB_HOST.into()).unwrap(); let status = client.check_status(); assert!(status.is_ok()); } #[test] fn b_should_create_sofa_test_db() { - let client = Client::new("http://localhost:5984".into()).unwrap(); + let client = Client::new(DB_HOST.into()).unwrap(); let dbw = client.db("b_should_create_sofa_test_db"); assert!(dbw.is_ok()); @@ -162,7 +164,7 @@ mod sofa_tests { #[test] fn c_should_create_a_document() { - let client = Client::new("http://localhost:5984".into()).unwrap(); + let client = Client::new(DB_HOST.into()).unwrap(); let dbw = client.db("c_should_create_a_document"); assert!(dbw.is_ok()); let db = dbw.unwrap(); @@ -181,7 +183,7 @@ mod sofa_tests { #[test] fn d_should_destroy_the_db() { - let client = Client::new("http://localhost:5984".into()).unwrap(); + let client = Client::new(DB_HOST.into()).unwrap(); let _ = client.db("d_should_destroy_the_db"); assert!(client.destroy_db("d_should_destroy_the_db").unwrap()); @@ -195,8 +197,10 @@ mod sofa_tests { use crate::document::Document; use crate::types; + const DB_HOST: &'static str = "http://192.168.64.5:5984"; + fn setup(dbname: &'static str) -> (Client, Database, Document) { - let client = Client::new("http://localhost:5984".into()).unwrap(); + let client = Client::new(DB_HOST.into()).unwrap(); let dbw = client.db(dbname); assert!(dbw.is_ok()); let db = dbw.unwrap(); From 6527b318a17c283cf091efe190b1f7a87ca2e5ff Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 07:35:58 +0100 Subject: [PATCH 03/70] Compiles without warnings. --- src/client.rs | 9 ++++----- src/database.rs | 10 +++++----- src/error.rs | 2 -- src/lib.rs | 10 +++++----- 4 files changed, 14 insertions(+), 17 deletions(-) diff --git a/src/client.rs b/src/client.rs index 7147f71..11221ff 100644 --- a/src/client.rs +++ b/src/client.rs @@ -1,7 +1,6 @@ use std::collections::HashMap; use std::time::Duration; use serde_json::from_reader; -use serde::{Serialize, Deserialize}; use reqwest::blocking::RequestBuilder; use reqwest::{self, Url, Method, StatusCode}; @@ -89,7 +88,7 @@ impl Client { } pub fn list_dbs(&self) -> Result, CouchError> { - let mut response = self.get(String::from("/_all_dbs"), None)?.send()?; + let response = self.get(String::from("/_all_dbs"), None)?.send()?; let data = response.json()?; Ok(data) @@ -182,7 +181,7 @@ impl Client { opts: Option> ) -> Result { let uri = self.create_path(path, opts)?; - let mut req = self._client.request(method, &uri). + let req = self._client.request(method, &uri). headers(construct_json_headers(Some(&uri))); // req.header(reqwest::header::Referer::new(uri.clone())); @@ -195,12 +194,12 @@ impl Client { } pub fn post(&self, path: String, body: String) -> Result { - let mut req = self.req(Method::POST, path, None)?.body(body); + let req = self.req(Method::POST, path, None)?.body(body); Ok(req) } pub fn put(&self, path: String, body: String) -> Result { - let mut req = self.req(Method::PUT, path, None)?.body(body); + let req = self.req(Method::PUT, path, None)?.body(body); Ok(req) } diff --git a/src/database.rs b/src/database.rs index 0ce40e2..757d6e2 100644 --- a/src/database.rs +++ b/src/database.rs @@ -57,7 +57,7 @@ impl Database { let request = self._client.post(path, "".into()); request - .and_then(|mut req| { + .and_then(|req| { Ok(req.send() .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) .unwrap_or(false)) @@ -73,7 +73,7 @@ impl Database { let request = self._client.post(path, "".into()); request - .and_then(|mut req| { + .and_then(|req| { Ok(req.send() .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) .unwrap_or(false)) @@ -86,7 +86,7 @@ impl Database { let request = self._client.post(self.create_compact_path(index), "".into()); request - .and_then(|mut req| { + .and_then(|req| { Ok(req.send() .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) .unwrap_or(false)) @@ -99,7 +99,7 @@ impl Database { let request = self._client.head(self.create_document_path(id), None); request - .and_then(|mut req| { + .and_then(|req| { Ok(req.send() .and_then(|res| { Ok(match res.status() { @@ -269,7 +269,7 @@ impl Database { ); request - .and_then(|mut req| { + .and_then(|req| { Ok(req.send() .and_then(|res| { Ok(match res.status() { diff --git a/src/error.rs b/src/error.rs index 34d1cce..65ead1d 100644 --- a/src/error.rs +++ b/src/error.rs @@ -1,7 +1,5 @@ use std::error; use std::fmt; -use reqwest::Error; -use url::ParseError; // Define our error types. These may be customized for our error handling cases. // Now we will be able to write our own errors, defer to an underlying error diff --git a/src/lib.rs b/src/lib.rs index 475e8a6..f9b658e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -129,13 +129,13 @@ mod macros { } pub mod types; -mod document; -mod database; +pub mod document; +pub mod database; mod client; -mod error; -mod model; +pub mod error; +pub mod model; -use client::{Client}; +pub use client::{Client}; #[allow(unused_mut, unused_variables)] #[cfg(test)] From 0bffd99a5254e06e625fed4b1081dadb4b7c125b Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 07:48:41 +0100 Subject: [PATCH 04/70] Updated Readme's. --- CHANGELOG.md | 6 ++- Cargo.toml | 5 +- README.md | 130 ++++++++++++++++++++++++++------------------------ src/client.rs | 2 + 4 files changed, 75 insertions(+), 68 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a7b3538..1d0647f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,17 +7,18 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] -## [0.6.0] - 2018-08-23 +## [0.7.0] - 2020-02-03 ### Added -- Added `failure` dependency - Added `Client::make_db` - Added `docker-compose.yml` - Added `.rustfmt.toml` ### Changed +- Updated to the Rust 2018 edition standards +- Compiles against the latest reqwest and serde libraries - Optimized memory consumption by moving `iter()` calls to `into_iter()` where needed - Changed `SofaError` to derive `failure` - Changed `Client::check_status` signature to remove potentially panicking `unwrap()` calls @@ -45,3 +46,4 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ### Removed - Removed env files that were necessary for single-threaded test run. Added section in README to reflect that. +- Removed the `failure` dependency diff --git a/Cargo.toml b/Cargo.toml index 4317cf5..a95e988 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "sofa" -version = "0.6.0" -authors = ["Mathieu Amiot ", "mibes "] +version = "0.7.0" +authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" readme = "README.md" @@ -17,7 +17,6 @@ include = [ ] [dependencies] -failure = "0.1" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.45" url = "2.1.1" diff --git a/README.md b/README.md index e438772..d60532f 100644 --- a/README.md +++ b/README.md @@ -1,67 +1,71 @@ -# Sofa - CouchDB for Rust - -[![Crates.io](https://img.shields.io/crates/v/sofa.svg)](https://crates.io/crates/sofa) [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_shield) - -[![docs.rs](https://docs.rs/sofa/badge.svg)](https://docs.rs/sofa) - -![sofa-logo](https://raw.githubusercontent.com/YellowInnovation/sofa/master/docs/logo-sofa.png "Logo Sofa") - -## Documentation - -Here: [http://docs.rs/sofa](http://docs.rs/sofa) - -## Installation - -```toml -[dependencies] -sofa = "0.6" -``` - -## Description - -This crate is an interface to CouchDB HTTP REST API. Works with stable Rust. - -Does not support `#![no_std]` - -After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence the need to create our own. - -No async I/O (yet), uses a mix of Reqwest and Serde under the hood, with a few nice abstractions out there. - -**NOT 1.0 YET, so expect changes** - -**Supports CouchDB 2.0 and up.** - -Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. - -## Running tests - -Make sure that you have an instance of CouchDB 2.0+ running, either via the supplied `docker-compose.yml` file or by yourself. It must be listening on the default port. - -And then -`cargo test -- --test-threads=1` - -Single-threading the tests is very important because we need to make sure that the basic features are working before actually testing features on dbs/documents. - -## Why the name "Sofa" - -CouchDB has a nice name, and I wanted to reflect that. - -## License - -Licensed under either of these: - -* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or - [https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0) -* MIT license ([LICENSE-MIT](LICENSE-MIT) or - [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)) - +# Sofa - CouchDB for Rust + +[![Crates.io](https://img.shields.io/crates/v/sofa.svg)](https://crates.io/crates/sofa) +[![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_shield) + +[![docs.rs](https://docs.rs/sofa/badge.svg)](https://docs.rs/sofa) + +![sofa-logo](https://raw.githubusercontent.com/mibes/sofa/master/docs/logo-sofa.png "Logo Sofa") + +## Documentation + +Here: [http://docs.rs/sofa](http://docs.rs/sofa) + +## Installation + +```toml +[dependencies] +sofa = "0.7" +``` + +## Description + +This crate is an interface to CouchDB HTTP REST API. Works with stable Rust. + +Does not support `#![no_std]` + +After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence the need to create our own. + +No async I/O (yet), uses a mix of Reqwest and Serde under the hood, with a few nice abstractions out there. + +**NOT 1.0 YET, so expect changes** + +**Supports CouchDB 2.0 and up.** + +Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. + +The 0.7 version is based on the 0.6 release from https://github.com/YellowInnovation/sofa. +It has been updated to the Rust 2018 edition standards and compiles against the latest serde and reqwest libraries. + +## Running tests + +Make sure that you have an instance of CouchDB 2.0+ running, either via the supplied `docker-compose.yml` file or by yourself. It must be listening on the default port. + +And then +`cargo test -- --test-threads=1` + +Single-threading the tests is very important because we need to make sure that the basic features are working before actually testing features on dbs/documents. + +## Why the name "Sofa" + +CouchDB has a nice name, and I wanted to reflect that. + +## License + +Licensed under either of these: + +* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or + [https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or + [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)) + [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_large) -## Yellow Innovation - -Yellow Innovation is the innovation laboratory of the French postal service: La Poste. - -We create innovative user experiences and journeys through services with a focus on IoT lately. - +## Yellow Innovation + +Yellow Innovation is the innovation laboratory of the French postal service: La Poste. + +We create innovative user experiences and journeys through services with a focus on IoT lately. + [Yellow Innovation's website and works](http://yellowinnovation.fr/en/) \ No newline at end of file diff --git a/src/client.rs b/src/client.rs index 11221ff..da3fc5b 100644 --- a/src/client.rs +++ b/src/client.rs @@ -34,6 +34,8 @@ pub struct Client { } impl Client { + /// new creates a new Couch client. The URI has to be in this format: http://hostname:5984, + /// for example: http://192.168.64.5:5984 pub fn new(uri: &str) -> Result { let client = reqwest::blocking::Client::builder() .gzip(true) From dcf9e6ff9cf2afb208ad0bfac93e0a0f8c06cb67 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 07:59:20 +0100 Subject: [PATCH 05/70] Updated installation instructions. --- README.md | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index d60532f..caf4d31 100644 --- a/README.md +++ b/README.md @@ -13,9 +13,21 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) ## Installation +If you want to use this particular fork, clone the project locally: + +```bash +git clone https://github.com/mibes/sofa.git +``` +and include it in your Cargo.toml file: +```toml +[dependencies] +sofa = { path = "../sofa" } +``` + +If you want to continue to use the "old" 0.6 version use this dependency instead: ```toml [dependencies] -sofa = "0.7" +sofa = "0.6" ``` ## Description From 5d533dd9a253d512157a54ce4c52326be24137a3 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 08:33:37 +0100 Subject: [PATCH 06/70] Allow for bulk operations. --- src/database.rs | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/database.rs b/src/database.rs index 757d6e2..027e41a 100644 --- a/src/database.rs +++ b/src/database.rs @@ -124,6 +124,27 @@ impl Database { self.get_bulk_params(ids, None) } + /// Each time a document is stored or updated in CouchDB, the internal B-tree is updated. + /// Bulk insertion provides efficiency gains in both storage space, and time, + /// by consolidating many of the updates to intermediate B-tree nodes. + /// + /// See the documentation on how to use bulk_docs here: https://docs.couchdb.org/en/stable/api/database/bulk-api.html#db-bulk-docs + /// + /// raw_docs is a vector of documents with or without an ID + /// + /// This endpoint can also be used to delete a set of documents by including "_deleted": true, in the document to be deleted. + /// When deleting or updating, both _id and _rev are mandatory. + pub fn bulk_docs(&self, raw_docs: Vec) -> Result, CouchError> { + let mut body = HashMap::new(); + body.insert(s!("docs"), raw_docs); + + let response = self._client.post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)?.send()?; + let status = response.status(); + + let data: Vec = from_reader(response)?; + Ok(data) + } + /// Gets documents in bulk with provided IDs list, with added params. Params description can be found here: Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view pub fn get_bulk_params( &self, From b549fdb9d7c07841c4961104f4f375b7cf7398eb Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 08:39:33 +0100 Subject: [PATCH 07/70] Removed unneeded code. --- src/database.rs | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/database.rs b/src/database.rs index 027e41a..93191f3 100644 --- a/src/database.rs +++ b/src/database.rs @@ -139,9 +139,8 @@ impl Database { body.insert(s!("docs"), raw_docs); let response = self._client.post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)?.send()?; - let status = response.status(); - let data: Vec = from_reader(response)?; + Ok(data) } From f630f29adb1deb31ef5c4b4aa2cf92b594f5d8ab Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 09:23:26 +0100 Subject: [PATCH 08/70] Elaborated example --- README.md | 7 +++ examples/basic_operations/main.rs | 91 +++++++++++++++++++++++++++++++ 2 files changed, 98 insertions(+) create mode 100644 examples/basic_operations/main.rs diff --git a/README.md b/README.md index caf4d31..63a51ea 100644 --- a/README.md +++ b/README.md @@ -49,6 +49,13 @@ Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/ind The 0.7 version is based on the 0.6 release from https://github.com/YellowInnovation/sofa. It has been updated to the Rust 2018 edition standards and compiles against the latest serde and reqwest libraries. +## Example code + +You can launch the included example with: +```shell script +cargo run --example basic_operations +``` + ## Running tests Make sure that you have an instance of CouchDB 2.0+ running, either via the supplied `docker-compose.yml` file or by yourself. It must be listening on the default port. diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs new file mode 100644 index 0000000..ba26f0a --- /dev/null +++ b/examples/basic_operations/main.rs @@ -0,0 +1,91 @@ +/// This example demonstrates some basic Couch operations: connecting, listing databases and +/// inserting some documents in bulk. +/// +/// The easiest way to get this example to work, is to connect it to a running CouchDB Docker +/// container: +/// +/// ``` +/// docker run --rm -p5984:5984 couchdb:2.3.1 +/// ``` +/// +/// Depending on the Docker framework you are using it may listen to "localhost" or to some other +/// automatically assigned IP address. Minikube for example generates a unique IP on start-up. You +/// can obtain it with: `minikube ip` + +extern crate sofa; + +use serde_json::{json, Value}; + +/// Update DB_HOST to point to your running Couch instance +const DB_HOST: &'static str = "http://192.168.64.5:5984"; +const TEST_DB: &'static str = "test_db"; + +/// test_docs generates a bunch of documents that can be used in the _bulk_docs operation. +fn test_docs(amount: i32) -> Vec { + let mut result: Vec = vec![]; + + for _i in 0..amount { + result.push(json!({"name": "Marcel"})) + } + + result +} + +fn main() { + println!("Connecting..."); + + // Prepare the Sofa client + let client = sofa::Client::new(DB_HOST).unwrap(); + let mut db_initialized = false; + + // This command gets a reference to an existing database, or it creates a new one when it does + // not yet exist. + let db = client.db(TEST_DB).unwrap(); + + // List the existing databases. The db_initialized is superfluous, since we just created it in + // the previous step. It is for educational purposes only. + match client.list_dbs() { + Ok(dbs) => { + println!("Existing databases:"); + for db in dbs { + println!("Couch DB {}", db); + + if db == TEST_DB { + db_initialized = true; + } + } + } + Err(err) => panic!("Oops: {:?}", err), + } + + let mut first_doc_id = String::from(""); + + if db_initialized { + // let's add some docs + match db.bulk_docs(test_docs(100)) { + Ok(resp) => { + println!("Bulk docs completed"); + + let first_doc = resp.first().unwrap().clone(); + first_doc_id = first_doc.id.unwrap_or(String::from("")); + + for r in resp { + println!("Id: {}, OK?: {}", r.id.unwrap_or("--".to_string()), r.ok.unwrap_or(false)) + } + } + Err(err) => println!("Oops: {:?}", err), + } + } + + println!("---"); + + if first_doc_id != "" { + // we have an id of the first document we've just inserted + match db.get(first_doc_id) { + Ok(doc) => { println!("First document: {}", doc.get_data().to_string()) } + Err(err) => println!("Oops: {:?}", err), + } + } + + println!("All operations are done") +} From 8d89d2094a74abd6de9d792295ae5579ebcbbcaa Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Mon, 3 Feb 2020 09:29:58 +0100 Subject: [PATCH 09/70] Simplified the example a bit. --- examples/basic_operations/main.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs index ba26f0a..1908bdd 100644 --- a/examples/basic_operations/main.rs +++ b/examples/basic_operations/main.rs @@ -58,7 +58,7 @@ fn main() { Err(err) => panic!("Oops: {:?}", err), } - let mut first_doc_id = String::from(""); + let mut first_doc_id: Option = None; if db_initialized { // let's add some docs @@ -66,8 +66,7 @@ fn main() { Ok(resp) => { println!("Bulk docs completed"); - let first_doc = resp.first().unwrap().clone(); - first_doc_id = first_doc.id.unwrap_or(String::from("")); + first_doc_id = resp.first().unwrap().clone().id; for r in resp { println!("Id: {}, OK?: {}", r.id.unwrap_or("--".to_string()), r.ok.unwrap_or(false)) @@ -79,9 +78,9 @@ fn main() { println!("---"); - if first_doc_id != "" { + if first_doc_id.is_some() { // we have an id of the first document we've just inserted - match db.get(first_doc_id) { + match db.get(first_doc_id.unwrap()) { Ok(doc) => { println!("First document: {}", doc.get_data().to_string()) } Err(err) => println!("Oops: {:?}", err), } From 19f16042ee29f274e05b9db2ee3a71ba76a83751 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 07:48:00 +0100 Subject: [PATCH 10/70] Return an error in case a document can not be found (404). --- src/database.rs | 6 +++++- src/error.rs | 4 ++-- 2 files changed, 7 insertions(+), 3 deletions(-) diff --git a/src/database.rs b/src/database.rs index 93191f3..dd7a45c 100644 --- a/src/database.rs +++ b/src/database.rs @@ -116,7 +116,11 @@ impl Database { pub fn get(&self, id: DocumentId) -> Result { let response = self._client.get(self.create_document_path(id), None)?.send()?; - Ok(Document::new(from_reader(response)?)) + match response.status() { + StatusCode::OK => Ok(Document::new(from_reader(response)?)), + StatusCode::NOT_FOUND => Err(CouchError::new("Document was not found".to_string(), StatusCode::NOT_FOUND)), + _ => Err(CouchError::new("Internal error".to_string(), response.status())), + } } /// Gets documents in bulk with provided IDs list diff --git a/src/error.rs b/src/error.rs index 65ead1d..9a72c5a 100644 --- a/src/error.rs +++ b/src/error.rs @@ -6,8 +6,8 @@ use std::fmt; // implementation, or do something in between. #[derive(Debug, Clone)] pub struct CouchError { - status: reqwest::StatusCode, - message: String, + pub status: reqwest::StatusCode, + pub message: String, } impl CouchError { From 8460189e6159fb70c5fbfd45460a37a702930655 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 07:48:21 +0100 Subject: [PATCH 11/70] Example on how to work with typed documents. --- examples/typed_documents/main.rs | 69 ++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 examples/typed_documents/main.rs diff --git a/examples/typed_documents/main.rs b/examples/typed_documents/main.rs new file mode 100644 index 0000000..7d09ba1 --- /dev/null +++ b/examples/typed_documents/main.rs @@ -0,0 +1,69 @@ +extern crate sofa; + +use sofa::types::document::DocumentId; +use serde::{Serialize, Deserialize}; +use reqwest::StatusCode; + +/// Update DB_HOST to point to your running Couch instance +const DB_HOST: &'static str = "http://192.168.64.5:5984"; +const TEST_DB: &'static str = "test_db"; + +#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] +pub struct TestDoc { + /// _ids are are the only unique enforced value within CouchDB so you might as well make use of this. + /// CouchDB stores its documents in a B+ tree. Each additional or updated document is stored as + /// a leaf node, and may require re-writing intermediary and parent nodes. You may be able to take + /// advantage of sequencing your own ids more effectively than the automatically generated ids if + /// you can arrange them to be sequential yourself. + pub _id: DocumentId, + + /// Document Revision, provided by CouchDB, helps negotiating conflicts + #[serde(skip_serializing)] + pub _rev: String, + + pub first_name: String, + pub last_name: String, +} + +fn main() { + println!("Connecting..."); + + // Prepare the Sofa client + let client = sofa::Client::new(DB_HOST).unwrap(); + + // This command gets a reference to an existing database, or it creates a new one when it does + // not yet exist. + let db = client.db(TEST_DB).unwrap(); + + let td = TestDoc { + _id: "1234".to_string(), + _rev: "".to_string(), + first_name: "John".to_string(), + last_name: "Doe".to_string(), + }; + + // check if the document already exists + match db.get("1234".to_string()) { + Ok(existing) => { + println!("Document has been previously created with Rev: {}", existing._rev); + let e: TestDoc = serde_json::from_value(existing.get_data()).unwrap(); + println!("Name: {} {}", e.first_name, e.last_name); + }, + Err(e) => { + match e.status { + StatusCode::NOT_FOUND => { + // create the document + match db.create(serde_json::to_value(td).unwrap()) { + Ok(r) => println!("Document was created with ID: {} and Rev: {}", r._id, r._rev), + Err(err) => println!("Oops: {:?}", err), + } + } + _ => { + println!("Unexpected error: {:?}", e); + } + } + } + } + + println!("All operations are done") +} From 710fbef68bd1be459b5904bd5cd0519280f92971 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 07:52:08 +0100 Subject: [PATCH 12/70] Updated comment --- examples/typed_documents/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/typed_documents/main.rs b/examples/typed_documents/main.rs index 7d09ba1..9c06c88 100644 --- a/examples/typed_documents/main.rs +++ b/examples/typed_documents/main.rs @@ -14,7 +14,7 @@ pub struct TestDoc { /// CouchDB stores its documents in a B+ tree. Each additional or updated document is stored as /// a leaf node, and may require re-writing intermediary and parent nodes. You may be able to take /// advantage of sequencing your own ids more effectively than the automatically generated ids if - /// you can arrange them to be sequential yourself. + /// you can arrange them to be sequential yourself. (https://docs.couchdb.org/en/stable/best-practices/documents.html) pub _id: DocumentId, /// Document Revision, provided by CouchDB, helps negotiating conflicts From ab5e1840eb585d88bbf47f4a1f2b4d2c16757725 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 15:36:16 +0100 Subject: [PATCH 13/70] Create a client with a custom timeout. Default set to 10 seconds. --- src/client.rs | 24 +++++++++++++++--------- 1 file changed, 15 insertions(+), 9 deletions(-) diff --git a/src/client.rs b/src/client.rs index da3fc5b..07ebe1f 100644 --- a/src/client.rs +++ b/src/client.rs @@ -28,34 +28,40 @@ pub struct Client { _client: reqwest::blocking::Client, dbs: Vec<&'static str>, _gzip: bool, - _timeout: u8, + _timeout: u64, pub uri: String, - pub db_prefix: String + pub db_prefix: String, } impl Client { - /// new creates a new Couch client. The URI has to be in this format: http://hostname:5984, - /// for example: http://192.168.64.5:5984 + /// new creates a new Couch client with a default timeout of 5 seconds. + /// The URI has to be in this format: http://hostname:5984, for example: http://192.168.64.5:5984 pub fn new(uri: &str) -> Result { + Client::new_with_timeout(uri, 10) + } + + /// new_with_timeout creates a new Couch client. The URI has to be in this format: http://hostname:5984, + /// timeout is in seconds. + pub fn new_with_timeout(uri: &str, timeout: u64) -> Result { let client = reqwest::blocking::Client::builder() .gzip(true) - .timeout(Duration::new(4, 0)) + .timeout(Duration::new(timeout, 0)) .build()?; Ok(Client { _client: client, uri: uri.to_string(), _gzip: true, - _timeout: 4, + _timeout: timeout, dbs: Vec::new(), - db_prefix: String::new() + db_prefix: String::new(), }) } fn create_client(&self) -> Result { let client = reqwest::blocking::Client::builder() .gzip(self._gzip) - .timeout(Duration::new(self._timeout as u64, 0)) + .timeout(Duration::new(self._timeout, 0)) .build()?; Ok(client) @@ -82,7 +88,7 @@ impl Client { Ok(self) } - pub fn timeout(&mut self, to: u8) -> Result<&Self, CouchError> { + pub fn timeout(&mut self, to: u64) -> Result<&Self, CouchError> { self._timeout = to; self._client = self.create_client()?; From c62c576b4b2140a5d62e1d1b4e1b43188475ebaa Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 16:34:09 +0100 Subject: [PATCH 14/70] Find with an optional bookmark. --- src/database.rs | 3 ++- src/document.rs | 5 ++++- src/types/find.rs | 48 +++++++++++++++++++++++++++++++++++++++++------ 3 files changed, 48 insertions(+), 8 deletions(-) diff --git a/src/database.rs b/src/database.rs index dd7a45c..a7d0ac1 100644 --- a/src/database.rs +++ b/src/database.rs @@ -204,6 +204,7 @@ impl Database { let status = response.status(); let data: FindResult = from_reader(response)?; + if let Some(doc_val) = data.docs { let documents: Vec = doc_val .into_iter() @@ -215,7 +216,7 @@ impl Database { .map(|v| Document::new(v.clone())) .collect(); - Ok(DocumentCollection::new_from_documents(documents)) + Ok(DocumentCollection::new_from_documents(documents, data.bookmark.unwrap_or("".to_string()))) } else if let Some(err) = data.error { Err(CouchError::new(err, status).into()) } else { diff --git a/src/document.rs b/src/document.rs index 56e201b..a17b3df 100644 --- a/src/document.rs +++ b/src/document.rs @@ -152,6 +152,7 @@ pub struct DocumentCollection { pub offset: u32, pub rows: Vec, pub total_rows: u32, + pub bookmark: Option, } impl DocumentCollection { @@ -173,16 +174,18 @@ impl DocumentCollection { offset: json_extr!(doc["offset"]), total_rows: items.len() as u32, rows: items, + bookmark: Option::None, } } - pub fn new_from_documents(docs: Vec) -> DocumentCollection { + pub fn new_from_documents(docs: Vec, bookmark: String) -> DocumentCollection { let len = docs.len() as u32; DocumentCollection { offset: 0, total_rows: len, rows: docs.into_iter().map(|d| DocumentCollectionItem::new(d)).collect(), + bookmark: Option::from(bookmark), } } diff --git a/src/types/find.rs b/src/types/find.rs index c0f661f..929be8f 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -1,13 +1,13 @@ -use serde_json::{Value}; use std::collections::HashMap; use serde::{Serialize, Deserialize}; +use serde_json::{Value, json}; /// Sort direction abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] #[serde(untagged)] pub enum SortDirection { Desc, - Asc + Asc, } impl From for SortDirection { @@ -27,7 +27,7 @@ pub type SortSpecContent = HashMap; #[serde(untagged)] pub enum SortSpec { Simple(String), - Complex(SortSpecContent) + Complex(SortSpecContent), } /// Index spec abstraction @@ -35,18 +35,31 @@ pub enum SortSpec { #[serde(untagged)] pub enum IndexSpec { DesignDocument(String), - IndexName((String, String)) + IndexName((String, String)), } /// Find query abstraction #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct FindQuery { pub selector: Value, + + #[serde(skip_serializing_if = "Option::is_none")] pub limit: Option, + + #[serde(skip_serializing_if = "Option::is_none")] pub skip: Option, + + #[serde(skip_serializing_if = "Option::is_none")] pub sort: Option, + + #[serde(skip_serializing_if = "Option::is_none")] pub fields: Option>, - pub use_index: Option + + #[serde(skip_serializing_if = "Option::is_none")] + pub use_index: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub bookmark: Option, } /// Find result abstraction @@ -56,6 +69,7 @@ pub struct FindResult { pub warning: Option, pub error: Option, pub reason: Option, + pub bookmark: Option, } //todo: include status on structs @@ -70,5 +84,27 @@ pub struct ExplainResult { pub limit: u32, pub skip: u64, pub fields: Vec, - pub range: Value + pub range: Value, } + +/// Returns all documents +#[macro_export] +macro_rules! find_all_selector { + () => { + json!({"selector" : { "_id" : {"$ne": "null"}}}) + } + } + +impl FindQuery { + pub fn find_all() -> FindQuery { + FindQuery { + selector: json!({ "_id" : {"$ne": null}}), + limit: None, + skip: None, + sort: None, + fields: None, + use_index: None, + bookmark: None, + } + } +} \ No newline at end of file From a078ae2958f1a2dcaffcb39117fa137ef3d2a2b0 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 16:36:24 +0100 Subject: [PATCH 15/70] Tested with CouchDB 2.3 --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 63a51ea..f90c2db 100644 --- a/README.md +++ b/README.md @@ -42,7 +42,7 @@ No async I/O (yet), uses a mix of Reqwest and Serde under the hood, with a few n **NOT 1.0 YET, so expect changes** -**Supports CouchDB 2.0 and up.** +**Supports CouchDB 2.3.0 and up.** Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. From 093521ca32edc4457ca3348ef898f764f00b9c7c Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 21:13:04 +0100 Subject: [PATCH 16/70] Fix optional bookmark --- src/database.rs | 10 +++++++++- src/document.rs | 4 ++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/database.rs b/src/database.rs index a7d0ac1..18251de 100644 --- a/src/database.rs +++ b/src/database.rs @@ -216,7 +216,15 @@ impl Database { .map(|v| Document::new(v.clone())) .collect(); - Ok(DocumentCollection::new_from_documents(documents, data.bookmark.unwrap_or("".to_string()))) + let mut bookmark = Option::None; + let returned_bookmark = data.bookmark.unwrap_or_default(); + + if returned_bookmark != "nil" && returned_bookmark != "" { + // a valid bookmark has been returned + bookmark = Option::from(returned_bookmark); + } + + Ok(DocumentCollection::new_from_documents(documents, bookmark)) } else if let Some(err) = data.error { Err(CouchError::new(err, status).into()) } else { diff --git a/src/document.rs b/src/document.rs index a17b3df..7ba1107 100644 --- a/src/document.rs +++ b/src/document.rs @@ -178,14 +178,14 @@ impl DocumentCollection { } } - pub fn new_from_documents(docs: Vec, bookmark: String) -> DocumentCollection { + pub fn new_from_documents(docs: Vec, bookmark: Option) -> DocumentCollection { let len = docs.len() as u32; DocumentCollection { offset: 0, total_rows: len, rows: docs.into_iter().map(|d| DocumentCollectionItem::new(d)).collect(), - bookmark: Option::from(bookmark), + bookmark, } } From 3d8baa91744fddc8749a975bfae75e36b98bde6b Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 4 Feb 2020 21:20:19 +0100 Subject: [PATCH 17/70] Use localhost in the example --- examples/basic_operations/main.rs | 2 +- examples/typed_documents/main.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs index 1908bdd..6cd7a77 100644 --- a/examples/basic_operations/main.rs +++ b/examples/basic_operations/main.rs @@ -17,7 +17,7 @@ extern crate sofa; use serde_json::{json, Value}; /// Update DB_HOST to point to your running Couch instance -const DB_HOST: &'static str = "http://192.168.64.5:5984"; +const DB_HOST: &'static str = "http://localhost:5984"; const TEST_DB: &'static str = "test_db"; /// test_docs generates a bunch of documents that can be used in the _bulk_docs operation. diff --git a/examples/typed_documents/main.rs b/examples/typed_documents/main.rs index 9c06c88..baffc9a 100644 --- a/examples/typed_documents/main.rs +++ b/examples/typed_documents/main.rs @@ -5,7 +5,7 @@ use serde::{Serialize, Deserialize}; use reqwest::StatusCode; /// Update DB_HOST to point to your running Couch instance -const DB_HOST: &'static str = "http://192.168.64.5:5984"; +const DB_HOST: &'static str = "http://localhost:5984"; const TEST_DB: &'static str = "test_db"; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] From c95649907c6f25a6ad00914a3c1b22150fac77d7 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Wed, 5 Feb 2020 07:30:09 +0100 Subject: [PATCH 18/70] Use replace --- src/database.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/database.rs b/src/database.rs index 18251de..40ca143 100644 --- a/src/database.rs +++ b/src/database.rs @@ -221,7 +221,7 @@ impl Database { if returned_bookmark != "nil" && returned_bookmark != "" { // a valid bookmark has been returned - bookmark = Option::from(returned_bookmark); + bookmark.replace(returned_bookmark); } Ok(DocumentCollection::new_from_documents(documents, bookmark)) From 21b0986f5edb8e898762b84e972cff7e073aaedf Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Wed, 5 Feb 2020 10:07:01 +0100 Subject: [PATCH 19/70] Support for batched reads. --- src/database.rs | 51 ++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 50 insertions(+), 1 deletion(-) diff --git a/src/database.rs b/src/database.rs index 40ca143..7013499 100644 --- a/src/database.rs +++ b/src/database.rs @@ -8,8 +8,9 @@ use crate::document::{Document, DocumentCollection}; use crate::error::CouchError; use crate::client::Client; use crate::types::document::{DocumentId, DocumentCreatedResult}; -use crate::types::find::FindResult; +use crate::types::find::{FindResult, FindQuery}; use crate::types::index::{IndexFields, IndexCreated, DatabaseIndexList}; +use std::sync::mpsc::{Sender}; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -179,6 +180,54 @@ impl Database { self.get_all_params(None) } + /// Gets all documents in the database, using bookmarks to iterate through all the documents. + /// Results are returned through an mpcs channel for async processing. Use this for very large + /// databases only. Batch size can be requested. A value of 0, means the default batch_size of + /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is + /// always rounded *up* to the nearest multiplication of batch_size. + pub fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> u64 { + let mut bookmark = Option::None; + let limit = if batch_size > 0 { + batch_size + } else { + 1000 + }; + + let mut results: u64 = 0; + + loop { + let mut query = FindQuery::find_all(); + + query.limit = Option::Some(limit); + query.bookmark = bookmark.clone(); + + let all_docs = self.find( + serde_json::to_value(query).unwrap()).unwrap(); + + if all_docs.total_rows == 0 { + // no more rows + break; + } + + if all_docs.bookmark.is_some() && all_docs.bookmark != bookmark { + bookmark.replace(all_docs.bookmark.clone().unwrap_or_default()); + } else { + // no bookmark, break the query loop + break; + } + + results += all_docs.total_rows.clone() as u64; + + tx.send(all_docs).unwrap(); + + if max_results > 0 && results >= max_results { + break; + } + } + + results + } + /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view pub fn get_all_params(&self, params: Option>) -> Result { let mut options; From b1399edb340d791cd77ced2a67d84556d87e6ca9 Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Wed, 5 Feb 2020 11:21:34 +0100 Subject: [PATCH 20/70] Use json unmarshalling from reqwest. --- src/database.rs | 21 ++++++++++----------- 1 file changed, 10 insertions(+), 11 deletions(-) diff --git a/src/database.rs b/src/database.rs index 7013499..edbbf2e 100644 --- a/src/database.rs +++ b/src/database.rs @@ -3,7 +3,7 @@ use std::collections::HashMap; use reqwest::StatusCode; use serde_json; -use serde_json::{from_reader, to_string, Value, json}; +use serde_json::{to_string, Value, json}; use crate::document::{Document, DocumentCollection}; use crate::error::CouchError; use crate::client::Client; @@ -118,7 +118,7 @@ impl Database { let response = self._client.get(self.create_document_path(id), None)?.send()?; match response.status() { - StatusCode::OK => Ok(Document::new(from_reader(response)?)), + StatusCode::OK => Ok(Document::new(response.json()?)), StatusCode::NOT_FOUND => Err(CouchError::new("Document was not found".to_string(), StatusCode::NOT_FOUND)), _ => Err(CouchError::new("Internal error".to_string(), response.status())), } @@ -144,7 +144,7 @@ impl Database { body.insert(s!("docs"), raw_docs); let response = self._client.post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)?.send()?; - let data: Vec = from_reader(response)?; + let data: Vec = response.json()?; Ok(data) } @@ -172,7 +172,7 @@ impl Database { .body(to_string(&body)?) .send()?; - Ok(DocumentCollection::new(from_reader(response)?)) + Ok(DocumentCollection::new(response.json()?)) } /// Gets all the documents in database @@ -243,7 +243,7 @@ impl Database { .get(self.create_document_path("_all_docs".into()), Some(options))? .send()?; - Ok(DocumentCollection::new(from_reader(response)?)) + Ok(DocumentCollection::new(response.json()?)) } /// Finds a document in the database through a Mango query. Parameters here http://docs.couchdb.org/en/latest/api/database/find.html @@ -251,8 +251,7 @@ impl Database { let path = self.create_document_path("_find".into()); let response = self._client.post(path, js!(¶ms))?.send()?; let status = response.status(); - - let data: FindResult = from_reader(response)?; + let data: FindResult = response.json().unwrap(); if let Some(doc_val) = data.docs { let documents: Vec = doc_val @@ -291,7 +290,7 @@ impl Database { .send()?; let status = response.status(); - let data: DocumentCreatedResult = from_reader(response)?; + let data: DocumentCreatedResult = response.json()?; match data.ok { Some(true) => { @@ -312,7 +311,7 @@ impl Database { let response = self._client.post(self.name.clone(), to_string(&raw_doc)?)?.send()?; let status = response.status(); - let data: DocumentCreatedResult = from_reader(response)?; + let data: DocumentCreatedResult = response.json()?; match data.ok { Some(true) => { @@ -378,7 +377,7 @@ impl Database { .send()?; let status = response.status(); - let data: IndexCreated = from_reader(response)?; + let data: IndexCreated = response.json()?; if data.error.is_some() { let err = data.error.unwrap_or(s!("unspecified error")); @@ -394,7 +393,7 @@ impl Database { .get(self.create_document_path("_index".into()), None)? .send()?; - Ok(from_reader(response)?) + Ok(response.json()?) } /// Method to ensure an index is created on the database with the following From 548ec03f9de925a9ffc399e271fa9773b673c70a Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Wed, 5 Feb 2020 11:29:57 +0100 Subject: [PATCH 21/70] Asynchronous batch read example. --- Cargo.toml | 1 + examples/async_batch_read/main.rs | 58 +++++++++++++++++++++++++++++++ 2 files changed, 59 insertions(+) create mode 100644 examples/async_batch_read/main.rs diff --git a/Cargo.toml b/Cargo.toml index a95e988..0a151a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -27,3 +27,4 @@ features = ["json", "gzip", "cookies", "blocking"] [dev-dependencies] pretty_assertions = "0.5" +tokio = { version = "0.2.11", features = ["full"] } diff --git a/examples/async_batch_read/main.rs b/examples/async_batch_read/main.rs new file mode 100644 index 0000000..1ed1120 --- /dev/null +++ b/examples/async_batch_read/main.rs @@ -0,0 +1,58 @@ +extern crate sofa; + +use std::time::SystemTime; +use std::sync::mpsc::{Sender, Receiver}; +use std::sync::mpsc; +use sofa::document::DocumentCollection; +use std::fs::File; +use std::io::prelude::*; + +const DB_HOST: &'static str = "http://127.0.0.1:5984"; +const TEST_DB: &'static str = "test_db"; + +#[tokio::main] +async fn main() { + println!("Connecting..."); + let now = SystemTime::now(); + + // Create a sender and receiver channel pair + let (tx, rx): (Sender, Receiver) = mpsc::channel(); + + // Spawn a separate thread to retrieve the batches from Couch + let t = tokio::task::spawn_blocking(move || { + let client = sofa::Client::new_with_timeout(DB_HOST, 120).unwrap(); + let db = client.db(TEST_DB).unwrap(); + + db.get_all_batched(tx, 0, 0); + }); + + // Open a file for writing + let mut file = File::create("test_db.json").unwrap(); + + // Loop until the receiving channel is closed. + loop { + match rx.recv() { + Ok(all_docs) => { + println!("Received {} docs", all_docs.total_rows); + + // unmarshal the documents and write them to a file. + // (there is probably a more efficient way of doing this...) + for row in all_docs.rows { + file.write_all(serde_json::to_string(&row.doc).unwrap().as_bytes()).unwrap(); + } + } + Err(_e) => { + break; + } + } + } + + // Make sure the file is written before exiting + file.sync_all().unwrap(); + + let elapsed = now.elapsed().unwrap_or_default(); + println!("{} ms", elapsed.as_millis()); + + // Wait for the spawned task to finish (should be done by now). + t.await.unwrap(); +} From 36a38923a0170bdd7c7425b01be507e597e52fcd Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Wed, 5 Feb 2020 12:52:24 +0100 Subject: [PATCH 22/70] Updated dependency in README --- README.md | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index f90c2db..fbb4d7a 100644 --- a/README.md +++ b/README.md @@ -13,15 +13,10 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) ## Installation -If you want to use this particular fork, clone the project locally: - -```bash -git clone https://github.com/mibes/sofa.git -``` -and include it in your Cargo.toml file: +If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml -[dependencies] -sofa = { path = "../sofa" } +[dependencies.sofa] +git = "https://github.com/mibes/sofa.git" ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From faae7073422789d00ee45ff6bdbe0e3001873245 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:06:02 +0100 Subject: [PATCH 23/70] Async operations --- Cargo.toml | 2 + examples/async_batch_read/main.rs | 6 +- examples/basic_operations/main.rs | 11 +- examples/typed_documents/main.rs | 9 +- src/client.rs | 42 ++++---- src/database.rs | 168 +++++++++++++++--------------- src/document.rs | 4 +- src/lib.rs | 110 +++++++++---------- 8 files changed, 175 insertions(+), 177 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 0a151a5..c6d1ace 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -20,6 +20,8 @@ include = [ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.45" url = "2.1.1" +tokio = { version = "0.2.11", features = ["full"] } +async-trait = "0.1.24" [dependencies.reqwest] version = "0.10.1" diff --git a/examples/async_batch_read/main.rs b/examples/async_batch_read/main.rs index 1ed1120..8010b6e 100644 --- a/examples/async_batch_read/main.rs +++ b/examples/async_batch_read/main.rs @@ -19,11 +19,11 @@ async fn main() { let (tx, rx): (Sender, Receiver) = mpsc::channel(); // Spawn a separate thread to retrieve the batches from Couch - let t = tokio::task::spawn_blocking(move || { + let t = tokio::spawn(async move { let client = sofa::Client::new_with_timeout(DB_HOST, 120).unwrap(); - let db = client.db(TEST_DB).unwrap(); + let db = client.db(TEST_DB).await.unwrap(); - db.get_all_batched(tx, 0, 0); + db.get_all_batched(tx, 0, 0).await; }); // Open a file for writing diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs index 6cd7a77..57dadaf 100644 --- a/examples/basic_operations/main.rs +++ b/examples/basic_operations/main.rs @@ -31,7 +31,8 @@ fn test_docs(amount: i32) -> Vec { result } -fn main() { +#[tokio::main] +async fn main() { println!("Connecting..."); // Prepare the Sofa client @@ -40,11 +41,11 @@ fn main() { // This command gets a reference to an existing database, or it creates a new one when it does // not yet exist. - let db = client.db(TEST_DB).unwrap(); + let db = client.db(TEST_DB).await.unwrap(); // List the existing databases. The db_initialized is superfluous, since we just created it in // the previous step. It is for educational purposes only. - match client.list_dbs() { + match client.list_dbs().await { Ok(dbs) => { println!("Existing databases:"); for db in dbs { @@ -62,7 +63,7 @@ fn main() { if db_initialized { // let's add some docs - match db.bulk_docs(test_docs(100)) { + match db.bulk_docs(test_docs(100)).await { Ok(resp) => { println!("Bulk docs completed"); @@ -80,7 +81,7 @@ fn main() { if first_doc_id.is_some() { // we have an id of the first document we've just inserted - match db.get(first_doc_id.unwrap()) { + match db.get(first_doc_id.unwrap()).await { Ok(doc) => { println!("First document: {}", doc.get_data().to_string()) } Err(err) => println!("Oops: {:?}", err), } diff --git a/examples/typed_documents/main.rs b/examples/typed_documents/main.rs index baffc9a..1706a55 100644 --- a/examples/typed_documents/main.rs +++ b/examples/typed_documents/main.rs @@ -25,7 +25,8 @@ pub struct TestDoc { pub last_name: String, } -fn main() { +#[tokio::main] +async fn main() { println!("Connecting..."); // Prepare the Sofa client @@ -33,7 +34,7 @@ fn main() { // This command gets a reference to an existing database, or it creates a new one when it does // not yet exist. - let db = client.db(TEST_DB).unwrap(); + let db = client.db(TEST_DB).await.unwrap(); let td = TestDoc { _id: "1234".to_string(), @@ -43,7 +44,7 @@ fn main() { }; // check if the document already exists - match db.get("1234".to_string()) { + match db.get("1234".to_string()).await { Ok(existing) => { println!("Document has been previously created with Rev: {}", existing._rev); let e: TestDoc = serde_json::from_value(existing.get_data()).unwrap(); @@ -53,7 +54,7 @@ fn main() { match e.status { StatusCode::NOT_FOUND => { // create the document - match db.create(serde_json::to_value(td).unwrap()) { + match db.create(serde_json::to_value(td).unwrap()).await { Ok(r) => println!("Document was created with ID: {} and Rev: {}", r._id, r._rev), Err(err) => println!("Oops: {:?}", err), } diff --git a/src/client.rs b/src/client.rs index 07ebe1f..d7dde44 100644 --- a/src/client.rs +++ b/src/client.rs @@ -1,8 +1,6 @@ use std::collections::HashMap; use std::time::Duration; -use serde_json::from_reader; - -use reqwest::blocking::RequestBuilder; +use reqwest::RequestBuilder; use reqwest::{self, Url, Method, StatusCode}; use reqwest::header::{HeaderMap, HeaderValue, USER_AGENT, CONTENT_TYPE, REFERER}; use crate::database::Database; @@ -25,7 +23,7 @@ fn construct_json_headers(uri: Option<&str>) -> HeaderMap { /// It is also responsible for the creation/access/destruction of databases. #[derive(Debug, Clone)] pub struct Client { - _client: reqwest::blocking::Client, + _client: reqwest::Client, dbs: Vec<&'static str>, _gzip: bool, _timeout: u64, @@ -43,7 +41,7 @@ impl Client { /// new_with_timeout creates a new Couch client. The URI has to be in this format: http://hostname:5984, /// timeout is in seconds. pub fn new_with_timeout(uri: &str, timeout: u64) -> Result { - let client = reqwest::blocking::Client::builder() + let client = reqwest::Client::builder() .gzip(true) .timeout(Duration::new(timeout, 0)) .build()?; @@ -58,8 +56,8 @@ impl Client { }) } - fn create_client(&self) -> Result { - let client = reqwest::blocking::Client::builder() + fn create_client(&self) -> Result { + let client = reqwest::Client::builder() .gzip(self._gzip) .timeout(Duration::new(self._timeout, 0)) .build()?; @@ -95,9 +93,9 @@ impl Client { Ok(self) } - pub fn list_dbs(&self) -> Result, CouchError> { - let response = self.get(String::from("/_all_dbs"), None)?.send()?; - let data = response.json()?; + pub async fn list_dbs(&self) -> Result, CouchError> { + let response = self.get(String::from("/_all_dbs"), None)?.send().await?; + let data = response.json().await?; Ok(data) } @@ -106,7 +104,7 @@ impl Client { self.db_prefix.clone() + dbname } - pub fn db(&self, dbname: &'static str) -> Result { + pub async fn db(&self, dbname: &'static str) -> Result { let name = self.build_dbname(dbname); let db = Database::new(name.clone(), self.clone()); @@ -115,15 +113,15 @@ impl Client { let head_response = self._client.head(&path) .headers(construct_json_headers(None)) - .send()?; + .send().await?; match head_response.status() { StatusCode::OK => Ok(db), - _ => self.make_db(dbname), + _ => self.make_db(dbname).await, } } - pub fn make_db(&self, dbname: &'static str) -> Result { + pub async fn make_db(&self, dbname: &'static str) -> Result { let name = self.build_dbname(dbname); let db = Database::new(name.clone(), self.clone()); @@ -132,10 +130,10 @@ impl Client { let put_response = self._client.put(&path) .headers(construct_json_headers(None)) - .send()?; + .send().await?; let status = put_response.status(); - let s: CouchResponse = from_reader(put_response)?; + let s: CouchResponse = put_response.json().await?; match s.ok { Some(true) => Ok(db), @@ -146,23 +144,23 @@ impl Client { } } - pub fn destroy_db(&self, dbname: &'static str) -> Result { + pub async fn destroy_db(&self, dbname: &'static str) -> Result { let path = self.create_path(self.build_dbname(dbname), None)?; let response = self._client.delete(&path) .headers(construct_json_headers(None)) - .send()?; + .send().await?; - let s: CouchResponse = from_reader(response)?; + let s: CouchResponse = response.json().await?; Ok(s.ok.unwrap_or(false)) } - pub fn check_status(&self) -> Result { + pub async fn check_status(&self) -> Result { let response = self._client.get(&self.uri) .headers(construct_json_headers(None)) - .send()?; + .send().await?; - let status = from_reader(response)?; + let status = response.json().await?; Ok(status) } diff --git a/src/database.rs b/src/database.rs index edbbf2e..de37580 100644 --- a/src/database.rs +++ b/src/database.rs @@ -51,82 +51,80 @@ impl Database { } /// Launches the compact process - pub fn compact(&self) -> bool { + pub async fn compact(&self) -> bool { let mut path: String = self.name.clone(); path.push_str("/_compact"); let request = self._client.post(path, "".into()); - request - .and_then(|req| { - Ok(req.send() - .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) - .unwrap_or(false)) - }) - .unwrap_or(false) + if let Ok(req) = request { + if let Ok(res) = req.send().await { + return res.status() == StatusCode::ACCEPTED; + } + } + + return false; } /// Starts the compaction of all views - pub fn compact_views(&self) -> bool { + pub async fn compact_views(&self) -> bool { let mut path: String = self.name.clone(); path.push_str("/_view_cleanup"); let request = self._client.post(path, "".into()); - request - .and_then(|req| { - Ok(req.send() - .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) - .unwrap_or(false)) - }) - .unwrap_or(false) + if let Ok(req) = request { + if let Ok(res) = req.send().await { + return res.status() == StatusCode::ACCEPTED; + } + } + + return false; } /// Starts the compaction of a given index - pub fn compact_index(&self, index: &'static str) -> bool { + pub async fn compact_index(&self, index: &'static str) -> bool { let request = self._client.post(self.create_compact_path(index), "".into()); - request - .and_then(|req| { - Ok(req.send() - .and_then(|res| Ok(res.status() == StatusCode::ACCEPTED)) - .unwrap_or(false)) - }) - .unwrap_or(false) + if let Ok(req) = request { + if let Ok(res) = req.send().await { + return res.status() == StatusCode::ACCEPTED; + } + } + + return false; } /// Checks if a document ID exists - pub fn exists(&self, id: DocumentId) -> bool { + pub async fn exists(&self, id: DocumentId) -> bool { let request = self._client.head(self.create_document_path(id), None); - request - .and_then(|req| { - Ok(req.send() - .and_then(|res| { - Ok(match res.status() { - StatusCode::OK | StatusCode::NOT_MODIFIED => true, - _ => false, - }) - }) - .unwrap_or(false)) - }) - .unwrap_or(false) + if let Ok(req) = request { + if let Ok(res) = req.send().await { + return match res.status() { + StatusCode::OK | StatusCode::NOT_MODIFIED => true, + _ => false, + } + } + } + + return false; } /// Gets one document - pub fn get(&self, id: DocumentId) -> Result { - let response = self._client.get(self.create_document_path(id), None)?.send()?; + pub async fn get(&self, id: DocumentId) -> Result { + let response = self._client.get(self.create_document_path(id), None)?.send().await?; match response.status() { - StatusCode::OK => Ok(Document::new(response.json()?)), + StatusCode::OK => Ok(Document::new(response.json().await?)), StatusCode::NOT_FOUND => Err(CouchError::new("Document was not found".to_string(), StatusCode::NOT_FOUND)), _ => Err(CouchError::new("Internal error".to_string(), response.status())), } } /// Gets documents in bulk with provided IDs list - pub fn get_bulk(&self, ids: Vec) -> Result { - self.get_bulk_params(ids, None) + pub async fn get_bulk(&self, ids: Vec) -> Result { + self.get_bulk_params(ids, None).await } /// Each time a document is stored or updated in CouchDB, the internal B-tree is updated. @@ -139,18 +137,18 @@ impl Database { /// /// This endpoint can also be used to delete a set of documents by including "_deleted": true, in the document to be deleted. /// When deleting or updating, both _id and _rev are mandatory. - pub fn bulk_docs(&self, raw_docs: Vec) -> Result, CouchError> { + pub async fn bulk_docs(&self, raw_docs: Vec) -> Result, CouchError> { let mut body = HashMap::new(); body.insert(s!("docs"), raw_docs); - let response = self._client.post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)?.send()?; - let data: Vec = response.json()?; + let response = self._client.post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)?.send().await?; + let data: Vec = response.json().await?; Ok(data) } /// Gets documents in bulk with provided IDs list, with added params. Params description can be found here: Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view - pub fn get_bulk_params( + pub async fn get_bulk_params( &self, ids: Vec, params: Option>, @@ -170,14 +168,14 @@ impl Database { let response = self._client .get(self.create_document_path("_all_docs".into()), Some(options))? .body(to_string(&body)?) - .send()?; + .send().await?; - Ok(DocumentCollection::new(response.json()?)) + Ok(DocumentCollection::new(response.json().await?)) } /// Gets all the documents in database - pub fn get_all(&self) -> Result { - self.get_all_params(None) + pub async fn get_all(&self) -> Result { + self.get_all_params(None).await } /// Gets all documents in the database, using bookmarks to iterate through all the documents. @@ -185,7 +183,7 @@ impl Database { /// databases only. Batch size can be requested. A value of 0, means the default batch_size of /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is /// always rounded *up* to the nearest multiplication of batch_size. - pub fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> u64 { + pub async fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> u64 { let mut bookmark = Option::None; let limit = if batch_size > 0 { batch_size @@ -202,7 +200,7 @@ impl Database { query.bookmark = bookmark.clone(); let all_docs = self.find( - serde_json::to_value(query).unwrap()).unwrap(); + serde_json::to_value(query).unwrap()).await.unwrap(); if all_docs.total_rows == 0 { // no more rows @@ -229,7 +227,7 @@ impl Database { } /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view - pub fn get_all_params(&self, params: Option>) -> Result { + pub async fn get_all_params(&self, params: Option>) -> Result { let mut options; if let Some(opts) = params { options = opts; @@ -241,17 +239,17 @@ impl Database { let response = self._client .get(self.create_document_path("_all_docs".into()), Some(options))? - .send()?; + .send().await?; - Ok(DocumentCollection::new(response.json()?)) + Ok(DocumentCollection::new(response.json().await?)) } /// Finds a document in the database through a Mango query. Parameters here http://docs.couchdb.org/en/latest/api/database/find.html - pub fn find(&self, params: Value) -> Result { + pub async fn find(&self, params: Value) -> Result { let path = self.create_document_path("_find".into()); - let response = self._client.post(path, js!(¶ms))?.send()?; + let response = self._client.post(path, js!(¶ms))?.send().await?; let status = response.status(); - let data: FindResult = response.json().unwrap(); + let data: FindResult = response.json().await.unwrap(); if let Some(doc_val) = data.docs { let documents: Vec = doc_val @@ -281,16 +279,16 @@ impl Database { } /// Updates a document - pub fn save(&self, doc: Document) -> Result { + pub async fn save(&self, doc: Document) -> Result { let id = doc._id.to_owned(); let raw = doc.get_data(); let response = self._client .put(self.create_document_path(id), to_string(&raw)?)? - .send()?; + .send().await?; let status = response.status(); - let data: DocumentCreatedResult = response.json()?; + let data: DocumentCreatedResult = response.json().await?; match data.ok { Some(true) => { @@ -307,11 +305,11 @@ impl Database { } /// Creates a document from a raw JSON document Value. - pub fn create(&self, raw_doc: Value) -> Result { - let response = self._client.post(self.name.clone(), to_string(&raw_doc)?)?.send()?; + pub async fn create(&self, raw_doc: Value) -> Result { + let response = self._client.post(self.name.clone(), to_string(&raw_doc)?)?.send().await?; let status = response.status(); - let data: DocumentCreatedResult = response.json()?; + let data: DocumentCreatedResult = response.json().await?; match data.ok { Some(true) => { @@ -339,7 +337,7 @@ impl Database { } /// Removes a document from the database. Returns success in a `bool` - pub fn remove(&self, doc: Document) -> bool { + pub async fn remove(&self, doc: Document) -> bool { let request = self._client.delete( self.create_document_path(doc._id.clone()), Some({ @@ -349,23 +347,21 @@ impl Database { }), ); - request - .and_then(|req| { - Ok(req.send() - .and_then(|res| { - Ok(match res.status() { - StatusCode::OK | StatusCode::ACCEPTED => true, - _ => false, - }) - }) - .unwrap_or(false)) - }) - .unwrap_or(false) + if let Ok(req) = request { + if let Ok(res) = req.send().await { + return match res.status() { + StatusCode::OK | StatusCode::NOT_MODIFIED => true, + _ => false, + } + } + } + + return false; } /// Inserts an index in a naive way, if it already exists, will throw an /// `Err` - pub fn insert_index(&self, name: String, spec: IndexFields) -> Result { + pub async fn insert_index(&self, name: String, spec: IndexFields) -> Result { let response = self._client .post( self.create_document_path("_index".into()), @@ -374,10 +370,10 @@ impl Database { "index": spec })), )? - .send()?; + .send().await?; let status = response.status(); - let data: IndexCreated = response.json()?; + let data: IndexCreated = response.json().await?; if data.error.is_some() { let err = data.error.unwrap_or(s!("unspecified error")); @@ -388,19 +384,19 @@ impl Database { } /// Reads the database's indexes and returns them - pub fn read_indexes(&self) -> Result { + pub async fn read_indexes(&self) -> Result { let response = self._client .get(self.create_document_path("_index".into()), None)? - .send()?; + .send().await?; - Ok(response.json()?) + Ok(response.json().await?) } /// Method to ensure an index is created on the database with the following /// spec. Returns `true` when we created a new one, or `false` when the /// index was already existing. - pub fn ensure_index(&self, name: String, spec: IndexFields) -> Result { - let db_indexes = self.read_indexes()?; + pub async fn ensure_index(&self, name: String, spec: IndexFields) -> Result { + let db_indexes = self.read_indexes().await?; // We look for our index for i in db_indexes.indexes.into_iter() { @@ -411,7 +407,7 @@ impl Database { } // Let's create it then - let _ = self.insert_index(name, spec)?; + let _ = self.insert_index(name, spec).await?; // Created and alright Ok(true) diff --git a/src/document.rs b/src/document.rs index 7ba1107..7d465fd 100644 --- a/src/document.rs +++ b/src/document.rs @@ -69,7 +69,7 @@ impl Document { /// Recursively populates field (must be an array of IDs from another /// database) with provided database documents - pub fn populate(&mut self, field: &String, db: Database) -> &Self { + pub async fn populate(&mut self, field: &String, db: Database) -> &Self { let ref val = self[field].clone(); if *val == Value::Null { return self; @@ -81,7 +81,7 @@ impl Document { .map(|v| s!(v.as_str().unwrap_or(""))) .collect(); - let data = db.get_bulk(ids).and_then(|docs| Ok(docs.get_data())); + let data = db.get_bulk(ids).await.and_then(|docs| Ok(docs.get_data())); match data { Ok(data) => { diff --git a/src/lib.rs b/src/lib.rs index f9b658e..cb96367 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -146,32 +146,32 @@ mod sofa_tests { use serde_json::{json}; use crate::client::Client; - #[test] - fn a_should_check_couchdbs_status() { + #[tokio::test] + async fn a_should_check_couchdbs_status() { let client = Client::new(DB_HOST.into()).unwrap(); - let status = client.check_status(); + let status = client.check_status().await; assert!(status.is_ok()); } - #[test] - fn b_should_create_sofa_test_db() { + #[tokio::test] + async fn b_should_create_sofa_test_db() { let client = Client::new(DB_HOST.into()).unwrap(); - let dbw = client.db("b_should_create_sofa_test_db"); + let dbw = client.db("b_should_create_sofa_test_db").await; assert!(dbw.is_ok()); let _ = client.destroy_db("b_should_create_sofa_test_db"); } - #[test] - fn c_should_create_a_document() { + #[tokio::test] + async fn c_should_create_a_document() { let client = Client::new(DB_HOST.into()).unwrap(); - let dbw = client.db("c_should_create_a_document"); + let dbw = client.db("c_should_create_a_document").await; assert!(dbw.is_ok()); let db = dbw.unwrap(); let ndoc_result = db.create(json!({ "thing": true - })); + })).await; assert!(ndoc_result.is_ok()); @@ -181,12 +181,12 @@ mod sofa_tests { let _ = client.destroy_db("c_should_create_a_document"); } - #[test] - fn d_should_destroy_the_db() { + #[tokio::test] + async fn d_should_destroy_the_db() { let client = Client::new(DB_HOST.into()).unwrap(); - let _ = client.db("d_should_destroy_the_db"); + let _ = client.db("d_should_destroy_the_db").await; - assert!(client.destroy_db("d_should_destroy_the_db").unwrap()); + assert!(client.destroy_db("d_should_destroy_the_db").await.unwrap()); } } @@ -199,15 +199,15 @@ mod sofa_tests { const DB_HOST: &'static str = "http://192.168.64.5:5984"; - fn setup(dbname: &'static str) -> (Client, Database, Document) { + async fn setup(dbname: &'static str) -> (Client, Database, Document) { let client = Client::new(DB_HOST.into()).unwrap(); - let dbw = client.db(dbname); + let dbw = client.db(dbname).await; assert!(dbw.is_ok()); let db = dbw.unwrap(); let ndoc_result = db.create(json!({ "thing": true - })); + })).await; assert!(ndoc_result.is_ok()); @@ -217,85 +217,85 @@ mod sofa_tests { (client, db, doc) } - fn teardown(client: Client, dbname: &'static str) { - assert!(client.destroy_db(dbname).unwrap()) + async fn teardown(client: Client, dbname: &'static str) { + assert!(client.destroy_db(dbname).await.unwrap()) } - #[test] - fn a_should_update_a_document() { - let (client, db, mut doc) = setup("a_should_update_a_document"); + #[tokio::test] + async fn a_should_update_a_document() { + let (client, db, mut doc) = setup("a_should_update_a_document").await; doc["thing"] = json!(false); - let save_result = db.save(doc); + let save_result = db.save(doc).await; assert!(save_result.is_ok()); let new_doc = save_result.unwrap(); assert_eq!(new_doc["thing"], json!(false)); - teardown(client, "a_should_update_a_document"); + teardown(client, "a_should_update_a_document").await; } - #[test] - fn b_should_remove_a_document() { - let (client, db, doc) = setup("b_should_remove_a_document"); - assert!(db.remove(doc)); + #[tokio::test] + async fn b_should_remove_a_document() { + let (client, db, doc) = setup("b_should_remove_a_document").await; + assert!(db.remove(doc).await); - teardown(client, "b_should_remove_a_document"); + teardown(client, "b_should_remove_a_document").await; } - #[test] - fn c_should_get_a_single_document() { - let (client, ..) = setup("c_should_get_a_single_document"); + #[tokio::test] + async fn c_should_get_a_single_document() { + let (client, ..) = setup("c_should_get_a_single_document").await; assert!(true); - teardown(client, "c_should_get_a_single_document"); + teardown(client, "c_should_get_a_single_document").await; } - fn setup_create_indexes(dbname: &'static str) -> (Client, Database, Document) { - let (client, db, doc) = setup(dbname); + async fn setup_create_indexes(dbname: &'static str) -> (Client, Database, Document) { + let (client, db, doc) = setup(dbname).await; let spec = types::index::IndexFields::new(vec![types::find::SortSpec::Simple(s!("thing"))]); - let res = db.insert_index("thing-index".into(), spec); + let res = db.insert_index("thing-index".into(), spec).await; assert!(res.is_ok()); (client, db, doc) } - #[test] - fn d_should_create_index_in_db() { - let (client, db, _) = setup_create_indexes("d_should_create_index_in_db"); + #[tokio::test] + async fn d_should_create_index_in_db() { + let (client, db, _) = setup_create_indexes("d_should_create_index_in_db").await; assert!(true); - teardown(client, "d_should_create_index_in_db"); + teardown(client, "d_should_create_index_in_db").await; } - #[test] - fn e_should_list_indexes_in_db() { - let (client, db, _) = setup_create_indexes("e_should_list_indexes_in_db"); + #[tokio::test] + async fn e_should_list_indexes_in_db() { + let (client, db, _) = setup_create_indexes("e_should_list_indexes_in_db").await; - let index_list = db.read_indexes().unwrap(); + let index_list = db.read_indexes().await.unwrap(); assert!(index_list.indexes.len() > 1); let ref findex = index_list.indexes[1]; assert_eq!(findex.name.as_str(), "thing-index"); - teardown(client, "e_should_list_indexes_in_db"); + teardown(client, "e_should_list_indexes_in_db").await; } - #[test] - fn f_should_ensure_index_in_db() { - let (client, db, _) = setup("f_should_ensure_index_in_db"); + #[tokio::test] + async fn f_should_ensure_index_in_db() { + let (client, db, _) = setup("f_should_ensure_index_in_db").await; let spec = types::index::IndexFields::new(vec![types::find::SortSpec::Simple(s!("thing"))]); - let res = db.ensure_index("thing-index".into(), spec); + let res = db.ensure_index("thing-index".into(), spec).await; assert!(res.is_ok()); - teardown(client, "f_should_ensure_index_in_db"); + teardown(client, "f_should_ensure_index_in_db").await; } - #[test] - fn g_should_find_documents_in_db() { - let (client, db, doc) = setup_create_indexes("g_should_find_documents_in_db"); + #[tokio::test] + async fn g_should_find_documents_in_db() { + let (client, db, doc) = setup_create_indexes("g_should_find_documents_in_db").await; let documents_res = db.find(json!({ "selector": { @@ -305,13 +305,13 @@ mod sofa_tests { "sort": [{ "thing": "desc" }] - })); + })).await; assert!(documents_res.is_ok()); let documents = documents_res.unwrap(); assert_eq!(documents.rows.len(), 1); - teardown(client, "g_should_find_documents_in_db"); + teardown(client, "g_should_find_documents_in_db").await; } } } From bfe6f8a35f5364828241255f16ea4e860ae1863b Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:08:32 +0100 Subject: [PATCH 24/70] Updated README --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index fbb4d7a..9d0b376 100644 --- a/README.md +++ b/README.md @@ -33,7 +33,7 @@ Does not support `#![no_std]` After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence the need to create our own. -No async I/O (yet), uses a mix of Reqwest and Serde under the hood, with a few nice abstractions out there. +Uses async I/O, with a mix of Reqwest and Serde under the hood, and a few nice abstractions out there. **NOT 1.0 YET, so expect changes** From 5786023da009272485ec394516f7e11e01688d24 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:17:21 +0100 Subject: [PATCH 25/70] Make the tests point to localhost again --- src/lib.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index cb96367..e1ae3df 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -141,7 +141,7 @@ pub use client::{Client}; #[cfg(test)] mod sofa_tests { mod a_sys { - const DB_HOST: &'static str = "http://192.168.64.5:5984"; + const DB_HOST: &'static str = "http://127.0.0.1:5984"; use serde_json::{json}; use crate::client::Client; @@ -197,7 +197,7 @@ mod sofa_tests { use crate::document::Document; use crate::types; - const DB_HOST: &'static str = "http://192.168.64.5:5984"; + const DB_HOST: &'static str = "http://127.0.0.1:5984"; async fn setup(dbname: &'static str) -> (Client, Database, Document) { let client = Client::new(DB_HOST.into()).unwrap(); From 553fe8e5cf6d0485ed10380090e9760f243961ea Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:26:30 +0100 Subject: [PATCH 26/70] Use tokio channels --- examples/async_batch_read/main.rs | 12 ++++++------ src/database.rs | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/examples/async_batch_read/main.rs b/examples/async_batch_read/main.rs index 8010b6e..62f529e 100644 --- a/examples/async_batch_read/main.rs +++ b/examples/async_batch_read/main.rs @@ -1,8 +1,8 @@ extern crate sofa; use std::time::SystemTime; -use std::sync::mpsc::{Sender, Receiver}; -use std::sync::mpsc; +use tokio::sync::mpsc::{Sender, Receiver}; +use tokio::sync::mpsc; use sofa::document::DocumentCollection; use std::fs::File; use std::io::prelude::*; @@ -16,7 +16,7 @@ async fn main() { let now = SystemTime::now(); // Create a sender and receiver channel pair - let (tx, rx): (Sender, Receiver) = mpsc::channel(); + let (tx, mut rx): (Sender, Receiver) = mpsc::channel(100); // Spawn a separate thread to retrieve the batches from Couch let t = tokio::spawn(async move { @@ -31,8 +31,8 @@ async fn main() { // Loop until the receiving channel is closed. loop { - match rx.recv() { - Ok(all_docs) => { + match rx.recv().await { + Some(all_docs) => { println!("Received {} docs", all_docs.total_rows); // unmarshal the documents and write them to a file. @@ -41,7 +41,7 @@ async fn main() { file.write_all(serde_json::to_string(&row.doc).unwrap().as_bytes()).unwrap(); } } - Err(_e) => { + None => { break; } } diff --git a/src/database.rs b/src/database.rs index de37580..9d231e5 100644 --- a/src/database.rs +++ b/src/database.rs @@ -10,7 +10,7 @@ use crate::client::Client; use crate::types::document::{DocumentId, DocumentCreatedResult}; use crate::types::find::{FindResult, FindQuery}; use crate::types::index::{IndexFields, IndexCreated, DatabaseIndexList}; -use std::sync::mpsc::{Sender}; +use tokio::sync::mpsc::{Sender}; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -183,7 +183,7 @@ impl Database { /// databases only. Batch size can be requested. A value of 0, means the default batch_size of /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is /// always rounded *up* to the nearest multiplication of batch_size. - pub async fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> u64 { + pub async fn get_all_batched(&self, mut tx: Sender, batch_size: u64, max_results: u64) -> u64 { let mut bookmark = Option::None; let limit = if batch_size > 0 { batch_size @@ -216,7 +216,7 @@ impl Database { results += all_docs.total_rows.clone() as u64; - tx.send(all_docs).unwrap(); + tx.send(all_docs).await.unwrap(); if max_results > 0 && results >= max_results { break; From cd9ed89180b139ab8454f59c3937ac66dfb63bd8 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:28:15 +0100 Subject: [PATCH 27/70] no async-traits (yet). --- Cargo.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index c6d1ace..66e7b79 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -21,7 +21,6 @@ serde = { version = "1.0", features = ["derive"] } serde_json = "1.0.45" url = "2.1.1" tokio = { version = "0.2.11", features = ["full"] } -async-trait = "0.1.24" [dependencies.reqwest] version = "0.10.1" From 152e9f6d6667f2e075d1660aeaf6f3d33cbc7274 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:36:32 +0100 Subject: [PATCH 28/70] Some de-duplication --- src/database.rs | 75 ++++++++++++++++++------------------------------- 1 file changed, 28 insertions(+), 47 deletions(-) diff --git a/src/database.rs b/src/database.rs index 9d231e5..4a3b787 100644 --- a/src/database.rs +++ b/src/database.rs @@ -1,7 +1,5 @@ use std::collections::HashMap; - -use reqwest::StatusCode; - +use reqwest::{StatusCode, RequestBuilder}; use serde_json; use serde_json::{to_string, Value, json}; use crate::document::{Document, DocumentCollection}; @@ -10,7 +8,7 @@ use crate::client::Client; use crate::types::document::{DocumentId, DocumentCreatedResult}; use crate::types::find::{FindResult, FindQuery}; use crate::types::index::{IndexFields, IndexCreated, DatabaseIndexList}; -use tokio::sync::mpsc::{Sender}; +use tokio::sync::mpsc::Sender; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -50,65 +48,57 @@ impl Database { result } - /// Launches the compact process - pub async fn compact(&self) -> bool { - let mut path: String = self.name.clone(); - path.push_str("/_compact"); + async fn is_accepted(&self, request: Result) -> bool { + if let Ok(req) = request { + if let Ok(res) = req.send().await { + return res.status() == StatusCode::ACCEPTED; + } + } - let request = self._client.post(path, "".into()); + return false; + } + async fn is_ok(&self, request: Result) -> bool { if let Ok(req) = request { if let Ok(res) = req.send().await { - return res.status() == StatusCode::ACCEPTED; + return match res.status() { + StatusCode::OK | StatusCode::NOT_MODIFIED => true, + _ => false, + }; } } return false; } + /// Launches the compact process + pub async fn compact(&self) -> bool { + let mut path: String = self.name.clone(); + path.push_str("/_compact"); + + let request = self._client.post(path, "".into()); + self.is_accepted(request).await + } + /// Starts the compaction of all views pub async fn compact_views(&self) -> bool { let mut path: String = self.name.clone(); path.push_str("/_view_cleanup"); let request = self._client.post(path, "".into()); - - if let Ok(req) = request { - if let Ok(res) = req.send().await { - return res.status() == StatusCode::ACCEPTED; - } - } - - return false; + self.is_accepted(request).await } /// Starts the compaction of a given index pub async fn compact_index(&self, index: &'static str) -> bool { let request = self._client.post(self.create_compact_path(index), "".into()); - - if let Ok(req) = request { - if let Ok(res) = req.send().await { - return res.status() == StatusCode::ACCEPTED; - } - } - - return false; + self.is_accepted(request).await } /// Checks if a document ID exists pub async fn exists(&self, id: DocumentId) -> bool { let request = self._client.head(self.create_document_path(id), None); - - if let Ok(req) = request { - if let Ok(res) = req.send().await { - return match res.status() { - StatusCode::OK | StatusCode::NOT_MODIFIED => true, - _ => false, - } - } - } - - return false; + self.is_ok(request).await } /// Gets one document @@ -347,16 +337,7 @@ impl Database { }), ); - if let Ok(req) = request { - if let Ok(res) = req.send().await { - return match res.status() { - StatusCode::OK | StatusCode::NOT_MODIFIED => true, - _ => false, - } - } - } - - return false; + self.is_ok(request).await } /// Inserts an index in a naive way, if it already exists, will throw an From 48670f892bd1550ece9e16918230dff45fa06e07 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 8 Feb 2020 14:38:18 +0100 Subject: [PATCH 29/70] Updated README --- README.md | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 9d0b376..4caedd2 100644 --- a/README.md +++ b/README.md @@ -29,8 +29,6 @@ sofa = "0.6" This crate is an interface to CouchDB HTTP REST API. Works with stable Rust. -Does not support `#![no_std]` - After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence the need to create our own. Uses async I/O, with a mix of Reqwest and Serde under the hood, and a few nice abstractions out there. @@ -42,7 +40,8 @@ Uses async I/O, with a mix of Reqwest and Serde under the hood, and a few nice a Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. The 0.7 version is based on the 0.6 release from https://github.com/YellowInnovation/sofa. -It has been updated to the Rust 2018 edition standards and compiles against the latest serde and reqwest libraries. +It has been updated to the Rust 2018 edition standards, uses async I/O, and compiles against the latest serde and +reqwest libraries. ## Example code From d0f05c0932dccd1d9dd787aa04eba7932976e246 Mon Sep 17 00:00:00 2001 From: mibes Date: Sun, 9 Feb 2020 11:57:48 +0100 Subject: [PATCH 30/70] Updated README --- src/lib.rs | 101 ++++++++++++++++++++++++++++++----------------------- 1 file changed, 57 insertions(+), 44 deletions(-) diff --git a/src/lib.rs b/src/lib.rs index e1ae3df..229eaa3 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,73 +1,86 @@ //! # Sofa - CouchDB for Rust -//! +//! //! [![Crates.io](https://img.shields.io/crates/v/sofa.svg)](https://crates.io/crates/sofa) +//! [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_shield) +//! //! [![docs.rs](https://docs.rs/sofa/badge.svg)](https://docs.rs/sofa) -//! -//! ![sofa-logo](https://raw.githubusercontent.com/YellowInnovation/sofa/master/docs/logo-sofa.png "Logo Sofa") -//! +//! +//! ![sofa-logo](https://raw.githubusercontent.com/mibes/sofa/master/docs/logo-sofa.png "Logo Sofa") +//! //! ## Documentation -//! +//! //! Here: [http://docs.rs/sofa](http://docs.rs/sofa) -//! +//! //! ## Installation -//! +//! +//! If you want to use this particular fork, include this dependency in the Cargo.toml file: +//! ```toml +//! [dependencies.sofa] +//! git = "https://github.com/mibes/sofa.git" +//! ``` +//! +//! If you want to continue to use the "old" 0.6 version use this dependency instead: //! ```toml //! [dependencies] //! sofa = "0.6" //! ``` -//! +//! //! ## Description -//! +//! //! This crate is an interface to CouchDB HTTP REST API. Works with stable Rust. -//! -//! Does not support `#![no_std]` -//! -//! After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence -//! the need to create our own. -//! -//! No async I/O (yet), uses a mix of Reqwest and Serde under the hood, with a -//! few nice abstractions out there. -//! +//! +//! After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence the need to create our own. +//! +//! Uses async I/O, with a mix of Reqwest and Serde under the hood, and a few nice abstractions out there. +//! //! **NOT 1.0 YET, so expect changes** -//! -//! **Supports CouchDB 2.0 and up.** -//! +//! +//! **Supports CouchDB 2.3.0 and up.** +//! //! Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. -//! +//! +//! The 0.7 version is based on the 0.6 release from https://github.com/YellowInnovation/sofa. +//! It has been updated to the Rust 2018 edition standards, uses async I/O, and compiles against the latest serde and +//! reqwest libraries. +//! +//! ## Example code +//! +//! You can launch the included example with: +//! ```shell script +//! cargo run --example basic_operations +//! ``` +//! //! ## Running tests -//! -//! Make sure that you have an instance of CouchDB 2.0+ running, either via the -//! supplied `docker-compose.yml` file or by yourself. It must be listening on -//! the default port. -//! +//! +//! Make sure that you have an instance of CouchDB 2.0+ running, either via the supplied `docker-compose.yml` file or by yourself. It must be listening on the default port. +//! //! And then //! `cargo test -- --test-threads=1` -//! -//! Single-threading the tests is very important because we need to make sure -//! that the basic features are working before actually testing features on -//! dbs/documents. -//! +//! +//! Single-threading the tests is very important because we need to make sure that the basic features are working before actually testing features on dbs/documents. +//! //! ## Why the name "Sofa" -//! +//! //! CouchDB has a nice name, and I wanted to reflect that. -//! +//! //! ## License -//! +//! //! Licensed under either of these: -//! +//! //! * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or //! [https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0) //! * MIT license ([LICENSE-MIT](LICENSE-MIT) or //! [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)) -//! +//! +//! +//! [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_large) +//! //! ## Yellow Innovation -//! -//! Yellow Innovation is the innovation laboratory of the French postal -//! service: La Poste. -//! -//! We create innovative user experiences and journeys through services with a -//! focus on IoT lately. -//! +//! +//! Yellow Innovation is the innovation laboratory of the French postal service: La Poste. +//! +//! We create innovative user experiences and journeys through services with a focus on IoT lately. +//! //! [Yellow Innovation's website and works](http://yellowinnovation.fr/en/) /// Macros that the crate exports to facilitate most of the From 8954291dbe8ddc30b5935bc761bb08e6891b8324 Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 10 Feb 2020 16:33:12 +0100 Subject: [PATCH 31/70] Some clean-up. --- Cargo.toml | 2 +- examples/async_batch_read/main.rs | 25 +++++++++---------------- examples/basic_operations/main.rs | 6 +++--- examples/typed_documents/main.rs | 4 ++-- src/client.rs | 2 +- src/database.rs | 28 ++++++++++++++-------------- src/document.rs | 12 ++++++------ src/lib.rs | 18 ++++++++---------- src/types/index.rs | 2 +- 9 files changed, 45 insertions(+), 54 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 66e7b79..27ad32c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -24,7 +24,7 @@ tokio = { version = "0.2.11", features = ["full"] } [dependencies.reqwest] version = "0.10.1" -features = ["json", "gzip", "cookies", "blocking"] +features = ["json", "gzip", "cookies"] [dev-dependencies] pretty_assertions = "0.5" diff --git a/examples/async_batch_read/main.rs b/examples/async_batch_read/main.rs index 62f529e..7dd90a4 100644 --- a/examples/async_batch_read/main.rs +++ b/examples/async_batch_read/main.rs @@ -7,8 +7,8 @@ use sofa::document::DocumentCollection; use std::fs::File; use std::io::prelude::*; -const DB_HOST: &'static str = "http://127.0.0.1:5984"; -const TEST_DB: &'static str = "test_db"; +const DB_HOST: &str = "http://127.0.0.1:5984"; +const TEST_DB: &str = "test_db"; #[tokio::main] async fn main() { @@ -30,20 +30,13 @@ async fn main() { let mut file = File::create("test_db.json").unwrap(); // Loop until the receiving channel is closed. - loop { - match rx.recv().await { - Some(all_docs) => { - println!("Received {} docs", all_docs.total_rows); - - // unmarshal the documents and write them to a file. - // (there is probably a more efficient way of doing this...) - for row in all_docs.rows { - file.write_all(serde_json::to_string(&row.doc).unwrap().as_bytes()).unwrap(); - } - } - None => { - break; - } + while let Some(all_docs) = rx.recv().await { + println!("Received {} docs", all_docs.total_rows); + + // unmarshal the documents and write them to a file. + // (there is probably a more efficient way of doing this...) + for row in all_docs.rows { + file.write_all(serde_json::to_string(&row.doc).unwrap().as_bytes()).unwrap(); } } diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs index 57dadaf..4a7cd84 100644 --- a/examples/basic_operations/main.rs +++ b/examples/basic_operations/main.rs @@ -17,8 +17,8 @@ extern crate sofa; use serde_json::{json, Value}; /// Update DB_HOST to point to your running Couch instance -const DB_HOST: &'static str = "http://localhost:5984"; -const TEST_DB: &'static str = "test_db"; +const DB_HOST: &str = "http://localhost:5984"; +const TEST_DB: &str = "test_db"; /// test_docs generates a bunch of documents that can be used in the _bulk_docs operation. fn test_docs(amount: i32) -> Vec { @@ -70,7 +70,7 @@ async fn main() { first_doc_id = resp.first().unwrap().clone().id; for r in resp { - println!("Id: {}, OK?: {}", r.id.unwrap_or("--".to_string()), r.ok.unwrap_or(false)) + println!("Id: {}, OK?: {}", r.id.unwrap_or_else(|| "--".to_string()), r.ok.unwrap_or(false)) } } Err(err) => println!("Oops: {:?}", err), diff --git a/examples/typed_documents/main.rs b/examples/typed_documents/main.rs index 1706a55..de4ed82 100644 --- a/examples/typed_documents/main.rs +++ b/examples/typed_documents/main.rs @@ -5,8 +5,8 @@ use serde::{Serialize, Deserialize}; use reqwest::StatusCode; /// Update DB_HOST to point to your running Couch instance -const DB_HOST: &'static str = "http://localhost:5984"; -const TEST_DB: &'static str = "test_db"; +const DB_HOST: &str = "http://localhost:5984"; +const TEST_DB: &str = "test_db"; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct TestDoc { diff --git a/src/client.rs b/src/client.rs index d7dde44..6f62cca 100644 --- a/src/client.rs +++ b/src/client.rs @@ -138,7 +138,7 @@ impl Client { match s.ok { Some(true) => Ok(db), Some(false) | _ => { - let err = s.error.unwrap_or(s!("unspecified error")); + let err = s.error.unwrap_or_else(|| s!("unspecified error")); Err(CouchError::new(err, status)) }, } diff --git a/src/database.rs b/src/database.rs index 4a3b787..8de0a8f 100644 --- a/src/database.rs +++ b/src/database.rs @@ -22,7 +22,7 @@ impl Database { pub fn new(name: String, client: Client) -> Database { Database { _client: client, - name: name, + name, } } @@ -55,7 +55,7 @@ impl Database { } } - return false; + false } async fn is_ok(&self, request: Result) -> bool { @@ -68,7 +68,7 @@ impl Database { } } - return false; + false } /// Launches the compact process @@ -204,7 +204,7 @@ impl Database { break; } - results += all_docs.total_rows.clone() as u64; + results += all_docs.total_rows as u64; tx.send(all_docs).await.unwrap(); @@ -249,7 +249,7 @@ impl Database { let id: String = json_extr!(d["_id"]); !id.starts_with('_') }) - .map(|v| Document::new(v.clone())) + .map(Document::new) .collect(); let mut bookmark = Option::None; @@ -262,7 +262,7 @@ impl Database { Ok(DocumentCollection::new_from_documents(documents, bookmark)) } else if let Some(err) = data.error { - Err(CouchError::new(err, status).into()) + Err(CouchError::new(err, status)) } else { Ok(DocumentCollection::default()) } @@ -288,8 +288,8 @@ impl Database { Ok(Document::new(val)) } Some(false) | _ => { - let err = data.error.unwrap_or(s!("unspecified error")); - return Err(CouchError::new(err, status).into()); + let err = data.error.unwrap_or_else(|| s!("unspecified error")); + Err(CouchError::new(err, status)) } } } @@ -305,12 +305,12 @@ impl Database { Some(true) => { let data_id = match data.id { Some(id) => id, - _ => return Err(CouchError::new(s!("invalid id"), status).into()), + _ => return Err(CouchError::new(s!("invalid id"), status)), }; let data_rev = match data.rev { Some(rev) => rev, - _ => return Err(CouchError::new(s!("invalid rev"), status).into()), + _ => return Err(CouchError::new(s!("invalid rev"), status)), }; let mut val = raw_doc.clone(); @@ -320,8 +320,8 @@ impl Database { Ok(Document::new(val)) } Some(false) | _ => { - let err = data.error.unwrap_or(s!("unspecified error")); - return Err(CouchError::new(err, status).into()); + let err = data.error.unwrap_or_else(|| s!("unspecified error")); + Err(CouchError::new(err, status)) } } } @@ -357,8 +357,8 @@ impl Database { let data: IndexCreated = response.json().await?; if data.error.is_some() { - let err = data.error.unwrap_or(s!("unspecified error")); - Err(CouchError::new(err, status).into()) + let err = data.error.unwrap_or_else(|| s!("unspecified error")); + Err(CouchError::new(err, status)) } else { Ok(data) } diff --git a/src/document.rs b/src/document.rs index 7d465fd..66ca995 100644 --- a/src/document.rs +++ b/src/document.rs @@ -26,7 +26,7 @@ impl Document { Document { _id: json_extr!(doc["_id"]), _rev: json_extr!(doc["_rev"]), - doc: doc, + doc, } } @@ -69,15 +69,15 @@ impl Document { /// Recursively populates field (must be an array of IDs from another /// database) with provided database documents - pub async fn populate(&mut self, field: &String, db: Database) -> &Self { - let ref val = self[field].clone(); + pub async fn populate(&mut self, field: &str, db: Database) -> &Self { + let val = &self[field].clone(); if *val == Value::Null { return self; } let ids = val.as_array() .unwrap_or(&Vec::new()) - .into_iter() + .iter() .map(|v| s!(v.as_str().unwrap_or(""))) .collect(); @@ -140,7 +140,7 @@ pub struct DocumentCollectionItem { impl DocumentCollectionItem { pub fn new(doc: Document) -> DocumentCollectionItem { let id = doc._id.clone(); - DocumentCollectionItem { doc: doc, id: id } + DocumentCollectionItem { doc, id } } } @@ -184,7 +184,7 @@ impl DocumentCollection { DocumentCollection { offset: 0, total_rows: len, - rows: docs.into_iter().map(|d| DocumentCollectionItem::new(d)).collect(), + rows: docs.into_iter().map(DocumentCollectionItem::new).collect(), bookmark, } } diff --git a/src/lib.rs b/src/lib.rs index 229eaa3..17cbaf5 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -154,21 +154,21 @@ pub use client::{Client}; #[cfg(test)] mod sofa_tests { mod a_sys { - const DB_HOST: &'static str = "http://127.0.0.1:5984"; + const DB_HOST: &str = "http://127.0.0.1:5984"; use serde_json::{json}; use crate::client::Client; #[tokio::test] async fn a_should_check_couchdbs_status() { - let client = Client::new(DB_HOST.into()).unwrap(); + let client = Client::new(DB_HOST).unwrap(); let status = client.check_status().await; assert!(status.is_ok()); } #[tokio::test] async fn b_should_create_sofa_test_db() { - let client = Client::new(DB_HOST.into()).unwrap(); + let client = Client::new(DB_HOST).unwrap(); let dbw = client.db("b_should_create_sofa_test_db").await; assert!(dbw.is_ok()); @@ -177,7 +177,7 @@ mod sofa_tests { #[tokio::test] async fn c_should_create_a_document() { - let client = Client::new(DB_HOST.into()).unwrap(); + let client = Client::new(DB_HOST).unwrap(); let dbw = client.db("c_should_create_a_document").await; assert!(dbw.is_ok()); let db = dbw.unwrap(); @@ -196,7 +196,7 @@ mod sofa_tests { #[tokio::test] async fn d_should_destroy_the_db() { - let client = Client::new(DB_HOST.into()).unwrap(); + let client = Client::new(DB_HOST).unwrap(); let _ = client.db("d_should_destroy_the_db").await; assert!(client.destroy_db("d_should_destroy_the_db").await.unwrap()); @@ -210,10 +210,10 @@ mod sofa_tests { use crate::document::Document; use crate::types; - const DB_HOST: &'static str = "http://127.0.0.1:5984"; + const DB_HOST: &str = "http://127.0.0.1:5984"; async fn setup(dbname: &'static str) -> (Client, Database, Document) { - let client = Client::new(DB_HOST.into()).unwrap(); + let client = Client::new(DB_HOST).unwrap(); let dbw = client.db(dbname).await; assert!(dbw.is_ok()); let db = dbw.unwrap(); @@ -259,7 +259,6 @@ mod sofa_tests { #[tokio::test] async fn c_should_get_a_single_document() { let (client, ..) = setup("c_should_get_a_single_document").await; - assert!(true); teardown(client, "c_should_get_a_single_document").await; } @@ -278,7 +277,6 @@ mod sofa_tests { #[tokio::test] async fn d_should_create_index_in_db() { let (client, db, _) = setup_create_indexes("d_should_create_index_in_db").await; - assert!(true); teardown(client, "d_should_create_index_in_db").await; } @@ -288,7 +286,7 @@ mod sofa_tests { let index_list = db.read_indexes().await.unwrap(); assert!(index_list.indexes.len() > 1); - let ref findex = index_list.indexes[1]; + let findex = &index_list.indexes[1]; assert_eq!(findex.name.as_str(), "thing-index"); teardown(client, "e_should_list_indexes_in_db").await; diff --git a/src/types/index.rs b/src/types/index.rs index 12f2292..18a021f 100644 --- a/src/types/index.rs +++ b/src/types/index.rs @@ -12,7 +12,7 @@ pub struct IndexFields { impl IndexFields { pub fn new(fields: Vec) -> IndexFields { IndexFields { - fields: fields + fields } } } From ad16a33c9618923e185b38ff716d50ab22f66ac2 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 29 Feb 2020 19:03:16 +0100 Subject: [PATCH 32/70] Updated for Couch 3.0 --- README.md | 2 +- examples/async_batch_read/main.rs | 11 ++++++----- examples/basic_operations/main.rs | 11 +++++++---- examples/typed_documents/main.rs | 8 ++++---- 4 files changed, 18 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 4caedd2..01634dd 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ Uses async I/O, with a mix of Reqwest and Serde under the hood, and a few nice a **NOT 1.0 YET, so expect changes** -**Supports CouchDB 2.3.0 and up.** +**Supports CouchDB 2.3.0 and up, including the newly released 3.0 version.** Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. diff --git a/examples/async_batch_read/main.rs b/examples/async_batch_read/main.rs index 7dd90a4..e2bf2d6 100644 --- a/examples/async_batch_read/main.rs +++ b/examples/async_batch_read/main.rs @@ -1,13 +1,13 @@ extern crate sofa; -use std::time::SystemTime; -use tokio::sync::mpsc::{Sender, Receiver}; -use tokio::sync::mpsc; use sofa::document::DocumentCollection; use std::fs::File; use std::io::prelude::*; +use std::time::SystemTime; +use tokio::sync::mpsc; +use tokio::sync::mpsc::{Receiver, Sender}; -const DB_HOST: &str = "http://127.0.0.1:5984"; +const DB_HOST: &str = "http://admin:password@localhost:5984"; const TEST_DB: &str = "test_db"; #[tokio::main] @@ -36,7 +36,8 @@ async fn main() { // unmarshal the documents and write them to a file. // (there is probably a more efficient way of doing this...) for row in all_docs.rows { - file.write_all(serde_json::to_string(&row.doc).unwrap().as_bytes()).unwrap(); + file.write_all(serde_json::to_string(&row.doc).unwrap().as_bytes()) + .unwrap(); } } diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs index 4a7cd84..7d186a1 100644 --- a/examples/basic_operations/main.rs +++ b/examples/basic_operations/main.rs @@ -11,13 +11,12 @@ /// Depending on the Docker framework you are using it may listen to "localhost" or to some other /// automatically assigned IP address. Minikube for example generates a unique IP on start-up. You /// can obtain it with: `minikube ip` - extern crate sofa; use serde_json::{json, Value}; /// Update DB_HOST to point to your running Couch instance -const DB_HOST: &str = "http://localhost:5984"; +const DB_HOST: &str = "http://admin:password@localhost:5984"; const TEST_DB: &str = "test_db"; /// test_docs generates a bunch of documents that can be used in the _bulk_docs operation. @@ -70,7 +69,11 @@ async fn main() { first_doc_id = resp.first().unwrap().clone().id; for r in resp { - println!("Id: {}, OK?: {}", r.id.unwrap_or_else(|| "--".to_string()), r.ok.unwrap_or(false)) + println!( + "Id: {}, OK?: {}", + r.id.unwrap_or_else(|| "--".to_string()), + r.ok.unwrap_or(false) + ) } } Err(err) => println!("Oops: {:?}", err), @@ -82,7 +85,7 @@ async fn main() { if first_doc_id.is_some() { // we have an id of the first document we've just inserted match db.get(first_doc_id.unwrap()).await { - Ok(doc) => { println!("First document: {}", doc.get_data().to_string()) } + Ok(doc) => println!("First document: {}", doc.get_data().to_string()), Err(err) => println!("Oops: {:?}", err), } } diff --git a/examples/typed_documents/main.rs b/examples/typed_documents/main.rs index de4ed82..3585178 100644 --- a/examples/typed_documents/main.rs +++ b/examples/typed_documents/main.rs @@ -1,11 +1,11 @@ extern crate sofa; -use sofa::types::document::DocumentId; -use serde::{Serialize, Deserialize}; use reqwest::StatusCode; +use serde::{Deserialize, Serialize}; +use sofa::types::document::DocumentId; /// Update DB_HOST to point to your running Couch instance -const DB_HOST: &str = "http://localhost:5984"; +const DB_HOST: &str = "http://admin:password@localhost:5984"; const TEST_DB: &str = "test_db"; #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] @@ -49,7 +49,7 @@ async fn main() { println!("Document has been previously created with Rev: {}", existing._rev); let e: TestDoc = serde_json::from_value(existing.get_data()).unwrap(); println!("Name: {} {}", e.first_name, e.last_name); - }, + } Err(e) => { match e.status { StatusCode::NOT_FOUND => { From 2ca5ef328e5e33e52c5045aa33f26b19459e1b2a Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 29 Feb 2020 19:13:36 +0100 Subject: [PATCH 33/70] Updated the tests for use with CouchDB 3.0 --- README.md | 6 +++ src/lib.rs | 120 ++++++++++++++++++++++++++++------------------------- 2 files changed, 69 insertions(+), 57 deletions(-) diff --git a/README.md b/README.md index 01634dd..e1d5054 100644 --- a/README.md +++ b/README.md @@ -53,6 +53,12 @@ cargo run --example basic_operations ## Running tests Make sure that you have an instance of CouchDB 2.0+ running, either via the supplied `docker-compose.yml` file or by yourself. It must be listening on the default port. +Since Couch 3.0 the "Admin Party" mode is no longer supported. This means you need to provide a username and password during launch. +The tests and examples assume an "admin" CouchDB user with a "password" CouchDB password. Docker run command: + +```shell script +docker run --rm -p 5984:5984 -e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password couchdb:3.0 +``` And then `cargo test -- --test-threads=1` diff --git a/src/lib.rs b/src/lib.rs index 17cbaf5..2188af4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,86 +1,86 @@ //! # Sofa - CouchDB for Rust -//! +//! //! [![Crates.io](https://img.shields.io/crates/v/sofa.svg)](https://crates.io/crates/sofa) //! [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=shield)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_shield) -//! +//! //! [![docs.rs](https://docs.rs/sofa/badge.svg)](https://docs.rs/sofa) -//! +//! //! ![sofa-logo](https://raw.githubusercontent.com/mibes/sofa/master/docs/logo-sofa.png "Logo Sofa") -//! +//! //! ## Documentation -//! +//! //! Here: [http://docs.rs/sofa](http://docs.rs/sofa) -//! +//! //! ## Installation -//! +//! //! If you want to use this particular fork, include this dependency in the Cargo.toml file: //! ```toml //! [dependencies.sofa] //! git = "https://github.com/mibes/sofa.git" //! ``` -//! +//! //! If you want to continue to use the "old" 0.6 version use this dependency instead: //! ```toml //! [dependencies] //! sofa = "0.6" //! ``` -//! +//! //! ## Description -//! +//! //! This crate is an interface to CouchDB HTTP REST API. Works with stable Rust. -//! +//! //! After trying most crates for CouchDB in Rust (`chill`, `couchdb` in particular), none of them fit our needs hence the need to create our own. -//! +//! //! Uses async I/O, with a mix of Reqwest and Serde under the hood, and a few nice abstractions out there. -//! +//! //! **NOT 1.0 YET, so expect changes** -//! +//! //! **Supports CouchDB 2.3.0 and up.** -//! +//! //! Be sure to check [CouchDB's Documentation](http://docs.couchdb.org/en/latest/index.html) in detail to see what's possible. -//! +//! //! The 0.7 version is based on the 0.6 release from https://github.com/YellowInnovation/sofa. -//! It has been updated to the Rust 2018 edition standards, uses async I/O, and compiles against the latest serde and +//! It has been updated to the Rust 2018 edition standards, uses async I/O, and compiles against the latest serde and //! reqwest libraries. -//! +//! //! ## Example code -//! +//! //! You can launch the included example with: //! ```shell script //! cargo run --example basic_operations //! ``` -//! +//! //! ## Running tests -//! +//! //! Make sure that you have an instance of CouchDB 2.0+ running, either via the supplied `docker-compose.yml` file or by yourself. It must be listening on the default port. -//! +//! //! And then //! `cargo test -- --test-threads=1` -//! +//! //! Single-threading the tests is very important because we need to make sure that the basic features are working before actually testing features on dbs/documents. -//! +//! //! ## Why the name "Sofa" -//! +//! //! CouchDB has a nice name, and I wanted to reflect that. -//! +//! //! ## License -//! +//! //! Licensed under either of these: -//! +//! //! * Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or //! [https://www.apache.org/licenses/LICENSE-2.0](https://www.apache.org/licenses/LICENSE-2.0) //! * MIT license ([LICENSE-MIT](LICENSE-MIT) or //! [https://opensource.org/licenses/MIT](https://opensource.org/licenses/MIT)) -//! -//! +//! +//! //! [![FOSSA Status](https://app.fossa.io/api/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa.svg?type=large)](https://app.fossa.io/projects/git%2Bgithub.com%2FYellowInnovation%2Fsofa?ref=badge_large) -//! +//! //! ## Yellow Innovation -//! +//! //! Yellow Innovation is the innovation laboratory of the French postal service: La Poste. -//! +//! //! We create innovative user experiences and journeys through services with a focus on IoT lately. -//! +//! //! [Yellow Innovation's website and works](http://yellowinnovation.fr/en/) /// Macros that the crate exports to facilitate most of the @@ -141,23 +141,23 @@ mod macros { } } -pub mod types; -pub mod document; -pub mod database; mod client; +pub mod database; +pub mod document; pub mod error; pub mod model; +pub mod types; -pub use client::{Client}; +pub use client::Client; #[allow(unused_mut, unused_variables)] #[cfg(test)] mod sofa_tests { mod a_sys { - const DB_HOST: &str = "http://127.0.0.1:5984"; + const DB_HOST: &str = "http://admin:password@localhost:5984"; - use serde_json::{json}; use crate::client::Client; + use serde_json::json; #[tokio::test] async fn a_should_check_couchdbs_status() { @@ -182,9 +182,11 @@ mod sofa_tests { assert!(dbw.is_ok()); let db = dbw.unwrap(); - let ndoc_result = db.create(json!({ - "thing": true - })).await; + let ndoc_result = db + .create(json!({ + "thing": true + })) + .await; assert!(ndoc_result.is_ok()); @@ -204,13 +206,13 @@ mod sofa_tests { } mod b_db { - use serde_json::{json}; use crate::client::Client; use crate::database::Database; use crate::document::Document; use crate::types; + use serde_json::json; - const DB_HOST: &str = "http://127.0.0.1:5984"; + const DB_HOST: &str = "http://admin:password@localhost:5984"; async fn setup(dbname: &'static str) -> (Client, Database, Document) { let client = Client::new(DB_HOST).unwrap(); @@ -218,9 +220,11 @@ mod sofa_tests { assert!(dbw.is_ok()); let db = dbw.unwrap(); - let ndoc_result = db.create(json!({ - "thing": true - })).await; + let ndoc_result = db + .create(json!({ + "thing": true + })) + .await; assert!(ndoc_result.is_ok()); @@ -308,15 +312,17 @@ mod sofa_tests { async fn g_should_find_documents_in_db() { let (client, db, doc) = setup_create_indexes("g_should_find_documents_in_db").await; - let documents_res = db.find(json!({ - "selector": { - "thing": true - }, - "limit": 1, - "sort": [{ - "thing": "desc" - }] - })).await; + let documents_res = db + .find(json!({ + "selector": { + "thing": true + }, + "limit": 1, + "sort": [{ + "thing": "desc" + }] + })) + .await; assert!(documents_res.is_ok()); let documents = documents_res.unwrap(); From 2d9d4c41f7561a61973904f87902684bbd4de185 Mon Sep 17 00:00:00 2001 From: mibes Date: Fri, 10 Apr 2020 15:25:13 +0200 Subject: [PATCH 34/70] Updated tokio and reqwest versions. --- Cargo.toml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 27ad32c..15f7e6d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -17,15 +17,15 @@ include = [ ] [dependencies] -serde = { version = "1.0", features = ["derive"] } -serde_json = "1.0.45" +serde = { version = "1.0.106", features = ["derive"] } +serde_json = "1.0.51" url = "2.1.1" -tokio = { version = "0.2.11", features = ["full"] } +tokio = { version = "0.2.17", features = ["full"] } [dependencies.reqwest] -version = "0.10.1" +version = "0.10.4" features = ["json", "gzip", "cookies"] [dev-dependencies] pretty_assertions = "0.5" -tokio = { version = "0.2.11", features = ["full"] } +tokio = { version = "0.2.17", features = ["full"] } From 5484a681d76a6c37836a3fb8043247a02d999657 Mon Sep 17 00:00:00 2001 From: mibes Date: Fri, 10 Apr 2020 15:25:35 +0200 Subject: [PATCH 35/70] Include version in dependency --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index e1d5054..6baae7d 100644 --- a/README.md +++ b/README.md @@ -15,8 +15,8 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml -[dependencies.sofa] -git = "https://github.com/mibes/sofa.git" +[dependencies] +sofa = { git = "https://github.com/mibes/sofa.git", tag = "v0.7" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From 6867964c64fb352f25672edba03e4df4d3b94122 Mon Sep 17 00:00:00 2001 From: mibes Date: Tue, 19 May 2020 09:54:21 +0200 Subject: [PATCH 36/70] bulk_get will only return the requested documents. --- src/database.rs | 4 ++-- src/document.rs | 4 ++-- src/lib.rs | 13 +++++++++++++ 3 files changed, 17 insertions(+), 4 deletions(-) diff --git a/src/database.rs b/src/database.rs index 8de0a8f..a56a88f 100644 --- a/src/database.rs +++ b/src/database.rs @@ -156,8 +156,8 @@ impl Database { body.insert(s!("keys"), ids); let response = self._client - .get(self.create_document_path("_all_docs".into()), Some(options))? - .body(to_string(&body)?) + .post(self.create_document_path("_all_docs".into()), to_string(&body)?)? + .query(&options) .send().await?; Ok(DocumentCollection::new(response.json().await?)) diff --git a/src/document.rs b/src/document.rs index 66ca995..94d2ab6 100644 --- a/src/document.rs +++ b/src/document.rs @@ -149,7 +149,7 @@ impl DocumentCollectionItem { /// implementation of `Index` and `IndexMut` #[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct DocumentCollection { - pub offset: u32, + pub offset: Option, pub rows: Vec, pub total_rows: u32, pub bookmark: Option, @@ -182,7 +182,7 @@ impl DocumentCollection { let len = docs.len() as u32; DocumentCollection { - offset: 0, + offset: Some(0), total_rows: len, rows: docs.into_iter().map(DocumentCollectionItem::new).collect(), bookmark, diff --git a/src/lib.rs b/src/lib.rs index 2188af4..b24b04e 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -330,5 +330,18 @@ mod sofa_tests { teardown(client, "g_should_find_documents_in_db").await; } + + + #[tokio::test] + async fn h_should_bulk_get_a_document() { + let (client, db, doc) = setup("h_should_bulk_get_a_document").await; + let id = doc._id.clone(); + + let collection = db.get_bulk(vec![id]).await.unwrap(); + assert_eq!(collection.rows.len(), 1); + assert!(db.remove(doc).await); + + teardown(client, "h_should_bulk_get_a_document").await; + } } } From 6d38df2403c08ac8a52ee883331100c84024732a Mon Sep 17 00:00:00 2001 From: Marcel Ibes Date: Tue, 19 May 2020 10:01:55 +0200 Subject: [PATCH 37/70] Aligned versions --- Cargo.toml | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 15f7e6d..8ec8679 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.0" +version = "0.7.2" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 6baae7d..7495ab9 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", tag = "v0.7" } +sofa = { git = "https://github.com/mibes/sofa.git", tag = "v0.7.2" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From 9a1a76b0e91ed556e3efb978c2ed30af6bda1e79 Mon Sep 17 00:00:00 2001 From: mibes Date: Tue, 19 May 2020 10:28:45 +0200 Subject: [PATCH 38/70] Do not panic on missing documents in bulk_get --- Cargo.toml | 2 +- src/document.rs | 12 +++++++++--- src/lib.rs | 14 ++++++++++++++ 3 files changed, 24 insertions(+), 4 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 8ec8679..fe429db 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.2" +version = "0.7.3" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/src/document.rs b/src/document.rs index 94d2ab6..8a0fd9e 100644 --- a/src/document.rs +++ b/src/document.rs @@ -160,9 +160,15 @@ impl DocumentCollection { let rows: Vec = json_extr!(doc["rows"]); let items: Vec = rows.into_iter() .filter(|d| { - // Remove _design documents - let id: String = json_extr!(d["doc"]["_id"]); - !id.starts_with('_') + let maybe_err: Option = json_extr!(d["error"]); + if let Some(_) = maybe_err { + // remove errors + false + } else { + // Remove _design documents + let id: String = json_extr!(d["doc"]["_id"]); + !id.starts_with('_') + } }) .map(|d| { let document: Value = json_extr!(d["doc"]); diff --git a/src/lib.rs b/src/lib.rs index b24b04e..477577d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -343,5 +343,19 @@ mod sofa_tests { teardown(client, "h_should_bulk_get_a_document").await; } + + + #[tokio::test] + async fn i_should_bulk_get_invalid_documents() { + let (client, db, doc) = setup("i_should_bulk_get_invalid_documents").await; + let id = doc._id.clone(); + let invalid_id = "does_not_exist".to_string(); + + let collection = db.get_bulk(vec![id, invalid_id]).await.unwrap(); + assert_eq!(collection.rows.len(), 1); + assert!(db.remove(doc).await); + + teardown(client, "i_should_bulk_get_invalid_documents").await; + } } } From c702b2ac7e133c25ed88c4802d1d774470a19dfe Mon Sep 17 00:00:00 2001 From: mibes Date: Tue, 19 May 2020 10:29:38 +0200 Subject: [PATCH 39/70] Aligned versions --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 7495ab9..3da03a0 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", tag = "v0.7.2" } +sofa = { git = "https://github.com/mibes/sofa.git", tag = "v0.7.3" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From d61c30aa957b9fed928894f9734e1626108b04ba Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 25 May 2020 09:32:09 +0200 Subject: [PATCH 40/70] Use version in favour of tag. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 3da03a0..e34aaff 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", tag = "v0.7.3" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.3" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From 7b761931cab8ec2bab44a2ef0c2150982a151103 Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Tue, 7 Jul 2020 22:33:43 +0200 Subject: [PATCH 41/70] Added create and execute view --- src/database.rs | 35 +++++++++++++++++++++++++++++++---- src/types/design.rs | 14 ++++++++++++++ src/types/index.rs | 10 ---------- src/types/mod.rs | 2 ++ src/types/view.rs | 16 ++++++++++++++++ 5 files changed, 63 insertions(+), 14 deletions(-) create mode 100644 src/types/design.rs create mode 100644 src/types/view.rs diff --git a/src/database.rs b/src/database.rs index a56a88f..6dcb382 100644 --- a/src/database.rs +++ b/src/database.rs @@ -7,8 +7,10 @@ use crate::error::CouchError; use crate::client::Client; use crate::types::document::{DocumentId, DocumentCreatedResult}; use crate::types::find::{FindResult, FindQuery}; -use crate::types::index::{IndexFields, IndexCreated, DatabaseIndexList}; +use crate::types::index::{IndexFields, DatabaseIndexList}; use tokio::sync::mpsc::Sender; +use crate::types::design::DesignCreated; +use crate::types::view::ViewCollection; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -33,7 +35,6 @@ impl Database { result } - #[allow(dead_code)] fn create_design_path(&self, id: DocumentId) -> String { let mut result: String = self.name.clone(); result.push_str("/_design/"); @@ -41,6 +42,15 @@ impl Database { result } + fn create_execute_view_path(&self, id: DocumentId) -> String { + let mut result: String = self.name.clone(); + result.push_str("/_design/"); + result.push_str(&id); + result.push_str("/_view/"); + result.push_str(&id); + result + } + fn create_compact_path(&self, design_name: &'static str) -> String { let mut result: String = self.name.clone(); result.push_str("/_compact/"); @@ -326,6 +336,23 @@ impl Database { } } + /// Creates a view document. + pub async fn create_view(&self, view_name: String, doc: Value) -> Result { + let response = self._client.put(self.create_design_path(view_name), to_string(&doc)?)?.send().await?; + + Ok(response.json().await?) + + } + + /// Executes a view. + pub async fn execute_view(&self, view_name: String, options: Option>) -> Result { + let response = self._client.get(self.create_execute_view_path(view_name), options)?.send().await?; + + dbg!(&response); + Ok(response.json().await?) + + } + /// Removes a document from the database. Returns success in a `bool` pub async fn remove(&self, doc: Document) -> bool { let request = self._client.delete( @@ -342,7 +369,7 @@ impl Database { /// Inserts an index in a naive way, if it already exists, will throw an /// `Err` - pub async fn insert_index(&self, name: String, spec: IndexFields) -> Result { + pub async fn insert_index(&self, name: String, spec: IndexFields) -> Result { let response = self._client .post( self.create_document_path("_index".into()), @@ -354,7 +381,7 @@ impl Database { .send().await?; let status = response.status(); - let data: IndexCreated = response.json().await?; + let data: DesignCreated = response.json().await?; if data.error.is_some() { let err = data.error.unwrap_or_else(|| s!("unspecified error")); diff --git a/src/types/design.rs b/src/types/design.rs new file mode 100644 index 0000000..721723d --- /dev/null +++ b/src/types/design.rs @@ -0,0 +1,14 @@ +use super::*; +use serde::{Serialize, Deserialize}; +use find::{SortSpec}; +use document::{DocumentId}; + +/// Design document created abstraction +#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] +pub struct DesignCreated { + pub result: Option, + pub id: Option, + pub name: Option, + pub error: Option, + pub reason: Option +} \ No newline at end of file diff --git a/src/types/index.rs b/src/types/index.rs index 18a021f..e67ba4a 100644 --- a/src/types/index.rs +++ b/src/types/index.rs @@ -27,16 +27,6 @@ pub struct Index { pub def: IndexFields } -/// Index created abstraction -#[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] -pub struct IndexCreated { - pub result: Option, - pub id: Option, - pub name: Option, - pub error: Option, - pub reason: Option -} - /// Database index list abstraction #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct DatabaseIndexList { diff --git a/src/types/mod.rs b/src/types/mod.rs index 2eeb6d9..1a78f50 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -1,3 +1,5 @@ +pub mod view; +pub mod design; pub mod system; pub mod document; pub mod find; diff --git a/src/types/view.rs b/src/types/view.rs new file mode 100644 index 0000000..b1477e0 --- /dev/null +++ b/src/types/view.rs @@ -0,0 +1,16 @@ +use serde_json::Value; +use serde::{Serialize, Deserialize}; + +#[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] +pub struct ViewCollection { + pub offset: Option, + pub rows: Vec, + pub total_rows: u32, +} + +#[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] +pub struct ViewItem { + pub id: String, + pub key: String, + pub value: String, +} \ No newline at end of file From 88e193979eff094ef53ad22c422013517217a72c Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Wed, 8 Jul 2020 11:22:53 +0200 Subject: [PATCH 42/70] Check design creation result for errors --- src/database.rs | 135 ++++++++++++++++++++++++++++++------------------ 1 file changed, 86 insertions(+), 49 deletions(-) diff --git a/src/database.rs b/src/database.rs index 6dcb382..815a30b 100644 --- a/src/database.rs +++ b/src/database.rs @@ -1,16 +1,16 @@ -use std::collections::HashMap; -use reqwest::{StatusCode, RequestBuilder}; -use serde_json; -use serde_json::{to_string, Value, json}; +use crate::client::Client; use crate::document::{Document, DocumentCollection}; use crate::error::CouchError; -use crate::client::Client; -use crate::types::document::{DocumentId, DocumentCreatedResult}; -use crate::types::find::{FindResult, FindQuery}; -use crate::types::index::{IndexFields, DatabaseIndexList}; -use tokio::sync::mpsc::Sender; use crate::types::design::DesignCreated; +use crate::types::document::{DocumentCreatedResult, DocumentId}; +use crate::types::find::{FindQuery, FindResult}; +use crate::types::index::{DatabaseIndexList, IndexFields}; use crate::types::view::ViewCollection; +use reqwest::{RequestBuilder, StatusCode}; +use serde_json; +use serde_json::{json, to_string, Value}; +use std::collections::HashMap; +use tokio::sync::mpsc::Sender; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -22,10 +22,7 @@ pub struct Database { impl Database { pub fn new(name: String, client: Client) -> Database { - Database { - _client: client, - name, - } + Database { _client: client, name } } fn create_document_path(&self, id: DocumentId) -> String { @@ -117,7 +114,10 @@ impl Database { match response.status() { StatusCode::OK => Ok(Document::new(response.json().await?)), - StatusCode::NOT_FOUND => Err(CouchError::new("Document was not found".to_string(), StatusCode::NOT_FOUND)), + StatusCode::NOT_FOUND => Err(CouchError::new( + "Document was not found".to_string(), + StatusCode::NOT_FOUND, + )), _ => Err(CouchError::new("Internal error".to_string(), response.status())), } } @@ -141,7 +141,11 @@ impl Database { let mut body = HashMap::new(); body.insert(s!("docs"), raw_docs); - let response = self._client.post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)?.send().await?; + let response = self + ._client + .post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)? + .send() + .await?; let data: Vec = response.json().await?; Ok(data) @@ -165,10 +169,12 @@ impl Database { let mut body = HashMap::new(); body.insert(s!("keys"), ids); - let response = self._client + let response = self + ._client .post(self.create_document_path("_all_docs".into()), to_string(&body)?)? .query(&options) - .send().await?; + .send() + .await?; Ok(DocumentCollection::new(response.json().await?)) } @@ -185,11 +191,7 @@ impl Database { /// always rounded *up* to the nearest multiplication of batch_size. pub async fn get_all_batched(&self, mut tx: Sender, batch_size: u64, max_results: u64) -> u64 { let mut bookmark = Option::None; - let limit = if batch_size > 0 { - batch_size - } else { - 1000 - }; + let limit = if batch_size > 0 { batch_size } else { 1000 }; let mut results: u64 = 0; @@ -199,8 +201,7 @@ impl Database { query.limit = Option::Some(limit); query.bookmark = bookmark.clone(); - let all_docs = self.find( - serde_json::to_value(query).unwrap()).await.unwrap(); + let all_docs = self.find(serde_json::to_value(query).unwrap()).await.unwrap(); if all_docs.total_rows == 0 { // no more rows @@ -227,7 +228,10 @@ impl Database { } /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view - pub async fn get_all_params(&self, params: Option>) -> Result { + pub async fn get_all_params( + &self, + params: Option>, + ) -> Result { let mut options; if let Some(opts) = params { options = opts; @@ -237,9 +241,11 @@ impl Database { options.insert(s!("include_docs"), s!("true")); - let response = self._client + let response = self + ._client .get(self.create_document_path("_all_docs".into()), Some(options))? - .send().await?; + .send() + .await?; Ok(DocumentCollection::new(response.json().await?)) } @@ -283,9 +289,11 @@ impl Database { let id = doc._id.to_owned(); let raw = doc.get_data(); - let response = self._client + let response = self + ._client .put(self.create_document_path(id), to_string(&raw)?)? - .send().await?; + .send() + .await?; let status = response.status(); let data: DocumentCreatedResult = response.json().await?; @@ -306,7 +314,11 @@ impl Database { /// Creates a document from a raw JSON document Value. pub async fn create(&self, raw_doc: Value) -> Result { - let response = self._client.post(self.name.clone(), to_string(&raw_doc)?)?.send().await?; + let response = self + ._client + .post(self.name.clone(), to_string(&raw_doc)?)? + .send() + .await?; let status = response.status(); let data: DocumentCreatedResult = response.json().await?; @@ -338,19 +350,35 @@ impl Database { /// Creates a view document. pub async fn create_view(&self, view_name: String, doc: Value) -> Result { - let response = self._client.put(self.create_design_path(view_name), to_string(&doc)?)?.send().await?; - - Ok(response.json().await?) - + let response = self + ._client + .put(self.create_design_path(view_name), to_string(&doc)?)? + .send() + .await?; + + let result: DesignCreated = response.json().await?; + match result.error { + Some(e) => Err(CouchError { + status: reqwest::StatusCode::INTERNAL_SERVER_ERROR, + message: e, + }), + None => Ok(result), + } } /// Executes a view. - pub async fn execute_view(&self, view_name: String, options: Option>) -> Result { - let response = self._client.get(self.create_execute_view_path(view_name), options)?.send().await?; + pub async fn execute_view( + &self, + view_name: String, + options: Option>, + ) -> Result { + let response = self + ._client + .get(self.create_execute_view_path(view_name), options)? + .send() + .await?; - dbg!(&response); Ok(response.json().await?) - } /// Removes a document from the database. Returns success in a `bool` @@ -370,15 +398,17 @@ impl Database { /// Inserts an index in a naive way, if it already exists, will throw an /// `Err` pub async fn insert_index(&self, name: String, spec: IndexFields) -> Result { - let response = self._client + let response = self + ._client .post( self.create_document_path("_index".into()), js!(json!({ - "name": name, - "index": spec - })), + "name": name, + "index": spec + })), )? - .send().await?; + .send() + .await?; let status = response.status(); let data: DesignCreated = response.json().await?; @@ -393,9 +423,11 @@ impl Database { /// Reads the database's indexes and returns them pub async fn read_indexes(&self) -> Result { - let response = self._client + let response = self + ._client .get(self.create_document_path("_index".into()), None)? - .send().await?; + .send() + .await?; Ok(response.json().await?) } @@ -415,9 +447,14 @@ impl Database { } // Let's create it then - let _ = self.insert_index(name, spec).await?; - - // Created and alright - Ok(true) + let result: DesignCreated = self.insert_index(name, spec).await?; + match result.error { + Some(e) => Err(CouchError { + status: reqwest::StatusCode::INTERNAL_SERVER_ERROR, + message: e, + }), + // Created and alright + None => Ok(true), + } } } From ec9cb37ea44a15910236bd6d0e8f191737720ee3 Mon Sep 17 00:00:00 2001 From: Alexander von Gluck IV Date: Tue, 14 Jul 2020 08:49:08 -0500 Subject: [PATCH 43/70] statics: Remove unneeded (and kinda odd) static str's * These make non-static str database names ackward to use --- src/client.rs | 8 ++++---- src/database.rs | 4 ++-- src/lib.rs | 6 +++--- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/src/client.rs b/src/client.rs index 6f62cca..efa4e08 100644 --- a/src/client.rs +++ b/src/client.rs @@ -100,11 +100,11 @@ impl Client { Ok(data) } - fn build_dbname(&self, dbname: &'static str) -> String { + fn build_dbname(&self, dbname: &str) -> String { self.db_prefix.clone() + dbname } - pub async fn db(&self, dbname: &'static str) -> Result { + pub async fn db(&self, dbname: &str) -> Result { let name = self.build_dbname(dbname); let db = Database::new(name.clone(), self.clone()); @@ -121,7 +121,7 @@ impl Client { } } - pub async fn make_db(&self, dbname: &'static str) -> Result { + pub async fn make_db(&self, dbname: &str) -> Result { let name = self.build_dbname(dbname); let db = Database::new(name.clone(), self.clone()); @@ -144,7 +144,7 @@ impl Client { } } - pub async fn destroy_db(&self, dbname: &'static str) -> Result { + pub async fn destroy_db(&self, dbname: &str) -> Result { let path = self.create_path(self.build_dbname(dbname), None)?; let response = self._client.delete(&path) .headers(construct_json_headers(None)) diff --git a/src/database.rs b/src/database.rs index 815a30b..536f7dc 100644 --- a/src/database.rs +++ b/src/database.rs @@ -48,7 +48,7 @@ impl Database { result } - fn create_compact_path(&self, design_name: &'static str) -> String { + fn create_compact_path(&self, design_name: &str) -> String { let mut result: String = self.name.clone(); result.push_str("/_compact/"); result.push_str(design_name); @@ -97,7 +97,7 @@ impl Database { } /// Starts the compaction of a given index - pub async fn compact_index(&self, index: &'static str) -> bool { + pub async fn compact_index(&self, index: &str) -> bool { let request = self._client.post(self.create_compact_path(index), "".into()); self.is_accepted(request).await } diff --git a/src/lib.rs b/src/lib.rs index 477577d..4734c7d 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -214,7 +214,7 @@ mod sofa_tests { const DB_HOST: &str = "http://admin:password@localhost:5984"; - async fn setup(dbname: &'static str) -> (Client, Database, Document) { + async fn setup(dbname: &str) -> (Client, Database, Document) { let client = Client::new(DB_HOST).unwrap(); let dbw = client.db(dbname).await; assert!(dbw.is_ok()); @@ -234,7 +234,7 @@ mod sofa_tests { (client, db, doc) } - async fn teardown(client: Client, dbname: &'static str) { + async fn teardown(client: Client, dbname: &str) { assert!(client.destroy_db(dbname).await.unwrap()) } @@ -266,7 +266,7 @@ mod sofa_tests { teardown(client, "c_should_get_a_single_document").await; } - async fn setup_create_indexes(dbname: &'static str) -> (Client, Database, Document) { + async fn setup_create_indexes(dbname: &str) -> (Client, Database, Document) { let (client, db, doc) = setup(dbname).await; let spec = types::index::IndexFields::new(vec![types::find::SortSpec::Simple(s!("thing"))]); From 3aada27be28816db8a48db9b8c18188965716e65 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 25 Jul 2020 13:13:44 +0200 Subject: [PATCH 44/70] Updated dependencies --- Cargo.toml | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index fe429db..a8f8df8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.3" +version = "0.7.4" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" @@ -17,15 +17,15 @@ include = [ ] [dependencies] -serde = { version = "1.0.106", features = ["derive"] } -serde_json = "1.0.51" -url = "2.1.1" -tokio = { version = "0.2.17", features = ["full"] } +serde = { version = "^1.0.114", features = ["derive"] } +serde_json = "^1.0.56" +url = "^2.1.1" +tokio = { version = "^0.2.22", features = ["full"] } [dependencies.reqwest] -version = "0.10.4" +version = "^0.10.7" features = ["json", "gzip", "cookies"] [dev-dependencies] -pretty_assertions = "0.5" -tokio = { version = "0.2.17", features = ["full"] } +pretty_assertions = "^0.6.1" +tokio = { version = "^0.2.22", features = ["full"] } From 8de24978d63a3b92a5ffa9159aafd787680c2cd8 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 25 Jul 2020 13:13:52 +0200 Subject: [PATCH 45/70] Updated version --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e34aaff..5145ccf 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.3" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.4" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From 695fd0cc1f93fbc909e2cffe693b78a7303fbd71 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 25 Jul 2020 13:14:02 +0200 Subject: [PATCH 46/70] Addressed some Clippy warnings --- src/client.rs | 2 +- src/database.rs | 5 ++--- src/document.rs | 5 ++--- src/types/design.rs | 3 --- src/types/find.rs | 2 +- src/types/view.rs | 1 - 6 files changed, 6 insertions(+), 12 deletions(-) diff --git a/src/client.rs b/src/client.rs index 6f62cca..016bcd4 100644 --- a/src/client.rs +++ b/src/client.rs @@ -137,7 +137,7 @@ impl Client { match s.ok { Some(true) => Ok(db), - Some(false) | _ => { + _ => { let err = s.error.unwrap_or_else(|| s!("unspecified error")); Err(CouchError::new(err, status)) }, diff --git a/src/database.rs b/src/database.rs index 815a30b..11acdab 100644 --- a/src/database.rs +++ b/src/database.rs @@ -7,7 +7,6 @@ use crate::types::find::{FindQuery, FindResult}; use crate::types::index::{DatabaseIndexList, IndexFields}; use crate::types::view::ViewCollection; use reqwest::{RequestBuilder, StatusCode}; -use serde_json; use serde_json::{json, to_string, Value}; use std::collections::HashMap; use tokio::sync::mpsc::Sender; @@ -305,7 +304,7 @@ impl Database { Ok(Document::new(val)) } - Some(false) | _ => { + _ => { let err = data.error.unwrap_or_else(|| s!("unspecified error")); Err(CouchError::new(err, status)) } @@ -341,7 +340,7 @@ impl Database { Ok(Document::new(val)) } - Some(false) | _ => { + _ => { let err = data.error.unwrap_or_else(|| s!("unspecified error")); Err(CouchError::new(err, status)) } diff --git a/src/document.rs b/src/document.rs index 8a0fd9e..0e381d5 100644 --- a/src/document.rs +++ b/src/document.rs @@ -1,4 +1,3 @@ -use serde_json; use serde_json::Value; use std::ops::{Index, IndexMut}; use serde::{Serialize, Deserialize}; @@ -81,7 +80,7 @@ impl Document { .map(|v| s!(v.as_str().unwrap_or(""))) .collect(); - let data = db.get_bulk(ids).await.and_then(|docs| Ok(docs.get_data())); + let data = db.get_bulk(ids).await.map(|docs| docs.get_data()); match data { Ok(data) => { @@ -161,7 +160,7 @@ impl DocumentCollection { let items: Vec = rows.into_iter() .filter(|d| { let maybe_err: Option = json_extr!(d["error"]); - if let Some(_) = maybe_err { + if maybe_err.is_some() { // remove errors false } else { diff --git a/src/types/design.rs b/src/types/design.rs index 721723d..8826cee 100644 --- a/src/types/design.rs +++ b/src/types/design.rs @@ -1,7 +1,4 @@ -use super::*; use serde::{Serialize, Deserialize}; -use find::{SortSpec}; -use document::{DocumentId}; /// Design document created abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] diff --git a/src/types/find.rs b/src/types/find.rs index 929be8f..cfdb887 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -14,7 +14,7 @@ impl From for SortDirection { fn from(original: String) -> SortDirection { match original.as_ref() { "desc" => SortDirection::Desc, - "asc" | _ => SortDirection::Asc + _ => SortDirection::Asc } } } diff --git a/src/types/view.rs b/src/types/view.rs index b1477e0..b9bba31 100644 --- a/src/types/view.rs +++ b/src/types/view.rs @@ -1,4 +1,3 @@ -use serde_json::Value; use serde::{Serialize, Deserialize}; #[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] From e508c82fcc55f9c9f0644a84e77a726ee5f86e74 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 5 Sep 2020 07:35:34 +0200 Subject: [PATCH 47/70] Convenience function to retrieve the database name. --- Cargo.toml | 2 +- README.md | 2 +- src/database.rs | 5 +++++ 3 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index a8f8df8..a093f57 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.4" +version = "0.7.5" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 5145ccf..6a1bfbd 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.4" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.5" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/database.rs b/src/database.rs index 11acdab..c1e4bcd 100644 --- a/src/database.rs +++ b/src/database.rs @@ -24,6 +24,11 @@ impl Database { Database { _client: client, name } } + // convenience function to retrieve the name of the database + pub fn name(&self) -> &str { + &self.name + } + fn create_document_path(&self, id: DocumentId) -> String { let mut result: String = self.name.clone(); result.push_str("/"); From 953c75f8d99f4c66880ccc46b7cdde00a57c929c Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 09:53:52 +0200 Subject: [PATCH 48/70] Included the find_batched operation. --- .gitignore | 2 ++ CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 4 ++-- src/database.rs | 23 ++++++++++++++++------- 5 files changed, 24 insertions(+), 10 deletions(-) diff --git a/.gitignore b/.gitignore index e25035f..7608143 100644 --- a/.gitignore +++ b/.gitignore @@ -15,4 +15,6 @@ Cargo.lock # Exclude IntelliJ files .idea +test_db.json + # End of https://www.gitignore.io/api/rust \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md index 1d0647f..21530f9 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.6] - 2020-09-09 +- Added `find_batched` to allow asynchronous customized searches + ## [0.7.0] - 2020-02-03 ### Added diff --git a/Cargo.toml b/Cargo.toml index a093f57..b2ae46d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.5" +version = "0.7.6" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 6a1bfbd..72ecaf2 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.5" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.6" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: @@ -57,7 +57,7 @@ Since Couch 3.0 the "Admin Party" mode is no longer supported. This means you ne The tests and examples assume an "admin" CouchDB user with a "password" CouchDB password. Docker run command: ```shell script -docker run --rm -p 5984:5984 -e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password couchdb:3.0 +docker run --rm -p 5984:5984 -e COUCHDB_USER=admin -e COUCHDB_PASSWORD=password couchdb:3 ``` And then diff --git a/src/database.rs b/src/database.rs index c1e4bcd..31efb04 100644 --- a/src/database.rs +++ b/src/database.rs @@ -193,19 +193,28 @@ impl Database { /// databases only. Batch size can be requested. A value of 0, means the default batch_size of /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is /// always rounded *up* to the nearest multiplication of batch_size. - pub async fn get_all_batched(&self, mut tx: Sender, batch_size: u64, max_results: u64) -> u64 { + /// This operation is identical to find_batched(FindQuery::find_all(), tx, batch_size, max_results) + pub async fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> u64 { + let query = FindQuery::find_all(); + self.find_batched(query, tx, batch_size, max_results).await + } + + /// Finds documents in the database, using bookmarks to iterate through all the documents. + /// Results are returned through an mpcs channel for async processing. Use this for very large + /// databases only. Batch size can be requested. A value of 0, means the default batch_size of + /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is + /// always rounded *up* to the nearest multiplication of batch_size. + pub async fn find_batched(&self, mut query: FindQuery, mut tx: Sender, batch_size: u64, max_results: u64) -> u64 { let mut bookmark = Option::None; let limit = if batch_size > 0 { batch_size } else { 1000 }; let mut results: u64 = 0; + query.limit = Option::Some(limit); loop { - let mut query = FindQuery::find_all(); - - query.limit = Option::Some(limit); - query.bookmark = bookmark.clone(); - - let all_docs = self.find(serde_json::to_value(query).unwrap()).await.unwrap(); + let mut segment_query = query.clone(); + segment_query.bookmark = bookmark.clone(); + let all_docs = self.find(serde_json::to_value(segment_query).unwrap()).await.unwrap(); if all_docs.total_rows == 0 { // no more rows From 50190ab428cc0831e98f852d47c67e325974b347 Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 11:38:11 +0200 Subject: [PATCH 49/70] Allow FindQuery to be converted to Value --- src/types/find.rs | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/src/types/find.rs b/src/types/find.rs index cfdb887..80e6def 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -107,4 +107,28 @@ impl FindQuery { bookmark: None, } } +} + +impl Into for FindQuery { + fn into(self) -> Value { + serde_json::to_value(&self).expect("can not convert into json") + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_convert_to_value() { + let mut sort = HashMap::new(); + sort.insert("first_name".to_string(), "desc".to_string()); + + let mut query = FindQuery::find_all(); + query.limit = Some(10); + query.skip = Some(20); + query.sort = Some(SortSpec::Complex(sort)); + let json: Value = query.into(); + assert_eq!(r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":{"first_name":"desc"}}"#, json.to_string()) + } } \ No newline at end of file From b27ac5848b5ddf47e487894589e357e8901fea80 Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 11:40:08 +0200 Subject: [PATCH 50/70] Allow FindQuery to be converted to Value --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 21530f9..0373637 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.7] - 2020-09-09 +- Allow FindQuery to be converted to Value + ## [0.7.6] - 2020-09-09 - Added `find_batched` to allow asynchronous customized searches diff --git a/Cargo.toml b/Cargo.toml index b2ae46d..0d705df 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.6" +version = "0.7.7" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 72ecaf2..e52c842 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.6" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.7" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From db5d5efb9c005c6012e024233097e13708f6507f Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 11:47:37 +0200 Subject: [PATCH 51/70] Implemented Display for FindQuery --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- src/types/find.rs | 19 +++++++++++++++++-- 4 files changed, 22 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 0373637..46debba 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.8] - 2020-09-09 +- Implemented Display for FindQuery + ## [0.7.7] - 2020-09-09 - Allow FindQuery to be converted to Value diff --git a/Cargo.toml b/Cargo.toml index 0d705df..6ce46cf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.7" +version = "0.7.8" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index e52c842..1f88a39 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.7" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.8" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/types/find.rs b/src/types/find.rs index 80e6def..6cabc4b 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -1,6 +1,8 @@ use std::collections::HashMap; use serde::{Serialize, Deserialize}; use serde_json::{Value, json}; +use std::fmt::Display; +use serde::export::Formatter; /// Sort direction abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] @@ -115,6 +117,19 @@ impl Into for FindQuery { } } +impl Into for &FindQuery { + fn into(self) -> Value { + serde_json::to_value(&self).expect("can not convert into json") + } +} + +impl Display for FindQuery { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + let json: Value = self.into(); + f.write_str(&json.to_string()) + } +} + #[cfg(test)] mod tests { use super::*; @@ -128,7 +143,7 @@ mod tests { query.limit = Some(10); query.skip = Some(20); query.sort = Some(SortSpec::Complex(sort)); - let json: Value = query.into(); - assert_eq!(r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":{"first_name":"desc"}}"#, json.to_string()) + let json = query.to_string(); + assert_eq!(r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":{"first_name":"desc"}}"#, json) } } \ No newline at end of file From 7b7aacce10ded9477d72733543f8447fac94ba3b Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 14:30:08 +0200 Subject: [PATCH 52/70] `json_extr!` does not panic when called on a non-existent field. Like in find for _id, when the find result does not include an _id. --- CHANGELOG.md | 4 ++++ Cargo.toml | 2 +- README.md | 2 +- src/lib.rs | 5 +++-- 4 files changed, 9 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 46debba..6dbcdca 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.9] - 2020-09-09 +- `json_extr!` does not panic when called on a non-existent field. Like in find for _id, + when the find result does not include an _id. + ## [0.7.8] - 2020-09-09 - Implemented Display for FindQuery diff --git a/Cargo.toml b/Cargo.toml index 6ce46cf..2ea6aa2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.8" +version = "0.7.9" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 1f88a39..1d0b581 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.8" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.9" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/lib.rs b/src/lib.rs index 477577d..fb42833 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -96,10 +96,11 @@ mod macros { }; } - /// Extracts a JSON Value to a defined Struct + /// Extracts a JSON Value to a defined Struct; Returns the default value when the field can not be found + /// or converted macro_rules! json_extr { ($e:expr) => { - serde_json::from_value($e.to_owned()).unwrap() + serde_json::from_value($e.to_owned()).unwrap_or_default() }; } From 05b094dfa0e3bf39fd141547ab32133c10e06149 Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 15:26:04 +0200 Subject: [PATCH 53/70] get_all_params now takes a typed QueryParams as input, and uses POST, instead of GET, for greater flexibility. --- CHANGELOG.md | 4 ++ Cargo.toml | 2 +- README.md | 2 +- src/database.rs | 11 ++-- src/lib.rs | 18 ++++++- src/types/mod.rs | 1 + src/types/query.rs | 128 +++++++++++++++++++++++++++++++++++++++++++++ 7 files changed, 159 insertions(+), 7 deletions(-) create mode 100644 src/types/query.rs diff --git a/CHANGELOG.md b/CHANGELOG.md index 6dbcdca..467c55d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.10] - 2020-09-09 +- BREAKING CHANGE: get_all_params now takes a typed QueryParams as input. +- get_all_params uses POST, instead of GET, for greater flexibility. + ## [0.7.9] - 2020-09-09 - `json_extr!` does not panic when called on a non-existent field. Like in find for _id, when the find result does not include an _id. diff --git a/Cargo.toml b/Cargo.toml index 2ea6aa2..af9563e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.9" +version = "0.7.10" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 1d0b581..3dde7d9 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.9" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.10" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/database.rs b/src/database.rs index 31efb04..6b33cef 100644 --- a/src/database.rs +++ b/src/database.rs @@ -10,6 +10,7 @@ use reqwest::{RequestBuilder, StatusCode}; use serde_json::{json, to_string, Value}; use std::collections::HashMap; use tokio::sync::mpsc::Sender; +use crate::types::query::QueryParams; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -243,20 +244,22 @@ impl Database { /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view pub async fn get_all_params( &self, - params: Option>, + params: Option, ) -> Result { let mut options; if let Some(opts) = params { options = opts; } else { - options = HashMap::new(); + options = QueryParams::default(); } - options.insert(s!("include_docs"), s!("true")); + options.include_docs = Some(true); + // we use POST here, because this allows for a larger set of keys to be provided, compared + // to a GET call. It provides the same functionality let response = self ._client - .get(self.create_document_path("_all_docs".into()), Some(options))? + .post(self.create_document_path("_all_docs".into()), js!(&options))? .send() .await?; diff --git a/src/lib.rs b/src/lib.rs index fb42833..baef459 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -211,7 +211,8 @@ mod sofa_tests { use crate::database::Database; use crate::document::Document; use crate::types; - use serde_json::json; + use serde_json::{json}; + use crate::types::query::QueryParams; const DB_HOST: &str = "http://admin:password@localhost:5984"; @@ -358,5 +359,20 @@ mod sofa_tests { teardown(client, "i_should_bulk_get_invalid_documents").await; } + + + #[tokio::test] + async fn j_should_get_all_documents_with_keys() { + let (client, db, doc) = setup("j_should_get_all_documents_with_keys").await; + let id = doc._id.clone(); + + let params = QueryParams::from_keys(vec![id]); + + let collection = db.get_all_params(Some(params)).await.unwrap(); + assert_eq!(collection.rows.len(), 1); + assert!(db.remove(doc).await); + + teardown(client, "j_should_get_all_documents_with_keys").await; + } } } diff --git a/src/types/mod.rs b/src/types/mod.rs index 1a78f50..aaf852b 100644 --- a/src/types/mod.rs +++ b/src/types/mod.rs @@ -4,3 +4,4 @@ pub mod system; pub mod document; pub mod find; pub mod index; +pub mod query; diff --git a/src/types/query.rs b/src/types/query.rs new file mode 100644 index 0000000..ce119fe --- /dev/null +++ b/src/types/query.rs @@ -0,0 +1,128 @@ +use serde::{Serialize, Deserialize}; + +#[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] +pub struct QueryParams { + #[serde(skip_serializing_if = "Option::is_none")] + pub conflicts: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub descending: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub end_key: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub end_key_doc_id: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub group: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub group_level: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub include_docs: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub attachments: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub att_encoding_info: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub inclusive_end: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub key: Option, + + #[serde(skip_serializing_if = "Vec::is_empty")] + pub keys: Vec, + + #[serde(skip_serializing_if = "Option::is_none")] + pub limit: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub reduce: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub skip: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub sorted: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub stable: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub stale: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub start_key: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub start_key_doc_id: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub update: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub update_seq: Option, +} + +impl Default for QueryParams { + fn default() -> Self { + QueryParams { + conflicts: None, + descending: None, + end_key: None, + end_key_doc_id: None, + group: None, + group_level: None, + include_docs: None, + attachments: None, + att_encoding_info: None, + inclusive_end: None, + key: None, + keys: vec![], + limit: None, + reduce: None, + skip: None, + sorted: None, + stable: None, + stale: None, + start_key: None, + start_key_doc_id: None, + update: None, + update_seq: None, + } + } +} + +impl QueryParams { + pub fn from_keys(keys: Vec) -> Self { + QueryParams { + conflicts: None, + descending: None, + end_key: None, + end_key_doc_id: None, + group: None, + group_level: None, + include_docs: None, + attachments: None, + att_encoding_info: None, + inclusive_end: None, + key: None, + keys, + limit: None, + reduce: None, + skip: None, + sorted: None, + stable: None, + stale: None, + start_key: None, + start_key_doc_id: None, + update: None, + update_seq: None, + } + } +} From ce78ef87ddcf7e54078ccfef0b553c20d6f2b6a7 Mon Sep 17 00:00:00 2001 From: mibes Date: Wed, 9 Sep 2020 19:37:08 +0200 Subject: [PATCH 54/70] Allow to query a view with a different design name --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- src/database.rs | 30 +++++++++++++++++++++++------- 4 files changed, 28 insertions(+), 9 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 467c55d..7c5c9cc 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.11] - 2020-09-09 +- Allow to query a view with a different design name + ## [0.7.10] - 2020-09-09 - BREAKING CHANGE: get_all_params now takes a typed QueryParams as input. - get_all_params uses POST, instead of GET, for greater flexibility. diff --git a/Cargo.toml b/Cargo.toml index af9563e..c43d010 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.10" +version = "0.7.11" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 3dde7d9..046fd14 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.10" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.11" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/database.rs b/src/database.rs index 6b33cef..8577a1d 100644 --- a/src/database.rs +++ b/src/database.rs @@ -44,12 +44,12 @@ impl Database { result } - fn create_execute_view_path(&self, id: DocumentId) -> String { + fn create_query_view_path(&self, design_id: DocumentId, view_id: DocumentId) -> String { let mut result: String = self.name.clone(); result.push_str("/_design/"); - result.push_str(&id); + result.push_str(&design_id); result.push_str("/_view/"); - result.push_str(&id); + result.push_str(&view_id); result } @@ -365,10 +365,10 @@ impl Database { } /// Creates a view document. - pub async fn create_view(&self, view_name: String, doc: Value) -> Result { + pub async fn create_view(&self, design_name: String, doc: Value) -> Result { let response = self ._client - .put(self.create_design_path(view_name), to_string(&doc)?)? + .put(self.create_design_path(design_name), to_string(&doc)?)? .send() .await?; @@ -382,7 +382,23 @@ impl Database { } } - /// Executes a view. + /// Executes a query against a view. + pub async fn query( + &self, + design_name: String, + view_name: String, + options: Option>, + ) -> Result { + let response = self + ._client + .get(self.create_query_view_path(design_name, view_name), options)? + .send() + .await?; + + Ok(response.json().await?) + } + + /// Convenience function to executes a view name matches design name. pub async fn execute_view( &self, view_name: String, @@ -390,7 +406,7 @@ impl Database { ) -> Result { let response = self ._client - .get(self.create_execute_view_path(view_name), options)? + .get(self.create_query_view_path(view_name.clone(), view_name), options)? .send() .await?; From 24935e1b86a5046373f9d0aef04d40fa07025f14 Mon Sep 17 00:00:00 2001 From: mibes Date: Thu, 10 Sep 2020 10:29:27 +0200 Subject: [PATCH 55/70] Check response success for create_view --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- src/database.rs | 18 ++++++++++++------ 4 files changed, 17 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7c5c9cc..095eaed 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.12] - 2020-09-10 +- Check response success for create_view() + ## [0.7.11] - 2020-09-09 - Allow to query a view with a different design name diff --git a/Cargo.toml b/Cargo.toml index c43d010..dd309ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.11" +version = "0.7.12" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 046fd14..082a1b9 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.11" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.12" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/database.rs b/src/database.rs index 8577a1d..d6c28cd 100644 --- a/src/database.rs +++ b/src/database.rs @@ -372,13 +372,19 @@ impl Database { .send() .await?; + let response_status = response.status(); let result: DesignCreated = response.json().await?; - match result.error { - Some(e) => Err(CouchError { - status: reqwest::StatusCode::INTERNAL_SERVER_ERROR, - message: e, - }), - None => Ok(result), + + if response_status.is_success() { + Ok(result) + } else { + match result.error { + Some(e) => Err(CouchError { + status: response_status, + message: e, + }), + None => Ok(result), + } } } From d114fc70bf4403f9725611d45702f00980746452 Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Thu, 10 Sep 2020 13:52:49 +0200 Subject: [PATCH 56/70] Allow executing update functions --- src/database.rs | 40 ++++++++++++++++++++++++++++++++++------ 1 file changed, 34 insertions(+), 6 deletions(-) diff --git a/src/database.rs b/src/database.rs index d6c28cd..a6a6af6 100644 --- a/src/database.rs +++ b/src/database.rs @@ -5,12 +5,12 @@ use crate::types::design::DesignCreated; use crate::types::document::{DocumentCreatedResult, DocumentId}; use crate::types::find::{FindQuery, FindResult}; use crate::types::index::{DatabaseIndexList, IndexFields}; +use crate::types::query::QueryParams; use crate::types::view::ViewCollection; use reqwest::{RequestBuilder, StatusCode}; use serde_json::{json, to_string, Value}; use std::collections::HashMap; use tokio::sync::mpsc::Sender; -use crate::types::query::QueryParams; /// Database holds the logic of making operations on a CouchDB Database /// (sometimes called Collection in other NoSQL flavors such as MongoDB). @@ -53,6 +53,15 @@ impl Database { result } + fn create_update_function_path(&self, design_id: DocumentId, view_id: DocumentId) -> String { + let mut result: String = self.name.clone(); + result.push_str("/_design/"); + result.push_str(&design_id); + result.push_str("/_update/"); + result.push_str(&view_id); + result + } + fn create_compact_path(&self, design_name: &'static str) -> String { let mut result: String = self.name.clone(); result.push_str("/_compact/"); @@ -205,7 +214,13 @@ impl Database { /// databases only. Batch size can be requested. A value of 0, means the default batch_size of /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is /// always rounded *up* to the nearest multiplication of batch_size. - pub async fn find_batched(&self, mut query: FindQuery, mut tx: Sender, batch_size: u64, max_results: u64) -> u64 { + pub async fn find_batched( + &self, + mut query: FindQuery, + mut tx: Sender, + batch_size: u64, + max_results: u64, + ) -> u64 { let mut bookmark = Option::None; let limit = if batch_size > 0 { batch_size } else { 1000 }; @@ -242,10 +257,7 @@ impl Database { } /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view - pub async fn get_all_params( - &self, - params: Option, - ) -> Result { + pub async fn get_all_params(&self, params: Option) -> Result { let mut options; if let Some(opts) = params { options = opts; @@ -419,6 +431,22 @@ impl Database { Ok(response.json().await?) } + /// Convenience function to execute an update function whose name matches design name. + pub async fn execute_update(&self, name: String, body: Option) -> Result { + let body = match body { + Some(v) => to_string(&v)?, + None => "".to_string(), + }; + + let response = self + ._client + .post(self.create_update_function_path(name.clone(), name), body)? + .send() + .await?; + + Ok(response.text().await?) + } + /// Removes a document from the database. Returns success in a `bool` pub async fn remove(&self, doc: Document) -> bool { let request = self._client.delete( From 46ce1a9b7e36a1cb4ca40660bc87bff862269f29 Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Thu, 10 Sep 2020 16:32:56 +0200 Subject: [PATCH 57/70] Pass document_id to execute update --- src/database.rs | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/src/database.rs b/src/database.rs index a6a6af6..51ff37a 100644 --- a/src/database.rs +++ b/src/database.rs @@ -53,12 +53,19 @@ impl Database { result } - fn create_update_function_path(&self, design_id: DocumentId, view_id: DocumentId) -> String { + fn create_update_function_path( + &self, + design_id: DocumentId, + update_id: DocumentId, + document_id: DocumentId, + ) -> String { let mut result: String = self.name.clone(); result.push_str("/_design/"); result.push_str(&design_id); result.push_str("/_update/"); - result.push_str(&view_id); + result.push_str(&update_id); + result.push_str("/"); + result.push_str(&document_id); result } @@ -432,7 +439,13 @@ impl Database { } /// Convenience function to execute an update function whose name matches design name. - pub async fn execute_update(&self, name: String, body: Option) -> Result { + pub async fn execute_update( + &self, + design_id: String, + name: String, + document_id: String, + body: Option, + ) -> Result { let body = match body { Some(v) => to_string(&v)?, None => "".to_string(), @@ -440,9 +453,10 @@ impl Database { let response = self ._client - .post(self.create_update_function_path(name.clone(), name), body)? + .put(self.create_update_function_path(design_id, name, document_id), body)? .send() - .await?; + .await? + .error_for_status()?; Ok(response.text().await?) } From cc59cfcbc908c151eb134f0924ec00b5e9803c3b Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Thu, 10 Sep 2020 17:39:12 +0200 Subject: [PATCH 58/70] Renamed --- src/database.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/database.rs b/src/database.rs index 51ff37a..d632ec7 100644 --- a/src/database.rs +++ b/src/database.rs @@ -53,7 +53,7 @@ impl Database { result } - fn create_update_function_path( + fn create_execute_update_path( &self, design_id: DocumentId, update_id: DocumentId, @@ -453,7 +453,7 @@ impl Database { let response = self ._client - .put(self.create_update_function_path(design_id, name, document_id), body)? + .put(self.create_execute_update_path(design_id, name, document_id), body)? .send() .await? .error_for_status()?; From d6bbe54ff03ce7dbeb27c4f5d466ae25256a1a79 Mon Sep 17 00:00:00 2001 From: mibes Date: Fri, 11 Sep 2020 10:15:24 +0200 Subject: [PATCH 59/70] Use reqwest's `error_for_status()` on responses & return an Error when one occurs during batch reading. --- CHANGELOG.md | 6 ++++ Cargo.toml | 2 +- README.md | 2 +- examples/async_batch_read/main.rs | 4 ++- src/database.rs | 58 ++++++++++++++++++------------- 5 files changed, 44 insertions(+), 28 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 095eaed..77e0203 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,12 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.13] - 2020-09-11 +- Use reqwest's `error_for_status()` on responses, where we are not actively checking the result. +- Return an Error when one occurs during batch reading. +- Removed the `'static` lifetime on some of the `str` parameters; contribution from kallisti5 +- Included `execute_update()` operation; contribution from horacimacias + ## [0.7.12] - 2020-09-10 - Check response success for create_view() diff --git a/Cargo.toml b/Cargo.toml index dd309ed..e976c62 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.12" +version = "0.7.13" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 082a1b9..083f55b 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.12" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.13" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/examples/async_batch_read/main.rs b/examples/async_batch_read/main.rs index e2bf2d6..59c2768 100644 --- a/examples/async_batch_read/main.rs +++ b/examples/async_batch_read/main.rs @@ -23,7 +23,9 @@ async fn main() { let client = sofa::Client::new_with_timeout(DB_HOST, 120).unwrap(); let db = client.db(TEST_DB).await.unwrap(); - db.get_all_batched(tx, 0, 0).await; + if let Err(err) = db.get_all_batched(tx, 0, 0).await { + println!("error during batch read: {:?}", err); + } }); // Open a file for writing diff --git a/src/database.rs b/src/database.rs index 3e3d7f2..2d33b59 100644 --- a/src/database.rs +++ b/src/database.rs @@ -131,16 +131,8 @@ impl Database { /// Gets one document pub async fn get(&self, id: DocumentId) -> Result { - let response = self._client.get(self.create_document_path(id), None)?.send().await?; - - match response.status() { - StatusCode::OK => Ok(Document::new(response.json().await?)), - StatusCode::NOT_FOUND => Err(CouchError::new( - "Document was not found".to_string(), - StatusCode::NOT_FOUND, - )), - _ => Err(CouchError::new("Internal error".to_string(), response.status())), - } + let response = self._client.get(self.create_document_path(id), None)?.send().await?.error_for_status()?; + Ok(Document::new(response.json().await?)) } /// Gets documents in bulk with provided IDs list @@ -167,6 +159,7 @@ impl Database { .post(self.create_document_path("_bulk_docs".into()), to_string(&body)?)? .send() .await?; + let data: Vec = response.json().await?; Ok(data) @@ -195,7 +188,8 @@ impl Database { .post(self.create_document_path("_all_docs".into()), to_string(&body)?)? .query(&options) .send() - .await?; + .await? + .error_for_status()?; Ok(DocumentCollection::new(response.json().await?)) } @@ -211,7 +205,7 @@ impl Database { /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is /// always rounded *up* to the nearest multiplication of batch_size. /// This operation is identical to find_batched(FindQuery::find_all(), tx, batch_size, max_results) - pub async fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> u64 { + pub async fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> Result { let query = FindQuery::find_all(); self.find_batched(query, tx, batch_size, max_results).await } @@ -227,28 +221,33 @@ impl Database { mut tx: Sender, batch_size: u64, max_results: u64, - ) -> u64 { + ) -> Result { let mut bookmark = Option::None; let limit = if batch_size > 0 { batch_size } else { 1000 }; let mut results: u64 = 0; query.limit = Option::Some(limit); - loop { + let maybe_err = loop { let mut segment_query = query.clone(); segment_query.bookmark = bookmark.clone(); - let all_docs = self.find(serde_json::to_value(segment_query).unwrap()).await.unwrap(); + let all_docs = match self.find(serde_json::to_value(segment_query).unwrap()).await { + Ok(docs) => docs, + Err(err) => { + break Some(err) + }, + }; if all_docs.total_rows == 0 { // no more rows - break; + break None; } if all_docs.bookmark.is_some() && all_docs.bookmark != bookmark { bookmark.replace(all_docs.bookmark.clone().unwrap_or_default()); } else { // no bookmark, break the query loop - break; + break None; } results += all_docs.total_rows as u64; @@ -256,11 +255,15 @@ impl Database { tx.send(all_docs).await.unwrap(); if max_results > 0 && results >= max_results { - break; + break None; } - } + }; - results + if let Some(err) = maybe_err { + Err(err) + } else { + Ok(results) + } } /// Gets all the documents in database, with applied parameters. Parameters description can be found here: http://docs.couchdb.org/en/latest/api/ddoc/views.html#api-ddoc-view @@ -280,7 +283,7 @@ impl Database { ._client .post(self.create_document_path("_all_docs".into()), js!(&options))? .send() - .await?; + .await?.error_for_status()?; Ok(DocumentCollection::new(response.json().await?)) } @@ -354,8 +357,8 @@ impl Database { .post(self.name.clone(), to_string(&raw_doc)?)? .send() .await?; - let status = response.status(); + let status = response.status(); let data: DocumentCreatedResult = response.json().await?; match data.ok { @@ -402,7 +405,10 @@ impl Database { status: response_status, message: e, }), - None => Ok(result), + None => Err(CouchError { + status: response_status, + message: s!("unspecified error"), + }), } } } @@ -418,7 +424,8 @@ impl Database { ._client .get(self.create_query_view_path(design_name, view_name), options)? .send() - .await?; + .await? + .error_for_status()?; Ok(response.json().await?) } @@ -433,7 +440,8 @@ impl Database { ._client .get(self.create_query_view_path(view_name.clone(), view_name), options)? .send() - .await?; + .await? + .error_for_status()?; Ok(response.json().await?) } From 465c54ea48d8f17f09917d81fc83d76cadce9e51 Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 14 Sep 2020 08:22:59 +0200 Subject: [PATCH 60/70] Return value in ViewItem as Value. --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- src/types/view.rs | 3 ++- 4 files changed, 7 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 77e0203..ea2fd61 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.14] - 2020-09-14 +- Return value in ViewItem as a Value, not String + ## [0.7.13] - 2020-09-11 - Use reqwest's `error_for_status()` on responses, where we are not actively checking the result. - Return an Error when one occurs during batch reading. diff --git a/Cargo.toml b/Cargo.toml index e976c62..1dd0f41 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.13" +version = "0.7.14" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 083f55b..02cd15d 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.13" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.14" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/types/view.rs b/src/types/view.rs index b9bba31..464f5ee 100644 --- a/src/types/view.rs +++ b/src/types/view.rs @@ -1,4 +1,5 @@ use serde::{Serialize, Deserialize}; +use serde_json::Value; #[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct ViewCollection { @@ -11,5 +12,5 @@ pub struct ViewCollection { pub struct ViewItem { pub id: String, pub key: String, - pub value: String, + pub value: Value, } \ No newline at end of file From ea58106da5b03cf363fc85116fd029b401b3b6bc Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 14 Sep 2020 08:32:35 +0200 Subject: [PATCH 61/70] Make id in ViewItem optional. --- CHANGELOG.md | 3 +++ README.md | 2 +- src/types/view.rs | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index ea2fd61..61ba8fd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.15] - 2020-09-14 +- Make id in ViewItem optional. + ## [0.7.14] - 2020-09-14 - Return value in ViewItem as a Value, not String diff --git a/README.md b/README.md index 02cd15d..017d132 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.14" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.15" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/types/view.rs b/src/types/view.rs index 464f5ee..69d7ab6 100644 --- a/src/types/view.rs +++ b/src/types/view.rs @@ -10,7 +10,7 @@ pub struct ViewCollection { #[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct ViewItem { - pub id: String, pub key: String, pub value: Value, + pub id: Option, } \ No newline at end of file From b51db4dab7257ecc888c1a3861fdc50c9b7b6489 Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 14 Sep 2020 08:35:26 +0200 Subject: [PATCH 62/70] Updated version to 0.7.15 --- Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Cargo.toml b/Cargo.toml index 1dd0f41..4973908 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.14" +version = "0.7.15" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" From 0fc7baf3429d3c82c912e7317e0e1491b83e3e70 Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 14 Sep 2020 08:41:45 +0200 Subject: [PATCH 63/70] Make total_rows in ViewCollection optional --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- src/types/view.rs | 2 +- 4 files changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 61ba8fd..f52a973 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.16] - 2020-09-14 +- Make total_rows in ViewCollection optional. + ## [0.7.15] - 2020-09-14 - Make id in ViewItem optional. diff --git a/Cargo.toml b/Cargo.toml index 4973908..10ee5f7 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.15" +version = "0.7.16" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 017d132..563fb44 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.15" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.16" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/types/view.rs b/src/types/view.rs index 69d7ab6..dfb7926 100644 --- a/src/types/view.rs +++ b/src/types/view.rs @@ -5,7 +5,7 @@ use serde_json::Value; pub struct ViewCollection { pub offset: Option, pub rows: Vec, - pub total_rows: u32, + pub total_rows: Option, } #[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] From 7e239476bb432fa2ac79312a6f1e07de6c870427 Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 14 Sep 2020 18:39:56 +0200 Subject: [PATCH 64/70] Sort takes an array of key/value pairs, like: [{"first_name":"desc"}] --- CHANGELOG.md | 3 +++ Cargo.toml | 2 +- README.md | 2 +- src/types/find.rs | 10 +++++----- 4 files changed, 10 insertions(+), 7 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index f52a973..3702866 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.17] - 2020-09-14 +- Sort takes an array of key/value pairs, like: [{"first_name":"desc"}] + ## [0.7.16] - 2020-09-14 - Make total_rows in ViewCollection optional. diff --git a/Cargo.toml b/Cargo.toml index 10ee5f7..a7f20cd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.16" +version = "0.7.17" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 563fb44..6a4b1b7 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.16" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.17" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/src/types/find.rs b/src/types/find.rs index 6cabc4b..30ffef7 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -51,8 +51,8 @@ pub struct FindQuery { #[serde(skip_serializing_if = "Option::is_none")] pub skip: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub sort: Option, + #[serde(skip_serializing_if = "Vec::is_empty")] + pub sort: Vec, #[serde(skip_serializing_if = "Option::is_none")] pub fields: Option>, @@ -103,7 +103,7 @@ impl FindQuery { selector: json!({ "_id" : {"$ne": null}}), limit: None, skip: None, - sort: None, + sort: vec![], fields: None, use_index: None, bookmark: None, @@ -142,8 +142,8 @@ mod tests { let mut query = FindQuery::find_all(); query.limit = Some(10); query.skip = Some(20); - query.sort = Some(SortSpec::Complex(sort)); + query.sort = vec![SortSpec::Complex(sort)]; let json = query.to_string(); - assert_eq!(r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":{"first_name":"desc"}}"#, json) + assert_eq!(r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":[{"first_name":"desc"}]}"#, json) } } \ No newline at end of file From 34db8bb8e2fc44a1a26269fe952108bab911af46 Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Thu, 24 Sep 2020 18:14:26 +0200 Subject: [PATCH 65/70] Delete execute_view as it's doing 99% the same as query --- src/database.rs | 37 +++++++++++++++---------------------- 1 file changed, 15 insertions(+), 22 deletions(-) diff --git a/src/database.rs b/src/database.rs index 2d33b59..b8a560f 100644 --- a/src/database.rs +++ b/src/database.rs @@ -131,7 +131,12 @@ impl Database { /// Gets one document pub async fn get(&self, id: DocumentId) -> Result { - let response = self._client.get(self.create_document_path(id), None)?.send().await?.error_for_status()?; + let response = self + ._client + .get(self.create_document_path(id), None)? + .send() + .await? + .error_for_status()?; Ok(Document::new(response.json().await?)) } @@ -205,7 +210,12 @@ impl Database { /// 1000 is used. max_results of 0 means all documents will be returned. A given max_results is /// always rounded *up* to the nearest multiplication of batch_size. /// This operation is identical to find_batched(FindQuery::find_all(), tx, batch_size, max_results) - pub async fn get_all_batched(&self, tx: Sender, batch_size: u64, max_results: u64) -> Result { + pub async fn get_all_batched( + &self, + tx: Sender, + batch_size: u64, + max_results: u64, + ) -> Result { let query = FindQuery::find_all(); self.find_batched(query, tx, batch_size, max_results).await } @@ -233,9 +243,7 @@ impl Database { segment_query.bookmark = bookmark.clone(); let all_docs = match self.find(serde_json::to_value(segment_query).unwrap()).await { Ok(docs) => docs, - Err(err) => { - break Some(err) - }, + Err(err) => break Some(err), }; if all_docs.total_rows == 0 { @@ -283,7 +291,8 @@ impl Database { ._client .post(self.create_document_path("_all_docs".into()), js!(&options))? .send() - .await?.error_for_status()?; + .await? + .error_for_status()?; Ok(DocumentCollection::new(response.json().await?)) } @@ -430,22 +439,6 @@ impl Database { Ok(response.json().await?) } - /// Convenience function to executes a view name matches design name. - pub async fn execute_view( - &self, - view_name: String, - options: Option>, - ) -> Result { - let response = self - ._client - .get(self.create_query_view_path(view_name.clone(), view_name), options)? - .send() - .await? - .error_for_status()?; - - Ok(response.json().await?) - } - /// Convenience function to execute an update function whose name matches design name. pub async fn execute_update( &self, From a36f6165a67a99c275ff221894b55258bbb32a49 Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Fri, 25 Sep 2020 10:13:39 +0200 Subject: [PATCH 66/70] ViewItem can now contain a doc for 'include_docs=true' view queries --- src/types/view.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/types/view.rs b/src/types/view.rs index dfb7926..6a1c6e3 100644 --- a/src/types/view.rs +++ b/src/types/view.rs @@ -1,4 +1,4 @@ -use serde::{Serialize, Deserialize}; +use serde::{Deserialize, Serialize}; use serde_json::Value; #[derive(Default, Serialize, Deserialize, PartialEq, Debug, Clone)] @@ -13,4 +13,6 @@ pub struct ViewItem { pub key: String, pub value: Value, pub id: Option, -} \ No newline at end of file + // docs field, populated if query was ran with 'include_docs' + pub doc: Option, +} From 9d46c15e1ebb1d792bb119e1177b3514464c69f6 Mon Sep 17 00:00:00 2001 From: Horaci Macias Date: Fri, 25 Sep 2020 10:14:29 +0200 Subject: [PATCH 67/70] Use QueryParams when executing views --- src/database.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/database.rs b/src/database.rs index b8a560f..70fba1c 100644 --- a/src/database.rs +++ b/src/database.rs @@ -427,11 +427,11 @@ impl Database { &self, design_name: String, view_name: String, - options: Option>, + options: Option, ) -> Result { let response = self ._client - .get(self.create_query_view_path(design_name, view_name), options)? + .post(self.create_query_view_path(design_name, view_name), js!(&options))? .send() .await? .error_for_status()?; From 75a946f3f0aeb6512fb75c978240a40c5413c117 Mon Sep 17 00:00:00 2001 From: mibes Date: Fri, 25 Sep 2020 10:22:57 +0200 Subject: [PATCH 68/70] Updated the CHANGELOG and versions. --- CHANGELOG.md | 9 +++++++++ Cargo.toml | 2 +- README.md | 2 +- 3 files changed, 11 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3702866..b6ee932 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,13 +7,22 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.7.18] - 2020-09-25 + +- Views now use QueryParams instead of an untyped map. +- Views can now return the optional "doc" item. +- BREAKING CHANGE: `execute_view` has been removed. Use `query` instead. + ## [0.7.17] - 2020-09-14 + - Sort takes an array of key/value pairs, like: [{"first_name":"desc"}] ## [0.7.16] - 2020-09-14 + - Make total_rows in ViewCollection optional. ## [0.7.15] - 2020-09-14 + - Make id in ViewItem optional. ## [0.7.14] - 2020-09-14 diff --git a/Cargo.toml b/Cargo.toml index a7f20cd..9bdce53 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.17" +version = "0.7.18" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index 6a4b1b7..a73895e 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.17" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.18" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: From 97968b6b3fd1313f5a3f1fe2463ad277217fd125 Mon Sep 17 00:00:00 2001 From: mibes Date: Sat, 26 Sep 2020 11:33:18 +0200 Subject: [PATCH 69/70] the `find()` operations takes a FindQuery --- CHANGELOG.md | 4 + Cargo.toml | 2 +- README.md | 2 +- docker-compose.yml | 2 +- examples/basic_operations/main.rs | 70 +++++++------- src/database.rs | 8 +- src/lib.rs | 27 +++--- src/types/find.rs | 151 +++++++++++++++++++++++++++--- 8 files changed, 192 insertions(+), 74 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index b6ee932..fa0babd 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] +## [0.8.0] - 2020-09-26 + +- the `find()` operations takes a FindQuery + ## [0.7.18] - 2020-09-25 - Views now use QueryParams instead of an untyped map. diff --git a/Cargo.toml b/Cargo.toml index 9bdce53..3153cb3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "sofa" -version = "0.7.18" +version = "0.8.0" authors = ["Mathieu Amiot ", "mibes "] license = "MIT/Apache-2.0" description = "Sofa - CouchDB for Rust" diff --git a/README.md b/README.md index a73895e..d7c1c69 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Here: [http://docs.rs/sofa](http://docs.rs/sofa) If you want to use this particular fork, include this dependency in the Cargo.toml file: ```toml [dependencies] -sofa = { git = "https://github.com/mibes/sofa.git", version = "0.7.18" } +sofa = { git = "https://github.com/mibes/sofa.git", version = "0.8.0" } ``` If you want to continue to use the "old" 0.6 version use this dependency instead: diff --git a/docker-compose.yml b/docker-compose.yml index fe740b5..1a280d7 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -3,7 +3,7 @@ version: '3' services: couchdb: - image: couchdb:2.2 + image: couchdb:3 restart: always ports: - '5984:5984' diff --git a/examples/basic_operations/main.rs b/examples/basic_operations/main.rs index 7d186a1..a36b7d8 100644 --- a/examples/basic_operations/main.rs +++ b/examples/basic_operations/main.rs @@ -14,6 +14,8 @@ extern crate sofa; use serde_json::{json, Value}; +use sofa::types::find::FindQuery; +use std::error::Error; /// Update DB_HOST to point to your running Couch instance const DB_HOST: &str = "http://admin:password@localhost:5984"; @@ -31,64 +33,58 @@ fn test_docs(amount: i32) -> Vec { } #[tokio::main] -async fn main() { +async fn main() -> Result<(), Box> { println!("Connecting..."); // Prepare the Sofa client let client = sofa::Client::new(DB_HOST).unwrap(); - let mut db_initialized = false; // This command gets a reference to an existing database, or it creates a new one when it does // not yet exist. let db = client.db(TEST_DB).await.unwrap(); - // List the existing databases. The db_initialized is superfluous, since we just created it in // the previous step. It is for educational purposes only. - match client.list_dbs().await { - Ok(dbs) => { - println!("Existing databases:"); - for db in dbs { - println!("Couch DB {}", db); - - if db == TEST_DB { - db_initialized = true; - } - } + let dbs = client.list_dbs().await?; + let mut db_initialized: bool = false; + println!("Existing databases:"); + for db in dbs { + println!("Couch DB {}", db); + if db == TEST_DB { + db_initialized = true; } - Err(err) => panic!("Oops: {:?}", err), } - let mut first_doc_id: Option = None; + if !db_initialized { + println!("{} not found", TEST_DB); + return Ok(()); + } - if db_initialized { - // let's add some docs - match db.bulk_docs(test_docs(100)).await { - Ok(resp) => { - println!("Bulk docs completed"); + println!("--- Creating ---"); - first_doc_id = resp.first().unwrap().clone().id; + // let's add some docs + match db.bulk_docs(test_docs(100)).await { + Ok(resp) => { + println!("Bulk docs completed"); - for r in resp { - println!( - "Id: {}, OK?: {}", - r.id.unwrap_or_else(|| "--".to_string()), - r.ok.unwrap_or(false) - ) - } + for r in resp { + println!( + "Id: {}, OK?: {}", + r.id.unwrap_or_else(|| "--".to_string()), + r.ok.unwrap_or(false) + ) } - Err(err) => println!("Oops: {:?}", err), } + Err(err) => println!("Oops: {:?}", err), } - println!("---"); + println!("--- Finding ---"); - if first_doc_id.is_some() { - // we have an id of the first document we've just inserted - match db.get(first_doc_id.unwrap()).await { - Ok(doc) => println!("First document: {}", doc.get_data().to_string()), - Err(err) => println!("Oops: {:?}", err), - } + let find_all = FindQuery::find_all(); + let docs = db.find(&find_all).await?; + if let Some(row) = docs.rows.iter().next() { + println!("First document: {}", row.doc.get_data().to_string()) } - println!("All operations are done") + println!("All operations are done"); + Ok(()) } diff --git a/src/database.rs b/src/database.rs index 70fba1c..b04c8f5 100644 --- a/src/database.rs +++ b/src/database.rs @@ -241,7 +241,7 @@ impl Database { let maybe_err = loop { let mut segment_query = query.clone(); segment_query.bookmark = bookmark.clone(); - let all_docs = match self.find(serde_json::to_value(segment_query).unwrap()).await { + let all_docs = match self.find(&query).await { Ok(docs) => docs, Err(err) => break Some(err), }; @@ -297,10 +297,10 @@ impl Database { Ok(DocumentCollection::new(response.json().await?)) } - /// Finds a document in the database through a Mango query. Parameters here http://docs.couchdb.org/en/latest/api/database/find.html - pub async fn find(&self, params: Value) -> Result { + /// Finds a document in the database through a Mango query. + pub async fn find(&self, query: &FindQuery) -> Result { let path = self.create_document_path("_find".into()); - let response = self._client.post(path, js!(¶ms))?.send().await?; + let response = self._client.post(path, js!(query))?.send().await?; let status = response.status(); let data: FindResult = response.json().await.unwrap(); diff --git a/src/lib.rs b/src/lib.rs index c18239a..4c89f28 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -211,8 +211,9 @@ mod sofa_tests { use crate::database::Database; use crate::document::Document; use crate::types; - use serde_json::{json}; + use crate::types::find::FindQuery; use crate::types::query::QueryParams; + use serde_json::json; const DB_HOST: &str = "http://admin:password@localhost:5984"; @@ -313,18 +314,17 @@ mod sofa_tests { #[tokio::test] async fn g_should_find_documents_in_db() { let (client, db, doc) = setup_create_indexes("g_should_find_documents_in_db").await; + let query = FindQuery::new_from_value(json!({ + "selector": { + "thing": true + }, + "limit": 1, + "sort": [{ + "thing": "desc" + }] + })); - let documents_res = db - .find(json!({ - "selector": { - "thing": true - }, - "limit": 1, - "sort": [{ - "thing": "desc" - }] - })) - .await; + let documents_res = db.find(&query).await; assert!(documents_res.is_ok()); let documents = documents_res.unwrap(); @@ -333,7 +333,6 @@ mod sofa_tests { teardown(client, "g_should_find_documents_in_db").await; } - #[tokio::test] async fn h_should_bulk_get_a_document() { let (client, db, doc) = setup("h_should_bulk_get_a_document").await; @@ -346,7 +345,6 @@ mod sofa_tests { teardown(client, "h_should_bulk_get_a_document").await; } - #[tokio::test] async fn i_should_bulk_get_invalid_documents() { let (client, db, doc) = setup("i_should_bulk_get_invalid_documents").await; @@ -360,7 +358,6 @@ mod sofa_tests { teardown(client, "i_should_bulk_get_invalid_documents").await; } - #[tokio::test] async fn j_should_get_all_documents_with_keys() { let (client, db, doc) = setup("j_should_get_all_documents_with_keys").await; diff --git a/src/types/find.rs b/src/types/find.rs index 30ffef7..e88b8f8 100644 --- a/src/types/find.rs +++ b/src/types/find.rs @@ -1,14 +1,15 @@ +use serde::export::Formatter; +use serde::{Deserialize, Serialize}; +use serde_json::Value; use std::collections::HashMap; -use serde::{Serialize, Deserialize}; -use serde_json::{Value, json}; use std::fmt::Display; -use serde::export::Formatter; /// Sort direction abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] -#[serde(untagged)] pub enum SortDirection { + #[serde(rename = "desc")] Desc, + #[serde(rename = "asc")] Asc, } @@ -16,13 +17,13 @@ impl From for SortDirection { fn from(original: String) -> SortDirection { match original.as_ref() { "desc" => SortDirection::Desc, - _ => SortDirection::Asc + _ => SortDirection::Asc, } } } /// Sort spec content abstraction -pub type SortSpecContent = HashMap; +pub type SortSpecContent = HashMap; /// Sort spec abstraction #[derive(Serialize, Deserialize, Eq, PartialEq, Debug, Clone)] @@ -41,6 +42,7 @@ pub enum IndexSpec { } /// Find query abstraction +/// Parameters here http://docs.couchdb.org/en/latest/api/database/find.html #[derive(Serialize, Deserialize, PartialEq, Debug, Clone)] pub struct FindQuery { pub selector: Value, @@ -60,8 +62,23 @@ pub struct FindQuery { #[serde(skip_serializing_if = "Option::is_none")] pub use_index: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub r: Option, + #[serde(skip_serializing_if = "Option::is_none")] pub bookmark: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub update: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub stable: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub stale: Option, + + #[serde(skip_serializing_if = "Option::is_none")] + pub execution_stats: Option, } /// Find result abstraction @@ -89,26 +106,88 @@ pub struct ExplainResult { pub range: Value, } +pub type RegEx = HashMap; +pub type FieldFilter = HashMap; + +#[derive(Serialize, Deserialize)] +pub struct NotEqual { + #[serde(rename = "$ne")] + pub ne: Option, +} + +#[derive(Serialize, Deserialize)] +pub struct SelectAll { + #[serde(rename = "_id")] + #[serde(skip_serializing_if = "Option::is_none")] + pub id: Option, +} + +// Little helper to create a select all query. +impl Default for SelectAll { + fn default() -> Self { + SelectAll { + id: Some(NotEqual { ne: None }), + } + } +} + +impl SelectAll { + pub fn as_value(&self) -> Value { + self.into() + } +} + +impl Into for &SelectAll { + fn into(self) -> Value { + serde_json::to_value(&self).expect("can not convert into json") + } +} + +impl From for SelectAll { + fn from(value: Value) -> Self { + serde_json::from_value(value).expect("json Value is not a valid Selector") + } +} + /// Returns all documents #[macro_export] macro_rules! find_all_selector { - () => { - json!({"selector" : { "_id" : {"$ne": "null"}}}) - } - } + () => { + FindQuery::find_all().as_value() + }; +} impl FindQuery { - pub fn find_all() -> FindQuery { + pub fn new_from_value(query: Value) -> Self { + query.into() + } + + // Create a new FindQuery from a valid selector. The selector syntax is documented here: + // https://docs.couchdb.org/en/latest/api/database/find.html#find-selectors + pub fn new(selector: Value) -> Self { FindQuery { - selector: json!({ "_id" : {"$ne": null}}), + selector, limit: None, skip: None, sort: vec![], fields: None, use_index: None, + r: None, bookmark: None, + update: None, + stable: None, + stale: None, + execution_stats: None, } } + + pub fn find_all() -> Self { + Self::new(SelectAll::default().as_value()) + } + + pub fn as_value(&self) -> Value { + self.into() + } } impl Into for FindQuery { @@ -123,6 +202,12 @@ impl Into for &FindQuery { } } +impl From for FindQuery { + fn from(value: Value) -> Self { + serde_json::from_value(value).expect("json Value is not a valid FindQuery") + } +} + impl Display for FindQuery { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { let json: Value = self.into(); @@ -133,17 +218,53 @@ impl Display for FindQuery { #[cfg(test)] mod tests { use super::*; + use serde_json::json; #[test] fn test_convert_to_value() { let mut sort = HashMap::new(); - sort.insert("first_name".to_string(), "desc".to_string()); + sort.insert("first_name".to_string(), SortDirection::Desc); let mut query = FindQuery::find_all(); query.limit = Some(10); query.skip = Some(20); query.sort = vec![SortSpec::Complex(sort)]; let json = query.to_string(); - assert_eq!(r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":[{"first_name":"desc"}]}"#, json) + assert_eq!( + r#"{"limit":10,"selector":{"_id":{"$ne":null}},"skip":20,"sort":[{"first_name":"desc"}]}"#, + json + ) + } + + #[test] + fn test_default_select_all() { + let selector = FindQuery::find_all().as_value().to_string(); + assert_eq!(selector, r#"{"selector":{"_id":{"$ne":null}}}"#) } -} \ No newline at end of file + + #[test] + fn test_from_json() { + let query = FindQuery::new_from_value(json!({ + "selector": { + "thing": true + }, + "limit": 1, + "sort": [{ + "thing": "desc" + }] + })); + + let selector = query.selector.to_string(); + assert_eq!(selector, r#"{"thing":true}"#); + assert_eq!(query.limit, Some(1)); + assert_eq!(query.sort.len(), 1); + let first_sort = query.sort.get(0).unwrap(); + if let SortSpec::Complex(spec) = first_sort { + assert!(spec.contains_key("thing")); + let direction = spec.get("thing").unwrap(); + assert_eq!(direction, &SortDirection::Desc); + } else { + panic!("unexpected sort spec"); + } + } +} From aee6ddbf9e603d28442ff663c7a0d17e05fbc3ae Mon Sep 17 00:00:00 2001 From: mibes Date: Mon, 28 Sep 2020 07:39:27 +0200 Subject: [PATCH 70/70] Spin-off notice --- README.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/README.md b/README.md index d7c1c69..d2c6f8a 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,11 @@ ![sofa-logo](https://raw.githubusercontent.com/mibes/sofa/master/docs/logo-sofa.png "Logo Sofa") +## Migration Notice + +Please note that we are no longer maintaining this fork, but have spun off a new project: +[couch-rs](https://github.com/mibes/couch-rs) + ## Documentation Here: [http://docs.rs/sofa](http://docs.rs/sofa)