From c04cdb0813cc161bd8b5ff0197a3a18b31f5e170 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Tue, 1 Apr 2014 20:30:49 +0200 Subject: [PATCH 01/92] Improve mapping of nested arrays. --- lib/mapping-generator.js | 18 ++++++++++++------ test/boost-field-test.js | 2 +- test/mapping-generator-test.js | 33 +++++++++++++++++++++++++++++++++ 3 files changed, 46 insertions(+), 7 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 1c456f4b..eb667b02 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -5,7 +5,6 @@ Generator.prototype.generateMapping = function(schema, cb){ var cleanTree = getCleanTree(schema.tree, schema.paths, ''); delete cleanTree[schema.get('versionKey')]; var mapping = getMapping(cleanTree, ''); - cb(null, { properties: mapping }); }; @@ -113,16 +112,23 @@ function getCleanTree(tree, paths, prefix) { // Field has some kind of type if (type) { - // If it is an nestec schema + // If it is a nested array if (value[0]) { - //A nested schema can be just a blank object with no defined paths - if(value[0].tree && value[0].paths){ - cleanTree[field] = getCleanTree(value[0].tree, value[0].paths, ''); + // A nested array can contain complex objects + if (paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { + cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); } // Check for single type arrays (which elasticsearch will treat as the core type i.e. [String] = string) else if ( paths[field].caster && paths[field].caster.instance ) { - cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; + // Even for simple types the value can be an object if there is other attributes than type + if(typeof value[0] === 'object'){ + cleanTree[field] = value[0]; + } else { + cleanTree[field] = {}; + } + cleanTree[field].type = paths[field].caster.instance.toLowerCase(); } + //A nested schema can be just a blank object with no defined paths else{ cleanTree[field] = { type:'object' diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 60f10fdf..e9b94108 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -30,7 +30,7 @@ describe('Add Boost Option Per Field', function(){ it('should create a mapping with boost field added', function(done){ BlogPost.createMapping(function(err, mapping){ esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ - var props = mapping.blogpost.properties; + var props = mapping.blogposts.mappings.blogpost.properties; props.title.type.should.eql('string'); props.title.boost.should.eql(2.0); done(); diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js index 4773ffcc..4562cfc2 100644 --- a/test/mapping-generator-test.js +++ b/test/mapping-generator-test.js @@ -75,6 +75,7 @@ describe('MappingGenerator', function(){ done(); }); }); + it('recognizes an multi_field and maps it as one', function(done){ generator.generateMapping(new Schema({ test: { @@ -137,6 +138,38 @@ describe('MappingGenerator', function(){ done(); }); }); + it('recognizes a nested array with a simple type and maps it as a simple attribute', function(done){ + generator.generateMapping(new Schema({ + contacts: [String] + }), function(err, mapping){ + mapping.properties.contacts.type.should.eql('string'); + done(); + }); + }); + it('recognizes a nested array with a simple type and additional attributes and maps it as a simple attribute', function(done){ + generator.generateMapping(new Schema({ + contacts: [{ type: String, es_index: 'not_analyzed' }] + }), function(err, mapping){ + mapping.properties.contacts.type.should.eql('string'); + mapping.properties.contacts.index.should.eql('not_analyzed'); + done(); + }); + }); + it('recognizes a nested array with a complex object and maps it', function(done){ + generator.generateMapping(new Schema({ + name: String, + contacts: [{ + email: {type: String, es_index: 'not_analyzed' }, + telephone: String + }] + }), function(err, mapping){ + mapping.properties.name.type.should.eql('string'); + mapping.properties.contacts.properties.email.type.should.eql('string'); + mapping.properties.contacts.properties.email.index.should.eql('not_analyzed'); + mapping.properties.contacts.properties.telephone.type.should.eql('string'); + done(); + }); + }); it('excludes a virtual property from mapping', function(done){ var PersonSchema = new Schema({ first_name: {type: String}, From 2a6e07b64bccad895e130aa6e0666f60305a6d96 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Thu, 24 Apr 2014 21:08:10 +0200 Subject: [PATCH 02/92] added possibility to specify any river options --- lib/mongoosastic.js | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index e9298999..f3205f9b 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -173,21 +173,22 @@ module.exports = function elasticSearchPlugin(schema, options){ var model = this; setIndexNameIfUnset(model.modelName); if (!this.db.name) throw "ERROR: "+ model.modelName +".river() call before mongoose.connect" - esClient.putRiver( - 'mongodb', - indexName, - { - type: 'mongodb', - mongodb: { - db: this.db.name, - collection: indexName, - gridfs: (useRiver && useRiver.gridfs) ? useRiver.gridfs : false - }, - index: { - name: indexName, - type: typeName - } - }, cb ); + + // the river definition can come from the options of mongoosasic, + // but some attributes will be overwritten anyway + // see https://github.com/richardwilly98/elasticsearch-river-mongodb/wiki + var riverDefinition = useRiver.definition || {}; + + riverDefinition.type = 'mongodb'; + riverDefinition.mongodb = riverDefinition.mongodb || {}; + riverDefinition.mongodb.db = this.db.name; + riverDefinition.mongodb.collection = indexName; + riverDefinition.gridfs = (useRiver && useRiver.gridfs) ? useRiver.gridfs : false; + riverDefinition.index = riverDefinition.index || {}; + riverDefinition.index.name = indexName; + riverDefinition.index.type = typeName; + + esClient.putRiver('mongodb', indexName, riverDefinition, cb); } } }; From b11adb9ec88504966b293fb8e9872f0921de4f65 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Fri, 25 Apr 2014 21:21:34 +0200 Subject: [PATCH 03/92] support changing indexName with river without loosing the correct collection name --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f3205f9b..0430d804 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -182,7 +182,7 @@ module.exports = function elasticSearchPlugin(schema, options){ riverDefinition.type = 'mongodb'; riverDefinition.mongodb = riverDefinition.mongodb || {}; riverDefinition.mongodb.db = this.db.name; - riverDefinition.mongodb.collection = indexName; + riverDefinition.mongodb.collection = this.modelName.toLowerCase() + 's'; riverDefinition.gridfs = (useRiver && useRiver.gridfs) ? useRiver.gridfs : false; riverDefinition.index = riverDefinition.index || {}; riverDefinition.index.name = indexName; From 95368d0653902c783f29743b95d33d532aac655b Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Fri, 9 May 2014 20:41:32 +0200 Subject: [PATCH 04/92] make serialize more robust for deep models with empty parts --- lib/serialize.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/serialize.js b/lib/serialize.js index 4e763005..381aeaab 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -5,9 +5,11 @@ function serialize(model, mapping) { var serializedForm = {}; for (var field in mapping.properties) { - var val = serialize(model[field], mapping.properties[field]); - if (val !== undefined) { - serializedForm[field] = val; + if (model) { + var val = serialize(model[field], mapping.properties[field]); + if (val !== undefined) { + serializedForm[field] = val; + } } } From c7db5a723d0d1e02ebb86663f80e12e55ee97b3e Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Fri, 9 May 2014 22:13:36 +0200 Subject: [PATCH 05/92] make serialize compatible with nested arrays and their mappings --- lib/serialize.js | 31 ++++++++++++++++++------------- test/serialize-test.js | 30 +++++++++++++++++++++++------- 2 files changed, 41 insertions(+), 20 deletions(-) diff --git a/lib/serialize.js b/lib/serialize.js index 381aeaab..55b108e8 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -1,20 +1,25 @@ module.exports = serialize; -function serialize(model, mapping) { - if (mapping.properties) { - var serializedForm = {}; - - for (var field in mapping.properties) { - if (model) { - var val = serialize(model[field], mapping.properties[field]); - if (val !== undefined) { - serializedForm[field] = val; - } - } +function _serializeObject(object, mapping) { + var serialized = {}; + for (var field in mapping.properties) { + var val = serialize(object[field], mapping.properties[field]); + if (val !== undefined) { + serialized[field] = val; } + } + return serialized; +} - return serializedForm; - +function serialize(model, mapping) { + if (mapping.properties && model) { + if (Array.isArray(model)) { + return model.map(function(object) { + return _serializeObject(object, mapping); + }); + } else { + return _serializeObject(model, mapping); + } } else if (typeof value === 'object' && value !== null) { var name = value.constructor.name; if (name === 'ObjectID') { diff --git a/test/serialize-test.js b/test/serialize-test.js index ed0da45a..f824ab17 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -14,7 +14,8 @@ var PersonSchema22 = new Schema({ , last: String }, dob: Date, - bowlingBall: {type:Schema.ObjectId, ref:'BowlingBall'} + bowlingBall: {type:Schema.ObjectId, ref:'BowlingBall'}, + games: [{score: Number, date: Date}] }); var Person = mongoose.model('Person22', PersonSchema22); @@ -26,24 +27,39 @@ generator.generateMapping(PersonSchema22, function(err, tmp) { mapping = tmp; }); -describe('serialize', function(){ +describe.only('serialize', function(){ var dude = new Person({ - name: {first:'Jeffery', last:'Lebowski'}, + name: {first:'Jeffrey', last:'Lebowski'}, dob: new Date(Date.parse('05/17/1962')), - bowlingBall: new BowlingBall() + bowlingBall: new BowlingBall(), + games: [{score: 80, date: new Date(Date.parse('05/17/1962'))}, {score: 80, date: new Date(Date.parse('06/17/1962'))}] }); + + // another person with missing parts to test robustness + var millionnaire = new Person({ + name: {first:'Jeffrey', last:'Lebowski'}, + }); + + it('should serialize a document with missing bits', function(){ + var serialized = serialize(millionnaire, mapping); + serialized.should.have.property('games', []); + }); + describe('with no indexed fields', function(){ var serialized = serialize(dude, mapping); it('should serialize model fields', function(){ - serialized.name.first.should.eql('Jeffery'); + serialized.name.first.should.eql('Jeffrey'); serialized.name.last.should.eql('Lebowski'); }); it('should serialize object ids as strings', function(){ serialized.bowlingBall.should.eql(dude.bowlingBall); }); - it('should serialize dates in ISO 8601 format', function(){ - serialized.dob.should.eql(dude.dob) + serialized.dob.should.eql(dude.dob); + }); + it('should serialize nested arrays', function(){ + serialized.games.should.have.lengthOf(2); + serialized.games[0].should.have.property('score', 80); }); }); From 57b5ece119fba1b285933d605d34cb5da8cf3ff3 Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Mon, 9 Jun 2014 21:58:18 +0200 Subject: [PATCH 06/92] first support of bulk indexing mode --- lib/mongoosastic.js | 54 +++++++++++++++++++++++++++---- test/bulk-test.js | 73 ++++++++++++++++++++++++++++++++++++++++++ test/serialize-test.js | 2 +- 3 files changed, 122 insertions(+), 7 deletions(-) create mode 100644 test/bulk-test.js diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 0430d804..3c8b4bd3 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,7 +13,8 @@ module.exports = function elasticSearchPlugin(schema, options){ , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 , esClient = new elastical.Client(host, options) - , useRiver = options && options.useRiver; + , useRiver = options && options.useRiver + , bulk = options && options.bulk; if (useRiver) setUpRiver(schema); @@ -44,8 +45,12 @@ module.exports = function elasticSearchPlugin(schema, options){ } var model = this; setIndexNameIfUnset(model.constructor.modelName); - esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); - } + if(bulk) { + bulkIndex(index || indexName, type || typeName, this); + } else { + esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); + } + }; /** * Unset elastic search index @@ -53,7 +58,11 @@ module.exports = function elasticSearchPlugin(schema, options){ schema.methods.unIndex = function(){ var model = this; setIndexNameIfUnset(model.constructor.modelName); - deleteByMongoId(esClient, model, indexName, typeName, 3); + if(bulk) { + bulkDelete(index || indexName, type || typeName, this); + } else { + deleteByMongoId(esClient, model, indexName, typeName, 3); + } } /** * Synchronize an existing collection @@ -127,6 +136,35 @@ module.exports = function elasticSearchPlugin(schema, options){ }); }; + var bulkBuffer = []; + + function bulkDelete(indexName, typeName, model) { + bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}); + } + + function bulkIndex(indexName, typeName, model) { + bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}); + } + + var bulkTimeout; + + function bulkAdd(instruction) { + bulkBuffer.push(instruction); + clearTimeout(bulkTimeout); + if(bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush(); + } else { + bulkTimeout = setTimeout(function(){ + schema.statics.flush(); + }, bulk.delay || 1000); + } + } + + schema.statics.flush = function(){ + esClient.bulk(bulkBuffer); + bulkBuffer = []; + }; + function setIndexNameIfUnset(model){ var modelName = model.toLowerCase(); if(!indexName){ @@ -146,7 +184,11 @@ module.exports = function elasticSearchPlugin(schema, options){ schema.post('remove', function(){ var model = this; setIndexNameIfUnset(model.constructor.modelName); - deleteByMongoId(esClient, model, indexName, typeName, 3); + if(bulk) { + bulkDelete(indexName, typeName, this); + } else { + deleteByMongoId(esClient, model, indexName, typeName, 3); + } }); /** @@ -257,4 +299,4 @@ function deleteByMongoId(client, model,indexName, typeName, tries){ model.emit('es-removed', err, res); } }); -} +} \ No newline at end of file diff --git a/test/bulk-test.js b/test/bulk-test.js new file mode 100644 index 00000000..658f3883 --- /dev/null +++ b/test/bulk-test.js @@ -0,0 +1,73 @@ +var mongoose = require('mongoose'), + elastical = require('elastical'), + esClient = new(require('elastical').Client)(), + should = require('should'), + config = require('./config'), + Schema = mongoose.Schema, + ObjectId = Schema.ObjectId, + async = require('async'), + mongoosastic = require('../lib/mongoosastic'); + +var BookSchema = new Schema({ + title: String +}); +BookSchema.plugin(mongoosastic, { + bulk: { + size: 10, + delay: 100 + } +}); + +var Book = mongoose.model('Book2', BookSchema); + +describe('Bulk mode', function() { + var books = null; + + before(function(done) { + config.deleteIndexIfExists(['book2s'], function() { + mongoose.connect(config.mongoUrl, function() { + var client = mongoose.connections[0].db; + client.collection('book2s', function(err, _books) { + books = _books; + Book.remove(done); + }); + }); + }); + }); + before(function(done) { + async.forEach(bookTitles(), function(title, cb) { + new Book({ + title: title + }).save(cb); + }, function() { + setTimeout(done, 1200); + }); + }); + before(function(done) { + Book.findOne({ + title: 'American Gods' + }, function(err, book) { + book.remove(function() { + setTimeout(done, 1200); + }); + }); + }); + it('should index all objects and support deletions too', function(done) { + Book.search({}, function(err, results) { + results.should.have.property('hits').with.property('total', 52); + done(); + }); + }); +}); + +function bookTitles() { + var books = [ + 'American Gods', + 'Gods of the Old World', + 'American Gothic' + ]; + for (var i = 0; i < 50; i++) { + books.push('ABABABA' + i); + } + return books; +} \ No newline at end of file diff --git a/test/serialize-test.js b/test/serialize-test.js index f824ab17..a482b205 100644 --- a/test/serialize-test.js +++ b/test/serialize-test.js @@ -27,7 +27,7 @@ generator.generateMapping(PersonSchema22, function(err, tmp) { mapping = tmp; }); -describe.only('serialize', function(){ +describe('serialize', function(){ var dude = new Person({ name: {first:'Jeffrey', last:'Lebowski'}, dob: new Date(Date.parse('05/17/1962')), From 122f0ed16ce205b5bf480554cf38213c0f49ed3b Mon Sep 17 00:00:00 2001 From: Alban Mouton Date: Mon, 9 Jun 2014 22:20:50 +0200 Subject: [PATCH 07/92] forgotten callback --- lib/mongoosastic.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 3c8b4bd3..dd5f2c6e 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -47,6 +47,7 @@ module.exports = function elasticSearchPlugin(schema, options){ setIndexNameIfUnset(model.constructor.modelName); if(bulk) { bulkIndex(index || indexName, type || typeName, this); + cb(); } else { esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); } From a3a2a2adaa69a36b4bc421c5648fa6f39b43488b Mon Sep 17 00:00:00 2001 From: taterbase Date: Fri, 24 Oct 2014 12:23:28 -0600 Subject: [PATCH 08/92] Update repo info in package.json --- .travis.yml | 4 ---- package.json | 4 ++-- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 690e2dd2..d2b4c927 100644 --- a/.travis.yml +++ b/.travis.yml @@ -8,7 +8,3 @@ node_js: services: - mongodb - elasticsearch - -notifications: - email: - - james.r.carr@gmail.com diff --git a/package.json b/package.json index c90295c1..06a36277 100644 --- a/package.json +++ b/package.json @@ -2,11 +2,11 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "0.6.0", + "version": "0.6.1", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", - "url": "git://github.com/jamescarr/mongoosastic" + "url": "git://github.com/mongoosastic/mongoosastic" }, "main":"lib/mongoosastic.js", "dependencies": { From 75efeee03918c179d30066305a83c48b2bc10794 Mon Sep 17 00:00:00 2001 From: taterbase Date: Fri, 24 Oct 2014 12:23:44 -0600 Subject: [PATCH 09/92] Update badge --- readme.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/readme.md b/readme.md index 22fb8b1a..d5bd3cf9 100644 --- a/readme.md +++ b/readme.md @@ -1,6 +1,6 @@ # Mongoosastic [![Build -Status](https://secure.travis-ci.org/jamescarr/mongoosastic.png?branch=master)](http://travis-ci.org/jamescarr/mongoosastic) +Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept running into cases where I needed full text search capabilities in my From 4ad5c37f53c65e87fc2115836e6e574ba032d74e Mon Sep 17 00:00:00 2001 From: George Shank Date: Fri, 24 Oct 2014 12:36:34 -0600 Subject: [PATCH 10/92] Update readme.md More dynamic version info --- readme.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/readme.md b/readme.md index d5bd3cf9..061d4cca 100644 --- a/readme.md +++ b/readme.md @@ -1,6 +1,7 @@ # Mongoosastic [![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) +[![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept running into cases where I needed full text search capabilities in my @@ -9,8 +10,6 @@ full text search, I also needed the ability to filter ranges of data points in the searches and even highlight matches. For these reasons, elastic search was a perfect fit and hence this project. -## Current Version -The current version is ``0.6.0`` ## Installation From 822ee1e21057a9f5c718a780b5ef4890ce62ac2f Mon Sep 17 00:00:00 2001 From: nlko Date: Mon, 17 Feb 2014 16:36:23 +0100 Subject: [PATCH 11/92] Keep geo_* types in the mapping Prior, only geo_point were kept in the mapping. --- lib/mapping-generator.js | 17 +++++++++++++---- 1 file changed, 13 insertions(+), 4 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 1bbf1df4..9cb3f05c 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -146,10 +146,19 @@ function getCleanTree(tree, paths, prefix) { // It has no type for some reason } else { - // Because it is an geo_point object!! - if (typeof value === 'object' && value.geo_point) { - cleanTree[field] = value.geo_point; - continue; + // Because it is an geo_* object!! + if (typeof value === 'object') + { + var key; + var geoFound = false; + for (key in value) { + if (value.hasOwnProperty(key) && /^geo_/.test(key)) { + cleanTree[field] = value[key]; + geoFound = true; + //break; + } + } + if(geoFound) continue } // If it's a virtual type, don't map it From 9f8fa2f067e7b91cac796268dbabacdce5a3fc6c Mon Sep 17 00:00:00 2001 From: nlko Date: Mon, 17 Feb 2014 18:41:31 +0100 Subject: [PATCH 12/92] Correct boost test field (support ES 0.9 and 1.0). In my tests, the mapping format returned by the getMapping function is not the same between 0.90.11 and 1.0 --- test/boost-field-test.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/boost-field-test.js b/test/boost-field-test.js index e9b94108..740d63e4 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -30,7 +30,11 @@ describe('Add Boost Option Per Field', function(){ it('should create a mapping with boost field added', function(done){ BlogPost.createMapping(function(err, mapping){ esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ - var props = mapping.blogposts.mappings.blogpost.properties; + /* elasticsearch 1.0 & 0.9 support */ + var props = + mapping.blogpost != undefined ? + mapping.blogpost.properties: /* ES 0.9.11 */ + mapping.blogposts.mappings.blogpost.properties; /* ES 1.0.0 */ props.title.type.should.eql('string'); props.title.boost.should.eql(2.0); done(); From 02b01384801621c2e8e8b5f87844799ea4e10e02 Mon Sep 17 00:00:00 2001 From: nlko Date: Thu, 20 Feb 2014 10:15:44 +0100 Subject: [PATCH 13/92] Add test for undefined object field in the path prior of its use --- lib/mapping-generator.js | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 9cb3f05c..48c21c95 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -117,21 +117,20 @@ function getCleanTree(tree, paths, prefix) { } // Field has some kind of type if (type) { - // If it is an nestec schema + // If it is an nested schema if (value[0]) { //A nested schema can be just a blank object with no defined paths if(value[0].tree && value[0].paths){ cleanTree[field] = getCleanTree(value[0].tree, value[0].paths, ''); + } else if ( paths[field] && paths[field].caster && paths[field].caster.instance ) { + cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; } // Check for single type arrays (which elasticsearch will treat as the core type i.e. [String] = string) else if (!paths[field] && prefix) { if(paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; } - } else if( paths[field].caster && paths[field].caster.instance ) { - cleanTree[field] = {type: paths[field].caster.instance.toLowerCase()}; - } - else{ + } else { cleanTree[field] = { type:'object' }; From 2214a6f337d9dd8b718256915bca1b1598c24056 Mon Sep 17 00:00:00 2001 From: nlko Date: Thu, 20 Feb 2014 15:55:08 +0100 Subject: [PATCH 14/92] Added testfor geo_shape and updated manual --- readme.md | 78 ++++++++++++++++++++ test/geo-test.js | 180 +++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 258 insertions(+) create mode 100644 test/geo-test.js diff --git a/readme.md b/readme.md index d5bd3cf9..4f8a7289 100644 --- a/readme.md +++ b/readme.md @@ -242,6 +242,17 @@ var ExampleSchema = new Schema({ lon: { type: Number } } + geo_shape: { + coordinates : [], + type: {type: String}, + geo_shape: { + type:String, + es_type: "geo_shape", + es_tree: "quadtree", + es_precision: "1km" + } + } + // Special feature : specify a cast method to pre-process the field before indexing it someFieldToCast : { type: String, @@ -249,6 +260,7 @@ var ExampleSchema = new Schema({ return value + ' something added'; } } + }); // Used as nested schema above. @@ -258,6 +270,56 @@ var SubSchema = new Schema({ }); ``` +## Geo mapping +Prior to index any geo mapped data (or calling the synchronize), +the mapping must be manualy created with the createMapping (see above). + +Notice that the name of the field containing the ES geo data must start by +'geo_' to be recognize as such. + +# Indexing a geo point + +```javascript + var geo = new GeoModel({ + … + geo_with_lat_lon: { lat: 1, lon: 2} + … + }); +``` + +# Indexing a geo shape + +```javascript + var geo = new GeoModel({ + … + geo_shape:{ + type:'envelope', + coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ + } + … + }); +``` + +Mapping, indexing and searching example for geo shape can be found in test/geo-test.js + +For example, one can retrieve the list of document where the shape contain a specific +point (or polygon...) + +```javascript + var geoQuery = { + "query": {"match_all": {}}, + "filter": {"geo_shape": { + "geo_shape": { + "shape": { + "type": "point", + "coordinates": [3,1] + }, + "relation": "intersects" + } + }} + } +``` + ### Advanced Queries The full query DSL of elasticsearch is exposed through the search method. For example, if you wanted to find all people between ages 21 @@ -358,6 +420,7 @@ The index method takes 3 arguments: Note that indexing a model does not mean it will be persisted to mongodb. Use save for that. +<<<<<<< HEAD ### Truncating an index The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. @@ -366,6 +429,21 @@ The static method truncate will deleted all documents from the associated index. ```javascript GarbageModel.truncate(function(err){...}); +======= +### Saving a document +The indexing takes place after saving inside the mongodb and is a defered process. +One can check the end of the indexion catching es-indexed event. + +```javascript +doc.save(function(err){ + if (err) throw err; + /* Document indexation on going */ + doc.on('es-indexed', function(err, res){ + if (err) throw err; + /* Document is indexed */ + }); + }); +>>>>>>> Added testfor geo_shape and updated manual ``` ### Model.plugin(mongoosastic, options) diff --git a/test/geo-test.js b/test/geo-test.js new file mode 100644 index 00000000..c798b6df --- /dev/null +++ b/test/geo-test.js @@ -0,0 +1,180 @@ +var mongoose = require('mongoose') + , elastical = require('elastical') + , esClient = new(require('elastical').Client) + , should = require('should') + , config = require('./config') + , Schema = mongoose.Schema + , ObjectId = Schema.ObjectId + , mongoosastic = require('../lib/mongoosastic'); + + +var GeoSchema; + + +var GeoModel; + +describe('GeoTest', function(){ + before(function(done){ + mongoose.connect(config.mongoUrl, function(){ + config.deleteIndexIfExists(['geodocs'], function(){ + + GeoSchema = new Schema({ + myId: Number, + frame: { + coordinates : [], + type: {type: String}, + geo_shape: { + type:String, + es_type: "geo_shape", + es_tree: "quadtree", + es_precision: "1km" + } + } + }); + + GeoSchema.plugin(mongoosastic); + GeoModel = mongoose.model('geodoc', GeoSchema); + + GeoModel.createMapping(function(err, mapping){ + GeoModel.remove(function(){ + + esClient.getMapping('geodocs', 'geodoc', function(err, mapping){ + mapping.geodoc.properties.frame.type.should.eql('geo_shape'); + done(); + }); + }); + }); + + }); + }); + }); + + it('should be able to create and store geo coordinates', function(done){ + + var geo = new GeoModel({ + myId : 1, + frame:{ + type:'envelope', + coordinates: [[3,4],[1,2]] + } + }); + + geo2 = new GeoModel({ + myId : 2, + frame:{ + type:'envelope', + coordinates: [[2,3],[4,0]] + } + }); + + + var saveAndWait = function (doc,cb) { + doc.save(function(err) { + if (err) cb(err); + else doc.on('es-indexed', cb ); + }); + }; + + saveAndWait(geo,function(err){ + if (err) throw err; + saveAndWait(geo2,function(err){ + if (err) throw err; + // Mongodb request + GeoModel.find({},function(err, res){ + if (err) throw err; + res.length.should.eql(2); + res[0].frame.type.should.eql('envelope'); + res[0].frame.coordinates[0].should.eql([3,4]); + res[0].frame.coordinates[1].should.eql([1,2]); + done(); + })})})}) + + var getDocOrderedQuery = {"query": {"match_all": {}},"sort":{"myId":{"order":"asc"}}}; + + it('should be able to find geo coordinates in the indexes', function(done){ + setTimeout(function(){ + // ES request + GeoModel.search(getDocOrderedQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + done(); + }); + }, 1100); + }); + + it('should be able to resync geo coordinates from the database', function(done){ + config.deleteIndexIfExists(['geodocs'], function(){ + GeoModel.createMapping(function(err, mapping){ + var stream = GeoModel.synchronize() + , count = 0; + + stream.on('data', function(err, doc){ + count++; + }); + + stream.on('close', function(){ + count.should.eql(2); + + setTimeout(function(){ + GeoModel.search(getDocOrderedQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(2); + res.hits.hits[0]._source.frame.type.should.eql('envelope'); + res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + done(); + }); + }, 1000); + }); + }); + }); + }); + + + + it('should be able to search points inside frames', function(done){ + var geoQuery = { + "query": {"match_all": {}}, + "filter": {"geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] + }, + "relation": "intersects" + } + }} + } + + setTimeout(function(){ + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(1); + res.hits.hits[0]._source.myId.should.eql(2); + geoQuery.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(1); + res.hits.hits[0]._source.myId.should.eql(1); + + geoQuery.filter.geo_shape.frame.shape.coordinates = [3,2]; + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(2); + + geoQuery.filter.geo_shape.frame.shape.coordinates = [0,0]; + GeoModel.search(geoQuery,function(err, res){ + if (err) throw err; + res.hits.total.should.eql(0); + done(); + }); + }); + }); + + }); + }, 1000); + }); + + +}); From d0e2be27d171742688e5e12406a403b6259c38ab Mon Sep 17 00:00:00 2001 From: nlko Date: Thu, 20 Feb 2014 16:00:29 +0100 Subject: [PATCH 15/92] Add ES 1.0 support for geo shape tests --- test/geo-test.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/test/geo-test.js b/test/geo-test.js index c798b6df..a07d1234 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -39,7 +39,10 @@ describe('GeoTest', function(){ GeoModel.remove(function(){ esClient.getMapping('geodocs', 'geodoc', function(err, mapping){ - mapping.geodoc.properties.frame.type.should.eql('geo_shape'); + (mapping.geodoc != undefined ? + mapping.geodoc: /* ES 0.9.11 */ + mapping.geodocs.mappings.geodoc /* ES 1.0.0 */ + ).properties.frame.type.should.eql('geo_shape'); done(); }); }); From 82fb1b2e51eda7ec0e58b2cd29180f752b8d83dc Mon Sep 17 00:00:00 2001 From: nlko Date: Tue, 25 Feb 2014 10:32:51 +0100 Subject: [PATCH 16/92] Correct enveloppe test Enveloppe corners were in wrong order resulting in a bad test. --- test/geo-test.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/geo-test.js b/test/geo-test.js index a07d1234..f530c98d 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -58,7 +58,7 @@ describe('GeoTest', function(){ myId : 1, frame:{ type:'envelope', - coordinates: [[3,4],[1,2]] + coordinates: [[1,4],[3,2]] } }); @@ -87,8 +87,8 @@ describe('GeoTest', function(){ if (err) throw err; res.length.should.eql(2); res[0].frame.type.should.eql('envelope'); - res[0].frame.coordinates[0].should.eql([3,4]); - res[0].frame.coordinates[1].should.eql([1,2]); + res[0].frame.coordinates[0].should.eql([1,4]); + res[0].frame.coordinates[1].should.eql([3,2]); done(); })})})}) @@ -101,7 +101,7 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); - res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1,4],[3,2]]); done(); }); }, 1100); @@ -125,7 +125,7 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); - res.hits.hits[0]._source.frame.coordinates.should.eql([[3,4],[1,2]]); + res.hits.hits[0]._source.frame.coordinates.should.eql([[1,4],[3,2]]); done(); }); }, 1000); @@ -166,7 +166,7 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(2); - geoQuery.filter.geo_shape.frame.shape.coordinates = [0,0]; + geoQuery.filter.geo_shape.frame.shape.coordinates = [0,3]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(0); From cd84784549f18ce02cea9aeffd7c01f36bc3ceff Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?H=C3=BCseyin=20BABAL?= Date: Mon, 17 Mar 2014 21:35:02 +0200 Subject: [PATCH 17/92] Get first level of hits field Provided fix for etting first level `hits` field of search results when used hydrate --- lib/mongoosastic.js | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f0c64751..32f5f6e3 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -152,7 +152,7 @@ module.exports = function elasticSearchPlugin(schema, options){ cb(err); }else{ if (alwaysHydrate || options.hydrate) { - hydrate(results, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); + hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); }else{ cb(null, res); } @@ -251,7 +251,8 @@ function createMappingIfNotPresent(client, indexName, typeName, schema, settings }); } -function hydrate(results, model, options, cb){ +function hydrate(res, model, options, cb){ + var results = res.hits; var resultsMap = {} var ids = results.hits.map(function(a, i){ resultsMap[a._id] = i @@ -276,7 +277,8 @@ function hydrate(results, model, options, cb){ hits[i] = doc }) results.hits = hits; - cb(null, results); + res.hits = results; + cb(null, res); } }); } From a6f6c3e69fe41ea43b1e4bea0040c63a7c1ec772 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 11:43:26 -0600 Subject: [PATCH 18/92] Update hydrated tests to conform to api --- test/index-test.js | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/test/index-test.js b/test/index-test.js index 711bb5f3..f0366e21 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -223,10 +223,10 @@ describe('indexing', function(){ it('when gathering search results while respecting default hydrate options', function(done){ Person.search({query:'James'}, function(err, res) { - res.hits[0].address.should.eql('Exampleville, MO'); - res.hits[0].name.should.eql('James Carr'); - res.hits[0].should.not.have.property('phone'); - res.hits[0].should.not.be.an.instanceof(Person); + res.hits.hits[0].address.should.eql('Exampleville, MO'); + res.hits.hits[0].name.should.eql('James Carr'); + res.hits.hits[0].should.not.have.property('phone'); + res.hits.hits[0].should.not.be.an.instanceof(Person); done(); }); }); @@ -258,9 +258,9 @@ describe('indexing', function(){ it('should hydrate returned documents if desired', function(done){ Talk.search({query:'cool'}, {hydrate:true}, function(err, res) { - res.total.should.eql(1) + res.hits.total.should.eql(1) - var talk = res.hits[0] + var talk = res.hits.hits[0] talk.should.have.property('title') talk.should.have.property('year'); talk.should.have.property('abstract') @@ -283,14 +283,14 @@ describe('indexing', function(){ it('should only return indexed fields and have indexed sub-objects', function(done){ Person.search({query:'Bob'}, function(err, res) { - res.hits[0].address.should.eql('Exampleville, MO'); - res.hits[0].name.should.eql('Bob Carr'); - res.hits[0].should.have.property('life'); - res.hits[0].life.born.should.eql(1950); - res.hits[0].life.should.not.have.property('died'); - res.hits[0].life.should.not.have.property('other'); - res.hits[0].should.not.have.property('phone'); - res.hits[0].should.not.be.an.instanceof(Person); + res.hits.hits[0].address.should.eql('Exampleville, MO'); + res.hits.hits[0].name.should.eql('Bob Carr'); + res.hits.hits[0].should.have.property('life'); + res.hits.hits[0].life.born.should.eql(1950); + res.hits.hits[0].life.should.not.have.property('died'); + res.hits.hits[0].life.should.not.have.property('other'); + res.hits.hits[0].should.not.have.property('phone'); + res.hits.hits[0].should.not.be.an.instanceof(Person); done(); }); }); @@ -298,9 +298,9 @@ describe('indexing', function(){ it('should allow extra query options when hydrating', function(done){ Talk.search({query:'cool'}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { - res.total.should.eql(1) + res.hits.total.should.eql(1) - var talk = res.hits[0] + var talk = res.hits.hits[0] talk.should.have.property('title') talk.should.have.property('year'); talk.should.have.property('abstract') From 241dfde0241b51117c4a80ce60091e0e93db2012 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 11:48:29 -0600 Subject: [PATCH 19/92] remove river code --- lib/mongoosastic.js | 36 +----------------------------------- 1 file changed, 1 insertion(+), 35 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f0c64751..8d3fa7c9 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,12 +13,8 @@ module.exports = function elasticSearchPlugin(schema, options){ , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 , esClient = new elastical.Client(host, options) - , useRiver = options && options.useRiver; - if (useRiver) - setUpRiver(schema); - else - setUpMiddlewareHooks(schema); + setUpMiddlewareHooks(schema); /** * ElasticSearch Client @@ -200,36 +196,6 @@ module.exports = function elasticSearchPlugin(schema, options){ }); } - /* - * Experimental MongoDB River functionality - * NOTICE: Only tested with: - * MongoDB V2.4.1 - * Elasticsearch V0.20.6 - * elasticsearch-river-mongodb V1.6.5 - * - https://github.com/richardwilly98/elasticsearch-river-mongodb/ - */ - function setUpRiver(schema) { - schema.statics.river = function(cb) { - var model = this; - setIndexNameIfUnset(model.modelName); - if (!this.db.name) throw "ERROR: "+ model.modelName +".river() call before mongoose.connect" - esClient.putRiver( - 'mongodb', - indexName, - { - type: 'mongodb', - mongodb: { - db: this.db.name, - collection: indexName, - gridfs: (useRiver && useRiver.gridfs) ? useRiver.gridfs : false - }, - index: { - name: indexName, - type: typeName - } - }, cb ); - } - } }; From 9a79e38b8d9e7ed6fea7e94e63ebb2e2643dd905 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 12:48:10 -0600 Subject: [PATCH 20/92] use containEql instead of include --- test/search-features-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/search-features-test.js b/test/search-features-test.js index 0810cbae..764a2336 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -53,7 +53,7 @@ describe('Query DSL', function(){ }, function(err, res){ res.hits.total.should.eql(2); res.hits.hits.forEach(function(bond){ - ['Legal', 'Construction'].should.include(bond._source.name); + ['Legal', 'Construction'].should.containEql(bond._source.name); }); done(); }); From a5711a0c29a11c7164082f111f46d8950b7a8158 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 16:43:41 -0600 Subject: [PATCH 21/92] big api changes, big version bump --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 06a36277..6e8d8500 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "0.6.1", + "version": "1.0.0", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", From b5e5cc0c5467d50868b060697473a13d7438171d Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 17:40:46 -0600 Subject: [PATCH 22/92] Add documentation about bulk api --- package.json | 2 +- readme.md | 15 +++++++++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/package.json b/package.json index 6e8d8500..a1c706f7 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "1.0.0", + "version": "1.0.1", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", diff --git a/readme.md b/readme.md index 2a19f3fa..daca1ad3 100644 --- a/readme.md +++ b/readme.md @@ -130,6 +130,21 @@ var stream = Book.synchronize({author: 'Arthur C. Clarke'}) One caveat... synchronization is kinda slow for now. Use with care. +### Bulk Indexing + +You can also specify `bulk` options with mongoose which will utilize elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. + +Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. + +```javascript +BookSchema.plugin(mongoosastic, { + bulk: { + size: 10, // preferred number of docs to bulk index + delay: 100 //milliseconds to wait for enough docs to meet size constraint + } +}); +``` + ### Per Field Options Schemas can be configured to have special options per field. These match with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by elasticsearch with the only difference being they are all prefixed by "es_". From 178df7b74a19842054f658f334be40aee3686a81 Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 27 Oct 2014 17:54:34 -0600 Subject: [PATCH 23/92] Document geo_shape --- package.json | 2 +- readme.md | 27 +++++++++++++-------------- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/package.json b/package.json index a1c706f7..994c90cd 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "1.0.1", + "version": "1.0.2", "tags":["mongodb", "elastic search", "mongoose", "full text search"], "repository": { "type": "git", diff --git a/readme.md b/readme.md index daca1ad3..40a54ae1 100644 --- a/readme.md +++ b/readme.md @@ -284,14 +284,14 @@ var SubSchema = new Schema({ }); ``` -## Geo mapping +### Geo mapping Prior to index any geo mapped data (or calling the synchronize), the mapping must be manualy created with the createMapping (see above). Notice that the name of the field containing the ES geo data must start by 'geo_' to be recognize as such. -# Indexing a geo point +#### Indexing a geo point ```javascript var geo = new GeoModel({ @@ -301,7 +301,7 @@ Notice that the name of the field containing the ES geo data must start by }); ``` -# Indexing a geo shape +#### Indexing a geo shape ```javascript var geo = new GeoModel({ @@ -434,16 +434,6 @@ The index method takes 3 arguments: Note that indexing a model does not mean it will be persisted to mongodb. Use save for that. -<<<<<<< HEAD -### Truncating an index - -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. - -#### Usage - -```javascript -GarbageModel.truncate(function(err){...}); -======= ### Saving a document The indexing takes place after saving inside the mongodb and is a defered process. One can check the end of the indexion catching es-indexed event. @@ -457,7 +447,16 @@ doc.save(function(err){ /* Document is indexed */ }); }); ->>>>>>> Added testfor geo_shape and updated manual +``` + +### Truncating an index + +The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. + +#### Usage + +```javascript +GarbageModel.truncate(function(err){...}); ``` ### Model.plugin(mongoosastic, options) From b48f7a4a6b3fee603a94097f6205a6df074f64ef Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 13:35:28 -0600 Subject: [PATCH 24/92] remove semicolons from mongoosastic.js --- lib/mongoosastic.js | 235 +++++++++++++++++++++++--------------------- package.json | 26 +++-- 2 files changed, 137 insertions(+), 124 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 013cc461..41157651 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,9 +1,10 @@ -var elastical = require('elastical') - , generator = new(require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events'); +var elasticsearch = require('elasticsearch') + , elastical = require('elastical') + , generator = new(require('./mapping-generator')) + , serialize = require('./serialize') + , events = require('events') -module.exports = function elasticSearchPlugin(schema, options){ +module.exports = function Mongoosastic(schema, options){ var mapping = getMapping(schema) , indexName = options && options.index , typeName = options && options.type @@ -12,15 +13,15 @@ module.exports = function elasticSearchPlugin(schema, options){ , _mapping = null , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 - , esClient = new elastical.Client(host, options) - , bulk = options && options.bulk; + , esClient = new elasticsearch.Client({host: {host: host, port: port}}) + , bulk = options && options.bulk - setUpMiddlewareHooks(schema); + setUpMiddlewareHooks(schema) /** * ElasticSearch Client */ - schema.statics.esClient = esClient; + schema.statics.esClient = esClient /** * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once @@ -30,13 +31,16 @@ module.exports = function elasticSearchPlugin(schema, options){ * @param callback Function */ schema.statics.createMapping = function(settings, cb) { - if (!cb) { - cb = settings; - settings = undefined; + if (arguments.length === 0) { + throw new Error("Must provide a callback to createMapping function") + } else if(arguments.length === 1) { + cb = arguments[0] + settings = undefined } - setIndexNameIfUnset(this.modelName); - createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb); - }; + + setIndexNameIfUnset(this.modelName) + createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb) + } /** * @param indexName String (optional) @@ -45,32 +49,35 @@ module.exports = function elasticSearchPlugin(schema, options){ */ schema.methods.index = function(index, type, cb){ if(cb == null && typeof index == 'function'){ - cb = index; - index = null; + cb = index + index = null }else if (cb == null && typeof type == 'function'){ - cb = type; + cb = type type = null } - var model = this; - setIndexNameIfUnset(model.constructor.modelName); + var model = this + setIndexNameIfUnset(model.constructor.modelName) if(bulk) { - bulkIndex(index || indexName, type || typeName, this); - cb(); + bulkIndex(index || indexName, type || typeName, this) + cb() } else { - esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb); + //esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) + esClient.index({ + index: + }, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) } - }; + } /** * Unset elastic search index */ schema.methods.unIndex = function(){ - var model = this; - setIndexNameIfUnset(model.constructor.modelName); + var model = this + setIndexNameIfUnset(model.constructor.modelName) if(bulk) { - bulkDelete(index || indexName, type || typeName, this); + bulkDelete(index || indexName, type || typeName, this) } else { - deleteByMongoId(esClient, model, indexName, typeName, 3); + deleteByMongoId(esClient, model, indexName, typeName, 3) } } @@ -86,8 +93,8 @@ module.exports = function elasticSearchPlugin(schema, options){ } } }, function(err, res) { - cb(err); - }); + cb(err) + }) } /** @@ -102,41 +109,41 @@ module.exports = function elasticSearchPlugin(schema, options){ , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - ; + - setIndexNameIfUnset(model.modelName); - var stream = model.find(query).stream(); + setIndexNameIfUnset(model.modelName) + var stream = model.find(query).stream() stream.on('data', function(doc){ - counter++; + counter++ doc.save(function(err){ if (err) { - em.emit('error', err); - return; + em.emit('error', err) + return } doc.on('es-indexed', function(err, doc){ - counter--; + counter-- if(err){ - em.emit('error', err); + em.emit('error', err) }else{ - em.emit('data', null, doc); + em.emit('data', null, doc) } if (readyToClose && counter === 0) close() - }); - }); - }); + }) + }) + }) stream.on('close', function(a, b){ - readyToClose = true; - closeValues = [a, b]; + readyToClose = true + closeValues = [a, b] if (counter === 0) close() - }); + }) stream.on('error', function(err){ - em.emit('error', err); - }); - return em; - }; + em.emit('error', err) + }) + return em + } /** * ElasticSearch search function * @@ -145,70 +152,70 @@ module.exports = function elasticSearchPlugin(schema, options){ * @param callback - callback called with search results */ schema.statics.search = function(query, options, cb){ - var model = this; - setIndexNameIfUnset(model.modelName); + var model = this + setIndexNameIfUnset(model.modelName) if(typeof options != 'object'){ - cb = options; - options = {}; + cb = options + options = {} } - query.index = indexName; + query.index = indexName esClient.search(query, function(err, results, res){ if(err){ - cb(err); + cb(err) }else{ if (alwaysHydrate || options.hydrate) { - hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb); + hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) }else{ - cb(null, res); + cb(null, res) } } - }); - }; + }) + } - var bulkBuffer = []; + var bulkBuffer = [] function bulkDelete(indexName, typeName, model) { - bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}); + bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}) } function bulkIndex(indexName, typeName, model) { - bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}); + bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}) } - var bulkTimeout; + var bulkTimeout function bulkAdd(instruction) { - bulkBuffer.push(instruction); - clearTimeout(bulkTimeout); + bulkBuffer.push(instruction) + clearTimeout(bulkTimeout) if(bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush(); + schema.statics.flush() } else { bulkTimeout = setTimeout(function(){ - schema.statics.flush(); - }, bulk.delay || 1000); + schema.statics.flush() + }, bulk.delay || 1000) } } schema.statics.flush = function(){ - esClient.bulk(bulkBuffer); - bulkBuffer = []; - }; + esClient.bulk(bulkBuffer) + bulkBuffer = [] + } schema.statics.refresh = function(cb){ - var model = this; - setIndexNameIfUnset(model.modelName); + var model = this + setIndexNameIfUnset(model.modelName) - esClient.refresh(indexName, cb); - }; + esClient.refresh(indexName, cb) + } function setIndexNameIfUnset(model){ - var modelName = model.toLowerCase(); + var modelName = model.toLowerCase() if(!indexName){ - indexName = modelName + "s"; + indexName = modelName + "s" } if(!typeName){ - typeName = modelName; + typeName = modelName } } @@ -219,84 +226,84 @@ module.exports = function elasticSearchPlugin(schema, options){ */ function setUpMiddlewareHooks(schema) { schema.post('remove', function(){ - var model = this; - setIndexNameIfUnset(model.constructor.modelName); + var model = this + setIndexNameIfUnset(model.constructor.modelName) if(bulk) { - bulkDelete(indexName, typeName, this); + bulkDelete(indexName, typeName, this) } else { - deleteByMongoId(esClient, model, indexName, typeName, 3); + deleteByMongoId(esClient, model, indexName, typeName, 3) } - }); + }) /** * Save in elastic search on save. */ schema.post('save', function(){ - var model = this; + var model = this model.index(function(err, res){ - model.emit('es-indexed', err, res); - }); - }); + model.emit('es-indexed', err, res) + }) + }) } -}; +} function createMappingIfNotPresent(client, indexName, typeName, schema, settings, cb) { generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {}; - completeMapping[typeName] = mapping; + var completeMapping = {} + completeMapping[typeName] = mapping client.indexExists(indexName, function(err, exists) { if (exists) { - client.putMapping(indexName, typeName, completeMapping, cb); + client.putMapping(indexName, typeName, completeMapping, cb) } else { client.createIndex(indexName, { settings: settings, mappings: completeMapping - }, cb); + }, cb) } - }); - }); + }) + }) } function hydrate(res, model, options, cb){ - var results = res.hits; + var results = res.hits var resultsMap = {} var ids = results.hits.map(function(a, i){ resultsMap[a._id] = i - return a._id; - }); - var query = model.find({_id:{$in:ids}}); + return a._id + }) + var query = model.find({_id:{$in:ids}}) // Build Mongoose query based on hydrate options // Example: {lean: true, sort: '-name', select: 'address name'} Object.keys(options).forEach(function(option){ - query[option](options[option]); - }); + query[option](options[option]) + }) query.exec(function(err, docs){ if(err){ - return cb(err); + return cb(err) }else{ - var hits = []; + var hits = [] docs.forEach(function(doc) { var i = resultsMap[doc._id] hits[i] = doc }) - results.hits = hits; - res.hits = results; - cb(null, res); + results.hits = hits + res.hits = results + cb(null, res) } - }); + }) } function getMapping(schema){ - var retMapping = {}; + var retMapping = {} generator.generateMapping(schema, function(err, mapping){ - retMapping = mapping; - }); - return retMapping; + retMapping = mapping + }) + return retMapping } function deleteByMongoId(client, model,indexName, typeName, tries){ client.delete(indexName, typeName, model._id.toString(), function(err, res){ @@ -305,11 +312,11 @@ function deleteByMongoId(client, model,indexName, typeName, tries){ if(tries <= 0){ // future issue.. what do we do!? }else{ - deleteByMongoId(client, model, indexName, typeName, --tries); + deleteByMongoId(client, model, indexName, typeName, --tries) } - }, 500); + }, 500) }else{ - model.emit('es-removed', err, res); + model.emit('es-removed', err, res) } - }); + }) } diff --git a/package.json b/package.json index 994c90cd..479113fb 100644 --- a/package.json +++ b/package.json @@ -3,28 +3,34 @@ "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", "version": "1.0.2", - "tags":["mongodb", "elastic search", "mongoose", "full text search"], + "tags": [ + "mongodb", + "elastic search", + "mongoose", + "full text search" + ], "repository": { "type": "git", "url": "git://github.com/mongoosastic/mongoosastic" }, - "main":"lib/mongoosastic.js", + "main": "lib/mongoosastic.js", "dependencies": { - "elastical":"0.0.12" + "elastical": "0.0.13", + "elasticsearch": "^2.4.3" }, "peerDependencies": { - "mongoose":"3.8.x" + "mongoose": "3.8.x" }, "devDependencies": { - "mocha":"*" - , "should":"*" - , "async":"*" - , "mongoose":"3.8.x" + "mocha": "*", + "should": "*", + "async": "*", + "mongoose": "3.8.x" }, "engines": { "node": ">= 0.8.0" }, - "scripts":{ - "test":"mocha -R spec -t 20000 -b" + "scripts": { + "test": "mocha -R spec -t 20000 -b" } } From 080f7f6fb95709bce44d8a971ca95d0525f4eea7 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 15:56:06 -0600 Subject: [PATCH 25/92] first pass at integrating elasticsearch driver --- lib/mongoosastic.js | 290 +++++++++++++++++--------- package.json | 3 +- test/alternative-index-method-test.js | 8 +- test/bulk-test.js | 4 +- 4 files changed, 194 insertions(+), 111 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 41157651..05b159e8 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -3,6 +3,7 @@ var elasticsearch = require('elasticsearch') , generator = new(require('./mapping-generator')) , serialize = require('./serialize') , events = require('events') + , nop = require('nop') module.exports = function Mongoosastic(schema, options){ var mapping = getMapping(schema) @@ -15,6 +16,8 @@ module.exports = function Mongoosastic(schema, options){ , port = options && options.port ? options.port : 9200 , esClient = new elasticsearch.Client({host: {host: host, port: port}}) , bulk = options && options.bulk + , bulkBuffer = [] + , bulkTimeout setUpMiddlewareHooks(schema) @@ -27,100 +30,130 @@ module.exports = function Mongoosastic(schema, options){ * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once * the mapping is created - * @param settings String (optional) + * @param settings Object (optional) * @param callback Function */ schema.statics.createMapping = function(settings, cb) { - if (arguments.length === 0) { - throw new Error("Must provide a callback to createMapping function") - } else if(arguments.length === 1) { - cb = arguments[0] + if(arguments.length < 2) { + cb = arguments[0] || nop settings = undefined } setIndexNameIfUnset(this.modelName) - createMappingIfNotPresent(esClient, indexName, typeName, schema, settings, cb) + + createMappingIfNotPresent({ + client: esClient, + indexName: indexName, + typeName: typeName, + schema: schema, + settings: settings + }, cb) } /** - * @param indexName String (optional) - * @param typeName String (optional) + * @param options Object (optional) * @param callback Function */ - schema.methods.index = function(index, type, cb){ - if(cb == null && typeof index == 'function'){ - cb = index - index = null - }else if (cb == null && typeof type == 'function'){ - cb = type - type = null + schema.methods.index = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} } - var model = this - setIndexNameIfUnset(model.constructor.modelName) + + setIndexNameIfUnset(this.constructor.modelName) + + var index = options.index || indexName + , type = options.type || typeName + if(bulk) { - bulkIndex(index || indexName, type || typeName, this) + bulkIndex(index, type, this) cb() } else { - //esClient.index(index || indexName, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) esClient.index({ - index: - }, type || typeName, serialize(model, mapping), {id:model._id.toString()}, cb) + index: index, + type: type, + id: this._id.toString(), + body: serialize(this, mapping) + }, cb) } } /** * Unset elastic search index + * @param options - (optional) options for unIndex + * @param callback - callback when unIndex is complete */ - schema.methods.unIndex = function(){ - var model = this - setIndexNameIfUnset(model.constructor.modelName) - if(bulk) { - bulkDelete(index || indexName, type || typeName, this) - } else { - deleteByMongoId(esClient, model, indexName, typeName, 3) + schema.methods.unIndex = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} } + + setIndexNameIfUnset(this.constructor.modelName) + + options.index = options.index || indexName + options.type = options.type || typeName + options.model = this + options.client = esClient + options.tries = 3 + + if(bulk) + bulkDelete(options, cb) + else + deleteByMongoId(options, cb) } /** * Delete all documents from a type/index + * @param options - (optional) specify index/type * @param callback - callback when truncation is complete */ - schema.statics.esTruncate = function(cb) { - esClient.delete(indexName, typeName, '', { - query: { + schema.statics.esTruncate = function(options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + var index = options.index || indexName + , type = options.type || typeName + + esClient.deleteByQuery({ + index: index, + type: type, + body: { query: { - "match_all": {} + query: { + "match_all": {} + } } } - }, function(err, res) { - cb(err) - }) + }, cb) } /** * Synchronize an existing collection * - * @param callback - callback when synchronization is complete + * @param query - query for documents you want to synchronize */ schema.statics.synchronize = function(query){ - var model = this - , em = new events.EventEmitter() + var em = new events.EventEmitter() , readyToClose , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} + query = query || {} - setIndexNameIfUnset(model.modelName) - var stream = model.find(query).stream() + setIndexNameIfUnset(this.modelName) + + var stream = this.find(query).stream() stream.on('data', function(doc){ counter++ doc.save(function(err){ - if (err) { - em.emit('error', err) - return - } + if (err) + return em.emit('error', err) + doc.on('es-indexed', function(err, doc){ counter-- if(err){ @@ -133,15 +166,18 @@ module.exports = function Mongoosastic(schema, options){ }) }) }) + stream.on('close', function(a, b){ readyToClose = true closeValues = [a, b] if (counter === 0) close() }) + stream.on('error', function(err){ em.emit('error', err) }) + return em } /** @@ -152,30 +188,31 @@ module.exports = function Mongoosastic(schema, options){ * @param callback - callback called with search results */ schema.statics.search = function(query, options, cb){ + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + var model = this + setIndexNameIfUnset(model.modelName) - if(typeof options != 'object'){ - cb = options - options = {} - } - query.index = indexName - esClient.search(query, function(err, results, res){ + query.index = options.index || indexName + query.type = options.type || typeName + + esClient.search(query, function(err, res){ if(err){ cb(err) - }else{ - if (alwaysHydrate || options.hydrate) { + } else { + if (alwaysHydrate || options.hydrate) hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) - }else{ + else cb(null, res) - } } }) } - var bulkBuffer = [] - - function bulkDelete(indexName, typeName, model) { + function bulkDelete(indexName, typeName, model, cb) { bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}) } @@ -183,11 +220,10 @@ module.exports = function Mongoosastic(schema, options){ bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}) } - var bulkTimeout - function bulkAdd(instruction) { bulkBuffer.push(instruction) clearTimeout(bulkTimeout) + if(bulkBuffer.length >= (bulk.size || 1000)) { schema.statics.flush() } else { @@ -197,16 +233,27 @@ module.exports = function Mongoosastic(schema, options){ } } - schema.statics.flush = function(){ - esClient.bulk(bulkBuffer) + schema.statics.flush = function(cb){ + cb = cb || function(err) { if (err) console.log(err) } + + esClient.bulk({ + body: bulkBuffer + }, function(err) { + cb(err) + }) bulkBuffer = [] } - schema.statics.refresh = function(cb){ - var model = this - setIndexNameIfUnset(model.modelName) + schema.statics.refresh = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } - esClient.refresh(indexName, cb) + setIndexNameIfUnset(this.modelName) + esClient.indices.refresh({ + index: options.index || indexName + }, cb) } function setIndexNameIfUnset(model){ @@ -225,43 +272,68 @@ module.exports = function Mongoosastic(schema, options){ * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function(){ - var model = this - setIndexNameIfUnset(model.constructor.modelName) + schema.post('remove', function(done){ + setIndexNameIfUnset(this.constructor.modelName) + + var options = { + index: indexName, + type: typeName, + tries: 3, + model: this, + client: esClient + } + if(bulk) { - bulkDelete(indexName, typeName, this) + bulkDelete(options, done) } else { - deleteByMongoId(esClient, model, indexName, typeName, 3) + deleteByMongoId(options, done) } }) /** * Save in elastic search on save. */ - schema.post('save', function(){ - var model = this - model.index(function(err, res){ - model.emit('es-indexed', err, res) + schema.post('save', function(done){ + this.index(function(err, res){ + this.emit('es-indexed', err, res) + done(err) }) }) } } +function createMappingIfNotPresent(options, cb) { + var client = options.client + , indexName = options.indexName + , typeName = options.typeName + , schema = options.schema + , settings = options.settings - -function createMappingIfNotPresent(client, indexName, typeName, schema, settings, cb) { generator.generateMapping(schema, function(err, mapping) { var completeMapping = {} completeMapping[typeName] = mapping - client.indexExists(indexName, function(err, exists) { + client.indices.exists({index: indexName}, function(err, exists) { + if (err) + return cb(err) + if (exists) { - client.putMapping(indexName, typeName, completeMapping, cb) - } else { - client.createIndex(indexName, { - settings: settings, - mappings: completeMapping + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping }, cb) + } else { + client.indices.create({index: indexName}, function(err) { + if (err) + return cb(err) + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + }) } }) }) @@ -269,12 +341,12 @@ function createMappingIfNotPresent(client, indexName, typeName, schema, settings function hydrate(res, model, options, cb){ var results = res.hits - var resultsMap = {} - var ids = results.hits.map(function(a, i){ - resultsMap[a._id] = i - return a._id - }) - var query = model.find({_id:{$in:ids}}) + , resultsMap = {} + , ids = results.hits.map(function(a, i){ + resultsMap[a._id] = i + return a._id + }) + , query = model.find({_id:{$in:ids}}) // Build Mongoose query based on hydrate options // Example: {lean: true, sort: '-name', select: 'address name'} @@ -283,9 +355,9 @@ function hydrate(res, model, options, cb){ }) query.exec(function(err, docs){ - if(err){ + if(err) { return cb(err) - }else{ + } else { var hits = [] docs.forEach(function(doc) { @@ -298,6 +370,7 @@ function hydrate(res, model, options, cb){ } }) } + function getMapping(schema){ var retMapping = {} generator.generateMapping(schema, function(err, mapping){ @@ -305,18 +378,27 @@ function getMapping(schema){ }) return retMapping } -function deleteByMongoId(client, model,indexName, typeName, tries){ - client.delete(indexName, typeName, model._id.toString(), function(err, res){ - if(err && err.message.indexOf('404') > -1){ - setTimeout(function(){ - if(tries <= 0){ - // future issue.. what do we do!? - }else{ - deleteByMongoId(client, model, indexName, typeName, --tries) - } - }, 500) - }else{ - model.emit('es-removed', err, res) - } - }) + +function deleteByMongoId(options, cb){ + var index = options.index + , type = options.type + , client = options.esClient + , model = options.model + , tries = options.tries + + client.delete(index, type, model._id.toString(), function(err, res){ + if(err && err.message.indexOf('404') > -1){ + setTimeout(function(){ + if(tries <= 0) { + return cb(err) + } else { + options.tries = --tries + deleteByMongoId(options, cb) + } + }, 500) + }else{ + model.emit('es-removed', err, res) + cb(err) + } + }) } diff --git a/package.json b/package.json index 479113fb..688ebc46 100644 --- a/package.json +++ b/package.json @@ -16,7 +16,8 @@ "main": "lib/mongoosastic.js", "dependencies": { "elastical": "0.0.13", - "elasticsearch": "^2.4.3" + "elasticsearch": "^2.4.3", + "nop": "^1.0.0" }, "peerDependencies": { "mongoose": "3.8.x" diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 1d43eb01..34e2b8c5 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -43,10 +43,10 @@ describe('Index Method', function(){ it('should be able to index to alternative index', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know taebo!'; - doc.index('public_tweets', function(){ + doc.index({index: 'public_tweets'}, function(){ setTimeout(function(){ - esClient.search({index: 'public_tweets', query:'know'}, function(err, results, res){ - res.hits.hits[0]._source.message.should.eql('I know taebo!'); + esClient.search({index: 'public_tweets', query:'know'}, function(err, res){ + res.hits[0]._source.message.should.eql('I know taebo!'); done(); }); }, config.indexingTimeout); @@ -56,7 +56,7 @@ describe('Index Method', function(){ it('should be able to index to alternative index and type', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know taebo!'; - doc.index('public_tweets', 'utterings', function(){ + doc.index({index: 'public_tweets', type: 'utterings'}, function(){ setTimeout(function(){ esClient.search({index: 'public_tweets', type: 'utterings', query:'know'}, function(err, results, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); diff --git a/test/bulk-test.js b/test/bulk-test.js index 658f3883..9e06a098 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -20,7 +20,7 @@ BookSchema.plugin(mongoosastic, { var Book = mongoose.model('Book2', BookSchema); -describe('Bulk mode', function() { +describe.only('Bulk mode', function() { var books = null; before(function(done) { @@ -70,4 +70,4 @@ function bookTitles() { books.push('ABABABA' + i); } return books; -} \ No newline at end of file +} From 7af9fffefdd397ce853990d2a323b2dc35514dc7 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 17:10:01 -0600 Subject: [PATCH 26/92] Close to fixing geo test --- lib/mongoosastic.js | 42 ++++++++++++++++++++++++++++++------------ test/bulk-test.js | 2 +- test/geo-test.js | 25 ++++++++++++++----------- 3 files changed, 45 insertions(+), 24 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 05b159e8..207da221 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -66,7 +66,11 @@ module.exports = function Mongoosastic(schema, options){ , type = options.type || typeName if(bulk) { - bulkIndex(index, type, this) + bulkIndex({ + index: index, + type: type, + model: this + }) cb() } else { esClient.index({ @@ -212,12 +216,25 @@ module.exports = function Mongoosastic(schema, options){ }) } - function bulkDelete(indexName, typeName, model, cb) { - bulkAdd({delete: {index: indexName, type: typeName, id: model._id.toString()}}) + function bulkDelete(options) { + bulkAdd({ + delete: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) } - function bulkIndex(indexName, typeName, model) { - bulkAdd({index: {index: indexName, type: typeName, id: model._id.toString(), data: model}}) + function bulkIndex(options) { + bulkAdd({ + index: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + bulkAdd({doc: options.model}) } function bulkAdd(instruction) { @@ -272,7 +289,7 @@ module.exports = function Mongoosastic(schema, options){ * to persist to Elasticsearch */ function setUpMiddlewareHooks(schema) { - schema.post('remove', function(done){ + schema.post('remove', function(){ setIndexNameIfUnset(this.constructor.modelName) var options = { @@ -284,19 +301,20 @@ module.exports = function Mongoosastic(schema, options){ } if(bulk) { - bulkDelete(options, done) + bulkDelete(options, nop) } else { - deleteByMongoId(options, done) + deleteByMongoId(options, nop) } }) /** * Save in elastic search on save. */ - schema.post('save', function(done){ - this.index(function(err, res){ - this.emit('es-indexed', err, res) - done(err) + schema.post('save', function(){ + var model = this + + model.index(function(err, res){ + model.emit('es-indexed', err, res) }) }) } diff --git a/test/bulk-test.js b/test/bulk-test.js index 9e06a098..88d36d79 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -20,7 +20,7 @@ BookSchema.plugin(mongoosastic, { var Book = mongoose.model('Book2', BookSchema); -describe.only('Bulk mode', function() { +describe('Bulk mode', function() { var books = null; before(function(done) { diff --git a/test/geo-test.js b/test/geo-test.js index f530c98d..052bc71d 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -13,7 +13,7 @@ var GeoSchema; var GeoModel; -describe('GeoTest', function(){ +describe.only('GeoTest', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ config.deleteIndexIfExists(['geodocs'], function(){ @@ -92,7 +92,7 @@ describe('GeoTest', function(){ done(); })})})}) - var getDocOrderedQuery = {"query": {"match_all": {}},"sort":{"myId":{"order":"asc"}}}; + var getDocOrderedQuery = {"query": {"match_all": {}},"sort":"myId:asc"}; it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ @@ -138,16 +138,19 @@ describe('GeoTest', function(){ it('should be able to search points inside frames', function(done){ var geoQuery = { - "query": {"match_all": {}}, - "filter": {"geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] - }, - "relation": "intersects" + filtered: { + "query": {"match_all": {}}, + "filter": { + "geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] + } + } + } } - }} + } } setTimeout(function(){ From c78cf0145b0d886cefd46d2c40cf976d45e2d55e Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 28 Oct 2014 18:00:51 -0600 Subject: [PATCH 27/92] had to scale back abstraction on search --- lib/mongoosastic.js | 4 ++-- test/geo-test.js | 41 ++++++++++++++++++++++++----------------- 2 files changed, 26 insertions(+), 19 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 207da221..c5e6a2d1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -201,8 +201,8 @@ module.exports = function Mongoosastic(schema, options){ setIndexNameIfUnset(model.modelName) - query.index = options.index || indexName - query.type = options.type || typeName + query.index = options.index || query.index || indexName + query.type = options.type || query.type || typeName esClient.search(query, function(err, res){ if(err){ diff --git a/test/geo-test.js b/test/geo-test.js index 052bc71d..f0b1dbc8 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -13,7 +13,7 @@ var GeoSchema; var GeoModel; -describe.only('GeoTest', function(){ +describe('GeoTest', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ config.deleteIndexIfExists(['geodocs'], function(){ @@ -92,7 +92,12 @@ describe.only('GeoTest', function(){ done(); })})})}) - var getDocOrderedQuery = {"query": {"match_all": {}},"sort":"myId:asc"}; + var getDocOrderedQuery = { + "query": { + "match_all": {} + }, + "sort":"myId:asc" + }; it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ @@ -138,14 +143,16 @@ describe.only('GeoTest', function(){ it('should be able to search points inside frames', function(done){ var geoQuery = { - filtered: { - "query": {"match_all": {}}, - "filter": { - "geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] + query: { + filtered: { + "query": {"match_all": {}}, + "filter": { + "geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] + } } } } @@ -154,23 +161,23 @@ describe.only('GeoTest', function(){ } setTimeout(function(){ - GeoModel.search(geoQuery,function(err, res){ + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(2); - geoQuery.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; - GeoModel.search(geoQuery,function(err, res){ + geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(1); - geoQuery.filter.geo_shape.frame.shape.coordinates = [3,2]; - GeoModel.search(geoQuery,function(err, res){ + geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(2); - geoQuery.filter.geo_shape.frame.shape.coordinates = [0,3]; - GeoModel.search(geoQuery,function(err, res){ + geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; + GeoModel.search({body: geoQuery},function(err, res){ if (err) throw err; res.hits.total.should.eql(0); done(); From 6af3339c5c8a4e49440dc563df3c3ca4f3436f74 Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 29 Oct 2014 12:26:52 -0600 Subject: [PATCH 28/92] All tests passing --- lib/mongoosastic.js | 27 ++++++++++----- test/alternative-index-method-test.js | 23 ++++++------ test/geo-test.js | 27 ++++++++------- test/index-test.js | 50 ++++++++++++++++++++------- test/synchronize-test.js | 2 +- test/truncate-test.js | 8 +++-- 6 files changed, 89 insertions(+), 48 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index c5e6a2d1..f43c2eb8 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -126,9 +126,7 @@ module.exports = function Mongoosastic(schema, options){ type: type, body: { query: { - query: { - "match_all": {} - } + match_all: {} } } }, cb) @@ -198,13 +196,20 @@ module.exports = function Mongoosastic(schema, options){ } var model = this + , esQuery = { + body: query, + index: options.index || indexName, + type: options.type || typeName + } - setIndexNameIfUnset(model.modelName) + Object.keys(options).forEach(function(opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) - query.index = options.index || query.index || indexName - query.type = options.type || query.type || typeName + setIndexNameIfUnset(model.modelName) - esClient.search(query, function(err, res){ + esClient.search(esQuery, function(err, res){ if(err){ cb(err) } else { @@ -400,11 +405,15 @@ function getMapping(schema){ function deleteByMongoId(options, cb){ var index = options.index , type = options.type - , client = options.esClient + , client = options.client , model = options.model , tries = options.tries - client.delete(index, type, model._id.toString(), function(err, res){ + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function(err, res){ if(err && err.message.indexOf('404') > -1){ setTimeout(function(){ if(tries <= 0) { diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 34e2b8c5..bea6a820 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -1,10 +1,8 @@ var mongoose = require('mongoose') - , elastical = require('elastical') , should = require('should') , config = require('./config') , Schema = mongoose.Schema , ObjectId = Schema.ObjectId - , esClient = new(require('elastical').Client) , mongoosastic = require('../lib/mongoosastic') , Tweet = require('./models/tweet'); @@ -12,11 +10,13 @@ describe('Index Method', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ config.deleteIndexIfExists(['tweets', 'public_tweets'], function(){ - config.createModelAndEnsureIndex(Tweet, { - user: 'jamescarr' - , message: "I know kung-fu!" - , post_date: new Date() - }, done); + Tweet.remove(function() { + config.createModelAndEnsureIndex(Tweet, { + user: 'jamescarr' + , message: "I know kung-fu!" + , post_date: new Date() + }, done); + }) }); }); }); @@ -27,12 +27,13 @@ describe('Index Method', function(){ done(); }); }); + it('should be able to index it directly without saving', function(done){ Tweet.findOne({message:'I know kung-fu!'}, function(err, doc){ doc.message = 'I know nodejitsu!'; doc.index(function(){ setTimeout(function(){ - Tweet.search({query:'know'}, function(err, res){ + Tweet.search({query: {query_string: {query: 'know'}}}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); @@ -45,8 +46,8 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets'}, function(){ setTimeout(function(){ - esClient.search({index: 'public_tweets', query:'know'}, function(err, res){ - res.hits[0]._source.message.should.eql('I know taebo!'); + Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets'}, function(err, res){ + res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); }, config.indexingTimeout); @@ -58,7 +59,7 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets', type: 'utterings'}, function(){ setTimeout(function(){ - esClient.search({index: 'public_tweets', type: 'utterings', query:'know'}, function(err, results, res){ + Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); diff --git a/test/geo-test.js b/test/geo-test.js index f0b1dbc8..5706cc4b 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -91,18 +91,15 @@ describe('GeoTest', function(){ res[0].frame.coordinates[1].should.eql([3,2]); done(); })})})}) - - var getDocOrderedQuery = { - "query": { - "match_all": {} - }, - "sort":"myId:asc" - }; it('should be able to find geo coordinates in the indexes', function(done){ setTimeout(function(){ // ES request - GeoModel.search(getDocOrderedQuery,function(err, res){ + GeoModel.search({ + query: { + match_all: {} + } + }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); @@ -126,7 +123,11 @@ describe('GeoTest', function(){ count.should.eql(2); setTimeout(function(){ - GeoModel.search(getDocOrderedQuery,function(err, res){ + GeoModel.search({ + query: { + match_all: {} + } + }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); res.hits.hits[0]._source.frame.type.should.eql('envelope'); @@ -161,23 +162,23 @@ describe('GeoTest', function(){ } setTimeout(function(){ - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(2); geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(1); geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(2); geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; - GeoModel.search({body: geoQuery},function(err, res){ + GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(0); done(); diff --git a/test/index-test.js b/test/index-test.js index f0366e21..dd898fde 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -107,19 +107,33 @@ describe('indexing', function(){ }); it('should be able to execute a simple query', function(done){ - Tweet.search({query:'Riak'}, function(err, results) { + Tweet.search({ + query: { + query_string: { + query: 'Riak' + } + } + }, function(err, results) { results.hits.total.should.eql(1) results.hits.hits[0]._source.message.should.eql('I like Riak better') done(); }); }); + it('should be able to execute a simple query', function(done){ - Tweet.search({query:'jamescarr'}, function(err, results) { + Tweet.search({ + query: { + query_string: { + query: 'jamescarr' + } + } + }, function(err, results) { results.hits.total.should.eql(1) results.hits.hits[0]._source.message.should.eql('I like Riak better') done() }); }); + it('should report errors', function(done){ Tweet.search({queriez:'jamescarr'}, function(err, results) { err.message.should.match(/SearchPhaseExecutionException/); @@ -140,7 +154,13 @@ describe('indexing', function(){ it('should remove from index when model is removed', function(done){ tweet.remove(function(){ setTimeout(function(){ - Tweet.search({query:'shouldnt'}, function(err, res){ + Tweet.search({ + query: { + query_string: { + query: 'shouldnt' + } + } + }, function(err, res){ res.hits.total.should.eql(0); done(); }); @@ -150,7 +170,13 @@ describe('indexing', function(){ it('should remove only index', function(done){ tweet.on('es-removed', function(err, res){ setTimeout(function(){ - Tweet.search({query:'shouldnt'}, function(err, res){ + Tweet.search({ + query: { + query_string: { + query: 'shouldnt' + } + } + }, function(err, res){ res.hits.total.should.eql(0); done(); }); @@ -197,14 +223,14 @@ describe('indexing', function(){ }); it('should only find models of type Tweet', function(done){ - Tweet.search({query:'Dude'}, function(err, res){ + Tweet.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.user.should.eql('Dude'); done(); }); }); it('should only find models of type Talk', function(done){ - Talk.search({query:'Dude'}, function(err, res){ + Talk.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.title.should.eql('Dude'); done(); @@ -222,7 +248,7 @@ describe('indexing', function(){ }); it('when gathering search results while respecting default hydrate options', function(done){ - Person.search({query:'James'}, function(err, res) { + Person.search({query: {query_string: {query: 'James'}}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('James Carr'); res.hits.hits[0].should.not.have.property('phone'); @@ -243,7 +269,7 @@ describe('indexing', function(){ }); it('should only return indexed fields', function(done){ - Talk.search({query:'cool'}, function(err, res) { + Talk.search({query: {query_string: {query: 'cool'}}}, function(err, res) { res.hits.total.should.eql(1); var talk = res.hits.hits[0]._source; @@ -257,7 +283,7 @@ describe('indexing', function(){ }); it('should hydrate returned documents if desired', function(done){ - Talk.search({query:'cool'}, {hydrate:true}, function(err, res) { + Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -282,7 +308,7 @@ describe('indexing', function(){ }); it('should only return indexed fields and have indexed sub-objects', function(done){ - Person.search({query:'Bob'}, function(err, res) { + Person.search({query: {query_string: {query: 'Bob'}}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('Bob Carr'); res.hits.hits[0].should.have.property('life'); @@ -297,7 +323,7 @@ describe('indexing', function(){ }); it('should allow extra query options when hydrating', function(done){ - Talk.search({query:'cool'}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { + Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -336,7 +362,7 @@ describe('indexing', function(){ }); var Bum = mongoose.model('bum', BumSchema); config.createModelAndEnsureIndex(Bum, {name:'Roger Wilson'}, function(){ - Bum.search({query:'Wilson'}, function(err, results){ + Bum.search({query: {query_string: {query: 'Wilson'}}}, function(err, results){ results.hits.total.should.eql(1); done(); }); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index f7b42237..d3d32096 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -47,7 +47,7 @@ describe('Synchronize', function(){ stream.on('close', function(){ count.should.eql(53); setTimeout(function(){ - Book.search({query:'American'}, function(err, results){ + Book.search({query: {query_string: {query: 'American'}}}, function(err, results){ results.hits.total.should.eql(2); done(); }); diff --git a/test/truncate-test.js b/test/truncate-test.js index cafcfc20..d1409e48 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -44,7 +44,11 @@ describe('Truncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { Dummy.search({ - query: 'Text1' + query: { + query_string: { + query: 'Text1' + } + } }, function(err, results) { results.hits.total.should.eql(0); done(err); @@ -52,4 +56,4 @@ describe('Truncate', function() { }); }); }); -}); \ No newline at end of file +}); From 755fd8ec6924c17ef24572add67bb32efb57ae67 Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 29 Oct 2014 12:45:48 -0600 Subject: [PATCH 29/92] remove elastical dependency --- lib/mongoosastic.js | 1 - package.json | 1 - test/boost-field-test.js | 8 +++++--- test/bulk-test.js | 2 -- test/config.js | 10 +++++++--- test/geo-test.js | 8 +++++--- test/index-test.js | 26 +++++++++++++++++--------- test/search-features-test.js | 2 -- test/synchronize-test.js | 2 -- test/truncate-test.js | 2 -- 10 files changed, 34 insertions(+), 28 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f43c2eb8..2b8a45c5 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,5 +1,4 @@ var elasticsearch = require('elasticsearch') - , elastical = require('elastical') , generator = new(require('./mapping-generator')) , serialize = require('./serialize') , events = require('events') diff --git a/package.json b/package.json index 688ebc46..44b8cfd9 100644 --- a/package.json +++ b/package.json @@ -15,7 +15,6 @@ }, "main": "lib/mongoosastic.js", "dependencies": { - "elastical": "0.0.13", "elasticsearch": "^2.4.3", "nop": "^1.0.0" }, diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 5346330b..26b8290c 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) + , esClient = new(require('elasticsearch').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema @@ -29,7 +28,10 @@ describe('Add Boost Option Per Field', function(){ it('should create a mapping with boost field added', function(done){ BlogPost.createMapping(function(err, mapping){ - esClient.getMapping('blogposts', 'blogpost', function(err, mapping){ + esClient.indices.getMapping({ + index: 'blogposts', + type: 'blogpost' + }, function(err, mapping){ /* elasticsearch 1.0 & 0.9 support */ var props = mapping.blogpost != undefined ? diff --git a/test/bulk-test.js b/test/bulk-test.js index 88d36d79..f7dc243d 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -1,6 +1,4 @@ var mongoose = require('mongoose'), - elastical = require('elastical'), - esClient = new(require('elastical').Client)(), should = require('should'), config = require('./config'), Schema = mongoose.Schema, diff --git a/test/config.js b/test/config.js index 2fad571a..9943b81a 100644 --- a/test/config.js +++ b/test/config.js @@ -1,4 +1,4 @@ -var esClient = new(require('elastical').Client) +var esClient = new(require('elasticsearch').Client) , async = require('async'); const INDEXING_TIMEOUT = 1100; @@ -8,9 +8,13 @@ module.exports = { , indexingTimeout: INDEXING_TIMEOUT , deleteIndexIfExists: function(indexes, done){ async.forEach(indexes, function(index, cb){ - esClient.indexExists(index, function(err, exists){ + esClient.indices.exists({ + index: index + }, function(err, exists){ if(exists){ - esClient.deleteIndex(index, cb); + esClient.indices.delete({ + index: index + }, cb); }else{ cb(); } diff --git a/test/geo-test.js b/test/geo-test.js index 5706cc4b..7b09f26d 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -1,6 +1,5 @@ var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) + , esClient = new(require('elasticsearch').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema @@ -38,7 +37,10 @@ describe('GeoTest', function(){ GeoModel.createMapping(function(err, mapping){ GeoModel.remove(function(){ - esClient.getMapping('geodocs', 'geodoc', function(err, mapping){ + esClient.indices.getMapping({ + index: 'geodocs', + type: 'geodoc' + }, function(err, mapping){ (mapping.geodoc != undefined ? mapping.geodoc: /* ES 0.9.11 */ mapping.geodocs.mappings.geodoc /* ES 1.0.0 */ diff --git a/test/index-test.js b/test/index-test.js index dd898fde..9c786f74 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -1,10 +1,9 @@ var mongoose = require('mongoose') - , elastical = require('elastical') , should = require('should') , config = require('./config') , Schema = mongoose.Schema , ObjectId = Schema.ObjectId - , esClient = new(require('elastical').Client) + , esClient = new(require('elasticsearch').Client) , mongoosastic = require('../lib/mongoosastic') , Tweet = require('./models/tweet'); @@ -99,8 +98,12 @@ describe('indexing', function(){ }); it("should use the model's id as ES id", function(done){ Tweet.findOne({message:"I like Riak better"}, function(err, doc){ - esClient.get('tweets', doc._id.toString(), function(err, res){ - res.message.should.eql(doc.message); + esClient.get({ + index: 'tweets', + type: 'tweet', + id: doc._id.toString() + }, function(err, res){ + res._source.message.should.eql(doc.message); done() }); }); @@ -342,13 +345,18 @@ describe('indexing', function(){ describe('Existing Index', function(){ before(function(done){ config.deleteIndexIfExists(['ms_sample'], function(){ - esClient.createIndex('ms_sample', {mappings:{ - bum:{ - properties: { - name: {type:'string'} + esClient.indices.create({ + index: 'ms_sample', + body: { + mappings:{ + bum:{ + properties: { + name: {type:'string'} + } + } } } - }}, done); + }, done); }); }); diff --git a/test/search-features-test.js b/test/search-features-test.js index 6954b07f..93ad2baa 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -1,5 +1,4 @@ var mongoose = require('mongoose') - , elastical = require('elastical') , should = require('should') , config = require('./config') , Schema = mongoose.Schema @@ -7,7 +6,6 @@ var mongoose = require('mongoose') , async = require('async') , mongoosastic = require('../lib/mongoosastic'); -var esClient = new elastical.Client(); var BondSchema = new Schema({ name: String , type: {type:String, default:'Other Bond'} diff --git a/test/synchronize-test.js b/test/synchronize-test.js index d3d32096..0a9f8251 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -1,6 +1,4 @@ var mongoose = require('mongoose') - , elastical = require('elastical') - , esClient = new(require('elastical').Client) , should = require('should') , config = require('./config') , Schema = mongoose.Schema diff --git a/test/truncate-test.js b/test/truncate-test.js index d1409e48..93cd106d 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -1,6 +1,4 @@ var mongoose = require('mongoose'), - elastical = require('elastical'), - esClient = new(require('elastical').Client), should = require('should'), config = require('./config'), Schema = mongoose.Schema, From 441617138d3da3d8009fde46d6f4c77fe65d0c6a Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 29 Oct 2014 12:56:43 -0600 Subject: [PATCH 30/92] Break out docs --- CONTRIBUTING.md | 53 ++++++++++++++++++++++++++++++++++++++ LICENSE.md | 9 +++++++ readme.md | 67 ------------------------------------------------- 3 files changed, 62 insertions(+), 67 deletions(-) create mode 100644 CONTRIBUTING.md create mode 100644 LICENSE.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..7d8e095c --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,53 @@ +# Contributing +Pull requests are always welcome as long as an accompanying test case is +associated. + +This project is configured to use [git +flow](https://github.com/nvie/gitflow/) and the following conventions +are used: + +* ``develop`` - represents current active development and can possibly be + unstable. +* ``master`` - pristine copy of repository, represents the currently + stable release found in the npm index. +* ``feature/**`` - represents a new feature being worked on + +If you wish to contribute, the only requirement is to: + +- branch a new feature branch from develop (if you're working on an + issue, prefix it with the issue number) +- make the changes, with accompanying test cases +- issue a pull request against develop branch + +Although I use git flow and prefix feature branches with "feature/" I +don't require this for pull requests... all I care is that the feature +branch name makes sense. + +Pulls requests against master or pull requests branched from master will +be rejected. + +## Examples +Someone picks up issue #39 on selective indexing. + +Good branch names: +* 39-selective-indexing +* feature/39-selective-indexing + +Someone submits a new feature that allows shard configuration: + +Good branch names: +* feature/shard-configuration +* shard-configuration +* or file an issue, then create a feature branch + +Feel free to ping me if you need help! :) + +## Running Tests +In order to run the tests you will need: + +* An elasticsearch server running on port 9200 +* A mongodb server +* [mocha](http://visionmedia.github.com/mocha/) + +With those installed, running ''npm test'' will run the tests with the +preferred timeout (which is extended for integration tests. diff --git a/LICENSE.md b/LICENSE.md new file mode 100644 index 00000000..61673abd --- /dev/null +++ b/LICENSE.md @@ -0,0 +1,9 @@ +[The MIT License](https://tldrlegal.com/l/mit) + +Copyright (c) 2012 James R. Carr + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/readme.md b/readme.md index 40a54ae1..a3bc3af0 100644 --- a/readme.md +++ b/readme.md @@ -495,70 +495,3 @@ SupervisorSchema.plugin(mongoosastic, {index: 'employees', type:'manager'}); var Supervisor = mongoose.model('supervisor', SupervisorSchema); ``` - -## Contributing -Pull requests are always welcome as long as an accompanying test case is -associated. - -This project is configured to use [git -flow](https://github.com/nvie/gitflow/) and the following conventions -are used: - -* ``develop`` - represents current active development and can possibly be - unstable. -* ``master`` - pristine copy of repository, represents the currently - stable release found in the npm index. -* ``feature/**`` - represents a new feature being worked on - -If you wish to contribute, the only requirement is to: - -- branch a new feature branch from develop (if you're working on an - issue, prefix it with the issue number) -- make the changes, with accompanying test cases -- issue a pull request against develop branch - -Although I use git flow and prefix feature branches with "feature/" I -don't require this for pull requests... all I care is that the feature -branch name makes sense. - -Pulls requests against master or pull requests branched from master will -be rejected. - -#### Examples -Someone picks up issue #39 on selective indexing. - -Good branch names: -* 39-selective-indexing -* feature/39-selective-indexing - -Someone submits a new feature that allows shard configuration: - -Good branch names: -* feature/shard-configuration -* shard-configuration -* or file an issue, then create a feature branch - -Feel free to ping me if you need help! :) - -### Running Tests -In order to run the tests you will need: - -* An elasticsearch server running on port 9200 -* A mongodb server -* [mocha](http://visionmedia.github.com/mocha/) - -With those installed, running ''npm test'' will run the tests with the -preferred timeout (which is extended for integration tests. - - -## License -[The MIT License](https://tldrlegal.com/l/mit) - -Copyright (c) 2012 James R. Carr - -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - From 1c58fbe9d3d9fde6f541a92e84c1baa307934dc9 Mon Sep 17 00:00:00 2001 From: George Shank Date: Wed, 29 Oct 2014 12:57:20 -0600 Subject: [PATCH 31/92] uppercase README --- readme.md => README.md | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename readme.md => README.md (100%) diff --git a/readme.md b/README.md similarity index 100% rename from readme.md rename to README.md From 953a3b354a259fcc400c037e3e17a9003966e86c Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 11:32:23 -0600 Subject: [PATCH 32/92] refactor bulk api --- lib/mongoosastic.js | 41 ++++++++++++++++++++++++++++++----------- 1 file changed, 30 insertions(+), 11 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 2b8a45c5..cf5fe927 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -70,7 +70,7 @@ module.exports = function Mongoosastic(schema, options){ type: type, model: this }) - cb() + setImmediate(cb) } else { esClient.index({ index: index, @@ -142,7 +142,13 @@ module.exports = function Mongoosastic(schema, options){ , closeValues = [] , counter = 0 , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - + + //Set indexing to be bulk when synchronizing to make synchronizing faster + bulk = { + delay: 1000, + size: 1000 + } + query = query || {} setIndexNameIfUnset(this.modelName) @@ -162,17 +168,18 @@ module.exports = function Mongoosastic(schema, options){ }else{ em.emit('data', null, doc) } - if (readyToClose && counter === 0) - close() }) }) }) stream.on('close', function(a, b){ - readyToClose = true closeValues = [a, b] - if (counter === 0) - close() + var closeInterval = setInterval(function() { + if (counter === 0 && bulkBuffer.length === 0) { + clearInterval(closeInterval) + close() + } + }, 1000) }) stream.on('error', function(err){ @@ -220,7 +227,7 @@ module.exports = function Mongoosastic(schema, options){ }) } - function bulkDelete(options) { + function bulkDelete(options, cb) { bulkAdd({ delete: { _index: options.index || indexName, @@ -228,6 +235,7 @@ module.exports = function Mongoosastic(schema, options){ _id: options.model._id.toString() } }) + cb() } function bulkIndex(options) { @@ -238,18 +246,29 @@ module.exports = function Mongoosastic(schema, options){ _id: options.model._id.toString() } }) - bulkAdd({doc: options.model}) + bulkAdd(options.model) + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout) + bulkTimeout = undefined } function bulkAdd(instruction) { bulkBuffer.push(instruction) - clearTimeout(bulkTimeout) + + //Return because we need the doc being indexed + //Before we start inserting + if (instruction.index && instruction.index._index) + return if(bulkBuffer.length >= (bulk.size || 1000)) { schema.statics.flush() - } else { + clearBulkTimeout() + } else if (bulkTimeout === undefined){ bulkTimeout = setTimeout(function(){ schema.statics.flush() + clearBulkTimeout() }, bulk.delay || 1000) } } From 7c84d8ee8b0b041c01c7ab329be8c58b30611a87 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 11:34:39 -0600 Subject: [PATCH 33/92] don't stop bulk options with synchronize --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index cf5fe927..afd8d4d4 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -144,7 +144,7 @@ module.exports = function Mongoosastic(schema, options){ , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} //Set indexing to be bulk when synchronizing to make synchronizing faster - bulk = { + bulk = bulk || { delay: 1000, size: 1000 } From 0ac8a968cefcdd2b4ac93169e4fc10aeb7ac9907 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:23:13 -0600 Subject: [PATCH 34/92] refresh readme --- README.md | 374 ++++++++++++++++++++++++++---------------------------- 1 file changed, 181 insertions(+), 193 deletions(-) diff --git a/README.md b/README.md index a3bc3af0..9ffb6624 100644 --- a/README.md +++ b/README.md @@ -3,28 +3,51 @@ Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) [![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) -A [mongoose](http://mongoosejs.com/) plugin that indexes models into [elasticsearch](http://www.elasticsearch.org/). I kept -running into cases where I needed full text search capabilities in my -mongodb based models only to discover mongodb has none. In addition to -full text search, I also needed the ability to filter ranges of data -points in the searches and even highlight matches. For these reasons, -elastic search was a perfect fit and hence this project. - - +Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatically index your models into [elasticsearch](http://www.elasticsearch.org/). + +- [Installation](#installation) +- [Setup](#setup) +- [Indexing](#indexing) + - [Saving a document](#saving-a-document) + - [Indexing nested models](#indexing-nested-models) + - [Indexing an existing collection](#indexing-an-existing-collection) + - [Bulk indexing](#bulk-indexing) + - [Indexing on demand](#indexing-on-demand) + - [Truncating an index](#truncating-an-index) +- [Mapping](#mapping) + - [Geo mapping](#geo-mapping) + - [Indexing a geo point](#indexing-a-geo-point) + - [Indexing a geo shape](#indexing-a-geo-shape) + - [Creating mappings on-demand](#creating-mappings-on-demand) +- [Queries](#queries) + - [Hydration](#hydration) ## Installation ```bash -npm install mongoosastic - +npm install -S mongoosastic ``` -Or add it to your package.json +## Setup + +### Model.plugin(mongoosastic, options) -## Usage +Options are: -To make a model indexed into elastic search simply add the plugin. +* `index` - the index in elastic search to use. Defaults to the + pluralization of the model name. +* `type` - the type this model represents in elastic search. Defaults + to the model name. +* `host` - the host elastic search is running on +* `port` - the port elastic search is running on +* `auth` - the authentication needed to reach elastic search server. In the standard format of 'username:password' +* `protocol` - the protocol the elastic search server uses. Defaults to http +* `hydrate` - whether or not to lookup results in mongodb before +* `hydrateOptions` - options to pass into hydrate function +* `bulk` - size and delay options for bulk indexing +To have a model indexed into elastic search simply add the plugin. + ```javascript var mongoose = require('mongoose') , mongoosastic = require('mongoosastic') @@ -64,7 +87,40 @@ User.plugin(mongoosastic) In this case only the name field will be indexed for searching. -####Indexing Nested Models +Now, by adding the plugin, the model will have a new method called +`search` which can be used to make simple to complex searches. The `search` +method accepts [standard elasticsearch query DSL](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-queries.html) + +```javascript +User.search({ + query_string: { + query: "john" + } +}, function(err, results) { + // results here +}); + +``` + +## Indexing + +### Saving a document +The indexing takes place after saving inside the mongodb and is a defered process. +One can check the end of the indexion catching es-indexed event. + +```javascript +doc.save(function(err){ + if (err) throw err; + /* Document indexation on going */ + doc.on('es-indexed', function(err, res){ + if (err) throw err; + /* Document is indexed */ + }); + }); +``` + + +###Indexing Nested Models In order to index nested models you can refer following example. ```javascript @@ -85,16 +141,6 @@ var User = new Schema({ User.plugin(mongoosastic) ``` -Finally, adding the plugin will add a new method to the model called -search which can be used to make simple to complex searches. - -```javascript - -User.search({query:"john"}, function(err, results) { - // results here -}); - -``` ### Indexing An Existing Collection Already have a mongodb collection that you'd like to index using this @@ -128,8 +174,6 @@ You can also synchronize a subset of documents based on a query! var stream = Book.synchronize({author: 'Arthur C. Clarke'}) ``` -One caveat... synchronization is kinda slow for now. Use with care. - ### Bulk Indexing You can also specify `bulk` options with mongoose which will utilize elasticsearch's bulk indexing api. This will cause the `synchronize` function to use bulk indexing as well. @@ -145,7 +189,38 @@ BookSchema.plugin(mongoosastic, { }); ``` -### Per Field Options +### Indexing On Demand +You can do on-demand indexes using the `index` function + +```javascript +Dude.findOne({name:'Jeffery Lebowski', function(err, dude){ + dude.awesome = true; + dude.index(function(err, res){ + console.log("egads! I've been indexed!"); + }); +}); +``` + +The index method takes 2 arguments: + +* `options` (optional) - {index, type} - the index and type to publish to. Defaults to the standard index and type. + the model was setup with. +* `callback` - callback function to be invoked when model has been + indexed. + +Note that indexing a model does not mean it will be persisted to +mongodb. Use save for that. + +### Truncating an index + +The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. + +```javascript +GarbageModel.truncate(function(err){...}); +``` + +## Mapping + Schemas can be configured to have special options per field. These match with the existing [field mapping configurations](http://www.elasticsearch.org/guide/reference/mapping/core-types.html) defined by elasticsearch with the only difference being they are all prefixed by "es_". @@ -165,46 +240,7 @@ This example uses a few other mapping fields... such as null_value and type (which overrides whatever value the schema type is, useful if you want stronger typing such as float). -#### Creating Mappings for These Features -The way this can be mapped in elastic search is by creating a mapping -for the index the model belongs to. Currently to the best of my -knowledge mappings are create once when creating an index and can only -be modified by destroying the index. The optionnal first parameter is -the settings option for the index (for defining analysers for example or whatever is [there](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/indices-update-settings.html). - -As such, creating the mapping is a one time operation and can be done as -follows (using the BookSchema as an example): - -```javascript -var BookSchema = new Schema({ - title: {type:String, es_boost:2.0} - , author: {type:String, es_null_value:"Unknown Author"} - , publicationDate: {type:Date, es_type:'date'} - -BookSchema.plugin(mongoosastic); -var Book = mongoose.model('Book', BookSchema); -Book.createMapping({ - "analysis" : { - "analyzer":{ - "content":{ - "type":"custom", - "tokenizer":"whitespace" - } - } - } -},function(err, mapping){ - // do neat things here -}); - -``` -This feature is still a work in progress. As of this writing you'll have -to manage whether or not you need to create the mapping, mongoosastic -will make no assumptions and simply attempt to create the mapping. If -the mapping already exists, an Exception detailing such will be -populated in the `err` argument. - -#### Mapping options -There are various types that can be defined in elasticsearch. Check out http://www.elasticsearch.org/guide/reference/mapping/ for more information. Here are examples to the currently possible definitions in mongoosastic: +There are various mapping options that can be defined in elasticsearch. Check out [http://www.elasticsearch.org/guide/reference/mapping/](http://www.elasticsearch.org/guide/reference/mapping/) for more information. Here are examples to the currently possible definitions in mongoosastic: ```javascript var ExampleSchema = new Schema({ @@ -294,24 +330,24 @@ Notice that the name of the field containing the ES geo data must start by #### Indexing a geo point ```javascript - var geo = new GeoModel({ - … - geo_with_lat_lon: { lat: 1, lon: 2} - … - }); +var geo = new GeoModel({ + /* … */ + geo_with_lat_lon: { lat: 1, lon: 2} + /* … */ +}); ``` #### Indexing a geo shape ```javascript - var geo = new GeoModel({ - … - geo_shape:{ - type:'envelope', - coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ - } - … - }); +var geo = new GeoModel({ + … + geo_shape:{ + type:'envelope', + coordinates: [[3,4],[1,2] /* Arrays of coord : [[lon,lat],[lon,lat]] */ + } + … +}); ``` Mapping, indexing and searching example for geo shape can be found in test/geo-test.js @@ -320,33 +356,68 @@ For example, one can retrieve the list of document where the shape contain a spe point (or polygon...) ```javascript - var geoQuery = { - "query": {"match_all": {}}, - "filter": {"geo_shape": { - "geo_shape": { - "shape": { - "type": "point", - "coordinates": [3,1] - }, - "relation": "intersects" +var geoQuery = { + "match_all": {} + } + +var geoFilter = { + geo_shape: { + geo_shape": { + shape: { + type: "point", + coordinates: [3,1] + } } - }} + } } + +GeoModel.search(geoQuery, {filter: geoFilter}, function(err, res) { /* ... */ }) ``` -### Advanced Queries +### Creating Mappings On Demand +Creating the mapping is a one time operation and can be done as +follows (using the BookSchema as an example): + +```javascript +var BookSchema = new Schema({ + title: {type:String, es_boost:2.0} + , author: {type:String, es_null_value:"Unknown Author"} + , publicationDate: {type:Date, es_type:'date'} + +BookSchema.plugin(mongoosastic); +var Book = mongoose.model('Book', BookSchema); +Book.createMapping({ + "analysis" : { + "analyzer":{ + "content":{ + "type":"custom", + "tokenizer":"whitespace" + } + } + } +},function(err, mapping){ + // do neat things here +}); + +``` +This feature is still a work in progress. As of this writing you'll have +to manage whether or not you need to create the mapping, mongoosastic +will make no assumptions and simply attempt to create the mapping. If +the mapping already exists, an Exception detailing such will be +populated in the `err` argument. + + +## Queries The full query DSL of elasticsearch is exposed through the search method. For example, if you wanted to find all people between ages 21 and 30: ```javascript Person.search({ - query:{ - range: { - age:{ - from:21 - , to: 30 - } + range: { + age:{ + from:21 + , to: 30 } } }, function(err, people){ @@ -354,9 +425,19 @@ Person.search({ }); ``` - See the elasticsearch [Query DSL](http://www.elasticsearch.org/guide/reference/query-dsl/) docs for more information. +You can also specify query options like [sorts](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/search-request-sort.html#search-request-sort) + +```javascript +Person.search({/* ... */}, {sort: "age:asc"}, function(err, people){ + //sorted results +}); +``` + +Options for queries must adhere to the [javascript elasticsearch driver specs](http://www.elasticsearch.org/guide/en/elasticsearch/client/javascript-api/current/api-reference.html#api-search). + + ### Hydration By default objects returned from performing a search will be the objects as is in elastic search. This is useful in cases where only what was @@ -368,7 +449,7 @@ provide {hydrate:true} as the second argument to a search call. ```javascript -User.search({query:"john"}, {hydrate:true}, function(err, results) { +User.search({query_string: {query: "john"}}, {hydrate:true}, function(err, results) { // results here }); @@ -379,7 +460,7 @@ how to query for the mongoose object. ```javascript -User.search({query:"john"}, {hydrate:true, hydrateOptions: {select: 'name age'}}, function(err, results) { +User.search({query_string: {query: "john"}}, {hydrate:true, hydrateOptions: {select: 'name age'}}, function(err, results) { // results here }); @@ -402,96 +483,3 @@ var User = new Schema({ User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) ``` - - -### Indexing On Demand -While developing mongoose I came across a scenario where we needed to be -able to save models (and search them) but a single action would -"publish" those models to be searched from a public site. To address -this I create a new method: `index`. - -#### Usage -Usage is as simple as calling index on an existing model. - -```javascript -Dude.findOne({name:'Jeffery Lebowski', function(err, dude){ - dude.awesome = true; - dude.index(function(err, res){ - console.log("egads! I've been indexed!"); - }); -}); -``` - -The index method takes 3 arguments: - -* `index` (optional) - the index to publish to. Defaults to the index - the model was setup with. -* `type` (optional) - the type to publish as. Defaults to the type the - model was setup with. -* `callback` - callback function to be invoked when model has been - indexed. - -Note that indexing a model does not mean it will be persisted to -mongodb. Use save for that. - -### Saving a document -The indexing takes place after saving inside the mongodb and is a defered process. -One can check the end of the indexion catching es-indexed event. - -```javascript -doc.save(function(err){ - if (err) throw err; - /* Document indexation on going */ - doc.on('es-indexed', function(err, res){ - if (err) throw err; - /* Document is indexed */ - }); - }); -``` - -### Truncating an index - -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. - -#### Usage - -```javascript -GarbageModel.truncate(function(err){...}); -``` - -### Model.plugin(mongoosastic, options) - -Options are: - -* `index` - the index in elastic search to use. Defaults to the - pluralization of the model name. -* `type` - the type this model represents in elastic search. Defaults - to the model name. -* `host` - the host elastic search is running on -* `port` - the port elastic search is running on -* `auth` - the authentication needed to reach elastic search server. In the standard format of 'username:password' -* `protocol` - the protocol the elastic search server uses. Defaults to http -* `hydrate` - whether or not to lookup results in mongodb before - returning results from a search. Defaults to false. -* `curlDebug` - elastical debugging. Defaults to false. - -Here are all other avaible options invloved in connection to elastic search server: -https://ramv.github.io/node-elastical/docs/classes/Client.html - -Experimental Options: - -#### Specifying Different Index and Type -Perhaps you have an existing index and you want to specify the index and -type used to index your document? No problem!! - -```javascript -var SupervisorSchema = new Schema({ - name: String -, department: String -}); - -SupervisorSchema.plugin(mongoosastic, {index: 'employees', type:'manager'}); - -var Supervisor = mongoose.model('supervisor', SupervisorSchema); - -``` From 848fd23ee9d1de9beb3919e656b9da3ecdf7f062 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:23:19 -0600 Subject: [PATCH 35/92] Update query interface --- lib/mongoosastic.js | 2 +- test/alternative-index-method-test.js | 6 ++-- test/bulk-test.js | 18 ++++++------ test/geo-test.js | 32 +++++++++------------ test/index-test.js | 40 +++++++++++---------------- test/search-features-test.js | 10 +++---- test/synchronize-test.js | 2 +- test/truncate-test.js | 6 ++-- 8 files changed, 48 insertions(+), 68 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index afd8d4d4..627ce097 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -203,7 +203,7 @@ module.exports = function Mongoosastic(schema, options){ var model = this , esQuery = { - body: query, + body: {query: query}, index: options.index || indexName, type: options.type || typeName } diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index bea6a820..5c4e9895 100644 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -33,7 +33,7 @@ describe('Index Method', function(){ doc.message = 'I know nodejitsu!'; doc.index(function(){ setTimeout(function(){ - Tweet.search({query: {query_string: {query: 'know'}}}, function(err, res){ + Tweet.search({query_string: {query: 'know'}}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know nodejitsu!'); done(); }); @@ -46,7 +46,7 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets'}, function(){ setTimeout(function(){ - Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets'}, function(err, res){ + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets'}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); @@ -59,7 +59,7 @@ describe('Index Method', function(){ doc.message = 'I know taebo!'; doc.index({index: 'public_tweets', type: 'utterings'}, function(){ setTimeout(function(){ - Tweet.search({query: {query_string: {query: 'know'}}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ + Tweet.search({query_string: {query: 'know'}}, {index: 'public_tweets', type: 'utterings'}, function(err, res){ res.hits.hits[0]._source.message.should.eql('I know taebo!'); done(); }); diff --git a/test/bulk-test.js b/test/bulk-test.js index f7dc243d..d03be08e 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -37,24 +37,22 @@ describe('Bulk mode', function() { new Book({ title: title }).save(cb); - }, function() { - setTimeout(done, 1200); - }); + }, done) }); before(function(done) { Book.findOne({ title: 'American Gods' }, function(err, book) { - book.remove(function() { - setTimeout(done, 1200); - }); + book.remove(done) }); }); it('should index all objects and support deletions too', function(done) { - Book.search({}, function(err, results) { - results.should.have.property('hits').with.property('total', 52); - done(); - }); + setTimeout(function() { + Book.search({match_all: {}}, function(err, results) { + results.should.have.property('hits').with.property('total', 52); + done(); + }); + }, 1500) }); }); diff --git a/test/geo-test.js b/test/geo-test.js index 7b09f26d..567ff413 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -98,9 +98,7 @@ describe('GeoTest', function(){ setTimeout(function(){ // ES request GeoModel.search({ - query: { - match_all: {} - } + match_all: {} }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); @@ -126,9 +124,7 @@ describe('GeoTest', function(){ setTimeout(function(){ GeoModel.search({ - query: { - match_all: {} - } + match_all: {} }, {sort: "myId:asc"}, function(err, res){ if (err) throw err; res.hits.total.should.eql(2); @@ -146,16 +142,14 @@ describe('GeoTest', function(){ it('should be able to search points inside frames', function(done){ var geoQuery = { - query: { - filtered: { - "query": {"match_all": {}}, - "filter": { - "geo_shape": { - "frame": { - "shape": { - "type": "point", - "coordinates": [3,1] - } + filtered: { + "query": {"match_all": {}}, + "filter": { + "geo_shape": { + "frame": { + "shape": { + "type": "point", + "coordinates": [3,1] } } } @@ -168,18 +162,18 @@ describe('GeoTest', function(){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(2); - geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [1.5,2.5]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(1); res.hits.hits[0]._source.myId.should.eql(1); - geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [3,2]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(2); - geoQuery.query.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; + geoQuery.filtered.filter.geo_shape.frame.shape.coordinates = [0,3]; GeoModel.search(geoQuery,function(err, res){ if (err) throw err; res.hits.total.should.eql(0); diff --git a/test/index-test.js b/test/index-test.js index 9c786f74..fd9c6308 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -111,10 +111,8 @@ describe('indexing', function(){ it('should be able to execute a simple query', function(done){ Tweet.search({ - query: { - query_string: { - query: 'Riak' - } + query_string: { + query: 'Riak' } }, function(err, results) { results.hits.total.should.eql(1) @@ -125,10 +123,8 @@ describe('indexing', function(){ it('should be able to execute a simple query', function(done){ Tweet.search({ - query: { - query_string: { - query: 'jamescarr' - } + query_string: { + query: 'jamescarr' } }, function(err, results) { results.hits.total.should.eql(1) @@ -158,10 +154,8 @@ describe('indexing', function(){ tweet.remove(function(){ setTimeout(function(){ Tweet.search({ - query: { - query_string: { - query: 'shouldnt' - } + query_string: { + query: 'shouldnt' } }, function(err, res){ res.hits.total.should.eql(0); @@ -174,10 +168,8 @@ describe('indexing', function(){ tweet.on('es-removed', function(err, res){ setTimeout(function(){ Tweet.search({ - query: { - query_string: { - query: 'shouldnt' - } + query_string: { + query: 'shouldnt' } }, function(err, res){ res.hits.total.should.eql(0); @@ -226,14 +218,14 @@ describe('indexing', function(){ }); it('should only find models of type Tweet', function(done){ - Tweet.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ + Tweet.search({query_string: {query: 'Dude'}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.user.should.eql('Dude'); done(); }); }); it('should only find models of type Talk', function(done){ - Talk.search({query: {query_string: {query: 'Dude'}}}, function(err, res){ + Talk.search({query_string: {query: 'Dude'}}, function(err, res){ res.hits.total.should.eql(1); res.hits.hits[0]._source.title.should.eql('Dude'); done(); @@ -251,7 +243,7 @@ describe('indexing', function(){ }); it('when gathering search results while respecting default hydrate options', function(done){ - Person.search({query: {query_string: {query: 'James'}}}, function(err, res) { + Person.search({query_string: {query: 'James'}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('James Carr'); res.hits.hits[0].should.not.have.property('phone'); @@ -272,7 +264,7 @@ describe('indexing', function(){ }); it('should only return indexed fields', function(done){ - Talk.search({query: {query_string: {query: 'cool'}}}, function(err, res) { + Talk.search({query_string: {query: 'cool'}}, function(err, res) { res.hits.total.should.eql(1); var talk = res.hits.hits[0]._source; @@ -286,7 +278,7 @@ describe('indexing', function(){ }); it('should hydrate returned documents if desired', function(done){ - Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true}, function(err, res) { + Talk.search({query_string: {query: 'cool'}}, {hydrate:true}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -311,7 +303,7 @@ describe('indexing', function(){ }); it('should only return indexed fields and have indexed sub-objects', function(done){ - Person.search({query: {query_string: {query: 'Bob'}}}, function(err, res) { + Person.search({query_string: {query: 'Bob'}}, function(err, res) { res.hits.hits[0].address.should.eql('Exampleville, MO'); res.hits.hits[0].name.should.eql('Bob Carr'); res.hits.hits[0].should.have.property('life'); @@ -326,7 +318,7 @@ describe('indexing', function(){ }); it('should allow extra query options when hydrating', function(done){ - Talk.search({query: {query_string: {query: 'cool'}}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { + Talk.search({query_string: {query: 'cool'}}, {hydrate:true, hydrateOptions: {lean: true}}, function(err, res) { res.hits.total.should.eql(1) var talk = res.hits.hits[0] @@ -370,7 +362,7 @@ describe('indexing', function(){ }); var Bum = mongoose.model('bum', BumSchema); config.createModelAndEnsureIndex(Bum, {name:'Roger Wilson'}, function(){ - Bum.search({query: {query_string: {query: 'Wilson'}}}, function(err, results){ + Bum.search({query_string: {query: 'Wilson'}}, function(err, results){ results.hits.total.should.eql(1); done(); }); diff --git a/test/search-features-test.js b/test/search-features-test.js index 93ad2baa..993de91e 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -40,12 +40,10 @@ describe('Query DSL', function(){ describe('range', function(){ it('should be able to find within range', function(done){ Bond.search({ - query:{ - range: { - price:{ - from:20000 - , to: 30000 - } + range: { + price:{ + from:20000 + , to: 30000 } } }, function(err, res){ diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 0a9f8251..45eeb25b 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -45,7 +45,7 @@ describe('Synchronize', function(){ stream.on('close', function(){ count.should.eql(53); setTimeout(function(){ - Book.search({query: {query_string: {query: 'American'}}}, function(err, results){ + Book.search({query_string: {query: 'American'}}, function(err, results){ results.hits.total.should.eql(2); done(); }); diff --git a/test/truncate-test.js b/test/truncate-test.js index 93cd106d..cc2dbc7f 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -42,10 +42,8 @@ describe('Truncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { Dummy.search({ - query: { - query_string: { - query: 'Text1' - } + query_string: { + query: 'Text1' } }, function(err, results) { results.hits.total.should.eql(0); From 59b1ef61f872ad8a80af55fb82c236792b7f0b47 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:25:10 -0600 Subject: [PATCH 36/92] Add gitter badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 9ffb6624..bae5993c 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ [![Build Status](https://secure.travis-ci.org/mongoosastic/mongoosastic.png?branch=master)](http://travis-ci.org/mongoosastic/mongoosastic) [![NPM version](https://badge.fury.io/js/mongoosastic.svg)](http://badge.fury.io/js/mongoosastic) +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mongoosastic/mongoosastic?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatically index your models into [elasticsearch](http://www.elasticsearch.org/). From e9f1a87ce2be4235042f52f27938032b6fc2113e Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:27:17 -0600 Subject: [PATCH 37/92] formatting --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index bae5993c..628901f1 100644 --- a/README.md +++ b/README.md @@ -22,6 +22,7 @@ Mongoosastic is a [mongoose](http://mongoosejs.com/) plugin that can automatical - [Creating mappings on-demand](#creating-mappings-on-demand) - [Queries](#queries) - [Hydration](#hydration) + ## Installation ```bash From a465b9658afc8a35b863ea3bb3e891fc547b1228 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 12:28:31 -0600 Subject: [PATCH 38/92] significant version bump --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 44b8cfd9..483224e3 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "1.0.2", + "version": "2.0.0", "tags": [ "mongodb", "elastic search", From 33537341c29bf4e43a72355a49e399ea57455a2f Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 13:33:08 -0600 Subject: [PATCH 39/92] longer delay for bulk test --- test/bulk-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index d03be08e..d742b6f0 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 1500) + }, 2000) }); }); From 516438f8c8dfca9f055d19f54bf94f9010bbadc3 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 13:51:15 -0600 Subject: [PATCH 40/92] updates for travis --- .travis.yml | 1 - test/bulk-test.js | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index d2b4c927..377e5bc7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,6 @@ language: node_js node_js: - 0.11 - 0.10 - - 0.8 services: - mongodb diff --git a/test/bulk-test.js b/test/bulk-test.js index d742b6f0..031fd3a3 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 2000) + }, 3000) }); }); From fcc6a46aa152bf97737b34a55ad55d30ef5aa14f Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 30 Oct 2014 13:58:59 -0600 Subject: [PATCH 41/92] Add changelog --- CHANGELOG.md | 12 ++++++++++++ 1 file changed, 12 insertions(+) create mode 100644 CHANGELOG.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 00000000..6785832b --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,12 @@ +## 2.0.0 (2014-10-10) + +Features: + +- Moved to [official elasticsearch driver](https://github.com/elasticsearch/elasticsearch-js) + - Caused `search` api to conform closer to official driver + - Added options to searching +- Refactored bulk api +- Refreshed README.md +- Added CHANGELOG.md +- Added CONTRIBUTING.md +- Added LICENSE.md From 3a6ad9b9b0ae8a3e708d94daea3f209451a31ee9 Mon Sep 17 00:00:00 2001 From: Sascha Schwabbauer Date: Sun, 2 Nov 2014 00:14:01 +0100 Subject: [PATCH 42/92] 'protocol' and 'auth' options are ignored This fixes an issue, where the 'protocol' and 'auth' options were ignored. --- lib/mongoosastic.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 627ce097..61ef1beb 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,13 +13,15 @@ module.exports = function Mongoosastic(schema, options){ , _mapping = null , host = options && options.host ? options.host : 'localhost' , port = options && options.port ? options.port : 9200 - , esClient = new elasticsearch.Client({host: {host: host, port: port}}) + , protocol = options && options.protocol ? options.protocol : 'http' + , auth = options && options.auth ? options.auth : null + , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth}}) , bulk = options && options.bulk , bulkBuffer = [] , bulkTimeout setUpMiddlewareHooks(schema) - + /** * ElasticSearch Client */ From 8ef97d50f7e73da84b874f6beac647eac0be8037 Mon Sep 17 00:00:00 2001 From: taterbase Date: Sat, 1 Nov 2014 21:46:32 -0600 Subject: [PATCH 43/92] bump package.json --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 483224e3..16958c7d 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.0", + "version": "2.0.1", "tags": [ "mongodb", "elastic search", From 1618f4b62e9b6571fc9adbf5a4a7fc99f84cfb09 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 4 Nov 2014 17:20:16 -0700 Subject: [PATCH 44/92] Treat null query like undefined --- lib/mongoosastic.js | 3 +++ package.json | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 61ef1beb..ce33d458 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -203,6 +203,9 @@ module.exports = function Mongoosastic(schema, options){ options = {} } + if (query === null) + query = undefined + var model = this , esQuery = { body: {query: query}, diff --git a/package.json b/package.json index 16958c7d..9adce9bc 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.1", + "version": "2.0.2", "tags": [ "mongodb", "elastic search", From a1ee33ad334b333076a987837fdcc7e31e139685 Mon Sep 17 00:00:00 2001 From: taterbase Date: Tue, 4 Nov 2014 17:39:53 -0700 Subject: [PATCH 45/92] remove unstable node testing, broken for now --- .travis.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 377e5bc7..07b5d92c 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,6 @@ language: node_js node_js: - - 0.11 - 0.10 services: From feb2060dfcceb7fcb2e5cdf7798328cb7a8a0a36 Mon Sep 17 00:00:00 2001 From: b96705008 Date: Sun, 9 Nov 2014 15:08:56 +0800 Subject: [PATCH 46/92] get rid of "continue" when encounter objectid --- lib/mapping-generator.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index 73b68568..ed384126 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -52,9 +52,9 @@ function getMapping(cleanTree, prefix) { } // If it is a objectid make it a string. - if(value.type === 'objectid'){ + if (value.type === 'objectid') { mapping[field].type = 'string'; - continue; + // do not continue here so we can handle other es_ options } //If indexing a number, and no es_type specified, default to double From 622239d13c340d69a47dc6ebdc0d6e4c2caf6373 Mon Sep 17 00:00:00 2001 From: taterbase Date: Sun, 9 Nov 2014 16:30:19 -0700 Subject: [PATCH 47/92] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 9adce9bc..2b3b0062 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.2", + "version": "2.0.3", "tags": [ "mongodb", "elastic search", From eee48de8cd657c3dbe492fe005247fcb1bc0cdcd Mon Sep 17 00:00:00 2001 From: Ignacio Lago Date: Mon, 10 Nov 2014 12:09:59 +0100 Subject: [PATCH 48/92] Serialize on bulk calls. Serialize: this = full model. --- lib/mongoosastic.js | 8 +++++++- lib/serialize.js | 4 ++-- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index ce33d458..f92b887c 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -67,10 +67,16 @@ module.exports = function Mongoosastic(schema, options){ , type = options.type || typeName if(bulk) { + /** + * To serialize in bulk it needs the _id + */ + var serialModel = serialize(this, mapping); + serialModel._id = this._id; + bulkIndex({ index: index, type: type, - model: this + model: serialModel }) setImmediate(cb) } else { diff --git a/lib/serialize.js b/lib/serialize.js index e0987354..55300db6 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -3,7 +3,7 @@ module.exports = serialize; function _serializeObject(object, mapping) { var serialized = {}; for (var field in mapping.properties) { - var val = serialize(object[field], mapping.properties[field]); + var val = serialize.call(object, object[field], mapping.properties[field]); if (val !== undefined) { serialized[field] = val; } @@ -30,7 +30,7 @@ function serialize(model, mapping) { } else { if (mapping.cast && typeof(mapping.cast) !== 'function') throw new Error('es_cast must be a function'); - model = mapping.cast ? mapping.cast(model) : model; + model = mapping.cast ? mapping.cast.call(this, model) : model; if (typeof model === 'object' && model !== null) { var name = model.constructor.name; if (name === 'ObjectID') { From c065446b4ea0f3d03cdd634fc7bd63399f96a6bc Mon Sep 17 00:00:00 2001 From: taterbase Date: Mon, 10 Nov 2014 11:06:36 -0700 Subject: [PATCH 49/92] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 2b3b0062..cd940f0b 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.3", + "version": "2.0.4", "tags": [ "mongodb", "elastic search", From 93d00fb9260b694ee0163679c5baffa76e1e31c7 Mon Sep 17 00:00:00 2001 From: Nicolas McCurdy Date: Thu, 20 Nov 2014 19:39:26 -0500 Subject: [PATCH 50/92] In documentation files, rename "truncate" to "esTruncate" --- README.md | 4 ++-- test/truncate-test.js | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 628901f1..f86fdb04 100644 --- a/README.md +++ b/README.md @@ -215,10 +215,10 @@ mongodb. Use save for that. ### Truncating an index -The static method truncate will deleted all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. +The static method `esTruncate` will delete all documents from the associated index. This method combined with synchronise can be usefull in case of integration tests for example when each test case needs a cleaned up index in ElasticSearch. ```javascript -GarbageModel.truncate(function(err){...}); +GarbageModel.esTruncate(function(err){...}); ``` ## Mapping diff --git a/test/truncate-test.js b/test/truncate-test.js index cc2dbc7f..da236202 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -38,7 +38,7 @@ describe('Truncate', function() { after(function(done) { Dummy.remove(done); }); - describe('truncate', function() { + describe('esTruncate', function() { it('should be able to truncate all documents', function(done) { Dummy.esTruncate(function(err) { Dummy.search({ From d4636e3ba66b1c4054c5620543703ad705bec349 Mon Sep 17 00:00:00 2001 From: taterbase Date: Thu, 20 Nov 2014 19:29:40 -0700 Subject: [PATCH 51/92] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index cd940f0b..46ce90ef 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.4", + "version": "2.0.5", "tags": [ "mongodb", "elastic search", From 098d5c0ae3e4ee5604ab260a44b0bea46af22f4a Mon Sep 17 00:00:00 2001 From: Christophe Wagner Date: Wed, 10 Dec 2014 18:24:35 +0100 Subject: [PATCH 52/92] add settings when index is created --- lib/mongoosastic.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index f92b887c..ef7ae42d 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -376,7 +376,7 @@ function createMappingIfNotPresent(options, cb) { body: completeMapping }, cb) } else { - client.indices.create({index: indexName}, function(err) { + client.indices.create({index: indexName, body: settings}, function(err) { if (err) return cb(err) From a1c25990cca9717497c0e706d7ac64b5ed204819 Mon Sep 17 00:00:00 2001 From: taterbase Date: Wed, 10 Dec 2014 18:06:40 -0700 Subject: [PATCH 53/92] Update semver --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 46ce90ef..ed595417 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", - "version": "2.0.5", + "version": "2.0.6", "tags": [ "mongodb", "elastic search", From f2a5e2ad66e2b16989849440763c60189a117457 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 20:46:24 +0800 Subject: [PATCH 54/92] API changed Method of Register the plugin that has changed,and you can make global search. --- lib/mongoosastic.js | 1035 ++++++++++++++++++++++++------------------- 1 file changed, 586 insertions(+), 449 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index ef7ae42d..91c7a2af 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,460 +1,597 @@ var elasticsearch = require('elasticsearch') - , generator = new(require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events') - , nop = require('nop') - -module.exports = function Mongoosastic(schema, options){ - var mapping = getMapping(schema) - , indexName = options && options.index - , typeName = options && options.type - , alwaysHydrate = options && options.hydrate - , defaultHydrateOptions = options && options.hydrateOptions - , _mapping = null - , host = options && options.host ? options.host : 'localhost' - , port = options && options.port ? options.port : 9200 - , protocol = options && options.protocol ? options.protocol : 'http' - , auth = options && options.auth ? options.auth : null - , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth}}) - , bulk = options && options.bulk - , bulkBuffer = [] - , bulkTimeout - - setUpMiddlewareHooks(schema) - - /** - * ElasticSearch Client - */ - schema.statics.esClient = esClient - - /** - * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once - * the mapping is created - - * @param settings Object (optional) - * @param callback Function - */ - schema.statics.createMapping = function(settings, cb) { - if(arguments.length < 2) { - cb = arguments[0] || nop - settings = undefined - } - - setIndexNameIfUnset(this.modelName) - - createMappingIfNotPresent({ - client: esClient, - indexName: indexName, - typeName: typeName, - schema: schema, - settings: settings - }, cb) - } - - /** - * @param options Object (optional) - * @param callback Function - */ - schema.methods.index = function(options, cb){ - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.constructor.modelName) - - var index = options.index || indexName - , type = options.type || typeName - - if(bulk) { - /** - * To serialize in bulk it needs the _id - */ - var serialModel = serialize(this, mapping); - serialModel._id = this._id; - - bulkIndex({ - index: index, - type: type, - model: serialModel - }) - setImmediate(cb) - } else { - esClient.index({ - index: index, - type: type, - id: this._id.toString(), - body: serialize(this, mapping) - }, cb) - } - } - - /** - * Unset elastic search index - * @param options - (optional) options for unIndex - * @param callback - callback when unIndex is complete - */ - schema.methods.unIndex = function(options, cb){ - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.constructor.modelName) - - options.index = options.index || indexName - options.type = options.type || typeName - options.model = this - options.client = esClient - options.tries = 3 - - if(bulk) - bulkDelete(options, cb) - else - deleteByMongoId(options, cb) - } - - /** - * Delete all documents from a type/index - * @param options - (optional) specify index/type - * @param callback - callback when truncation is complete - */ - schema.statics.esTruncate = function(options, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - var index = options.index || indexName - , type = options.type || typeName - - esClient.deleteByQuery({ - index: index, - type: type, - body: { - query: { - match_all: {} - } - } - }, cb) - } - - /** - * Synchronize an existing collection - * - * @param query - query for documents you want to synchronize - */ - schema.statics.synchronize = function(query){ - var em = new events.EventEmitter() - , readyToClose - , closeValues = [] - , counter = 0 - , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - - //Set indexing to be bulk when synchronizing to make synchronizing faster - bulk = bulk || { - delay: 1000, - size: 1000 - } - - query = query || {} - - setIndexNameIfUnset(this.modelName) - - var stream = this.find(query).stream() - - stream.on('data', function(doc){ - counter++ - doc.save(function(err){ - if (err) - return em.emit('error', err) - - doc.on('es-indexed', function(err, doc){ - counter-- - if(err){ - em.emit('error', err) - }else{ - em.emit('data', null, doc) - } - }) - }) - }) - - stream.on('close', function(a, b){ - closeValues = [a, b] - var closeInterval = setInterval(function() { - if (counter === 0 && bulkBuffer.length === 0) { - clearInterval(closeInterval) - close() - } - }, 1000) - }) - - stream.on('error', function(err){ - em.emit('error', err) - }) - - return em - } - /** - * ElasticSearch search function - * - * @param query - query object to perform search with - * @param options - (optional) special search options, such as hydrate - * @param callback - callback called with search results - */ - schema.statics.search = function(query, options, cb){ - if (arguments.length === 2) { - cb = arguments[1] - options = {} - } - - if (query === null) - query = undefined - - var model = this - , esQuery = { - body: {query: query}, - index: options.index || indexName, - type: options.type || typeName - } - - Object.keys(options).forEach(function(opt) { - if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) - esQuery[opt] = options[opt] - }) - - setIndexNameIfUnset(model.modelName) - - esClient.search(esQuery, function(err, res){ - if(err){ - cb(err) - } else { - if (alwaysHydrate || options.hydrate) - hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) - else - cb(null, res) - } - }) - } - - function bulkDelete(options, cb) { - bulkAdd({ - delete: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }) - cb() - } - - function bulkIndex(options) { - bulkAdd({ - index: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }) - bulkAdd(options.model) - } - - function clearBulkTimeout() { - clearTimeout(bulkTimeout) - bulkTimeout = undefined - } - - function bulkAdd(instruction) { - bulkBuffer.push(instruction) - - //Return because we need the doc being indexed - //Before we start inserting - if (instruction.index && instruction.index._index) - return - - if(bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush() - clearBulkTimeout() - } else if (bulkTimeout === undefined){ - bulkTimeout = setTimeout(function(){ - schema.statics.flush() - clearBulkTimeout() - }, bulk.delay || 1000) - } - } - - schema.statics.flush = function(cb){ - cb = cb || function(err) { if (err) console.log(err) } - - esClient.bulk({ - body: bulkBuffer - }, function(err) { - cb(err) - }) - bulkBuffer = [] - } - - schema.statics.refresh = function(options, cb){ - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.modelName) - esClient.indices.refresh({ - index: options.index || indexName - }, cb) - } - - function setIndexNameIfUnset(model){ - var modelName = model.toLowerCase() - if(!indexName){ - indexName = modelName + "s" - } - if(!typeName){ - typeName = modelName - } - } - - - /** - * Use standard Mongoose Middleware hooks - * to persist to Elasticsearch - */ - function setUpMiddlewareHooks(schema) { - schema.post('remove', function(){ - setIndexNameIfUnset(this.constructor.modelName) - - var options = { - index: indexName, - type: typeName, - tries: 3, - model: this, - client: esClient - } - - if(bulk) { - bulkDelete(options, nop) - } else { - deleteByMongoId(options, nop) - } - }) - - /** - * Save in elastic search on save. - */ - schema.post('save', function(){ - var model = this - - model.index(function(err, res){ - model.emit('es-indexed', err, res) - }) - }) - } + , generator = new (require('./mapping-generator')) + , serialize = require('./serialize') + , events = require('events') + , mongoose = require('mongoose') + , async = require('async') + , nop = require('nop') + +function Mongoosastic(schema, options) { + var mapping = getMapping(schema) + , indexName = options && options.index + , typeName = options && options.type + , alwaysHydrate = options && options.hydrate + , defaultHydrateOptions = options && options.hydrateOptions + , _mapping = null + , host = options.host + , port = options.port + , protocol = options.protocol + , auth = options.auth + , bulk = options.bulk + , modelFieldName = options.modelFieldName + , bulkBuffer = [] + , bulkTimeout + + mapping.properties,es_name = { + type:"string" + }; + + this.esClient = this.esClient || new elasticsearch.Client({ + host: { + host: host, + port: port, + protocol: protocol, + auth: auth + } + }); + var esClient = this.esClient; + setUpMiddlewareHooks(schema) + + /** + * ElasticSearch Client + */ + schema.statics.esClient = esClient + + /** + * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once + * the mapping is created + + * @param settings Object (optional) + * @param callback Function + */ + schema.statics.createMapping = function (settings, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + settings = undefined + } + + setIndexNameIfUnset(this.modelName) + + createMappingIfNotPresent({ + client: esClient, + indexName: indexName, + typeName: typeName, + schema: schema, + settings: settings + }, cb) + } + + /** + * @param options Object (optional) + * @param callback Function + */ + schema.methods.index = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + //自动匹配index与type + setIndexNameIfUnset(this.constructor.modelName) + + var index = options.index || indexName + , type = options.type || typeName + ,serialModel = serialize(this, mapping); + if (bulk) { + /** + * To serialize in bulk it needs the _id + */ + serialModel._id = this._id; + bulkIndex({ + index: index, + type: type, + model: serialModel + }) + setImmediate(cb) + } else { + serialModel[modelFieldName] = this.constructor.modelName; + esClient.index({ + index: index, + type: type, + id: this._id.toString(), + body: serialModel + }, cb) + } + } + + /** + * Unset elastic search index + * @param options - (optional) options for unIndex + * @param callback - callback when unIndex is complete + */ + schema.methods.unIndex = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.constructor.modelName) + + options.index = options.index || indexName + options.type = options.type || typeName + options.model = this + options.client = esClient + options.tries = 3 + + if (bulk) + bulkDelete(options, cb) + else + deleteByMongoId(options, cb) + } + + /** + * Delete all documents from a type/index + * @param options - (optional) specify index/type + * @param callback - callback when truncation is complete + */ + schema.statics.esTruncate = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + var index = options.index || indexName + , type = options.type || typeName + + esClient.deleteByQuery({ + index: index, + type: type, + body: { + query: { + match_all: {} + } + } + }, cb) + } + + /** + * Synchronize an existing collection + * + * @param query - query for documents you want to synchronize + */ + schema.statics.synchronize = function (query) { + var em = new events.EventEmitter() + , readyToClose + , closeValues = [] + , counter = 0 + , close = function () { + em.emit.apply(em, ['close'].concat(closeValues)) + } + + //Set indexing to be bulk when synchronizing to make synchronizing faster + bulk = bulk || { + delay: 1000, + size: 1000 + } + + query = query || {} + + setIndexNameIfUnset(this.modelName) + + var stream = this.find(query).stream() + + stream.on('data', function (doc) { + counter++ + doc.save(function (err) { + if (err) + return em.emit('error', err) + + doc.on('es-indexed', function (err, doc) { + counter-- + if (err) { + em.emit('error', err) + } else { + em.emit('data', null, doc) + } + }) + }) + }) + + stream.on('close', function (a, b) { + closeValues = [a, b] + var closeInterval = setInterval(function () { + if (counter === 0 && bulkBuffer.length === 0) { + clearInterval(closeInterval) + close() + } + }, 1000) + }) + + stream.on('error', function (err) { + em.emit('error', err) + }) + + return em + } + /** + * ElasticSearch search function + * + * @param query - query object to perform search with + * @param options - (optional) special search options, such as hydrate + * @param callback - callback called with search results + */ + schema.statics.search = function (query, options, cb) { + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + + if (query === null) + query = undefined + + setIndexNameIfUnset(this.modelName) + + var model = this + , esQuery = { + body: {query: query}, + index: options.index || indexName, + type: options.type || typeName + } + + + Object.keys(options).forEach(function (opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) + + esClient.search(esQuery, function (err, res) { + if (err) { + cb(err) + } else { + if (alwaysHydrate || options.hydrate) { + options.hydrateOptions = options.hydrateOptions || defaultHydrateOptions || {} + options.modelFieldName = modelFieldName + hydrate(res, model, options, cb) + } else { + cb(null, res) + } + } + }) + } + + function bulkDelete(options, cb) { + bulkAdd({ + delete: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + cb() + } + + function bulkIndex(options) { + bulkAdd({ + index: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + bulkAdd(options.model) + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout) + bulkTimeout = undefined + } + + function bulkAdd(instruction) { + bulkBuffer.push(instruction) + + //Return because we need the doc being indexed + //Before we start inserting + if (instruction.index && instruction.index._index) + return + + if (bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush() + clearBulkTimeout() + } else if (bulkTimeout === undefined) { + bulkTimeout = setTimeout(function () { + schema.statics.flush() + clearBulkTimeout() + }, bulk.delay || 1000) + } + } + + schema.statics.flush = function (cb) { + cb = cb || function (err) { + if (err) console.log(err) + } + + esClient.bulk({ + body: bulkBuffer + }, function (err) { + cb(err) + }) + bulkBuffer = [] + } + + schema.statics.refresh = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.modelName) + esClient.indices.refresh({ + index: options.index || indexName + }, cb) + } + + function setIndexNameIfUnset(model) { + var modelName = model.toLowerCase() + if (!indexName) { + indexName = modelName + "s" + } + if (!typeName) { + typeName = modelName + } + } + + + /** + * Use standard Mongoose Middleware hooks + * to persist to Elasticsearch + */ + function setUpMiddlewareHooks(schema) { + schema.post('remove', function () { + setIndexNameIfUnset(this.constructor.modelName) + + var options = { + index: indexName, + type: typeName, + tries: 3, + model: this, + client: esClient + } + + if (bulk) { + bulkDelete(options, nop) + } else { + deleteByMongoId(options, nop) + } + }) + + /** + * Save in elastic search on save. + */ + schema.post('save', function () { + var model = this + + model.index(function (err, res) { + model.emit('es-indexed', err, res) + }) + }) + } } + +module.exports = { + connect: function (options) { + var host = options && options.host || 'localhost', + port = options && options.port || 9200, + protocol = options && options.protocol || 'http', + auth = options && options.auth ? options.auth : null; + + this.esClient = this.esClient || new elasticsearch.Client({ + host: { + host: host, + port: port, + protocol: protocol, + auth: auth + } + }); + }, + /** + * 全局性的搜索,可以搜索多索引,多类型,同时也能hydrate + */ + search:function(query, options, cb){ + var _this = this; + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + + if (query === null) + query = undefined + + var model = this + , esQuery = { + body: {query: query}, + index: options.index || "", + type: options.type || "" + } + + + Object.keys(options).forEach(function (opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) + + esClient.search(esQuery, function (err, res) { + if (err) { + cb(err) + } else { + if (options.hydrate) { + options.modelFieldName = options.modelFieldName || _this.options.modelFieldName + hydrate(res,null,options || {}, cb) + } else { + cb(null, res) + } + } + }) + }, + plugin: function(options){ + options.host = options && options.host ? options.host : 'localhost' + options.protocol = options && options.protocol ? options.protocol : 'http' + options.auth = options && options.auth ? options.auth : null + options.bulk = options && options.bulk + options.modelFieldName = options && options.modelFieldName ? options.modelFieldName : "__model_name__" + this.options = options; + return function(schema){ + return Mongoosastic(schema,options); + } + } +}; + + function createMappingIfNotPresent(options, cb) { - var client = options.client - , indexName = options.indexName - , typeName = options.typeName - , schema = options.schema - , settings = options.settings - - generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {} - completeMapping[typeName] = mapping - client.indices.exists({index: indexName}, function(err, exists) { - if (err) - return cb(err) - - if (exists) { - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb) - } else { - client.indices.create({index: indexName, body: settings}, function(err) { - if (err) - return cb(err) - - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb) - }) - } - }) - }) + var client = options.client + , indexName = options.indexName + , typeName = options.typeName + , schema = options.schema + , settings = options.settings + + generator.generateMapping(schema, function (err, mapping) { + var completeMapping = {} + completeMapping[typeName] = mapping + client.indices.exists({index: indexName}, function (err, exists) { + if (err) + return cb(err) + + if (exists) { + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + } else { + client.indices.create({index: indexName, body: settings}, function (err) { + if (err) + return cb(err) + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + }) + } + }) + }) } - -function hydrate(res, model, options, cb){ - var results = res.hits - , resultsMap = {} - , ids = results.hits.map(function(a, i){ - resultsMap[a._id] = i - return a._id - }) - , query = model.find({_id:{$in:ids}}) - - // Build Mongoose query based on hydrate options - // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(options).forEach(function(option){ - query[option](options[option]) - }) - - query.exec(function(err, docs){ - if(err) { - return cb(err) - } else { - var hits = [] - - docs.forEach(function(doc) { - var i = resultsMap[doc._id] - hits[i] = doc - }) - results.hits = hits - res.hits = results - cb(null, res) - } - }) +/* +function hydrate(res, model, options, cb) { + var results = res.hits + , resultsMap = {} + , ids = results.hits.map(function (a, i) { + resultsMap[a._id] = i + return a._id + }) + , query = model.find({_id: {$in: ids}}) + + // Build Mongoose query based on hydrate options + // Example: {lean: true, sort: '-name', select: 'address name'} + Object.keys(options).forEach(function (option) {//想不到这个可以支持populate,真让我非常开心啊 + query[option](options[option]) + }) + + query.exec(function (err, docs) { + if (err) { + return cb(err) + } else { + var hits = [] + + docs.forEach(function (doc) { + var i = resultsMap[doc._id] //按照es输出的顺序来从新组合数据 + hits[i] = doc + }) + results.hits = hits + res.hits = results + cb(null, res) + } + }) +} +*/ +function hydrate(res,model, options, cb) { + var results = res.hits + , resultsMap = {} + , ids = {} + , querys = {} + , modelName = "" + , hits = [] + , modelFieldName = options.modelFieldName + + results.hits.forEach(function(a,i){ + var source = a._source; + if(source[modelFieldName]) { + resultsMap[source[modelFieldName]] = resultsMap[source[modelFieldName]] || {}; + ids[source[modelFieldName]] = ids[source[modelFieldName]] || []; + + + resultsMap[source[modelFieldName]][a._id] = i;//记录排序索引 + ids[source[modelFieldName]].push(a._id); + } + }); + + async.eachSeries(Object.keys(resultsMap),function(modelName,callback){ + model = mongoose.model(modelName); + querys[modelName] = model.find({_id:{$in:ids[modelName]}}); + Object.keys(options.hydrateOptions).forEach(function (option) { + querys[modelName][option](options.hydrateOptions[option]) + }) + querys[modelName].exec(function(err, docs){ + if (err) { + return cb(err) + } else { + docs.forEach(function (doc) { + var i = resultsMap[modelName][doc._id] + hits[i] = doc + }); + callback(); + } + }) + },function(){ + results.hits = hits + res.hits = results + cb(null, res) + }); } -function getMapping(schema){ - var retMapping = {} - generator.generateMapping(schema, function(err, mapping){ - retMapping = mapping - }) - return retMapping +function getMapping(schema) { + var retMapping = {} + generator.generateMapping(schema, function (err, mapping) { + retMapping = mapping + }) + return retMapping } -function deleteByMongoId(options, cb){ - var index = options.index - , type = options.type - , client = options.client - , model = options.model - , tries = options.tries - - client.delete({ - index: index, - type: type, - id: model._id.toString() - }, function(err, res){ - if(err && err.message.indexOf('404') > -1){ - setTimeout(function(){ - if(tries <= 0) { - return cb(err) - } else { - options.tries = --tries - deleteByMongoId(options, cb) - } - }, 500) - }else{ - model.emit('es-removed', err, res) - cb(err) - } - }) +function deleteByMongoId(options, cb) { + var index = options.index + , type = options.type + , client = options.client + , model = options.model + , tries = options.tries + + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function (err, res) { + if (err && err.message.indexOf('404') > -1) { + setTimeout(function () { + if (tries <= 0) { + return cb(err) + } else { + options.tries = --tries + deleteByMongoId(options, cb) + } + }, 500) + } else { + model.emit('es-removed', err, res) + cb(err) + } + }) } From 72a371394763a0ea2821107e691a364c3f46e085 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 20:57:39 +0800 Subject: [PATCH 55/92] Update README.md --- README.md | 34 ++++++++++++++++++++++++++++++++-- 1 file changed, 32 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index f86fdb04..57df5de3 100644 --- a/README.md +++ b/README.md @@ -60,8 +60,13 @@ var User = new Schema({ , email: String , city: String }) +var options = { + host:"localhost", + port:9200, + modelFieldName:'__model_name__' //This property is used to search for the model, in a multi-index and more types of queries,default value is __model_name__ +}; -User.plugin(mongoosastic) +User.plugin(mongoosastic.plugin(options)) ``` This will by default simply use the pluralization of the model name as the index @@ -83,7 +88,7 @@ var User = new Schema({ , city: String }) -User.plugin(mongoosastic) +User.plugin(mongoosastic.plugin()) ``` In this case only the name field @@ -485,3 +490,28 @@ var User = new Schema({ User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) ``` + + +###Populating + +```javascript +var mongoosastic = require('mongoosastic'); + +mongoosastic.search({ + match_all:{} +},{ + index:["articles","videos","musics","gallerys"], + type:["article","video","music","gallery"], + hydrate:true, + hydrateOptions:{ + populate:"tags catgories ..." + } +}, function(err, results) { + if(err){ + console.log(err); + } else { + console.log(results.hits); + } +}); + +``` From f16503a5b4820f5757f429e0697c94955650dd84 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 20:59:09 +0800 Subject: [PATCH 56/92] Update README.md --- README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 57df5de3..c923e52f 100644 --- a/README.md +++ b/README.md @@ -145,7 +145,7 @@ var User = new Schema({ , comments: {type:[Comment], es_indexed:true} }) -User.plugin(mongoosastic) +User.plugin(mongoosastic.plugin()) ``` @@ -188,7 +188,7 @@ You can also specify `bulk` options with mongoose which will utilize elasticsear Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. ```javascript -BookSchema.plugin(mongoosastic, { +BookSchema.plugin(mongoosastic.plugin(), { bulk: { size: 10, // preferred number of docs to bulk index delay: 100 //milliseconds to wait for enough docs to meet size constraint @@ -488,7 +488,7 @@ var User = new Schema({ , city: String }) -User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) +User.plugin(mongoosastic.plugin(), {hydrate:true, hydrateOptions: {lean: true}}) ``` From 4cc52bec8da9215a12fcc1db6cc0d5bd9e9345aa Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:22:48 +0800 Subject: [PATCH 57/92] Update tweet.js --- test/models/tweet.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/models/tweet.js b/test/models/tweet.js index f991c3ad..50105f39 100644 --- a/test/models/tweet.js +++ b/test/models/tweet.js @@ -10,6 +10,6 @@ var TweetSchema = new Schema({ , message: String }); -TweetSchema.plugin(mongoosastic) +TweetSchema.plugin(mongoosastic.plugin()) module.exports = mongoose.model('Tweet', TweetSchema); From 6cc7ff68370430875f935488fd4d437f99f1ca81 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:23:40 +0800 Subject: [PATCH 58/92] Update package.json --- package.json | 37 +++++++++++++++++++++++++++++++++++-- 1 file changed, 35 insertions(+), 2 deletions(-) diff --git a/package.json b/package.json index ed595417..9bbe6c75 100644 --- a/package.json +++ b/package.json @@ -1,5 +1,9 @@ { - "author": "James R. Carr (http://blog.james-carr.org)", + "author": { + "name": "James R. Carr", + "email": "james.r.carr@gmail.com", + "url": "http://blog.james-carr.org" + }, "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", "version": "2.0.6", @@ -32,5 +36,34 @@ }, "scripts": { "test": "mocha -R spec -t 20000 -b" - } + }, + "gitHead": "a1c25990cca9717497c0e706d7ac64b5ed204819", + "bugs": { + "url": "https://github.com/mongoosastic/mongoosastic/issues" + }, + "homepage": "https://github.com/mongoosastic/mongoosastic", + "_id": "mongoosastic@2.0.6", + "_shasum": "8c86d2e396cf110abbcc9ef3e041818a1b4f9788", + "_from": "mongoosastic@", + "_npmVersion": "2.0.0", + "_npmUser": { + "name": "taterbase", + "email": "taterbase@gmail.com" + }, + "maintainers": [ + { + "name": "jamescarr", + "email": "james.r.carr@gmail.com" + }, + { + "name": "taterbase", + "email": "shankga@gmail.com" + } + ], + "dist": { + "shasum": "8c86d2e396cf110abbcc9ef3e041818a1b4f9788", + "tarball": "http://registry.npmjs.org/mongoosastic/-/mongoosastic-2.0.6.tgz" + }, + "directories": {}, + "_resolved": "https://registry.npmjs.org/mongoosastic/-/mongoosastic-2.0.6.tgz" } From e5712cfe9d2acd728f60d759563a9a960cb53c9e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:24:12 +0800 Subject: [PATCH 59/92] Update search-features-test.js --- test/search-features-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/search-features-test.js b/test/search-features-test.js index 993de91e..9bfc087b 100644 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -12,7 +12,7 @@ var BondSchema = new Schema({ , price: Number }); -BondSchema.plugin(mongoosastic); +BondSchema.plugin(mongoosastic.plugin()); var Bond = mongoose.model('Bond', BondSchema); From cf5d89ef026c7677d7a4bf24f2c40c6518f33b99 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:24:52 +0800 Subject: [PATCH 60/92] Update geo-test.js --- test/geo-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/geo-test.js b/test/geo-test.js index 567ff413..31a888df 100644 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -31,7 +31,7 @@ describe('GeoTest', function(){ } }); - GeoSchema.plugin(mongoosastic); + GeoSchema.plugin(mongoosastic.plugin()); GeoModel = mongoose.model('geodoc', GeoSchema); GeoModel.createMapping(function(err, mapping){ From 8820f488247376ee0811760570c6c7216020f5ed Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:25:17 +0800 Subject: [PATCH 61/92] Update bulk-test.js --- test/bulk-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index 031fd3a3..b581ba91 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose'), var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic, { +BookSchema.plugin(mongoosastic.plugin(), { bulk: { size: 10, delay: 100 From bb8cbe4cf917c753ac8b79581fe657323fd2b2d2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:26:01 +0800 Subject: [PATCH 62/92] Update boost-field-test.js --- test/boost-field-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 26b8290c..70b84b86 100644 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -14,7 +14,7 @@ var TweetSchema = new Schema({ , title: {type:String, es_boost:2.0} }); -TweetSchema.plugin(mongoosastic); +TweetSchema.plugin(mongoosastic.plugin()); var BlogPost = mongoose.model('BlogPost', TweetSchema); describe('Add Boost Option Per Field', function(){ From 0733fe7308d91820171ca1d5eedcf0c1dcd206f4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:27:19 +0800 Subject: [PATCH 63/92] Update index-test.js --- test/index-test.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/index-test.js b/test/index-test.js index fd9c6308..9a1ca171 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -15,7 +15,7 @@ var TalkSchema = new Schema({ , abstract: {type:String, es_indexed:true} , bio: String }); -TalkSchema.plugin(mongoosastic) +TalkSchema.plugin(mongoosastic.plugin()) var Talk = mongoose.model("Talk", TalkSchema); @@ -356,7 +356,7 @@ describe('indexing', function(){ var BumSchema = new Schema({ name: String }); - BumSchema.plugin(mongoosastic, { + BumSchema.plugin(mongoosastic.plugin(), { index: 'ms_sample' , type: 'bum' }); From 30ae9633b2568fd92156d8267f5982ae815f5676 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:27:36 +0800 Subject: [PATCH 64/92] Update index-test.js --- test/index-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/index-test.js b/test/index-test.js index 9a1ca171..55c71dc2 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -28,7 +28,7 @@ var PersonSchema = new Schema({ , died: {type: Number, es_indexed:true} } }); -PersonSchema.plugin(mongoosastic, { +PersonSchema.plugin(mongoosastic.plugin(), { index:'people' , type: 'dude' , hydrate: true From a67d5846a7618310da462d7e497315aceeda94be Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:28:40 +0800 Subject: [PATCH 65/92] Update synchronize-test.js --- test/synchronize-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 45eeb25b..776221d7 100644 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose') var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic); +BookSchema.plugin(mongoosastic.plugin()); var Book = mongoose.model('Book', BookSchema); From 1230a60eb08695d942a604d3b74bbf11b3e8a867 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:29:04 +0800 Subject: [PATCH 66/92] Update truncate-test.js --- test/truncate-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/truncate-test.js b/test/truncate-test.js index da236202..caa4f923 100644 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose'), var DummySchema = new Schema({ text: String }); -DummySchema.plugin(mongoosastic); +DummySchema.plugin(mongoosastic.plugin()); var Dummy = mongoose.model('Dummy', DummySchema); From 5595d0a97a87d7ffa88c9b7894b45337db796d4f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:35:25 +0800 Subject: [PATCH 67/92] Update mongoosastic.js --- lib/mongoosastic.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 91c7a2af..dfc3ec98 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -434,6 +434,7 @@ module.exports = { }) }, plugin: function(options){ + options = options || {}; options.host = options && options.host ? options.host : 'localhost' options.protocol = options && options.protocol ? options.protocol : 'http' options.auth = options && options.auth ? options.auth : null From 551b17f6f274bf2f98f0c9d7f41d4be17cadc4ab Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:47:51 +0800 Subject: [PATCH 68/92] API changed --- lib/mongoosastic.js | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index dfc3ec98..d274a106 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -434,19 +434,28 @@ module.exports = { }) }, plugin: function(options){ + var _this = this; options = options || {}; options.host = options && options.host ? options.host : 'localhost' options.protocol = options && options.protocol ? options.protocol : 'http' options.auth = options && options.auth ? options.auth : null options.bulk = options && options.bulk options.modelFieldName = options && options.modelFieldName ? options.modelFieldName : "__model_name__" - this.options = options; - return function(schema){ - return Mongoosastic(schema,options); + return function(schema,_options){ + _this.options = extend(options,_options); + return Mongoosastic(schema,_options); } } }; +function extend(target) { + var src + for (var i = 1, l = arguments.length; i < l; i++) { + src = arguments[i] + for (var k in src) target[k] = src[k] + } + return target +} function createMappingIfNotPresent(options, cb) { var client = options.client From 59cfee60d29a4280327c3eb81ae92c5402d6465d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 21:53:09 +0800 Subject: [PATCH 69/92] API changed --- lib/mongoosastic.js | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index d274a106..aa2382fc 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -442,6 +442,7 @@ module.exports = { options.bulk = options && options.bulk options.modelFieldName = options && options.modelFieldName ? options.modelFieldName : "__model_name__" return function(schema,_options){ + _options = _options || {}; _this.options = extend(options,_options); return Mongoosastic(schema,_options); } From 867f497cf983d126c04fbe1c4d090fa44998a315 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 22:03:17 +0800 Subject: [PATCH 70/92] API changed --- lib/mongoosastic.js | 20 ++------------------ 1 file changed, 2 insertions(+), 18 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index aa2382fc..c6d935d1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -25,8 +25,8 @@ function Mongoosastic(schema, options) { mapping.properties,es_name = { type:"string" }; - - this.esClient = this.esClient || new elasticsearch.Client({ + + var esClient = new elasticsearch.Client({ host: { host: host, port: port, @@ -34,7 +34,6 @@ function Mongoosastic(schema, options) { auth: auth } }); - var esClient = this.esClient; setUpMiddlewareHooks(schema) /** @@ -379,21 +378,6 @@ function Mongoosastic(schema, options) { module.exports = { - connect: function (options) { - var host = options && options.host || 'localhost', - port = options && options.port || 9200, - protocol = options && options.protocol || 'http', - auth = options && options.auth ? options.auth : null; - - this.esClient = this.esClient || new elasticsearch.Client({ - host: { - host: host, - port: port, - protocol: protocol, - auth: auth - } - }); - }, /** * 全局性的搜索,可以搜索多索引,多类型,同时也能hydrate */ From 4b35e39ea199a91dfe6a6490f15e90395c80b43b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 22:06:22 +0800 Subject: [PATCH 71/92] API changed --- lib/mongoosastic.js | 20 ++++++++++++++++++-- 1 file changed, 18 insertions(+), 2 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index c6d935d1..aa2382fc 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -25,8 +25,8 @@ function Mongoosastic(schema, options) { mapping.properties,es_name = { type:"string" }; - - var esClient = new elasticsearch.Client({ + + this.esClient = this.esClient || new elasticsearch.Client({ host: { host: host, port: port, @@ -34,6 +34,7 @@ function Mongoosastic(schema, options) { auth: auth } }); + var esClient = this.esClient; setUpMiddlewareHooks(schema) /** @@ -378,6 +379,21 @@ function Mongoosastic(schema, options) { module.exports = { + connect: function (options) { + var host = options && options.host || 'localhost', + port = options && options.port || 9200, + protocol = options && options.protocol || 'http', + auth = options && options.auth ? options.auth : null; + + this.esClient = this.esClient || new elasticsearch.Client({ + host: { + host: host, + port: port, + protocol: protocol, + auth: auth + } + }); + }, /** * 全局性的搜索,可以搜索多索引,多类型,同时也能hydrate */ From ec21dd854f8675d1d4783cc24dfae70213990092 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 22:12:50 +0800 Subject: [PATCH 72/92] API changed --- lib/mongoosastic.js | 34 +--------------------------------- 1 file changed, 1 insertion(+), 33 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index aa2382fc..d71f9ecf 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -444,7 +444,7 @@ module.exports = { return function(schema,_options){ _options = _options || {}; _this.options = extend(options,_options); - return Mongoosastic(schema,_options); + return Mongoosastic(schema,_this.options); } } }; @@ -493,39 +493,7 @@ function createMappingIfNotPresent(options, cb) { }) }) } -/* -function hydrate(res, model, options, cb) { - var results = res.hits - , resultsMap = {} - , ids = results.hits.map(function (a, i) { - resultsMap[a._id] = i - return a._id - }) - , query = model.find({_id: {$in: ids}}) - // Build Mongoose query based on hydrate options - // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(options).forEach(function (option) {//想不到这个可以支持populate,真让我非常开心啊 - query[option](options[option]) - }) - - query.exec(function (err, docs) { - if (err) { - return cb(err) - } else { - var hits = [] - - docs.forEach(function (doc) { - var i = resultsMap[doc._id] //按照es输出的顺序来从新组合数据 - hits[i] = doc - }) - results.hits = hits - res.hits = results - cb(null, res) - } - }) -} -*/ function hydrate(res,model, options, cb) { var results = res.hits , resultsMap = {} From 5ff76530320bfffe9a77fbbcb23aedf61bfe0b4a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 22:52:12 +0800 Subject: [PATCH 73/92] Update bulk-test.js --- test/bulk-test.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index b581ba91..73132966 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 3000) + }, 8000) }); }); From 3e0409cc3f21c3659160e7f72421b776cbe585cb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 22:55:14 +0800 Subject: [PATCH 74/92] Update bulk-test.js --- test/bulk-test.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index 73132966..c787acf8 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -10,10 +10,10 @@ var BookSchema = new Schema({ title: String }); BookSchema.plugin(mongoosastic.plugin(), { - bulk: { +/* bulk: { size: 10, delay: 100 - } + }*/ }); var Book = mongoose.model('Book2', BookSchema); @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 8000) + }, 2000) }); }); From b3bd0916ee59577a50443742c55c72a49bc0bfe5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 22:57:51 +0800 Subject: [PATCH 75/92] Update bulk-test.js --- test/bulk-test.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/bulk-test.js b/test/bulk-test.js index c787acf8..af567e01 100644 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -9,12 +9,12 @@ var mongoose = require('mongoose'), var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic.plugin(), { -/* bulk: { +BookSchema.plugin(mongoosastic.plugin({ + bulk: { size: 10, delay: 100 - }*/ -}); + } +})); var Book = mongoose.model('Book2', BookSchema); From 60d8f63bb4edfef7b049dcbc66c9410a74fec1b5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 23:25:42 +0800 Subject: [PATCH 76/92] Update mongoosastic.js --- lib/mongoosastic.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index d71f9ecf..382a5d19 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -22,10 +22,6 @@ function Mongoosastic(schema, options) { , bulkBuffer = [] , bulkTimeout - mapping.properties,es_name = { - type:"string" - }; - this.esClient = this.esClient || new elasticsearch.Client({ host: { host: host, From e0a9343031317562a0a39f8ad8af197d5884ce02 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Fri, 6 Feb 2015 23:55:37 +0800 Subject: [PATCH 77/92] API changed --- lib/mongoosastic.js | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 382a5d19..59031923 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -246,7 +246,7 @@ function Mongoosastic(schema, options) { if (alwaysHydrate || options.hydrate) { options.hydrateOptions = options.hydrateOptions || defaultHydrateOptions || {} options.modelFieldName = modelFieldName - hydrate(res, model, options, cb) + hydrate(res,options, cb) } else { cb(null, res) } @@ -422,7 +422,7 @@ module.exports = { } else { if (options.hydrate) { options.modelFieldName = options.modelFieldName || _this.options.modelFieldName - hydrate(res,null,options || {}, cb) + hydrate(res,options || {}, cb) } else { cb(null, res) } @@ -490,13 +490,14 @@ function createMappingIfNotPresent(options, cb) { }) } -function hydrate(res,model, options, cb) { +function hydrate(res,options, cb) { var results = res.hits , resultsMap = {} , ids = {} , querys = {} , modelName = "" , hits = [] + , model , modelFieldName = options.modelFieldName results.hits.forEach(function(a,i){ From 476c0bce673c0895b1e6e62c8f5a73591ba3d803 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Sat, 7 Feb 2015 00:48:18 +0800 Subject: [PATCH 78/92] API changed --- lib/mongoosastic.js | 36 ++++++++++++++++++++---------------- 1 file changed, 20 insertions(+), 16 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 59031923..546aa46c 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -18,7 +18,6 @@ function Mongoosastic(schema, options) { , protocol = options.protocol , auth = options.auth , bulk = options.bulk - , modelFieldName = options.modelFieldName , bulkBuffer = [] , bulkTimeout @@ -89,7 +88,6 @@ function Mongoosastic(schema, options) { }) setImmediate(cb) } else { - serialModel[modelFieldName] = this.constructor.modelName; esClient.index({ index: index, type: type, @@ -244,9 +242,7 @@ function Mongoosastic(schema, options) { cb(err) } else { if (alwaysHydrate || options.hydrate) { - options.hydrateOptions = options.hydrateOptions || defaultHydrateOptions || {} - options.modelFieldName = modelFieldName - hydrate(res,options, cb) + hydrate(res,options.hydrateOptions || defaultHydrateOptions || {}, cb) } else { cb(null, res) } @@ -410,7 +406,6 @@ module.exports = { type: options.type || "" } - Object.keys(options).forEach(function (opt) { if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) esQuery[opt] = options[opt] @@ -421,7 +416,6 @@ module.exports = { cb(err) } else { if (options.hydrate) { - options.modelFieldName = options.modelFieldName || _this.options.modelFieldName hydrate(res,options || {}, cb) } else { cb(null, res) @@ -436,7 +430,6 @@ module.exports = { options.protocol = options && options.protocol ? options.protocol : 'http' options.auth = options && options.auth ? options.auth : null options.bulk = options && options.bulk - options.modelFieldName = options && options.modelFieldName ? options.modelFieldName : "__model_name__" return function(schema,_options){ _options = _options || {}; _this.options = extend(options,_options); @@ -498,17 +491,15 @@ function hydrate(res,options, cb) { , modelName = "" , hits = [] , model - , modelFieldName = options.modelFieldName - results.hits.forEach(function(a,i){ - var source = a._source; - if(source[modelFieldName]) { - resultsMap[source[modelFieldName]] = resultsMap[source[modelFieldName]] || {}; - ids[source[modelFieldName]] = ids[source[modelFieldName]] || []; + var modelName = getModelName(a); + if(modelName) { + resultsMap[modelName] = resultsMap[modelName] || {}; + ids[modelName] = ids[modelName] || []; - resultsMap[source[modelFieldName]][a._id] = i;//记录排序索引 - ids[source[modelFieldName]].push(a._id); + resultsMap[modelName][a._id] = i;//记录排序索引 + ids[modelName].push(a._id); } }); @@ -536,6 +527,19 @@ function hydrate(res,options, cb) { }); } +function getModelName(es_item){ + if(!es_item || !es_item._type) return; + var names = mongoose.modelNames(), + res=""; + names.forEach(function(name){ + if(es_item._type === name.toLowerCase()){ + res = name; + return false; + } + }); + return res; +} + function getMapping(schema) { var retMapping = {} generator.generateMapping(schema, function (err, mapping) { From 583510be125db1327dbdb60d3a6ee3b8ef2a325a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Sat, 7 Feb 2015 01:02:33 +0800 Subject: [PATCH 79/92] test remove --- test/index-test.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/test/index-test.js b/test/index-test.js index 55c71dc2..cecd38ce 100644 --- a/test/index-test.js +++ b/test/index-test.js @@ -141,6 +141,7 @@ describe('indexing', function(){ }); }); }); + /* describe('Removing', function(){ var tweet = null; beforeEach(function(done){ @@ -194,6 +195,7 @@ describe('indexing', function(){ }); }); + */ describe('Isolated Models', function(){ before(function(done){ var talk = new Talk({ From 8cfdfc5880a667557f7f398ce9b4825568dfcb41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=99=BD=E7=8E=84?= Date: Sat, 7 Feb 2015 01:18:26 +0800 Subject: [PATCH 80/92] scroll back --- .idea/.name | 1 + .idea/encodings.xml | 4 + .idea/misc.xml | 4 + .idea/modules.xml | 8 + .idea/mongoosastic.iml | 8 + .idea/scopes/scope_settings.xml | 5 + .idea/vcs.xml | 6 + .idea/workspace.xml | 157 ++++ CHANGELOG.md | 0 CONTRIBUTING.md | 0 LICENSE.md | 0 README.md | 40 +- example/blog/app.js | 0 example/blog/package.json | 0 example/blog/public/stylesheets/style.css | 0 example/blog/routes/index.js | 0 example/blog/views/index.jade | 0 example/blog/views/layout.jade | 0 example/blog/views/post.jade | 0 example/readme.md | 0 lib/mapping-generator.js | 0 lib/mongoosastic.js | 1013 +++++++++------------ lib/serialize.js | 0 package.json | 37 +- test/alternative-index-method-test.js | 0 test/boost-field-test.js | 2 +- test/bulk-test.js | 6 +- test/config.js | 0 test/geo-test.js | 2 +- test/index-test.js | 8 +- test/mapping-generator-test.js | 0 test/models/tweet.js | 2 +- test/search-features-test.js | 2 +- test/serialize-test.js | 0 test/synchronize-test.js | 2 +- test/truncate-test.js | 2 +- 36 files changed, 660 insertions(+), 649 deletions(-) create mode 100644 .idea/.name create mode 100644 .idea/encodings.xml create mode 100644 .idea/misc.xml create mode 100644 .idea/modules.xml create mode 100644 .idea/mongoosastic.iml create mode 100644 .idea/scopes/scope_settings.xml create mode 100644 .idea/vcs.xml create mode 100644 .idea/workspace.xml mode change 100644 => 100755 CHANGELOG.md mode change 100644 => 100755 CONTRIBUTING.md mode change 100644 => 100755 LICENSE.md mode change 100644 => 100755 README.md mode change 100644 => 100755 example/blog/app.js mode change 100644 => 100755 example/blog/package.json mode change 100644 => 100755 example/blog/public/stylesheets/style.css mode change 100644 => 100755 example/blog/routes/index.js mode change 100644 => 100755 example/blog/views/index.jade mode change 100644 => 100755 example/blog/views/layout.jade mode change 100644 => 100755 example/blog/views/post.jade mode change 100644 => 100755 example/readme.md mode change 100644 => 100755 lib/mapping-generator.js mode change 100644 => 100755 lib/mongoosastic.js mode change 100644 => 100755 lib/serialize.js mode change 100644 => 100755 package.json mode change 100644 => 100755 test/alternative-index-method-test.js mode change 100644 => 100755 test/boost-field-test.js mode change 100644 => 100755 test/bulk-test.js mode change 100644 => 100755 test/config.js mode change 100644 => 100755 test/geo-test.js mode change 100644 => 100755 test/index-test.js mode change 100644 => 100755 test/mapping-generator-test.js mode change 100644 => 100755 test/models/tweet.js mode change 100644 => 100755 test/search-features-test.js mode change 100644 => 100755 test/serialize-test.js mode change 100644 => 100755 test/synchronize-test.js mode change 100644 => 100755 test/truncate-test.js diff --git a/.idea/.name b/.idea/.name new file mode 100644 index 00000000..dea147d2 --- /dev/null +++ b/.idea/.name @@ -0,0 +1 @@ +mongoosastic \ No newline at end of file diff --git a/.idea/encodings.xml b/.idea/encodings.xml new file mode 100644 index 00000000..d8210482 --- /dev/null +++ b/.idea/encodings.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml new file mode 100644 index 00000000..8662aa97 --- /dev/null +++ b/.idea/misc.xml @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml new file mode 100644 index 00000000..44eefacb --- /dev/null +++ b/.idea/modules.xml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/mongoosastic.iml b/.idea/mongoosastic.iml new file mode 100644 index 00000000..c956989b --- /dev/null +++ b/.idea/mongoosastic.iml @@ -0,0 +1,8 @@ + + + + + + + + \ No newline at end of file diff --git a/.idea/scopes/scope_settings.xml b/.idea/scopes/scope_settings.xml new file mode 100644 index 00000000..922003b8 --- /dev/null +++ b/.idea/scopes/scope_settings.xml @@ -0,0 +1,5 @@ + + + + \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml new file mode 100644 index 00000000..94a25f7f --- /dev/null +++ b/.idea/vcs.xml @@ -0,0 +1,6 @@ + + + + + + \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml new file mode 100644 index 00000000..1dfdc0be --- /dev/null +++ b/.idea/workspace.xml @@ -0,0 +1,157 @@ + + + + + + + + + + + + + + + + + + + + + true + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 1423243009585 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/CHANGELOG.md b/CHANGELOG.md old mode 100644 new mode 100755 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md old mode 100644 new mode 100755 diff --git a/LICENSE.md b/LICENSE.md old mode 100644 new mode 100755 diff --git a/README.md b/README.md old mode 100644 new mode 100755 index c923e52f..f86fdb04 --- a/README.md +++ b/README.md @@ -60,13 +60,8 @@ var User = new Schema({ , email: String , city: String }) -var options = { - host:"localhost", - port:9200, - modelFieldName:'__model_name__' //This property is used to search for the model, in a multi-index and more types of queries,default value is __model_name__ -}; -User.plugin(mongoosastic.plugin(options)) +User.plugin(mongoosastic) ``` This will by default simply use the pluralization of the model name as the index @@ -88,7 +83,7 @@ var User = new Schema({ , city: String }) -User.plugin(mongoosastic.plugin()) +User.plugin(mongoosastic) ``` In this case only the name field @@ -145,7 +140,7 @@ var User = new Schema({ , comments: {type:[Comment], es_indexed:true} }) -User.plugin(mongoosastic.plugin()) +User.plugin(mongoosastic) ``` @@ -188,7 +183,7 @@ You can also specify `bulk` options with mongoose which will utilize elasticsear Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. ```javascript -BookSchema.plugin(mongoosastic.plugin(), { +BookSchema.plugin(mongoosastic, { bulk: { size: 10, // preferred number of docs to bulk index delay: 100 //milliseconds to wait for enough docs to meet size constraint @@ -488,30 +483,5 @@ var User = new Schema({ , city: String }) -User.plugin(mongoosastic.plugin(), {hydrate:true, hydrateOptions: {lean: true}}) -``` - - -###Populating - -```javascript -var mongoosastic = require('mongoosastic'); - -mongoosastic.search({ - match_all:{} -},{ - index:["articles","videos","musics","gallerys"], - type:["article","video","music","gallery"], - hydrate:true, - hydrateOptions:{ - populate:"tags catgories ..." - } -}, function(err, results) { - if(err){ - console.log(err); - } else { - console.log(results.hits); - } -}); - +User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) ``` diff --git a/example/blog/app.js b/example/blog/app.js old mode 100644 new mode 100755 diff --git a/example/blog/package.json b/example/blog/package.json old mode 100644 new mode 100755 diff --git a/example/blog/public/stylesheets/style.css b/example/blog/public/stylesheets/style.css old mode 100644 new mode 100755 diff --git a/example/blog/routes/index.js b/example/blog/routes/index.js old mode 100644 new mode 100755 diff --git a/example/blog/views/index.jade b/example/blog/views/index.jade old mode 100644 new mode 100755 diff --git a/example/blog/views/layout.jade b/example/blog/views/layout.jade old mode 100644 new mode 100755 diff --git a/example/blog/views/post.jade b/example/blog/views/post.jade old mode 100644 new mode 100755 diff --git a/example/readme.md b/example/readme.md old mode 100644 new mode 100755 diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js old mode 100644 new mode 100755 diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js old mode 100644 new mode 100755 index 546aa46c..ef7ae42d --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,577 +1,460 @@ var elasticsearch = require('elasticsearch') - , generator = new (require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events') - , mongoose = require('mongoose') - , async = require('async') - , nop = require('nop') - -function Mongoosastic(schema, options) { - var mapping = getMapping(schema) - , indexName = options && options.index - , typeName = options && options.type - , alwaysHydrate = options && options.hydrate - , defaultHydrateOptions = options && options.hydrateOptions - , _mapping = null - , host = options.host - , port = options.port - , protocol = options.protocol - , auth = options.auth - , bulk = options.bulk - , bulkBuffer = [] - , bulkTimeout - - this.esClient = this.esClient || new elasticsearch.Client({ - host: { - host: host, - port: port, - protocol: protocol, - auth: auth - } - }); - var esClient = this.esClient; - setUpMiddlewareHooks(schema) - - /** - * ElasticSearch Client - */ - schema.statics.esClient = esClient - - /** - * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once - * the mapping is created - - * @param settings Object (optional) - * @param callback Function - */ - schema.statics.createMapping = function (settings, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - settings = undefined - } - - setIndexNameIfUnset(this.modelName) - - createMappingIfNotPresent({ - client: esClient, - indexName: indexName, - typeName: typeName, - schema: schema, - settings: settings - }, cb) - } - - /** - * @param options Object (optional) - * @param callback Function - */ - schema.methods.index = function (options, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - //自动匹配index与type - setIndexNameIfUnset(this.constructor.modelName) - - var index = options.index || indexName - , type = options.type || typeName - ,serialModel = serialize(this, mapping); - if (bulk) { - /** - * To serialize in bulk it needs the _id - */ - serialModel._id = this._id; - bulkIndex({ - index: index, - type: type, - model: serialModel - }) - setImmediate(cb) - } else { - esClient.index({ - index: index, - type: type, - id: this._id.toString(), - body: serialModel - }, cb) - } - } - - /** - * Unset elastic search index - * @param options - (optional) options for unIndex - * @param callback - callback when unIndex is complete - */ - schema.methods.unIndex = function (options, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.constructor.modelName) - - options.index = options.index || indexName - options.type = options.type || typeName - options.model = this - options.client = esClient - options.tries = 3 - - if (bulk) - bulkDelete(options, cb) - else - deleteByMongoId(options, cb) - } - - /** - * Delete all documents from a type/index - * @param options - (optional) specify index/type - * @param callback - callback when truncation is complete - */ - schema.statics.esTruncate = function (options, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - var index = options.index || indexName - , type = options.type || typeName - - esClient.deleteByQuery({ - index: index, - type: type, - body: { - query: { - match_all: {} - } - } - }, cb) - } - - /** - * Synchronize an existing collection - * - * @param query - query for documents you want to synchronize - */ - schema.statics.synchronize = function (query) { - var em = new events.EventEmitter() - , readyToClose - , closeValues = [] - , counter = 0 - , close = function () { - em.emit.apply(em, ['close'].concat(closeValues)) - } - - //Set indexing to be bulk when synchronizing to make synchronizing faster - bulk = bulk || { - delay: 1000, - size: 1000 - } - - query = query || {} - - setIndexNameIfUnset(this.modelName) - - var stream = this.find(query).stream() - - stream.on('data', function (doc) { - counter++ - doc.save(function (err) { - if (err) - return em.emit('error', err) - - doc.on('es-indexed', function (err, doc) { - counter-- - if (err) { - em.emit('error', err) - } else { - em.emit('data', null, doc) - } - }) - }) - }) - - stream.on('close', function (a, b) { - closeValues = [a, b] - var closeInterval = setInterval(function () { - if (counter === 0 && bulkBuffer.length === 0) { - clearInterval(closeInterval) - close() - } - }, 1000) - }) - - stream.on('error', function (err) { - em.emit('error', err) - }) - - return em - } - /** - * ElasticSearch search function - * - * @param query - query object to perform search with - * @param options - (optional) special search options, such as hydrate - * @param callback - callback called with search results - */ - schema.statics.search = function (query, options, cb) { - if (arguments.length === 2) { - cb = arguments[1] - options = {} - } - - if (query === null) - query = undefined - - setIndexNameIfUnset(this.modelName) - - var model = this - , esQuery = { - body: {query: query}, - index: options.index || indexName, - type: options.type || typeName - } - - - Object.keys(options).forEach(function (opt) { - if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) - esQuery[opt] = options[opt] - }) - - esClient.search(esQuery, function (err, res) { - if (err) { - cb(err) - } else { - if (alwaysHydrate || options.hydrate) { - hydrate(res,options.hydrateOptions || defaultHydrateOptions || {}, cb) - } else { - cb(null, res) - } - } - }) - } - - function bulkDelete(options, cb) { - bulkAdd({ - delete: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }) - cb() - } - - function bulkIndex(options) { - bulkAdd({ - index: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }) - bulkAdd(options.model) - } - - function clearBulkTimeout() { - clearTimeout(bulkTimeout) - bulkTimeout = undefined - } - - function bulkAdd(instruction) { - bulkBuffer.push(instruction) - - //Return because we need the doc being indexed - //Before we start inserting - if (instruction.index && instruction.index._index) - return - - if (bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush() - clearBulkTimeout() - } else if (bulkTimeout === undefined) { - bulkTimeout = setTimeout(function () { - schema.statics.flush() - clearBulkTimeout() - }, bulk.delay || 1000) - } - } - - schema.statics.flush = function (cb) { - cb = cb || function (err) { - if (err) console.log(err) - } - - esClient.bulk({ - body: bulkBuffer - }, function (err) { - cb(err) - }) - bulkBuffer = [] - } - - schema.statics.refresh = function (options, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.modelName) - esClient.indices.refresh({ - index: options.index || indexName - }, cb) - } - - function setIndexNameIfUnset(model) { - var modelName = model.toLowerCase() - if (!indexName) { - indexName = modelName + "s" - } - if (!typeName) { - typeName = modelName - } - } - - - /** - * Use standard Mongoose Middleware hooks - * to persist to Elasticsearch - */ - function setUpMiddlewareHooks(schema) { - schema.post('remove', function () { - setIndexNameIfUnset(this.constructor.modelName) - - var options = { - index: indexName, - type: typeName, - tries: 3, - model: this, - client: esClient - } - - if (bulk) { - bulkDelete(options, nop) - } else { - deleteByMongoId(options, nop) - } - }) - - /** - * Save in elastic search on save. - */ - schema.post('save', function () { - var model = this - - model.index(function (err, res) { - model.emit('es-indexed', err, res) - }) - }) - } + , generator = new(require('./mapping-generator')) + , serialize = require('./serialize') + , events = require('events') + , nop = require('nop') + +module.exports = function Mongoosastic(schema, options){ + var mapping = getMapping(schema) + , indexName = options && options.index + , typeName = options && options.type + , alwaysHydrate = options && options.hydrate + , defaultHydrateOptions = options && options.hydrateOptions + , _mapping = null + , host = options && options.host ? options.host : 'localhost' + , port = options && options.port ? options.port : 9200 + , protocol = options && options.protocol ? options.protocol : 'http' + , auth = options && options.auth ? options.auth : null + , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth}}) + , bulk = options && options.bulk + , bulkBuffer = [] + , bulkTimeout + + setUpMiddlewareHooks(schema) + + /** + * ElasticSearch Client + */ + schema.statics.esClient = esClient + + /** + * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once + * the mapping is created + + * @param settings Object (optional) + * @param callback Function + */ + schema.statics.createMapping = function(settings, cb) { + if(arguments.length < 2) { + cb = arguments[0] || nop + settings = undefined + } + + setIndexNameIfUnset(this.modelName) + + createMappingIfNotPresent({ + client: esClient, + indexName: indexName, + typeName: typeName, + schema: schema, + settings: settings + }, cb) + } + + /** + * @param options Object (optional) + * @param callback Function + */ + schema.methods.index = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.constructor.modelName) + + var index = options.index || indexName + , type = options.type || typeName + + if(bulk) { + /** + * To serialize in bulk it needs the _id + */ + var serialModel = serialize(this, mapping); + serialModel._id = this._id; + + bulkIndex({ + index: index, + type: type, + model: serialModel + }) + setImmediate(cb) + } else { + esClient.index({ + index: index, + type: type, + id: this._id.toString(), + body: serialize(this, mapping) + }, cb) + } + } + + /** + * Unset elastic search index + * @param options - (optional) options for unIndex + * @param callback - callback when unIndex is complete + */ + schema.methods.unIndex = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.constructor.modelName) + + options.index = options.index || indexName + options.type = options.type || typeName + options.model = this + options.client = esClient + options.tries = 3 + + if(bulk) + bulkDelete(options, cb) + else + deleteByMongoId(options, cb) + } + + /** + * Delete all documents from a type/index + * @param options - (optional) specify index/type + * @param callback - callback when truncation is complete + */ + schema.statics.esTruncate = function(options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + var index = options.index || indexName + , type = options.type || typeName + + esClient.deleteByQuery({ + index: index, + type: type, + body: { + query: { + match_all: {} + } + } + }, cb) + } + + /** + * Synchronize an existing collection + * + * @param query - query for documents you want to synchronize + */ + schema.statics.synchronize = function(query){ + var em = new events.EventEmitter() + , readyToClose + , closeValues = [] + , counter = 0 + , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} + + //Set indexing to be bulk when synchronizing to make synchronizing faster + bulk = bulk || { + delay: 1000, + size: 1000 + } + + query = query || {} + + setIndexNameIfUnset(this.modelName) + + var stream = this.find(query).stream() + + stream.on('data', function(doc){ + counter++ + doc.save(function(err){ + if (err) + return em.emit('error', err) + + doc.on('es-indexed', function(err, doc){ + counter-- + if(err){ + em.emit('error', err) + }else{ + em.emit('data', null, doc) + } + }) + }) + }) + + stream.on('close', function(a, b){ + closeValues = [a, b] + var closeInterval = setInterval(function() { + if (counter === 0 && bulkBuffer.length === 0) { + clearInterval(closeInterval) + close() + } + }, 1000) + }) + + stream.on('error', function(err){ + em.emit('error', err) + }) + + return em + } + /** + * ElasticSearch search function + * + * @param query - query object to perform search with + * @param options - (optional) special search options, such as hydrate + * @param callback - callback called with search results + */ + schema.statics.search = function(query, options, cb){ + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + + if (query === null) + query = undefined + + var model = this + , esQuery = { + body: {query: query}, + index: options.index || indexName, + type: options.type || typeName + } + + Object.keys(options).forEach(function(opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) + + setIndexNameIfUnset(model.modelName) + + esClient.search(esQuery, function(err, res){ + if(err){ + cb(err) + } else { + if (alwaysHydrate || options.hydrate) + hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) + else + cb(null, res) + } + }) + } + + function bulkDelete(options, cb) { + bulkAdd({ + delete: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + cb() + } + + function bulkIndex(options) { + bulkAdd({ + index: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + bulkAdd(options.model) + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout) + bulkTimeout = undefined + } + + function bulkAdd(instruction) { + bulkBuffer.push(instruction) + + //Return because we need the doc being indexed + //Before we start inserting + if (instruction.index && instruction.index._index) + return + + if(bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush() + clearBulkTimeout() + } else if (bulkTimeout === undefined){ + bulkTimeout = setTimeout(function(){ + schema.statics.flush() + clearBulkTimeout() + }, bulk.delay || 1000) + } + } + + schema.statics.flush = function(cb){ + cb = cb || function(err) { if (err) console.log(err) } + + esClient.bulk({ + body: bulkBuffer + }, function(err) { + cb(err) + }) + bulkBuffer = [] + } + + schema.statics.refresh = function(options, cb){ + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.modelName) + esClient.indices.refresh({ + index: options.index || indexName + }, cb) + } + + function setIndexNameIfUnset(model){ + var modelName = model.toLowerCase() + if(!indexName){ + indexName = modelName + "s" + } + if(!typeName){ + typeName = modelName + } + } + + + /** + * Use standard Mongoose Middleware hooks + * to persist to Elasticsearch + */ + function setUpMiddlewareHooks(schema) { + schema.post('remove', function(){ + setIndexNameIfUnset(this.constructor.modelName) + + var options = { + index: indexName, + type: typeName, + tries: 3, + model: this, + client: esClient + } + + if(bulk) { + bulkDelete(options, nop) + } else { + deleteByMongoId(options, nop) + } + }) + + /** + * Save in elastic search on save. + */ + schema.post('save', function(){ + var model = this + + model.index(function(err, res){ + model.emit('es-indexed', err, res) + }) + }) + } } - -module.exports = { - connect: function (options) { - var host = options && options.host || 'localhost', - port = options && options.port || 9200, - protocol = options && options.protocol || 'http', - auth = options && options.auth ? options.auth : null; - - this.esClient = this.esClient || new elasticsearch.Client({ - host: { - host: host, - port: port, - protocol: protocol, - auth: auth - } - }); - }, - /** - * 全局性的搜索,可以搜索多索引,多类型,同时也能hydrate - */ - search:function(query, options, cb){ - var _this = this; - if (arguments.length === 2) { - cb = arguments[1] - options = {} - } - - if (query === null) - query = undefined - - var model = this - , esQuery = { - body: {query: query}, - index: options.index || "", - type: options.type || "" - } - - Object.keys(options).forEach(function (opt) { - if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) - esQuery[opt] = options[opt] - }) - - esClient.search(esQuery, function (err, res) { - if (err) { - cb(err) - } else { - if (options.hydrate) { - hydrate(res,options || {}, cb) - } else { - cb(null, res) - } - } - }) - }, - plugin: function(options){ - var _this = this; - options = options || {}; - options.host = options && options.host ? options.host : 'localhost' - options.protocol = options && options.protocol ? options.protocol : 'http' - options.auth = options && options.auth ? options.auth : null - options.bulk = options && options.bulk - return function(schema,_options){ - _options = _options || {}; - _this.options = extend(options,_options); - return Mongoosastic(schema,_this.options); - } - } -}; - -function extend(target) { - var src - for (var i = 1, l = arguments.length; i < l; i++) { - src = arguments[i] - for (var k in src) target[k] = src[k] - } - return target -} - function createMappingIfNotPresent(options, cb) { - var client = options.client - , indexName = options.indexName - , typeName = options.typeName - , schema = options.schema - , settings = options.settings - - generator.generateMapping(schema, function (err, mapping) { - var completeMapping = {} - completeMapping[typeName] = mapping - client.indices.exists({index: indexName}, function (err, exists) { - if (err) - return cb(err) - - if (exists) { - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb) - } else { - client.indices.create({index: indexName, body: settings}, function (err) { - if (err) - return cb(err) - - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb) - }) - } - }) - }) -} - -function hydrate(res,options, cb) { - var results = res.hits - , resultsMap = {} - , ids = {} - , querys = {} - , modelName = "" - , hits = [] - , model - results.hits.forEach(function(a,i){ - var modelName = getModelName(a); - if(modelName) { - resultsMap[modelName] = resultsMap[modelName] || {}; - ids[modelName] = ids[modelName] || []; - - - resultsMap[modelName][a._id] = i;//记录排序索引 - ids[modelName].push(a._id); - } - }); - - async.eachSeries(Object.keys(resultsMap),function(modelName,callback){ - model = mongoose.model(modelName); - querys[modelName] = model.find({_id:{$in:ids[modelName]}}); - Object.keys(options.hydrateOptions).forEach(function (option) { - querys[modelName][option](options.hydrateOptions[option]) - }) - querys[modelName].exec(function(err, docs){ - if (err) { - return cb(err) - } else { - docs.forEach(function (doc) { - var i = resultsMap[modelName][doc._id] - hits[i] = doc - }); - callback(); - } - }) - },function(){ - results.hits = hits - res.hits = results - cb(null, res) - }); + var client = options.client + , indexName = options.indexName + , typeName = options.typeName + , schema = options.schema + , settings = options.settings + + generator.generateMapping(schema, function(err, mapping) { + var completeMapping = {} + completeMapping[typeName] = mapping + client.indices.exists({index: indexName}, function(err, exists) { + if (err) + return cb(err) + + if (exists) { + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + } else { + client.indices.create({index: indexName, body: settings}, function(err) { + if (err) + return cb(err) + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + }) + } + }) + }) } -function getModelName(es_item){ - if(!es_item || !es_item._type) return; - var names = mongoose.modelNames(), - res=""; - names.forEach(function(name){ - if(es_item._type === name.toLowerCase()){ - res = name; - return false; - } - }); - return res; +function hydrate(res, model, options, cb){ + var results = res.hits + , resultsMap = {} + , ids = results.hits.map(function(a, i){ + resultsMap[a._id] = i + return a._id + }) + , query = model.find({_id:{$in:ids}}) + + // Build Mongoose query based on hydrate options + // Example: {lean: true, sort: '-name', select: 'address name'} + Object.keys(options).forEach(function(option){ + query[option](options[option]) + }) + + query.exec(function(err, docs){ + if(err) { + return cb(err) + } else { + var hits = [] + + docs.forEach(function(doc) { + var i = resultsMap[doc._id] + hits[i] = doc + }) + results.hits = hits + res.hits = results + cb(null, res) + } + }) } -function getMapping(schema) { - var retMapping = {} - generator.generateMapping(schema, function (err, mapping) { - retMapping = mapping - }) - return retMapping +function getMapping(schema){ + var retMapping = {} + generator.generateMapping(schema, function(err, mapping){ + retMapping = mapping + }) + return retMapping } -function deleteByMongoId(options, cb) { - var index = options.index - , type = options.type - , client = options.client - , model = options.model - , tries = options.tries - - client.delete({ - index: index, - type: type, - id: model._id.toString() - }, function (err, res) { - if (err && err.message.indexOf('404') > -1) { - setTimeout(function () { - if (tries <= 0) { - return cb(err) - } else { - options.tries = --tries - deleteByMongoId(options, cb) - } - }, 500) - } else { - model.emit('es-removed', err, res) - cb(err) - } - }) +function deleteByMongoId(options, cb){ + var index = options.index + , type = options.type + , client = options.client + , model = options.model + , tries = options.tries + + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function(err, res){ + if(err && err.message.indexOf('404') > -1){ + setTimeout(function(){ + if(tries <= 0) { + return cb(err) + } else { + options.tries = --tries + deleteByMongoId(options, cb) + } + }, 500) + }else{ + model.emit('es-removed', err, res) + cb(err) + } + }) } diff --git a/lib/serialize.js b/lib/serialize.js old mode 100644 new mode 100755 diff --git a/package.json b/package.json old mode 100644 new mode 100755 index 9bbe6c75..ed595417 --- a/package.json +++ b/package.json @@ -1,9 +1,5 @@ { - "author": { - "name": "James R. Carr", - "email": "james.r.carr@gmail.com", - "url": "http://blog.james-carr.org" - }, + "author": "James R. Carr (http://blog.james-carr.org)", "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", "version": "2.0.6", @@ -36,34 +32,5 @@ }, "scripts": { "test": "mocha -R spec -t 20000 -b" - }, - "gitHead": "a1c25990cca9717497c0e706d7ac64b5ed204819", - "bugs": { - "url": "https://github.com/mongoosastic/mongoosastic/issues" - }, - "homepage": "https://github.com/mongoosastic/mongoosastic", - "_id": "mongoosastic@2.0.6", - "_shasum": "8c86d2e396cf110abbcc9ef3e041818a1b4f9788", - "_from": "mongoosastic@", - "_npmVersion": "2.0.0", - "_npmUser": { - "name": "taterbase", - "email": "taterbase@gmail.com" - }, - "maintainers": [ - { - "name": "jamescarr", - "email": "james.r.carr@gmail.com" - }, - { - "name": "taterbase", - "email": "shankga@gmail.com" - } - ], - "dist": { - "shasum": "8c86d2e396cf110abbcc9ef3e041818a1b4f9788", - "tarball": "http://registry.npmjs.org/mongoosastic/-/mongoosastic-2.0.6.tgz" - }, - "directories": {}, - "_resolved": "https://registry.npmjs.org/mongoosastic/-/mongoosastic-2.0.6.tgz" + } } diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js old mode 100644 new mode 100755 diff --git a/test/boost-field-test.js b/test/boost-field-test.js old mode 100644 new mode 100755 index 70b84b86..26b8290c --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -14,7 +14,7 @@ var TweetSchema = new Schema({ , title: {type:String, es_boost:2.0} }); -TweetSchema.plugin(mongoosastic.plugin()); +TweetSchema.plugin(mongoosastic); var BlogPost = mongoose.model('BlogPost', TweetSchema); describe('Add Boost Option Per Field', function(){ diff --git a/test/bulk-test.js b/test/bulk-test.js old mode 100644 new mode 100755 index af567e01..031fd3a3 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -9,12 +9,12 @@ var mongoose = require('mongoose'), var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic.plugin({ +BookSchema.plugin(mongoosastic, { bulk: { size: 10, delay: 100 } -})); +}); var Book = mongoose.model('Book2', BookSchema); @@ -52,7 +52,7 @@ describe('Bulk mode', function() { results.should.have.property('hits').with.property('total', 52); done(); }); - }, 2000) + }, 3000) }); }); diff --git a/test/config.js b/test/config.js old mode 100644 new mode 100755 diff --git a/test/geo-test.js b/test/geo-test.js old mode 100644 new mode 100755 index 31a888df..567ff413 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -31,7 +31,7 @@ describe('GeoTest', function(){ } }); - GeoSchema.plugin(mongoosastic.plugin()); + GeoSchema.plugin(mongoosastic); GeoModel = mongoose.model('geodoc', GeoSchema); GeoModel.createMapping(function(err, mapping){ diff --git a/test/index-test.js b/test/index-test.js old mode 100644 new mode 100755 index cecd38ce..fd9c6308 --- a/test/index-test.js +++ b/test/index-test.js @@ -15,7 +15,7 @@ var TalkSchema = new Schema({ , abstract: {type:String, es_indexed:true} , bio: String }); -TalkSchema.plugin(mongoosastic.plugin()) +TalkSchema.plugin(mongoosastic) var Talk = mongoose.model("Talk", TalkSchema); @@ -28,7 +28,7 @@ var PersonSchema = new Schema({ , died: {type: Number, es_indexed:true} } }); -PersonSchema.plugin(mongoosastic.plugin(), { +PersonSchema.plugin(mongoosastic, { index:'people' , type: 'dude' , hydrate: true @@ -141,7 +141,6 @@ describe('indexing', function(){ }); }); }); - /* describe('Removing', function(){ var tweet = null; beforeEach(function(done){ @@ -195,7 +194,6 @@ describe('indexing', function(){ }); }); - */ describe('Isolated Models', function(){ before(function(done){ var talk = new Talk({ @@ -358,7 +356,7 @@ describe('indexing', function(){ var BumSchema = new Schema({ name: String }); - BumSchema.plugin(mongoosastic.plugin(), { + BumSchema.plugin(mongoosastic, { index: 'ms_sample' , type: 'bum' }); diff --git a/test/mapping-generator-test.js b/test/mapping-generator-test.js old mode 100644 new mode 100755 diff --git a/test/models/tweet.js b/test/models/tweet.js old mode 100644 new mode 100755 index 50105f39..f991c3ad --- a/test/models/tweet.js +++ b/test/models/tweet.js @@ -10,6 +10,6 @@ var TweetSchema = new Schema({ , message: String }); -TweetSchema.plugin(mongoosastic.plugin()) +TweetSchema.plugin(mongoosastic) module.exports = mongoose.model('Tweet', TweetSchema); diff --git a/test/search-features-test.js b/test/search-features-test.js old mode 100644 new mode 100755 index 9bfc087b..993de91e --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -12,7 +12,7 @@ var BondSchema = new Schema({ , price: Number }); -BondSchema.plugin(mongoosastic.plugin()); +BondSchema.plugin(mongoosastic); var Bond = mongoose.model('Bond', BondSchema); diff --git a/test/serialize-test.js b/test/serialize-test.js old mode 100644 new mode 100755 diff --git a/test/synchronize-test.js b/test/synchronize-test.js old mode 100644 new mode 100755 index 776221d7..45eeb25b --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose') var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic.plugin()); +BookSchema.plugin(mongoosastic); var Book = mongoose.model('Book', BookSchema); diff --git a/test/truncate-test.js b/test/truncate-test.js old mode 100644 new mode 100755 index caa4f923..da236202 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose'), var DummySchema = new Schema({ text: String }); -DummySchema.plugin(mongoosastic.plugin()); +DummySchema.plugin(mongoosastic); var Dummy = mongoose.model('Dummy', DummySchema); From a40977bef000c2d6ce3f25bea32bd5c46043f298 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Sat, 7 Feb 2015 01:28:07 +0800 Subject: [PATCH 81/92] to now --- CHANGELOG.md | 0 CONTRIBUTING.md | 0 LICENSE.md | 0 README.md | 0 example/blog/app.js | 0 example/blog/package.json | 0 example/blog/public/stylesheets/style.css | 0 example/blog/routes/index.js | 0 example/blog/views/index.jade | 0 example/blog/views/layout.jade | 0 example/blog/views/post.jade | 0 example/readme.md | 0 lib/mapping-generator.js | 0 lib/mongoosastic.js | 1011 ++++++++++++--------- lib/serialize.js | 0 package.json | 39 +- test/alternative-index-method-test.js | 1 - test/boost-field-test.js | 2 +- test/bulk-test.js | 2 +- test/config.js | 2 +- test/geo-test.js | 2 +- test/index-test.js | 6 +- test/models/tweet.js | 2 +- test/search-features-test.js | 2 +- test/synchronize-test.js | 2 +- test/truncate-test.js | 2 +- 26 files changed, 610 insertions(+), 463 deletions(-) mode change 100755 => 100644 CHANGELOG.md mode change 100755 => 100644 CONTRIBUTING.md mode change 100755 => 100644 LICENSE.md mode change 100755 => 100644 README.md mode change 100755 => 100644 example/blog/app.js mode change 100755 => 100644 example/blog/package.json mode change 100755 => 100644 example/blog/public/stylesheets/style.css mode change 100755 => 100644 example/blog/routes/index.js mode change 100755 => 100644 example/blog/views/index.jade mode change 100755 => 100644 example/blog/views/layout.jade mode change 100755 => 100644 example/blog/views/post.jade mode change 100755 => 100644 example/readme.md mode change 100755 => 100644 lib/mapping-generator.js mode change 100755 => 100644 lib/mongoosastic.js mode change 100755 => 100644 lib/serialize.js mode change 100755 => 100644 package.json diff --git a/CHANGELOG.md b/CHANGELOG.md old mode 100755 new mode 100644 diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md old mode 100755 new mode 100644 diff --git a/LICENSE.md b/LICENSE.md old mode 100755 new mode 100644 diff --git a/README.md b/README.md old mode 100755 new mode 100644 diff --git a/example/blog/app.js b/example/blog/app.js old mode 100755 new mode 100644 diff --git a/example/blog/package.json b/example/blog/package.json old mode 100755 new mode 100644 diff --git a/example/blog/public/stylesheets/style.css b/example/blog/public/stylesheets/style.css old mode 100755 new mode 100644 diff --git a/example/blog/routes/index.js b/example/blog/routes/index.js old mode 100755 new mode 100644 diff --git a/example/blog/views/index.jade b/example/blog/views/index.jade old mode 100755 new mode 100644 diff --git a/example/blog/views/layout.jade b/example/blog/views/layout.jade old mode 100755 new mode 100644 diff --git a/example/blog/views/post.jade b/example/blog/views/post.jade old mode 100755 new mode 100644 diff --git a/example/readme.md b/example/readme.md old mode 100755 new mode 100644 diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js old mode 100755 new mode 100644 diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js old mode 100755 new mode 100644 index ef7ae42d..49d91ff7 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -1,460 +1,575 @@ var elasticsearch = require('elasticsearch') - , generator = new(require('./mapping-generator')) - , serialize = require('./serialize') - , events = require('events') - , nop = require('nop') - -module.exports = function Mongoosastic(schema, options){ - var mapping = getMapping(schema) - , indexName = options && options.index - , typeName = options && options.type - , alwaysHydrate = options && options.hydrate - , defaultHydrateOptions = options && options.hydrateOptions - , _mapping = null - , host = options && options.host ? options.host : 'localhost' - , port = options && options.port ? options.port : 9200 - , protocol = options && options.protocol ? options.protocol : 'http' - , auth = options && options.auth ? options.auth : null - , esClient = new elasticsearch.Client({host: {host: host, port: port, protocol: protocol, auth: auth}}) - , bulk = options && options.bulk - , bulkBuffer = [] - , bulkTimeout - - setUpMiddlewareHooks(schema) - - /** - * ElasticSearch Client - */ - schema.statics.esClient = esClient - - /** - * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once - * the mapping is created - - * @param settings Object (optional) - * @param callback Function - */ - schema.statics.createMapping = function(settings, cb) { - if(arguments.length < 2) { - cb = arguments[0] || nop - settings = undefined - } - - setIndexNameIfUnset(this.modelName) - - createMappingIfNotPresent({ - client: esClient, - indexName: indexName, - typeName: typeName, - schema: schema, - settings: settings - }, cb) - } - - /** - * @param options Object (optional) - * @param callback Function - */ - schema.methods.index = function(options, cb){ - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.constructor.modelName) - - var index = options.index || indexName - , type = options.type || typeName - - if(bulk) { - /** - * To serialize in bulk it needs the _id - */ - var serialModel = serialize(this, mapping); - serialModel._id = this._id; - - bulkIndex({ - index: index, - type: type, - model: serialModel - }) - setImmediate(cb) - } else { - esClient.index({ - index: index, - type: type, - id: this._id.toString(), - body: serialize(this, mapping) - }, cb) - } - } - - /** - * Unset elastic search index - * @param options - (optional) options for unIndex - * @param callback - callback when unIndex is complete - */ - schema.methods.unIndex = function(options, cb){ - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.constructor.modelName) - - options.index = options.index || indexName - options.type = options.type || typeName - options.model = this - options.client = esClient - options.tries = 3 - - if(bulk) - bulkDelete(options, cb) - else - deleteByMongoId(options, cb) - } - - /** - * Delete all documents from a type/index - * @param options - (optional) specify index/type - * @param callback - callback when truncation is complete - */ - schema.statics.esTruncate = function(options, cb) { - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - var index = options.index || indexName - , type = options.type || typeName - - esClient.deleteByQuery({ - index: index, - type: type, - body: { - query: { - match_all: {} - } - } - }, cb) - } - - /** - * Synchronize an existing collection - * - * @param query - query for documents you want to synchronize - */ - schema.statics.synchronize = function(query){ - var em = new events.EventEmitter() - , readyToClose - , closeValues = [] - , counter = 0 - , close = function(){em.emit.apply(em, ['close'].concat(closeValues))} - - //Set indexing to be bulk when synchronizing to make synchronizing faster - bulk = bulk || { - delay: 1000, - size: 1000 - } - - query = query || {} - - setIndexNameIfUnset(this.modelName) - - var stream = this.find(query).stream() - - stream.on('data', function(doc){ - counter++ - doc.save(function(err){ - if (err) - return em.emit('error', err) - - doc.on('es-indexed', function(err, doc){ - counter-- - if(err){ - em.emit('error', err) - }else{ - em.emit('data', null, doc) - } - }) - }) - }) - - stream.on('close', function(a, b){ - closeValues = [a, b] - var closeInterval = setInterval(function() { - if (counter === 0 && bulkBuffer.length === 0) { - clearInterval(closeInterval) - close() - } - }, 1000) - }) - - stream.on('error', function(err){ - em.emit('error', err) - }) - - return em - } - /** - * ElasticSearch search function - * - * @param query - query object to perform search with - * @param options - (optional) special search options, such as hydrate - * @param callback - callback called with search results - */ - schema.statics.search = function(query, options, cb){ - if (arguments.length === 2) { - cb = arguments[1] - options = {} - } - - if (query === null) - query = undefined - - var model = this - , esQuery = { - body: {query: query}, - index: options.index || indexName, - type: options.type || typeName - } - - Object.keys(options).forEach(function(opt) { - if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) - esQuery[opt] = options[opt] - }) - - setIndexNameIfUnset(model.modelName) - - esClient.search(esQuery, function(err, res){ - if(err){ - cb(err) - } else { - if (alwaysHydrate || options.hydrate) - hydrate(res, model, options.hydrateOptions || defaultHydrateOptions || {}, cb) - else - cb(null, res) - } - }) - } - - function bulkDelete(options, cb) { - bulkAdd({ - delete: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }) - cb() - } - - function bulkIndex(options) { - bulkAdd({ - index: { - _index: options.index || indexName, - _type: options.type || typeName, - _id: options.model._id.toString() - } - }) - bulkAdd(options.model) - } - - function clearBulkTimeout() { - clearTimeout(bulkTimeout) - bulkTimeout = undefined - } - - function bulkAdd(instruction) { - bulkBuffer.push(instruction) - - //Return because we need the doc being indexed - //Before we start inserting - if (instruction.index && instruction.index._index) - return - - if(bulkBuffer.length >= (bulk.size || 1000)) { - schema.statics.flush() - clearBulkTimeout() - } else if (bulkTimeout === undefined){ - bulkTimeout = setTimeout(function(){ - schema.statics.flush() - clearBulkTimeout() - }, bulk.delay || 1000) - } - } - - schema.statics.flush = function(cb){ - cb = cb || function(err) { if (err) console.log(err) } - - esClient.bulk({ - body: bulkBuffer - }, function(err) { - cb(err) - }) - bulkBuffer = [] - } - - schema.statics.refresh = function(options, cb){ - if (arguments.length < 2) { - cb = arguments[0] || nop - options = {} - } - - setIndexNameIfUnset(this.modelName) - esClient.indices.refresh({ - index: options.index || indexName - }, cb) - } - - function setIndexNameIfUnset(model){ - var modelName = model.toLowerCase() - if(!indexName){ - indexName = modelName + "s" - } - if(!typeName){ - typeName = modelName - } - } - - - /** - * Use standard Mongoose Middleware hooks - * to persist to Elasticsearch - */ - function setUpMiddlewareHooks(schema) { - schema.post('remove', function(){ - setIndexNameIfUnset(this.constructor.modelName) - - var options = { - index: indexName, - type: typeName, - tries: 3, - model: this, - client: esClient - } - - if(bulk) { - bulkDelete(options, nop) - } else { - deleteByMongoId(options, nop) - } - }) - - /** - * Save in elastic search on save. - */ - schema.post('save', function(){ - var model = this - - model.index(function(err, res){ - model.emit('es-indexed', err, res) - }) - }) - } + , generator = new (require('./mapping-generator')) + , serialize = require('./serialize') + , events = require('events') + , mongoose = require('mongoose') + , async = require('async') + , nop = require('nop') + +function Mongoosastic(schema, options) { + var mapping = getMapping(schema) + , indexName = options && options.index + , typeName = options && options.type + , alwaysHydrate = options && options.hydrate + , defaultHydrateOptions = options && options.hydrateOptions + , _mapping = null + , host = options.host + , port = options.port + , protocol = options.protocol + , auth = options.auth + , bulk = options.bulk + , bulkBuffer = [] + , bulkTimeout + + this.esClient = this.esClient || new elasticsearch.Client({ + host: { + host: host, + port: port, + protocol: protocol, + auth: auth + } + }); + var esClient = this.esClient; + setUpMiddlewareHooks(schema) + + /** + * ElasticSearch Client + */ + schema.statics.esClient = esClient + + /** + * Create the mapping. Takes an optionnal settings parameter and a callback that will be called once + * the mapping is created + + * @param settings Object (optional) + * @param callback Function + */ + schema.statics.createMapping = function (settings, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + settings = undefined + } + + setIndexNameIfUnset(this.modelName) + + createMappingIfNotPresent({ + client: esClient, + indexName: indexName, + typeName: typeName, + schema: schema, + settings: settings + }, cb) + } + + /** + * @param options Object (optional) + * @param callback Function + */ + schema.methods.index = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + //自动匹配index与type + setIndexNameIfUnset(this.constructor.modelName) + + var index = options.index || indexName + , type = options.type || typeName + ,serialModel = serialize(this, mapping); + if (bulk) { + /** + * To serialize in bulk it needs the _id + */ + serialModel._id = this._id; + bulkIndex({ + index: index, + type: type, + model: serialModel + }) + setImmediate(cb) + } else { + esClient.index({ + index: index, + type: type, + id: this._id.toString(), + body: serialModel + }, cb) + } + } + + /** + * Unset elastic search index + * @param options - (optional) options for unIndex + * @param callback - callback when unIndex is complete + */ + schema.methods.unIndex = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.constructor.modelName) + + options.index = options.index || indexName + options.type = options.type || typeName + options.model = this + options.client = esClient + options.tries = 3 + + if (bulk) + bulkDelete(options, cb) + else + deleteByMongoId(options, cb) + } + + /** + * Delete all documents from a type/index + * @param options - (optional) specify index/type + * @param callback - callback when truncation is complete + */ + schema.statics.esTruncate = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + var index = options.index || indexName + , type = options.type || typeName + + esClient.deleteByQuery({ + index: index, + type: type, + body: { + query: { + match_all: {} + } + } + }, cb) + } + + /** + * Synchronize an existing collection + * + * @param query - query for documents you want to synchronize + */ + schema.statics.synchronize = function (query) { + var em = new events.EventEmitter() + , readyToClose + , closeValues = [] + , counter = 0 + , close = function () { + em.emit.apply(em, ['close'].concat(closeValues)) + } + + //Set indexing to be bulk when synchronizing to make synchronizing faster + bulk = bulk || { + delay: 1000, + size: 1000 + } + + query = query || {} + + setIndexNameIfUnset(this.modelName) + + var stream = this.find(query).stream() + + stream.on('data', function (doc) { + counter++ + doc.save(function (err) { + if (err) + return em.emit('error', err) + + doc.on('es-indexed', function (err, doc) { + counter-- + if (err) { + em.emit('error', err) + } else { + em.emit('data', null, doc) + } + }) + }) + }) + + stream.on('close', function (a, b) { + closeValues = [a, b] + var closeInterval = setInterval(function () { + if (counter === 0 && bulkBuffer.length === 0) { + clearInterval(closeInterval) + close() + } + }, 1000) + }) + + stream.on('error', function (err) { + em.emit('error', err) + }) + + return em + } + /** + * ElasticSearch search function + * + * @param query - query object to perform search with + * @param options - (optional) special search options, such as hydrate + * @param callback - callback called with search results + */ + schema.statics.search = function (query, options, cb) { + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + + if (query === null) + query = undefined + + setIndexNameIfUnset(this.modelName) + + var model = this + , esQuery = { + body: {query: query}, + index: options.index || indexName, + type: options.type || typeName + } + + + Object.keys(options).forEach(function (opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) + + esClient.search(esQuery, function (err, res) { + if (err) { + cb(err) + } else { + if (alwaysHydrate || options.hydrate) { + hydrate(res,options.hydrateOptions || defaultHydrateOptions || {}, cb) + } else { + cb(null, res) + } + } + }) + } + + function bulkDelete(options, cb) { + bulkAdd({ + delete: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + cb() + } + + function bulkIndex(options) { + bulkAdd({ + index: { + _index: options.index || indexName, + _type: options.type || typeName, + _id: options.model._id.toString() + } + }) + bulkAdd(options.model) + } + + function clearBulkTimeout() { + clearTimeout(bulkTimeout) + bulkTimeout = undefined + } + + function bulkAdd(instruction) { + bulkBuffer.push(instruction) + + //Return because we need the doc being indexed + //Before we start inserting + if (instruction.index && instruction.index._index) + return + + if (bulkBuffer.length >= (bulk.size || 1000)) { + schema.statics.flush() + clearBulkTimeout() + } else if (bulkTimeout === undefined) { + bulkTimeout = setTimeout(function () { + schema.statics.flush() + clearBulkTimeout() + }, bulk.delay || 1000) + } + } + + schema.statics.flush = function (cb) { + cb = cb || function (err) { + if (err) console.log(err) + } + + esClient.bulk({ + body: bulkBuffer + }, function (err) { + cb(err) + }) + bulkBuffer = [] + } + + schema.statics.refresh = function (options, cb) { + if (arguments.length < 2) { + cb = arguments[0] || nop + options = {} + } + + setIndexNameIfUnset(this.modelName) + esClient.indices.refresh({ + index: options.index || indexName + }, cb) + } + + function setIndexNameIfUnset(model) { + var modelName = model.toLowerCase() + if (!indexName) { + indexName = modelName + "s" + } + if (!typeName) { + typeName = modelName + } + } + + + /** + * Use standard Mongoose Middleware hooks + * to persist to Elasticsearch + */ + function setUpMiddlewareHooks(schema) { + schema.post('remove', function () { + setIndexNameIfUnset(this.constructor.modelName) + + var options = { + index: indexName, + type: typeName, + tries: 3, + model: this, + client: esClient + } + + if (bulk) { + bulkDelete(options, nop) + } else { + deleteByMongoId(options, nop) + } + }) + + /** + * Save in elastic search on save. + */ + schema.post('save', function () { + var model = this + + model.index(function (err, res) { + model.emit('es-indexed', err, res) + }) + }) + } } + +module.exports = { + connect: function (options) { + var host = options && options.host || 'localhost', + port = options && options.port || 9200, + protocol = options && options.protocol || 'http', + auth = options && options.auth ? options.auth : null; + + this.esClient = this.esClient || new elasticsearch.Client({ + host: { + host: host, + port: port, + protocol: protocol, + auth: auth + } + }); + }, + /** + * 全局性的搜索,可以搜索多索引,多类型,同时也能hydrate + */ + search:function(query, options, cb){ + var _this = this; + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + + if (query === null) + query = undefined + + var model = this + , esQuery = { + body: {query: query}, + index: options.index || "", + type: options.type || "" + } + + Object.keys(options).forEach(function (opt) { + if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) + esQuery[opt] = options[opt] + }) + + esClient.search(esQuery, function (err, res) { + if (err) { + cb(err) + } else { + if (options.hydrate) { + hydrate(res,options || {}, cb) + } else { + cb(null, res) + } + } + }) + }, + plugin: function(options){ + var _this = this; + options = options || {}; + options.host = options && options.host ? options.host : 'localhost' + options.protocol = options && options.protocol ? options.protocol : 'http' + options.auth = options && options.auth ? options.auth : null + options.bulk = options && options.bulk + return function(schema,_options){ + _options = _options || {}; + _this.options = extend(options,_options); + return Mongoosastic(schema,_this.options); + } + } +}; + +function extend(target) { + var src + for (var i = 1, l = arguments.length; i < l; i++) { + src = arguments[i] + for (var k in src) target[k] = src[k] + } + return target +} + function createMappingIfNotPresent(options, cb) { - var client = options.client - , indexName = options.indexName - , typeName = options.typeName - , schema = options.schema - , settings = options.settings - - generator.generateMapping(schema, function(err, mapping) { - var completeMapping = {} - completeMapping[typeName] = mapping - client.indices.exists({index: indexName}, function(err, exists) { - if (err) - return cb(err) - - if (exists) { - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb) - } else { - client.indices.create({index: indexName, body: settings}, function(err) { - if (err) - return cb(err) - - client.indices.putMapping({ - index: indexName, - type: typeName, - body: completeMapping - }, cb) - }) - } - }) - }) + var client = options.client + , indexName = options.indexName + , typeName = options.typeName + , schema = options.schema + , settings = options.settings + + generator.generateMapping(schema, function (err, mapping) { + var completeMapping = {} + completeMapping[typeName] = mapping + client.indices.exists({index: indexName}, function (err, exists) { + if (err) + return cb(err) + + if (exists) { + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + } else { + client.indices.create({index: indexName, body: settings}, function (err) { + if (err) + return cb(err) + + client.indices.putMapping({ + index: indexName, + type: typeName, + body: completeMapping + }, cb) + }) + } + }) + }) +} + +function hydrate(res,options, cb) { + var results = res.hits + , resultsMap = {} + , ids = {} + , querys = {} + , modelName = "" + , hits = [] + , model + results.hits.forEach(function(a,i){ + var modelName = getModelName(a); + if(modelName) { + resultsMap[modelName] = resultsMap[modelName] || {}; + ids[modelName] = ids[modelName] || []; + resultsMap[modelName][a._id] = i;//记录排序索引 + ids[modelName].push(a._id); + } + }); + + async.eachSeries(Object.keys(resultsMap),function(modelName,callback){ + model = mongoose.model(modelName); + querys[modelName] = model.find({_id:{$in:ids[modelName]}}); + Object.keys(options.hydrateOptions).forEach(function (option) { + querys[modelName][option](options.hydrateOptions[option]) + }) + querys[modelName].exec(function(err, docs){ + if (err) { + return cb(err) + } else { + docs.forEach(function (doc) { + var i = resultsMap[modelName][doc._id] + hits[i] = doc + }); + callback(); + } + }) + },function(){ + results.hits = hits + res.hits = results + cb(null, res) + }); } -function hydrate(res, model, options, cb){ - var results = res.hits - , resultsMap = {} - , ids = results.hits.map(function(a, i){ - resultsMap[a._id] = i - return a._id - }) - , query = model.find({_id:{$in:ids}}) - - // Build Mongoose query based on hydrate options - // Example: {lean: true, sort: '-name', select: 'address name'} - Object.keys(options).forEach(function(option){ - query[option](options[option]) - }) - - query.exec(function(err, docs){ - if(err) { - return cb(err) - } else { - var hits = [] - - docs.forEach(function(doc) { - var i = resultsMap[doc._id] - hits[i] = doc - }) - results.hits = hits - res.hits = results - cb(null, res) - } - }) +function getModelName(es_item){ + if(!es_item || !es_item._type) return; + var names = mongoose.modelNames(), + res=""; + names.forEach(function(name){ + if(es_item._type === name.toLowerCase()){ + res = name; + return false; + } + }); + return res; } -function getMapping(schema){ - var retMapping = {} - generator.generateMapping(schema, function(err, mapping){ - retMapping = mapping - }) - return retMapping +function getMapping(schema) { + var retMapping = {} + generator.generateMapping(schema, function (err, mapping) { + retMapping = mapping + }) + return retMapping } -function deleteByMongoId(options, cb){ - var index = options.index - , type = options.type - , client = options.client - , model = options.model - , tries = options.tries - - client.delete({ - index: index, - type: type, - id: model._id.toString() - }, function(err, res){ - if(err && err.message.indexOf('404') > -1){ - setTimeout(function(){ - if(tries <= 0) { - return cb(err) - } else { - options.tries = --tries - deleteByMongoId(options, cb) - } - }, 500) - }else{ - model.emit('es-removed', err, res) - cb(err) - } - }) +function deleteByMongoId(options, cb) { + var index = options.index + , type = options.type + , client = options.client + , model = options.model + , tries = options.tries + + client.delete({ + index: index, + type: type, + id: model._id.toString() + }, function (err, res) { + if (err && err.message.indexOf('404') > -1) { + setTimeout(function () { + if (tries <= 0) { + return cb(err) + } else { + options.tries = --tries + deleteByMongoId(options, cb) + } + }, 500) + } else { + model.emit('es-removed', err, res) + cb(err) + } + }) } diff --git a/lib/serialize.js b/lib/serialize.js old mode 100755 new mode 100644 diff --git a/package.json b/package.json old mode 100755 new mode 100644 index ed595417..0b9613ad --- a/package.json +++ b/package.json @@ -1,5 +1,9 @@ { - "author": "James R. Carr (http://blog.james-carr.org)", + "author": { + "name": "James R. Carr", + "email": "james.r.carr@gmail.com", + "url": "http://blog.james-carr.org" + }, "name": "mongoosastic", "description": "A mongoose plugin that indexes models into elastic search", "version": "2.0.6", @@ -11,7 +15,7 @@ ], "repository": { "type": "git", - "url": "git://github.com/mongoosastic/mongoosastic" + "url": "https://github.com/janryWang/mongoosastic" }, "main": "lib/mongoosastic.js", "dependencies": { @@ -32,5 +36,34 @@ }, "scripts": { "test": "mocha -R spec -t 20000 -b" - } + }, + "gitHead": "a1c25990cca9717497c0e706d7ac64b5ed204819", + "bugs": { + "url": "https://github.com/mongoosastic/mongoosastic/issues" + }, + "homepage": "https://github.com/mongoosastic/mongoosastic", + "_id": "mongoosastic@2.0.6", + "_shasum": "8c86d2e396cf110abbcc9ef3e041818a1b4f9788", + "_from": "mongoosastic@", + "_npmVersion": "2.0.0", + "_npmUser": { + "name": "taterbase", + "email": "taterbase@gmail.com" + }, + "maintainers": [ + { + "name": "jamescarr", + "email": "james.r.carr@gmail.com" + }, + { + "name": "taterbase", + "email": "shankga@gmail.com" + } + ], + "dist": { + "shasum": "8c86d2e396cf110abbcc9ef3e041818a1b4f9788", + "tarball": "http://registry.npmjs.org/mongoosastic/-/mongoosastic-2.0.6.tgz" + }, + "directories": {}, + "_resolved": "https://registry.npmjs.org/mongoosastic/-/mongoosastic-2.0.6.tgz" } diff --git a/test/alternative-index-method-test.js b/test/alternative-index-method-test.js index 5c4e9895..72534caf 100755 --- a/test/alternative-index-method-test.js +++ b/test/alternative-index-method-test.js @@ -5,7 +5,6 @@ var mongoose = require('mongoose') , ObjectId = Schema.ObjectId , mongoosastic = require('../lib/mongoosastic') , Tweet = require('./models/tweet'); - describe('Index Method', function(){ before(function(done){ mongoose.connect(config.mongoUrl, function(){ diff --git a/test/boost-field-test.js b/test/boost-field-test.js index 26b8290c..70b84b86 100755 --- a/test/boost-field-test.js +++ b/test/boost-field-test.js @@ -14,7 +14,7 @@ var TweetSchema = new Schema({ , title: {type:String, es_boost:2.0} }); -TweetSchema.plugin(mongoosastic); +TweetSchema.plugin(mongoosastic.plugin()); var BlogPost = mongoose.model('BlogPost', TweetSchema); describe('Add Boost Option Per Field', function(){ diff --git a/test/bulk-test.js b/test/bulk-test.js index 031fd3a3..b581ba91 100755 --- a/test/bulk-test.js +++ b/test/bulk-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose'), var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic, { +BookSchema.plugin(mongoosastic.plugin(), { bulk: { size: 10, delay: 100 diff --git a/test/config.js b/test/config.js index 9943b81a..f658a0f6 100755 --- a/test/config.js +++ b/test/config.js @@ -4,7 +4,7 @@ var esClient = new(require('elasticsearch').Client) const INDEXING_TIMEOUT = 1100; module.exports = { - mongoUrl: 'mongodb://localhost/es-test' + mongoUrl: 'mongodb://judicy:sa@localhost:27017/es-test' , indexingTimeout: INDEXING_TIMEOUT , deleteIndexIfExists: function(indexes, done){ async.forEach(indexes, function(index, cb){ diff --git a/test/geo-test.js b/test/geo-test.js index 567ff413..31a888df 100755 --- a/test/geo-test.js +++ b/test/geo-test.js @@ -31,7 +31,7 @@ describe('GeoTest', function(){ } }); - GeoSchema.plugin(mongoosastic); + GeoSchema.plugin(mongoosastic.plugin()); GeoModel = mongoose.model('geodoc', GeoSchema); GeoModel.createMapping(function(err, mapping){ diff --git a/test/index-test.js b/test/index-test.js index fd9c6308..55c71dc2 100755 --- a/test/index-test.js +++ b/test/index-test.js @@ -15,7 +15,7 @@ var TalkSchema = new Schema({ , abstract: {type:String, es_indexed:true} , bio: String }); -TalkSchema.plugin(mongoosastic) +TalkSchema.plugin(mongoosastic.plugin()) var Talk = mongoose.model("Talk", TalkSchema); @@ -28,7 +28,7 @@ var PersonSchema = new Schema({ , died: {type: Number, es_indexed:true} } }); -PersonSchema.plugin(mongoosastic, { +PersonSchema.plugin(mongoosastic.plugin(), { index:'people' , type: 'dude' , hydrate: true @@ -356,7 +356,7 @@ describe('indexing', function(){ var BumSchema = new Schema({ name: String }); - BumSchema.plugin(mongoosastic, { + BumSchema.plugin(mongoosastic.plugin(), { index: 'ms_sample' , type: 'bum' }); diff --git a/test/models/tweet.js b/test/models/tweet.js index f991c3ad..50105f39 100755 --- a/test/models/tweet.js +++ b/test/models/tweet.js @@ -10,6 +10,6 @@ var TweetSchema = new Schema({ , message: String }); -TweetSchema.plugin(mongoosastic) +TweetSchema.plugin(mongoosastic.plugin()) module.exports = mongoose.model('Tweet', TweetSchema); diff --git a/test/search-features-test.js b/test/search-features-test.js index 993de91e..9bfc087b 100755 --- a/test/search-features-test.js +++ b/test/search-features-test.js @@ -12,7 +12,7 @@ var BondSchema = new Schema({ , price: Number }); -BondSchema.plugin(mongoosastic); +BondSchema.plugin(mongoosastic.plugin()); var Bond = mongoose.model('Bond', BondSchema); diff --git a/test/synchronize-test.js b/test/synchronize-test.js index 45eeb25b..776221d7 100755 --- a/test/synchronize-test.js +++ b/test/synchronize-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose') var BookSchema = new Schema({ title: String }); -BookSchema.plugin(mongoosastic); +BookSchema.plugin(mongoosastic.plugin()); var Book = mongoose.model('Book', BookSchema); diff --git a/test/truncate-test.js b/test/truncate-test.js index da236202..caa4f923 100755 --- a/test/truncate-test.js +++ b/test/truncate-test.js @@ -9,7 +9,7 @@ var mongoose = require('mongoose'), var DummySchema = new Schema({ text: String }); -DummySchema.plugin(mongoosastic); +DummySchema.plugin(mongoosastic.plugin()); var Dummy = mongoose.model('Dummy', DummySchema); From 8dd2a3431cd551005b553146bf715ad3e3276688 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Sat, 7 Feb 2015 01:33:27 +0800 Subject: [PATCH 82/92] update test/config.js and readme.md --- .idea/workspace.xml | 68 +++++++++++++++++++++++++++++++++++++++++++-- README.md | 40 ++++++++++++++++++++++---- test/config.js | 34 +++++++++++------------ 3 files changed, 117 insertions(+), 25 deletions(-) diff --git a/.idea/workspace.xml b/.idea/workspace.xml index 1dfdc0be..4a7407de 100644 --- a/.idea/workspace.xml +++ b/.idea/workspace.xml @@ -1,7 +1,10 @@ - + + + + + + + @@ -112,14 +153,14 @@ - + - + @@ -154,4 +195,25 @@ + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/README.md b/README.md index f86fdb04..b851e54b 100644 --- a/README.md +++ b/README.md @@ -60,8 +60,13 @@ var User = new Schema({ , email: String , city: String }) +var options = { + host:"localhost", + port:9200, + modelFieldName:'__model_name__' //This property is used to search for the model, in a multi-index and more types of queries,default value is __model_name__ +}; -User.plugin(mongoosastic) +User.plugin(mongoosastic.plugin(options)) ``` This will by default simply use the pluralization of the model name as the index @@ -83,7 +88,7 @@ var User = new Schema({ , city: String }) -User.plugin(mongoosastic) +User.plugin(mongoosastic.plugin()) ``` In this case only the name field @@ -140,7 +145,7 @@ var User = new Schema({ , comments: {type:[Comment], es_indexed:true} }) -User.plugin(mongoosastic) +User.plugin(mongoosastic.plugin()) ``` @@ -183,7 +188,7 @@ You can also specify `bulk` options with mongoose which will utilize elasticsear Mongoosastic will wait 1 second (or specified delay) until it has 1000 docs (or specified size) and then perform bulk indexing. ```javascript -BookSchema.plugin(mongoosastic, { +BookSchema.plugin(mongoosastic.plugin(), { bulk: { size: 10, // preferred number of docs to bulk index delay: 100 //milliseconds to wait for enough docs to meet size constraint @@ -483,5 +488,30 @@ var User = new Schema({ , city: String }) -User.plugin(mongoosastic, {hydrate:true, hydrateOptions: {lean: true}}) +User.plugin(mongoosastic.plugin(), {hydrate:true, hydrateOptions: {lean: true}}) ``` + + +###Populating + +```javascript +var mongoosastic = require('mongoosastic'); + +mongoosastic.search({ + match_all:{} +},{ + index:["articles","videos","musics","gallerys"], + type:["article","video","music","gallery"], + hydrate:true, + hydrateOptions:{ + populate:"tags catgories ..." + } +}, function(err, results) { + if(err){ + console.log(err); + } else { + console.log(results.hits); + } +}); + +``` \ No newline at end of file diff --git a/test/config.js b/test/config.js index f658a0f6..7e3f3c5d 100755 --- a/test/config.js +++ b/test/config.js @@ -1,26 +1,26 @@ var esClient = new(require('elasticsearch').Client) - , async = require('async'); + , async = require('async'); const INDEXING_TIMEOUT = 1100; module.exports = { - mongoUrl: 'mongodb://judicy:sa@localhost:27017/es-test' + mongoUrl: 'mongodb://localhost/es-test' , indexingTimeout: INDEXING_TIMEOUT , deleteIndexIfExists: function(indexes, done){ - async.forEach(indexes, function(index, cb){ - esClient.indices.exists({ - index: index - }, function(err, exists){ - if(exists){ - esClient.indices.delete({ - index: index - }, cb); - }else{ - cb(); - } - }); - }, done); - } + async.forEach(indexes, function(index, cb){ + esClient.indices.exists({ + index: index + }, function(err, exists){ + if(exists){ + esClient.indices.delete({ + index: index + }, cb); + }else{ + cb(); + } + }); + }, done); + } , createModelAndEnsureIndex: createModelAndEnsureIndex }; @@ -31,4 +31,4 @@ function createModelAndEnsureIndex(model, obj, cb){ setTimeout(cb, INDEXING_TIMEOUT); }); }); -} +} \ No newline at end of file From 4f7fb4365bb211b8affab1bee5530c1a78c7febd Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Sat, 7 Feb 2015 01:36:17 +0800 Subject: [PATCH 83/92] update reademe.md --- .idea/workspace.xml | 31 +++++++++++++++---------------- README.md | 3 +-- 2 files changed, 16 insertions(+), 18 deletions(-) diff --git a/.idea/workspace.xml b/.idea/workspace.xml index 4a7407de..7e49f98e 100644 --- a/.idea/workspace.xml +++ b/.idea/workspace.xml @@ -3,7 +3,6 @@ - @@ -27,23 +26,23 @@ - + - - + + - + - + @@ -58,8 +57,8 @@ @@ -196,21 +195,21 @@ - - - - + - - + + - + + + + - - + + diff --git a/README.md b/README.md index b851e54b..8f8d5e96 100644 --- a/README.md +++ b/README.md @@ -62,8 +62,7 @@ var User = new Schema({ }) var options = { host:"localhost", - port:9200, - modelFieldName:'__model_name__' //This property is used to search for the model, in a multi-index and more types of queries,default value is __model_name__ + port:9200 }; User.plugin(mongoosastic.plugin(options)) From f1af9ccd35e70f08a5e3c49765ac8973ae8edc29 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E7=8E=8B=E6=99=BA=E5=8A=9B?= Date: Wed, 8 Apr 2015 15:08:44 +0800 Subject: [PATCH 84/92] Update mongoosastic.js --- lib/mongoosastic.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 49d91ff7..c3f4f3cc 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -371,6 +371,7 @@ function Mongoosastic(schema, options) { module.exports = { + mongoose:null, connect: function (options) { var host = options && options.host || 'localhost', port = options && options.port || 9200, @@ -395,7 +396,7 @@ module.exports = { cb = arguments[1] options = {} } - + mongoose = this.mongoose || mongoose; if (query === null) query = undefined @@ -500,7 +501,6 @@ function hydrate(res,options, cb) { ids[modelName].push(a._id); } }); - async.eachSeries(Object.keys(resultsMap),function(modelName,callback){ model = mongoose.model(modelName); querys[modelName] = model.find({_id:{$in:ids[modelName]}}); @@ -513,7 +513,7 @@ function hydrate(res,options, cb) { } else { docs.forEach(function (doc) { var i = resultsMap[modelName][doc._id] - hits[i] = doc + hits[i] = Object.create(doc); }); callback(); } From 893e630f79bf80a432b709dab565738fae3adc9e Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Wed, 15 Apr 2015 01:12:48 +0800 Subject: [PATCH 85/92] bug fixed --- .gitignore | 32 +++- .idea/.name | 1 - .idea/encodings.xml | 4 - .idea/misc.xml | 4 - .idea/modules.xml | 8 - .idea/mongoosastic.iml | 8 - .idea/scopes/scope_settings.xml | 5 - .idea/vcs.xml | 6 - .idea/workspace.xml | 218 ---------------------- lib/mapping-generator.js | 319 ++++++++++++++++---------------- lib/mongoosastic.js | 144 +++++++++----- lib/serialize.js | 82 ++++---- 12 files changed, 328 insertions(+), 503 deletions(-) delete mode 100644 .idea/.name delete mode 100644 .idea/encodings.xml delete mode 100644 .idea/misc.xml delete mode 100644 .idea/modules.xml delete mode 100644 .idea/mongoosastic.iml delete mode 100644 .idea/scopes/scope_settings.xml delete mode 100644 .idea/vcs.xml delete mode 100644 .idea/workspace.xml diff --git a/.gitignore b/.gitignore index 9daa8247..1e700d61 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,32 @@ -.DS_Store +# Logs +logs +*.log + +# Runtime data +pids +*.pid +*.seed + +# Directory for instrumented libs generated by jscoverage/JSCover +lib-cov + +# Coverage directory used by tools like istanbul +coverage + +# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) +.grunt + +# Compiled binary addons (http://nodejs.org/api/addons.html) +build/Release + +# Dependency directory +# Deployed apps should consider commenting this line out: +# see https://npmjs.org/doc/faq.html#Should-I-check-my-node_modules-folder-into-git node_modules + +# Ignore .env configuration files +.env + +# Ignore .DS_Store files on OS X +.DS_Store +.idea diff --git a/.idea/.name b/.idea/.name deleted file mode 100644 index dea147d2..00000000 --- a/.idea/.name +++ /dev/null @@ -1 +0,0 @@ -mongoosastic \ No newline at end of file diff --git a/.idea/encodings.xml b/.idea/encodings.xml deleted file mode 100644 index d8210482..00000000 --- a/.idea/encodings.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/misc.xml b/.idea/misc.xml deleted file mode 100644 index 8662aa97..00000000 --- a/.idea/misc.xml +++ /dev/null @@ -1,4 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/modules.xml b/.idea/modules.xml deleted file mode 100644 index 44eefacb..00000000 --- a/.idea/modules.xml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/mongoosastic.iml b/.idea/mongoosastic.iml deleted file mode 100644 index c956989b..00000000 --- a/.idea/mongoosastic.iml +++ /dev/null @@ -1,8 +0,0 @@ - - - - - - - - \ No newline at end of file diff --git a/.idea/scopes/scope_settings.xml b/.idea/scopes/scope_settings.xml deleted file mode 100644 index 922003b8..00000000 --- a/.idea/scopes/scope_settings.xml +++ /dev/null @@ -1,5 +0,0 @@ - - - - \ No newline at end of file diff --git a/.idea/vcs.xml b/.idea/vcs.xml deleted file mode 100644 index 94a25f7f..00000000 --- a/.idea/vcs.xml +++ /dev/null @@ -1,6 +0,0 @@ - - - - - - \ No newline at end of file diff --git a/.idea/workspace.xml b/.idea/workspace.xml deleted file mode 100644 index 7e49f98e..00000000 --- a/.idea/workspace.xml +++ /dev/null @@ -1,218 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - true - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - 1423243009585 - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - \ No newline at end of file diff --git a/lib/mapping-generator.js b/lib/mapping-generator.js index ed384126..2950fc0c 100644 --- a/lib/mapping-generator.js +++ b/lib/mapping-generator.js @@ -1,17 +1,15 @@ -function Generator(){ +function Generator() { } -Generator.prototype.generateMapping = function(schema, cb){ - var cleanTree = getCleanTree(schema.tree, schema.paths, ''); - delete cleanTree[schema.get('versionKey')]; - var mapping = getMapping(cleanTree, ''); - cb(null, { properties: mapping }); +Generator.prototype.generateMapping = function(schema, cb) { + var cleanTree = getCleanTree(schema.tree, schema.paths, ''); + delete cleanTree[schema.get('versionKey')]; + var mapping = getMapping(cleanTree, ''); + cb(null, {properties: mapping}); }; module.exports = Generator; - - // // Generates the mapping // @@ -22,67 +20,65 @@ module.exports = Generator; // @return the mapping // function getMapping(cleanTree, prefix) { - var mapping = {}, - value = {}, - implicitFields = [], - hasEs_index = false; - - if (prefix !== '') { - prefix = prefix + '.'; - } - - for (var field in cleanTree) { - value = cleanTree[field]; - mapping[field] = {}; - mapping[field].type = value.type; - - // Check if field was explicity indexed, if not keep track implicitly - if(value.es_indexed) { - hasEs_index = true; - } else if (value.type) { - implicitFields.push(field); - } - - - // If there is no type, then it's an object with subfields. - if (!value.type) { - mapping[field].type = 'object'; - mapping[field].properties = getMapping(value, prefix + field); - continue; - } - - // If it is a objectid make it a string. - if (value.type === 'objectid') { - mapping[field].type = 'string'; - // do not continue here so we can handle other es_ options - } - - //If indexing a number, and no es_type specified, default to double - if (value.type === 'number' && value['es_type'] === undefined) { - mapping[field].type = 'double'; - continue; - } - - // Else, it has a type and we want to map that! - for (var prop in value) { - // Map to field if it's an Elasticsearch option - if (prop.indexOf('es_') === 0 && prop !== 'es_indexed') { - mapping[field][prop.replace(/^es_/, '')] = value[prop]; - } - } - } - - //If one of the fields was explicitly indexed, delete all implicit fields - if (hasEs_index) { - implicitFields.forEach(function(field) { - delete mapping[field]; - }); - } - - return mapping; + var mapping = {}, + value, + implicitFields = [], + hasEsIndex = false; + + if (prefix !== '') { + prefix = prefix + '.'; + } + + for (var field in cleanTree) { + value = cleanTree[field]; + mapping[field] = {}; + mapping[field].type = value.type; + + // Check if field was explicity indexed, if not keep track implicitly + if (value.es_indexed) { + hasEsIndex = true; + } else if (value.type) { + implicitFields.push(field); + } + + // If there is no type, then it's an object with subfields. + if (!value.type) { + mapping[field].type = 'object'; + mapping[field].properties = getMapping(value, prefix + field); + continue; + } + + // If it is a objectid make it a string. + if (value.type === 'objectid') { + // do not continue here so we can handle other es_ options + mapping[field].type = 'string'; + } + + //If indexing a number, and no es_type specified, default to double + if (value.type === 'number' && value.es_type === undefined) { + mapping[field].type = 'double'; + continue; + } + + // Else, it has a type and we want to map that! + for (var prop in value) { + // Map to field if it's an Elasticsearch option + if (prop.indexOf('es_') === 0 && prop !== 'es_indexed') { + mapping[field][prop.replace(/^es_/, '')] = value[prop]; + } + } + } + + //If one of the fields was explicitly indexed, delete all implicit fields + if (hasEsIndex) { + implicitFields.forEach(function(field) { + delete mapping[field]; + }); + } + + return mapping; } - // // Generates a clean tree // @@ -95,91 +91,92 @@ function getMapping(cleanTree, prefix) { // function getCleanTree(tree, paths, prefix) { - var cleanTree = {}, - type = '', - value = {}; - - if (prefix !== '') { - prefix = prefix + '.'; - } - - for (var field in tree){ - if (prefix === '' && (field === "id" || field === "_id")) { - continue; - } - - type = getTypeFromPaths(paths, prefix + field); - value = tree[field]; - - if(value.es_indexed === false) { - continue; - } - // Field has some kind of type - if (type) { - // If it is an nested schema - if (value[0]) { - // A nested array can contain complex objects - if (paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { - cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); - } else if ( paths[field] && paths[field].caster && paths[field].caster.instance ) { - // Even for simple types the value can be an object if there is other attributes than type - if(typeof value[0] === 'object'){ - cleanTree[field] = value[0]; - } else { - cleanTree[field] = {}; - } - cleanTree[field].type = paths[field].caster.instance.toLowerCase(); - } else if (!paths[field] && prefix) { - if(paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { - cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; - } - } else { - cleanTree[field] = { - type:'object' - }; - } - } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array){ - cleanTree[field] = {}; - cleanTree[field].type = type; - } else { - cleanTree[field] = value; - cleanTree[field].type = type; - } - - // It has no type for some reason - } else { - // Because it is an geo_* object!! - if (typeof value === 'object') - { - var key; - var geoFound = false; - for (key in value) { - if (value.hasOwnProperty(key) && /^geo_/.test(key)) { - cleanTree[field] = value[key]; - geoFound = true; - //break; - } - } - if(geoFound) continue - } - - // If it's a virtual type, don't map it - if (typeof value === 'object' && value.getters && value.setters && value.options) { - continue; - } - - // Because it is some other object!! Or we assumed that it is one. - if (typeof value === 'object') { - cleanTree[field] = getCleanTree(value, paths, prefix + field); - } - } - } - - return cleanTree; + var cleanTree = {}, + type = '', + value = {}; + + if (prefix !== '') { + prefix = prefix + '.'; + } + + for (var field in tree) { + if (prefix === '' && (field === 'id' || field === '_id')) { + continue; + } + + type = getTypeFromPaths(paths, prefix + field); + value = tree[field]; + + if (value.es_indexed === false) { + continue; + } + + // Field has some kind of type + if (type) { + // If it is an nested schema + if (value[0]) { + // A nested array can contain complex objects + if (paths[field] && paths[field].schema && paths[field].schema.tree && paths[field].schema.paths) { + cleanTree[field] = getCleanTree(paths[field].schema.tree, paths[field].schema.paths, ''); + } else if (paths[field] && paths[field].caster && paths[field].caster.instance) { + // Even for simple types the value can be an object if there is other attributes than type + if (typeof value[0] === 'object') { + cleanTree[field] = value[0]; + } else { + cleanTree[field] = {}; + } + + cleanTree[field].type = paths[field].caster.instance.toLowerCase(); + } else if (!paths[field] && prefix) { + if (paths[prefix + field] && paths[prefix + field].caster && paths[prefix + field].caster.instance) { + cleanTree[field] = {type: paths[prefix + field].caster.instance.toLowerCase()}; + } + } else { + cleanTree[field] = { + type: 'object' + }; + } + } else if (value === String || value === Object || value === Date || value === Number || value === Boolean || value === Array) { + cleanTree[field] = {}; + cleanTree[field].type = type; + } else { + cleanTree[field] = value; + cleanTree[field].type = type; + } + + // It has no type for some reason + } else { + // Because it is an geo_* object!! + if (typeof value === 'object') { + var key; + var geoFound = false; + for (key in value) { + if (value.hasOwnProperty(key) && /^geo_/.test(key)) { + cleanTree[field] = value[key]; + geoFound = true; + } + } + + if (geoFound) { + continue; + } + } + + // If it's a virtual type, don't map it + if (typeof value === 'object' && value.getters && value.setters && value.options) { + continue; + } + + // Because it is some other object!! Or we assumed that it is one. + if (typeof value === 'object') { + cleanTree[field] = getCleanTree(value, paths, prefix + field); + } + } + } + + return cleanTree; } - - // // Get type from the mongoose schema // @@ -190,19 +187,19 @@ function getCleanTree(tree, paths, prefix) { // @return the type or false // function getTypeFromPaths(paths, field) { - var type = false; + var type = false; - if (paths[field] && paths[field].options.type === Date) { - return 'date'; - } + if (paths[field] && paths[field].options.type === Date) { + return 'date'; + } - if (paths[field] && paths[field].options.type === Boolean) { - return 'boolean'; - } + if (paths[field] && paths[field].options.type === Boolean) { + return 'boolean'; + } - if (paths[field]) { - type = paths[field].instance ? paths[field].instance.toLowerCase() : 'object'; - } + if (paths[field]) { + type = paths[field].instance ? paths[field].instance.toLowerCase() : 'object'; + } - return type; + return type; } diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 49d91ff7..4735557c 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -5,6 +5,7 @@ var elasticsearch = require('elasticsearch') , mongoose = require('mongoose') , async = require('async') , nop = require('nop') + , esClient function Mongoosastic(schema, options) { var mapping = getMapping(schema) @@ -16,11 +17,10 @@ function Mongoosastic(schema, options) { , host = options.host , port = options.port , protocol = options.protocol - , auth = options.auth + , auth = options.auth , bulk = options.bulk , bulkBuffer = [] , bulkTimeout - this.esClient = this.esClient || new elasticsearch.Client({ host: { host: host, @@ -29,7 +29,7 @@ function Mongoosastic(schema, options) { auth: auth } }); - var esClient = this.esClient; + esClient = this.esClient; setUpMiddlewareHooks(schema) /** @@ -66,35 +66,46 @@ function Mongoosastic(schema, options) { * @param callback Function */ schema.methods.index = function (options, cb) { + var _this = this; if (arguments.length < 2) { cb = arguments[0] || nop options = {} } + //自动匹配index与type setIndexNameIfUnset(this.constructor.modelName) - var index = options.index || indexName , type = options.type || typeName ,serialModel = serialize(this, mapping); - if (bulk) { - /** - * To serialize in bulk it needs the _id - */ - serialModel._id = this._id; - bulkIndex({ - index: index, - type: type, - model: serialModel - }) - setImmediate(cb) - } else { - esClient.index({ - index: index, - type: type, - id: this._id.toString(), - body: serialModel - }, cb) - } + + createMappingIfNotPresent({ + client: esClient, + indexName: index, + typeName: type, + schema: schema + },function(err,res){ + if(err) throw err; + if (bulk) { + /** + * To serialize in bulk it needs the _id + */ + serialModel._id = _this._id; + bulkIndex({ + index: index, + type: type, + model: serialModel + }) + setImmediate(cb) + } else { + esClient.index({ + index: index, + type: type, + id: _this._id.toString(), + body: serialModel + }, cb) + } + + }); } /** @@ -221,9 +232,9 @@ function Mongoosastic(schema, options) { if (query === null) query = undefined - + setIndexNameIfUnset(this.modelName) - + var model = this , esQuery = { body: {query: query}, @@ -236,7 +247,7 @@ function Mongoosastic(schema, options) { if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) esQuery[opt] = options[opt] }) - + esClient.search(esQuery, function (err, res) { if (err) { cb(err) @@ -371,12 +382,13 @@ function Mongoosastic(schema, options) { module.exports = { + mongoose:null, connect: function (options) { var host = options && options.host || 'localhost', port = options && options.port || 9200, protocol = options && options.protocol || 'http', auth = options && options.auth ? options.auth : null; - + this.esClient = this.esClient || new elasticsearch.Client({ host: { host: host, @@ -386,47 +398,85 @@ module.exports = { } }); }, + suggest:function(query, options, cb){ + if (arguments.length === 2) { + cb = arguments[1] + options = {} + } + mongoose = this.mongoose || mongoose; + if (query === null) + query = undefined + + var esQuery = { + body:query, + index: options.index || "", + type: options.type || "" + }; + + esClient.indices.exists({index: options.index},function(err,exists){ + if(!err){ + if(exists){ + esClient.suggest(esQuery, function (err, res) { + if (err) { + cb(err) + } else { + cb(null, res) + } + }) + } else { + cb(null, {}); + } + } + }); + }, /** * 全局性的搜索,可以搜索多索引,多类型,同时也能hydrate */ search:function(query, options, cb){ - var _this = this; if (arguments.length === 2) { cb = arguments[1] options = {} } - + mongoose = this.mongoose || mongoose; if (query === null) query = undefined - var model = this - , esQuery = { - body: {query: query}, - index: options.index || "", - type: options.type || "" - } + var esQuery = { + body:query, + index: options.index || "", + type: options.type || "" + }; Object.keys(options).forEach(function (opt) { if (!opt.match(/hydrate/) && options.hasOwnProperty(opt)) esQuery[opt] = options[opt] }) - esClient.search(esQuery, function (err, res) { - if (err) { - cb(err) - } else { - if (options.hydrate) { - hydrate(res,options || {}, cb) + esClient.indices.exists({index: options.index},function(err,exists){ + if(!err){ + if(exists){ + esClient.search(esQuery, function (err, res) { + if (err) { + cb(err) + } else { + if (options.hydrate) { + hydrate(res,options || {}, cb) + } else { + cb(null, res) + } + } + }) } else { - cb(null, res) + cb(null, {}); } } - }) + }); + }, plugin: function(options){ var _this = this; options = options || {}; - options.host = options && options.host ? options.host : 'localhost' + options.host = options && options.host ? options.host : 'localhost' options.protocol = options && options.protocol ? options.protocol : 'http' options.auth = options && options.auth ? options.auth : null options.bulk = options && options.bulk @@ -471,7 +521,6 @@ function createMappingIfNotPresent(options, cb) { client.indices.create({index: indexName, body: settings}, function (err) { if (err) return cb(err) - client.indices.putMapping({ index: indexName, type: typeName, @@ -500,7 +549,6 @@ function hydrate(res,options, cb) { ids[modelName].push(a._id); } }); - async.eachSeries(Object.keys(resultsMap),function(modelName,callback){ model = mongoose.model(modelName); querys[modelName] = model.find({_id:{$in:ids[modelName]}}); @@ -513,7 +561,7 @@ function hydrate(res,options, cb) { } else { docs.forEach(function (doc) { var i = resultsMap[modelName][doc._id] - hits[i] = doc + hits[i] = Object.create(doc); }); callback(); } @@ -526,7 +574,7 @@ function hydrate(res,options, cb) { } function getModelName(es_item){ - if(!es_item || !es_item._type) return; + if(!es_item || !es_item._type) return; var names = mongoose.modelNames(), res=""; names.forEach(function(name){ diff --git a/lib/serialize.js b/lib/serialize.js index 55300db6..384d7f3c 100644 --- a/lib/serialize.js +++ b/lib/serialize.js @@ -1,46 +1,50 @@ module.exports = serialize; function _serializeObject(object, mapping) { - var serialized = {}; - for (var field in mapping.properties) { - var val = serialize.call(object, object[field], mapping.properties[field]); - if (val !== undefined) { - serialized[field] = val; - } - } - return serialized; + var serialized = {}; + for (var field in mapping.properties) { + var val = serialize.call(object, object[field], mapping.properties[field]); + if (val !== undefined) { + serialized[field] = val; + } + } + + return serialized; } function serialize(model, mapping) { - if (mapping.properties && model) { - if (Array.isArray(model)) { - return model.map(function(object) { - return _serializeObject(object, mapping); - }); - } else { - return _serializeObject(model, mapping); - } - } else if (typeof value === 'object' && value !== null) { - var name = value.constructor.name; - if (name === 'ObjectID') { - return value.toString(); - } else if (name === 'Date') { - return new Date(value).toJSON(); - } - } else { - if (mapping.cast && typeof(mapping.cast) !== 'function') - throw new Error('es_cast must be a function'); - model = mapping.cast ? mapping.cast.call(this, model) : model; - if (typeof model === 'object' && model !== null) { - var name = model.constructor.name; - if (name === 'ObjectID') { - return model.toString(); - } else if (name === 'Date') { - return new Date(model).toJSON(); - } - return model; - } else { - return model; - } - } + var name; + + if (mapping.properties && model) { + + if (Array.isArray(model)) { + return model.map(function (object) { + return _serializeObject(object, mapping); + }); + } + + return _serializeObject(model, mapping); + + } + + if (mapping.cast && typeof mapping.cast !== 'function') { + throw new Error('es_cast must be a function'); + } + + model = mapping.cast ? mapping.cast.call(this, model) : model; + if (typeof model === 'object' && model !== null) { + name = model.constructor.name; + if (name === 'ObjectID') { + return model.toString(); + } + + if (name === 'Date') { + return new Date(model).toJSON(); + } + + } + + return model; + } + From 126df46116c4411e664ccd2cb5cd96133ccb88cf Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Wed, 15 Apr 2015 23:29:26 +0800 Subject: [PATCH 86/92] bug fixed --- lib/mongoosastic.js | 19 ++++--------------- 1 file changed, 4 insertions(+), 15 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 4735557c..b35c3f47 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -14,20 +14,11 @@ function Mongoosastic(schema, options) { , alwaysHydrate = options && options.hydrate , defaultHydrateOptions = options && options.hydrateOptions , _mapping = null - , host = options.host - , port = options.port - , protocol = options.protocol - , auth = options.auth - , bulk = options.bulk , bulkBuffer = [] , bulkTimeout this.esClient = this.esClient || new elasticsearch.Client({ - host: { - host: host, - port: port, - protocol: protocol, - auth: auth - } + host: options.host || "localhost:9200", + apiVersion:options.apiVersion || '1.0' }); esClient = this.esClient; setUpMiddlewareHooks(schema) @@ -476,10 +467,8 @@ module.exports = { plugin: function(options){ var _this = this; options = options || {}; - options.host = options && options.host ? options.host : 'localhost' - options.protocol = options && options.protocol ? options.protocol : 'http' - options.auth = options && options.auth ? options.auth : null - options.bulk = options && options.bulk + options.host = options && options.host ? options.host : 'localhost:9200' + options.apiVersion = options && options.apiVersion ? options.apiVersion : '1.0' return function(schema,_options){ _options = _options || {}; _this.options = extend(options,_options); From e4d456449a71419f1b551c97fbc2ec30cab299a4 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Thu, 16 Apr 2015 00:03:27 +0800 Subject: [PATCH 87/92] bug fixed --- lib/mongoosastic.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index b35c3f47..fdbf36a1 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -14,8 +14,10 @@ function Mongoosastic(schema, options) { , alwaysHydrate = options && options.hydrate , defaultHydrateOptions = options && options.hydrateOptions , _mapping = null + , bulk = options.bulk , bulkBuffer = [] , bulkTimeout + this.esClient = this.esClient || new elasticsearch.Client({ host: options.host || "localhost:9200", apiVersion:options.apiVersion || '1.0' From 32cd74f54565be5029de8056b72e7fee70d9df67 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Thu, 16 Apr 2015 01:40:24 +0800 Subject: [PATCH 88/92] bug fixed --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 0b9613ad..3dc3ce0b 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ "nop": "^1.0.0" }, "peerDependencies": { - "mongoose": "3.8.x" + "mongoose": "^3.8.22" }, "devDependencies": { "mocha": "*", From 74f74601ec4d886fe34c6e7d8a1ccfec3487c5fa Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Thu, 16 Apr 2015 12:20:19 +0800 Subject: [PATCH 89/92] test --- lib/mongoosastic.js | 27 +++++++++++++++------------ 1 file changed, 15 insertions(+), 12 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index fdbf36a1..752f971c 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -13,7 +13,6 @@ function Mongoosastic(schema, options) { , typeName = options && options.type , alwaysHydrate = options && options.hydrate , defaultHydrateOptions = options && options.hydrateOptions - , _mapping = null , bulk = options.bulk , bulkBuffer = [] , bulkTimeout @@ -70,14 +69,14 @@ function Mongoosastic(schema, options) { var index = options.index || indexName , type = options.type || typeName ,serialModel = serialize(this, mapping); - - createMappingIfNotPresent({ - client: esClient, - indexName: index, - typeName: type, - schema: schema - },function(err,res){ - if(err) throw err; + // + //createMappingIfNotPresent({ + // client: esClient, + // indexName: index, + // typeName: type, + // schema: schema + //},function(err){ + // if(err) throw err; if (bulk) { /** * To serialize in bulk it needs the _id @@ -90,6 +89,12 @@ function Mongoosastic(schema, options) { }) setImmediate(cb) } else { + console.log({ + index: index, + type: type, + id: _this._id.toString(), + body: serialModel + }); esClient.index({ index: index, type: type, @@ -98,7 +103,7 @@ function Mongoosastic(schema, options) { }, cb) } - }); + //}); } /** @@ -158,7 +163,6 @@ function Mongoosastic(schema, options) { */ schema.statics.synchronize = function (query) { var em = new events.EventEmitter() - , readyToClose , closeValues = [] , counter = 0 , close = function () { @@ -528,7 +532,6 @@ function hydrate(res,options, cb) { , resultsMap = {} , ids = {} , querys = {} - , modelName = "" , hits = [] , model results.hits.forEach(function(a,i){ From 253524911c2889ce529318cc0fab74b3a816b2c3 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Thu, 16 Apr 2015 12:24:11 +0800 Subject: [PATCH 90/92] test --- lib/mongoosastic.js | 24 +++++++++--------------- 1 file changed, 9 insertions(+), 15 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 752f971c..dc9dbdf3 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -69,14 +69,14 @@ function Mongoosastic(schema, options) { var index = options.index || indexName , type = options.type || typeName ,serialModel = serialize(this, mapping); - // - //createMappingIfNotPresent({ - // client: esClient, - // indexName: index, - // typeName: type, - // schema: schema - //},function(err){ - // if(err) throw err; + console.log(serialModel,mapping); + createMappingIfNotPresent({ + client: esClient, + indexName: index, + typeName: type, + schema: schema + },function(err){ + if(err) throw err; if (bulk) { /** * To serialize in bulk it needs the _id @@ -89,12 +89,6 @@ function Mongoosastic(schema, options) { }) setImmediate(cb) } else { - console.log({ - index: index, - type: type, - id: _this._id.toString(), - body: serialModel - }); esClient.index({ index: index, type: type, @@ -103,7 +97,7 @@ function Mongoosastic(schema, options) { }, cb) } - //}); + }); } /** From 760f98f12d09d169f7da2a6eb6a2379cf59b15d0 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Thu, 16 Apr 2015 12:28:31 +0800 Subject: [PATCH 91/92] test --- lib/mongoosastic.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index dc9dbdf3..65efa08e 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -16,7 +16,8 @@ function Mongoosastic(schema, options) { , bulk = options.bulk , bulkBuffer = [] , bulkTimeout - + console.log(mapping,schema); + this.esClient = this.esClient || new elasticsearch.Client({ host: options.host || "localhost:9200", apiVersion:options.apiVersion || '1.0' @@ -69,7 +70,6 @@ function Mongoosastic(schema, options) { var index = options.index || indexName , type = options.type || typeName ,serialModel = serialize(this, mapping); - console.log(serialModel,mapping); createMappingIfNotPresent({ client: esClient, indexName: index, From f587900a01265f685ccf6dc5c600c70224edfe77 Mon Sep 17 00:00:00 2001 From: Janry Wang Date: Thu, 16 Apr 2015 12:30:23 +0800 Subject: [PATCH 92/92] =?UTF-8?q?=E6=B5=8B=E8=AF=95=E6=97=A0=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- lib/mongoosastic.js | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/mongoosastic.js b/lib/mongoosastic.js index 65efa08e..3d462522 100644 --- a/lib/mongoosastic.js +++ b/lib/mongoosastic.js @@ -16,7 +16,6 @@ function Mongoosastic(schema, options) { , bulk = options.bulk , bulkBuffer = [] , bulkTimeout - console.log(mapping,schema); this.esClient = this.esClient || new elasticsearch.Client({ host: options.host || "localhost:9200",