diff --git a/HISTORY.md b/HISTORY.md index 1dd567fecef..379a447b8a4 100644 --- a/HISTORY.md +++ b/HISTORY.md @@ -1,3 +1,16 @@ + +## [3.1.1](https://github.com/mongodb/node-mongodb-native/compare/v3.1.0...v3.1.1) (2018-07-05) + + +### Bug Fixes + +* **client-ops:** return transform map to map rather than function ([b8b4bfa](https://github.com/mongodb/node-mongodb-native/commit/b8b4bfa)) +* **collection:** correctly shallow clone passed in options ([2e6c4fa](https://github.com/mongodb/node-mongodb-native/commit/2e6c4fa)) +* **collection:** countDocuments throws error when query doesn't match docs ([4e83556](https://github.com/mongodb/node-mongodb-native/commit/4e83556)) +* **server:** remove unnecessary print statement ([20e11b3](https://github.com/mongodb/node-mongodb-native/commit/20e11b3)) + + + # [3.1.0](https://github.com/mongodb/node-mongodb-native/compare/v3.0.6...v3.1.0) (2018-06-27) diff --git a/README.md b/README.md index 62af88c2471..6768183b11d 100644 --- a/README.md +++ b/README.md @@ -15,7 +15,7 @@ The official [MongoDB](https://www.mongodb.com/) driver for Node.js. Provides a | what | where | |---------------|------------------------------------------------| | documentation | http://mongodb.github.io/node-mongodb-native | -| api-doc | http://mongodb.github.io/node-mongodb-native/3.0/api | +| api-doc | http://mongodb.github.io/node-mongodb-native/3.1/api | | source | https://github.com/mongodb/node-mongodb-native | | mongodb | http://www.mongodb.org | diff --git a/docs/reference/content/reference/ecmascriptnext/crud.md b/docs/reference/content/reference/ecmascriptnext/crud.md index 150caac9ceb..160b2ba210d 100644 --- a/docs/reference/content/reference/ecmascriptnext/crud.md +++ b/docs/reference/content/reference/ecmascriptnext/crud.md @@ -17,7 +17,7 @@ Let's take a look at the CRUD operations from the perspective of ESNext. In this This reference also omits methods that no longer make sense when using ESNext such as the `each` and `forEach` methods. ## Inserting Documents -The *insertOne* and *insertMany* methods exists on the *Collection* class and is used to insert documents into MongoDB. Code speaks a thousand words so let's see two simple examples of inserting documents. +The *insertOne* and *insertMany* methods exist on the *Collection* class and are used to insert documents into MongoDB. Code speaks a thousand words so let's see two simple examples of inserting documents. ```js const MongoClient = require('mongodb').MongoClient; @@ -94,7 +94,7 @@ const dbName = 'myproject'; That wraps up the *insert* methods. Next let's look at the *update* methods. ## Updating Documents -The *updateOne* and *updateMany* methods exists on the *Collection* class and is used to update and upsert documents into MongoDB. Let's look at a couple of usage examples. +The *updateOne* and *updateMany* methods exist on the *Collection* class and are used to update and upsert documents into MongoDB. Let's look at a couple of usage examples. ```js const MongoClient = require('mongodb').MongoClient; @@ -114,7 +114,7 @@ const dbName = 'myproject'; const col = db.collection('updates'); let r; - // Insert a single document + // Insert multiple documents r = await col.insertMany([{a:1}, {a:2}, {a:2}]); assert.equal(3, r.insertedCount); @@ -142,7 +142,7 @@ const dbName = 'myproject'; ``` ## Removing Documents -The *deleteOne* and *deleteMany* methods exist on the *Collection* class and is used to remove documents from MongoDB. Let's look at a couple of usage examples. +The *deleteOne* and *deleteMany* methods exist on the *Collection* class and are used to remove documents from MongoDB. Let's look at a couple of usage examples. ```js const MongoClient = require('mongodb').MongoClient; @@ -164,7 +164,7 @@ const dbName = 'myproject'; const col = db.collection('removes'); let r; - // Insert a single document + // Insert multiple documents r = await col.insertMany([{a:1}, {a:2}, {a:2}]); assert.equal(3, r.insertedCount); @@ -172,7 +172,7 @@ const dbName = 'myproject'; r = await col.deleteOne({a:1}); assert.equal(1, r.deletedCount); - // Update multiple documents + // Remove multiple documents r = await col.deleteMany({a:2}); assert.equal(2, r.deletedCount); } catch (err) { @@ -185,7 +185,7 @@ const dbName = 'myproject'; ``` ## findOneAndUpdate, findOneAndDelete and findOneAndReplace -The three methods *findOneAndUpdate*, *findOneAndDelete* and *findOneAndReplace* are special commands that allows the user to update or upsert a document and have the modified or existing document returned. It comes at a cost as the operation takes a write lock for the duration of the operation as it needs to ensure the modification is *atomic*. Let's look at *findOneAndUpdate* first using an example. +The three methods *findOneAndUpdate*, *findOneAndDelete* and *findOneAndReplace* are special commands that allow the user to update or upsert a document and have the modified or existing document returned. They come at a cost because the operations take a write lock for the duration of the operation as they need to ensure the modification is *atomic*. Let's look at *findOneAndUpdate* first using an example. ```js const MongoClient = require('mongodb').MongoClient; @@ -207,7 +207,7 @@ const dbName = 'myproject'; const col = db.collection('findAndModify'); let r; - // Insert a single document + // Insert multiple documents r = await col.insert([{a:1}, {a:2}, {a:2}]); assert.equal(3, r.result.n); @@ -219,9 +219,6 @@ const dbName = 'myproject'; }); assert.equal(1, r.value.b); - // Remove and return a document - r = await col.findOneAndDelete({a:2}); - assert.ok(r.value.b == null); } catch (err) { console.log(err.stack); } @@ -231,7 +228,7 @@ const dbName = 'myproject'; })(); ``` -The *findOneAndDelete* function is a function especially defined to help remove a document. Let's look at an example of usage. +The *findOneAndDelete* function is especially defined to help remove a document. Let's look at an example of usage. ```js const MongoClient = require('mongodb').MongoClient; @@ -253,12 +250,12 @@ const dbName = 'myproject'; const col = db.collection('findAndModify'); let r; - // Insert a single document + // Insert multiple documents r = await col.insert([{a:1}, {a:2}, {a:2}]); assert.equal(3, r.result.n); // Remove a document from MongoDB and return it - r = await col.findOneAndDelete({a:1}, {sort: [['a',1]]}); + r = await col.findOneAndDelete({a:1}); assert.ok(r.value.b == null); } catch (err) { console.log(err.stack); @@ -270,7 +267,7 @@ const dbName = 'myproject'; ``` ## BulkWrite -The *bulkWrite* function allows for a simple set of bulk operations to be done in a non fluent way as in comparison to the bulk API discussed next. Let's look at an example. +The *bulkWrite* function allows for a simple set of bulk operations to be done in a non [fluent](https://en.wikipedia.org/wiki/Fluent_interface) way, as compared with the *bulk* API discussed next. Let's look at an example. ```js const MongoClient = require('mongodb').MongoClient; @@ -320,7 +317,7 @@ const dbName = 'myproject'; This covers the basic write operations. Let's have a look at the Bulk write operations next. ## Bulk Write Operations -The bulk write operations make it easy to write groups of operations together to MongoDB. There are some caveats and to get the best performance you need to be running against MongoDB *2.6* or higher that support the new write commands. Bulk operations are split into *ordered* and *unordered* bulk operations. An *ordered* bulk operation guarantees the order of execution of writes while the *unordered* bulk operation makes no assumptions about the order of execution. In the Node.js driver the *unordered* bulk operations will group operations according to type and write them in parallel. Let's have a look at how to build an ordered bulk operation. +The bulk write operations make it easy to write groups of operations together to MongoDB. There are some caveats and to get the best performance you need to be running against MongoDB *2.6* or higher, which supports the new write commands. Bulk operations are split into *ordered* and *unordered* bulk operations. An *ordered* bulk operation guarantees the order of execution of writes while the *unordered* bulk operation makes no assumptions about the order of execution. In the Node.js driver the *unordered* bulk operations will group operations according to type and write them in parallel. Let's have a look at how to build an ordered bulk operation. ```js const MongoClient = require('mongodb').MongoClient; @@ -369,7 +366,7 @@ const dbName = 'myproject'; We will not cover the results object here as it's documented in the driver API. The Bulk API handles all the splitting of operations into multiple writes. -There are some important things to keep in mind when using the bulk API and especially the *ordered* bulk API mode. The write commands are single operation type. That means they can only do insert/update and remove. If you f.ex do the following combination of operations. +There are some important things to keep in mind when using the bulk API and especially the *ordered* bulk API mode. The write commands are insert, update and remove. They will be serially executed in the order they are added, creating a new operation for each switch in types. If you have the following series of operations, Insert {a:1} Update {a:1} to {a:1, b:1} @@ -377,7 +374,7 @@ There are some important things to keep in mind when using the bulk API and espe Remove {b:1} Insert {a:3} -This will result in the driver issuing 4 write commands to the server. +This will result in the driver issuing 5 write commands to the server: Insert Command with {a:1} Update Command {a:1} to {a:1, b:1} @@ -385,7 +382,7 @@ This will result in the driver issuing 4 write commands to the server. Remove Command with {b:1} Insert Command with {a:3} -If you instead organize your *ordered* in the following manner. +If instead you order your operations as follows, Insert {a:1} Insert {a:2} @@ -393,24 +390,24 @@ If you instead organize your *ordered* in the following manner. Update {a:1} to {a:1, b:1} Remove {b:1} -The number of write commands issued by the driver will be. +The write commands issued by the driver will be, Insert Command with {a:1}, {a:2}, {a:3} Update Command {a:1} to {a:1, b:1} Remove Command with {b:1} -Allowing for more efficient and faster bulk write operation. +Allowing for a more efficient and faster bulk write operation. For *unordered* bulk operations this is not important as the driver sorts operations by type and executes them in parallel. This covers write operations for MongoDB. Let's look at querying for documents next. ## Read Methods -The main method for querying the database are the *find* and the *aggregate* method. In this CRUD tutorial we will focus on *find*. +The main methods for querying the database are *find* and *aggregate*. In this CRUD tutorial we will focus on *find*. -The *method* return a cursor that allows us to operate on the data. The *cursor* also implements the Node.js 0.10.x or higher stream interface allowing us to pipe the results to other streams. +The *find* method returns a cursor that allows us to operate on the data. The *cursor* also implements the Node.js 0.10.x or higher stream interface, allowing us to pipe the results to other streams. -Let's look at a simple find example that materializes all the documents from a query using the toArray but limits the number of returned results to 2 documents. +Let's look at a simple *find* example that materializes all the documents from a query using the *toArray* method but limits the number of returned results to 2 documents. ```js const MongoClient = require('mongodb').MongoClient; @@ -431,7 +428,7 @@ const dbName = 'myproject'; // Get the collection const col = db.collection('find'); - // Insert a single document + // Insert multiple documents const r = await col.insertMany([{a:1}, {a:1}, {a:1}]); assert.equal(3, r.insertedCount); @@ -447,7 +444,7 @@ const dbName = 'myproject'; })(); ``` -Next lets take a look at the *next* method and how we can iterate over the cursor in ECMAScript 6. The new `async`/`await` commands allow for what is arguably a much cleaner and easier to read iteration code. +Next let's take a look at the *next* method and how we can iterate over the cursor in ECMAScript 6. The new `async`/`await` commands allow for what is arguably a much cleaner and easier to read iteration code. ```js const MongoClient = require('mongodb').MongoClient; @@ -468,7 +465,7 @@ const dbName = 'myproject'; // Get the collection const col = db.collection('find'); - // Insert a single document + // Insert multiple documents const r = await col.insertMany([{a:1}, {a:1}, {a:1}]); assert.equal(3, r.insertedCount); @@ -490,7 +487,7 @@ const dbName = 'myproject'; ``` ## Executing Commands -The `Db.command` method also returns a `Promise` allowing us to leverage `async`/`await` to get clear and concise code. Below is an example calling the `buildInfo` method. +The `Db.command` method also returns a `Promise` allowing us to leverage `async`/`await` for clear and concise code. Below is an example calling the `buildInfo` method. ```js const MongoClient = require('mongodb').MongoClient; diff --git a/lib/collection.js b/lib/collection.js index f9873359f4a..b838f5e8b92 100644 --- a/lib/collection.js +++ b/lib/collection.js @@ -2039,7 +2039,7 @@ Collection.prototype.mapReduce = function(map, reduce, options, callback) { }; /** - * Initiate a Out of order batch write operation. All operations will be buffered into insert/update/remove commands executed out of order. + * Initiate an Out of order batch write operation. All operations will be buffered into insert/update/remove commands executed out of order. * * @method * @param {object} [options] Optional settings. @@ -2056,7 +2056,7 @@ Collection.prototype.initializeUnorderedBulkOp = function(options) { }; /** - * Initiate an In order bulk write operation, operations will be serially executed in the order they are added, creating a new operation for each switch in types. + * Initiate an In order bulk write operation. Operations will be serially executed in the order they are added, creating a new operation for each switch in types. * * @method * @param {object} [options] Optional settings. diff --git a/lib/cursor.js b/lib/cursor.js index 3e7addd2d6f..6f77982217c 100644 --- a/lib/cursor.js +++ b/lib/cursor.js @@ -799,7 +799,7 @@ Cursor.prototype.setReadPreference = function(readPreference) { /** * Returns an array of documents. The caller is responsible for making sure that there - * is enough memory to store the results. Note that the array only contain partial + * is enough memory to store the results. Note that the array only contains partial * results when this cursor had been previouly accessed. In that case, * cursor.rewind() can be used to reset the cursor. * @method diff --git a/lib/db.js b/lib/db.js index dbe9f580873..da539b60700 100644 --- a/lib/db.js +++ b/lib/db.js @@ -313,7 +313,7 @@ const collectionKeys = [ Db.prototype.collection = function(name, options, callback) { if (typeof options === 'function') (callback = options), (options = {}); options = options || {}; - Object.assign({}, options); + options = Object.assign({}, options); // Set the promise library options.promiseLibrary = this.s.promiseLibrary; diff --git a/lib/operations/collection_ops.js b/lib/operations/collection_ops.js index 9b67d9e5e39..f1e14e8864d 100644 --- a/lib/operations/collection_ops.js +++ b/lib/operations/collection_ops.js @@ -234,7 +234,7 @@ function countDocuments(coll, query, options, callback) { if (err) return handleCallback(callback, err); result.toArray((err, docs) => { if (err) handleCallback(err); - handleCallback(callback, null, docs[0].n); + handleCallback(callback, null, docs.length ? docs[0].n : 0); }); }); } diff --git a/lib/operations/mongo_client_ops.js b/lib/operations/mongo_client_ops.js index a3f6500dc6e..7d56954e279 100644 --- a/lib/operations/mongo_client_ops.js +++ b/lib/operations/mongo_client_ops.js @@ -508,10 +508,15 @@ function replayEvents(mongoClient, events) { } } +const LEGACY_OPTIONS_MAP = validOptionNames.reduce((obj, name) => { + obj[name.toLowerCase()] = name; + return obj; +}, {}); + function transformUrlOptions(_object) { let object = Object.assign({ servers: _object.hosts }, _object.options); for (let name in object) { - const camelCaseName = validOptionsLowerCaseToCamelCase[name]; + const camelCaseName = LEGACY_OPTIONS_MAP[name]; if (camelCaseName) { object[camelCaseName] = object[name]; } @@ -595,11 +600,4 @@ function validOptions(options) { } } -function validOptionsLowerCaseToCamelCase() { - validOptionNames.reduce((obj, name) => { - obj[name.toLowerCase()] = name; - return obj; - }, {}); -} - module.exports = { connectOp, logout, validOptions }; diff --git a/lib/topologies/server.js b/lib/topologies/server.js index 103496a1e50..77d8b6e5dc1 100644 --- a/lib/topologies/server.js +++ b/lib/topologies/server.js @@ -297,7 +297,6 @@ class Server extends TopologyBase { try { callback(null, self); } catch (err) { - console.log(err.stack); process.nextTick(function() { throw err; }); diff --git a/package.json b/package.json index afcaa6f7fb7..2c7fac70468 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "mongodb", - "version": "3.1.0", + "version": "3.1.1", "description": "The official MongoDB driver for Node.js", "main": "index.js", "repository": { diff --git a/test/functional/collection_tests.js b/test/functional/collection_tests.js index 60699ec921b..18ea41d6ac2 100644 --- a/test/functional/collection_tests.js +++ b/test/functional/collection_tests.js @@ -2,6 +2,7 @@ const test = require('./shared').assert; const setupDatabase = require('./shared').setupDatabase; const expect = require('chai').expect; +const MongoClient = require('../..').MongoClient; describe('Collection', function() { before(function() { @@ -1581,6 +1582,40 @@ describe('Collection', function() { } }); + it('should correctly perform estimatedDocumentCount on non-matching query', function(done) { + const configuration = this.configuration; + const client = new MongoClient(configuration.url(), { w: 1 }); + + client.connect(function(err, client) { + const db = client.db(configuration.db); + const collection = db.collection('nonexistent_coll_1'); + const close = e => client.close(() => done(e)); + + Promise.resolve() + .then(() => collection.estimatedDocumentCount({ a: 'b' })) + .then(count => expect(count).to.equal(0)) + .then(() => close()) + .catch(e => close(e)); + }); + }); + + it('should correctly perform countDocuments on non-matching query', function(done) { + const configuration = this.configuration; + const client = new MongoClient(configuration.url(), { w: 1 }); + + client.connect(function(err, client) { + const db = client.db(configuration.db); + const collection = db.collection('nonexistent_coll_2'); + const close = e => client.close(() => done(e)); + + Promise.resolve() + .then(() => collection.countDocuments({ a: 'b' })) + .then(count => expect(count).to.equal(0)) + .then(() => close()) + .catch(e => close(e)); + }); + }); + describe('Retryable Writes on bulk ops', function() { const MongoClient = require('../../lib/mongo_client'); diff --git a/test/functional/uri_tests.js b/test/functional/uri_tests.js index 76a6b4713ad..4e5d96c0318 100644 --- a/test/functional/uri_tests.js +++ b/test/functional/uri_tests.js @@ -1,5 +1,7 @@ 'use strict'; -var expect = require('chai').expect; + +const expect = require('chai').expect; +const MongoClient = require('../..').MongoClient; describe('URI', function() { /** @@ -15,7 +17,6 @@ describe('URI', function() { // The actual test we wish to run test: function(done) { var self = this; - var MongoClient = self.configuration.require.MongoClient; // Connect using the connection string MongoClient.connect( @@ -57,7 +58,6 @@ describe('URI', function() { // The actual test we wish to run test: function(done) { var self = this; - var MongoClient = self.configuration.require.MongoClient; // Connect using the connection string MongoClient.connect('mongodb://localhost:27017/integration_tests?w=0', function(err, client) { @@ -89,8 +89,6 @@ describe('URI', function() { // The actual test we wish to run test: function(done) { - var MongoClient = this.configuration.require.MongoClient; - if (process.platform !== 'win32') { MongoClient.connect('mongodb://%2Ftmp%2Fmongodb-27017.sock?safe=false', function( err, @@ -114,8 +112,6 @@ describe('URI', function() { // The actual test we wish to run test: function(done) { var self = this; - var MongoClient = self.configuration.require.MongoClient; - MongoClient.connect('mongodb://127.0.0.1:27017/?fsync=true', function(err, client) { var db = client.db(self.configuration.db); expect(db.writeConcern.fsync).to.be.true; @@ -133,7 +129,6 @@ describe('URI', function() { // The actual test we wish to run test: function(done) { var self = this; - var MongoClient = self.configuration.require.MongoClient; MongoClient.connect( 'mongodb://localhost:27017/integration_tests', @@ -164,4 +159,22 @@ describe('URI', function() { ); } }); + + it('should correctly translate uri options using new parser', { + metadata: { requires: { topology: 'replicaset' } }, + test: function(done) { + const config = this.configuration; + const uri = `mongodb://${config.host}:${config.port}/${config.db}?replicaSet=${ + config.replicasetName + }`; + + MongoClient.connect(uri, { useNewUrlParser: true }, (err, client) => { + if (err) console.dir(err); + expect(err).to.not.exist; + expect(client).to.exist; + expect(client.s.options.replicaSet).to.exist.and.equal(config.replicasetName); + done(); + }); + } + }); });