From a1af63882740e7e578d0c65f6523c1e8b3df84ec Mon Sep 17 00:00:00 2001 From: Luis Lobo Borobia Date: Fri, 19 May 2023 23:10:26 -0500 Subject: [PATCH 1/2] Update code style and dependencies ## Summary - Update URLs in .editorconfig and .jshintrc files - Add new rules to .eslintrc file for code style consistency - Remove .jshintrc file - Update node.js versions in .travis.yml file ## Details The URLs in the `.editorconfig` and `.jshintrc` files were updated to use HTTPS instead of HTTP. In addition, the `.eslintrc` file was updated with new rules for code style consistency, including requiring `let` or `const` instead of `var`, enforcing spacing before blocks and functions, requiring parens in arrow function arguments, using template literals instead of string concatenation, and more. The `.jshintrc` file was removed as it is no longer needed. Finally, the node.js versions in the `.travis.yml` file were updated to include newer versions. ## Statistics - 5 files changed, 1 deleted - 23 insertions(+), 141 deletions(-) No dependency updates found. --- .editorconfig | 2 +- .eslintrc | 143 ++++++++- .gitignore | 1 + .jshintrc | 134 -------- .travis.yml | 5 +- README.md | 58 ++-- lib/index.js | 186 ++++++----- lib/private/build-std-adapter-method.js | 32 +- lib/private/constants/dry-orm.input.js | 10 +- lib/private/constants/query.input.js | 2 +- lib/private/do-with-connection.js | 116 +++---- lib/private/machines/avg-records.js | 34 +- lib/private/machines/count-records.js | 24 +- lib/private/machines/create-each-record.js | 38 +-- lib/private/machines/create-manager.js | 28 +- lib/private/machines/create-record.js | 38 +-- lib/private/machines/define-physical-model.js | 2 +- lib/private/machines/destroy-manager.js | 4 +- lib/private/machines/destroy-records.js | 42 +-- lib/private/machines/drop-physical-model.js | 2 +- lib/private/machines/find-records.js | 48 +-- lib/private/machines/get-connection.js | 2 +- .../private/build-mongo-where-clause.js | 62 ++-- .../private/normalize-mongo-object-id.js | 32 +- .../machines/private/process-native-error.js | 8 +- .../machines/private/process-native-record.js | 34 +- .../machines/private/reify-values-to-set.js | 40 +-- lib/private/machines/release-connection.js | 4 +- lib/private/machines/set-physical-sequence.js | 2 +- lib/private/machines/sum-records.js | 32 +- lib/private/machines/update-records.js | 58 ++-- lib/private/machines/verify-model-def.js | 4 +- .../normalize-datastore-config/index.js | 232 +++++++------- .../private/normalize-database.js | 16 +- .../private/normalize-host.js | 16 +- .../private/normalize-password.js | 8 +- .../private/normalize-port.js | 14 +- .../private/normalize-user.js | 16 +- package.json | 5 +- test/connectable/create-manager.test.js | 30 +- test/connectable/destroy-manager.test.js | 24 +- test/connectable/get-connection.test.js | 26 +- test/connectable/release-connection.test.js | 32 +- test/run-adapter-specific-tests.js | 296 +++++++++--------- test/run-standard-tests.js | 34 +- 45 files changed, 987 insertions(+), 989 deletions(-) delete mode 100644 .jshintrc diff --git a/.editorconfig b/.editorconfig index 98a4353fa..e945c3988 100644 --- a/.editorconfig +++ b/.editorconfig @@ -6,7 +6,7 @@ # throughout this package, the Sails framework, and the Node-Machine project. # # To review what each of these options mean, see: -# http://editorconfig.org/ +# https://editorconfig.org/ root = true [*] diff --git a/.eslintrc b/.eslintrc index 80fa6bfff..752c89a83 100644 --- a/.eslintrc +++ b/.eslintrc @@ -10,19 +10,23 @@ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // For more information about any of the rules below, check out the relevant // reference page on eslint.org. For example, to get details on "no-sequences", - // you would visit `http://eslint.org/docs/rules/no-sequences`. If you're unsure + // you would visit `https://eslint.org/docs/rules/no-sequences`. If you're unsure // or could use some advice, come by https://sailsjs.com/support. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - "env": { - "node": true + "node": true, + "es2021": true }, "parserOptions": { - "ecmaVersion": 5 - // ^^This can be changed to `8` if this package doesn't need to support <= Node v6. + "ecmaVersion": "latest" }, + "plugins": [ + "eslint-comments" + ], + "globals": { "Promise": true // ^^Available since Node v4 @@ -46,6 +50,137 @@ "ObjectExpression": 1, "ignoredNodes": ["ConditionalExpression"] }], + // require let or const instead of var + "no-var": "error", + "no-whitespace-before-property": [ + "error" + ], + "space-before-blocks": [ + "error" + ], + "space-before-function-paren": [ + "error", + { + "anonymous": "always", + "named": "never" + } + ], + "space-in-parens": [ + "error" + ], + "space-infix-ops": [ + "error" + ], + "space-unary-ops": [ + "error" + ], + "spaced-comment": [ + "error", + "always", + { + "line": { + "markers": [ + "/" + ], + "exceptions": [ + "-", + "+" + ] + }, + "block": { + "markers": [ + "!" + ], + "exceptions": [ + "*" + ], + "balanced": true + } + } + ], + "arrow-body-style": [ + "error", + "as-needed", + { + "requireReturnForObjectLiteral": false + } + ], + // require parens in arrow function arguments + // https://eslint.org/docs/rules/arrow-parens + "arrow-parens": [ + "error", + "as-needed", + { + "requireForBlockBody": true + } + ], + // require space before/after arrow function"s arrow + // https://eslint.org/docs/rules/arrow-spacing + "arrow-spacing": [ + "error", + { + "before": true, + "after": true + } + ], + // Allow "confusing arrows" since, well, we don't confuse them + // https://eslint.org/docs/rules/no-confusing-arrow + "no-confusing-arrow": "off", + // disallow modifying variables that are declared using const + "no-const-assign": "error", + // require method and property shorthand syntax for object literals + // https://eslint.org/docs/rules/object-shorthand + "object-shorthand": [ + "error", + "always", + { + "ignoreConstructors": false, + "avoidQuotes": true + } + ], + // suggest using arrow functions as callbacks + "prefer-arrow-callback": [ + "error", + { + "allowNamedFunctions": false, + "allowUnboundThis": true + } + ], + // suggest using of const declaration for variables that are never modified after declared + "prefer-const": [ + "error", + { + "destructuring": "any", + "ignoreReadBeforeAssign": true + } + ], + // disallow parseInt() in favor of binary, octal, and hexadecimal literals + // https://eslint.org/docs/rules/prefer-numeric-literals + "prefer-numeric-literals": "error", + // suggest using Reflect methods where applicable + // https://eslint.org/docs/rules/prefer-reflect + "prefer-reflect": "off", + // use rest parameters instead of arguments + // https://eslint.org/docs/rules/prefer-rest-params + "prefer-rest-params": "error", + // suggest using the spread operator instead of .apply() + // https://eslint.org/docs/rules/prefer-spread + "prefer-spread": "error", + // suggest using template literals instead of string concatenation + // https://eslint.org/docs/rules/prefer-template + "prefer-template": "error", + // disallow generator functions that do not have yield + // https://eslint.org/docs/rules/require-yield + "require-yield": "error", + // enforce spacing between object rest-spread + // https://eslint.org/docs/rules/rest-spread-spacing + "rest-spread-spacing": [ + "error", + "never" + ], + // enforce usage of spacing in template strings + // https://eslint.org/docs/rules/template-curly-spacing + "template-curly-spacing": "error", "linebreak-style": ["error", "unix"], "no-dupe-keys": ["error"], "no-duplicate-case": ["error"], diff --git a/.gitignore b/.gitignore index 72cd44459..f94c8cd4f 100644 --- a/.gitignore +++ b/.gitignore @@ -18,6 +18,7 @@ node_modules .tmp npm-debug.log package-lock.json +package-lock.* .waterline .node_history diff --git a/.jshintrc b/.jshintrc deleted file mode 100644 index 5099273dd..000000000 --- a/.jshintrc +++ /dev/null @@ -1,134 +0,0 @@ -{ - // ┬┌─┐╦ ╦╦╔╗╔╔╦╗┬─┐┌─┐ - // │└─┐╠═╣║║║║ ║ ├┬┘│ - // o└┘└─┘╩ ╩╩╝╚╝ ╩ ┴└─└─┘ - // - // This file (`.jshintrc`) exists to help with consistency of code - // throughout this package, and throughout Sails and the Node-Machine project. - // - // To review what each of these options mean, see: - // http://jshint.com/docs/options - // - // (or: https://github.com/jshint/jshint/blob/master/examples/.jshintrc) - - - - ////////////////////////////////////////////////////////////////////// - // NOT SUPPORTED IN SOME JSHINT VERSIONS SO LEAVING COMMENTED OUT: - ////////////////////////////////////////////////////////////////////// - // Prevent overwriting prototypes of native classes like `Array`. - // (doing this is _never_ ok in any of our packages that are intended - // to be used as dependencies of other developers' modules and apps) - // "freeze": true, - ////////////////////////////////////////////////////////////////////// - - - ////////////////////////////////////////////////////////////////////// - // EVERYTHING ELSE: - ////////////////////////////////////////////////////////////////////// - - // Allow the use of ES6 features. - // (re ES7, see https://github.com/jshint/jshint/issues/2297) - "esversion": 6, - - // Allow the use of `eval` and `new Function()` - // (we sometimes actually need to use these things) - "evil": true, - - // Tolerate funny-looking dashes in RegExp literals. - // (see https://github.com/jshint/jshint/issues/159#issue-903547) - "regexdash": true, - - // The potential runtime "Environments" (as defined by jshint) - // that the _style_ of code written in this package should be - // compatible with (not the code itself, of course). - "browser": true, - "node": true, - "wsh": true, - - // Tolerate the use `[]` notation when dot notation would be possible. - // (this is sometimes preferable for readability) - "sub": true, - - // Do NOT suppress warnings about mixed tabs and spaces - // (two spaces always, please; see `.editorconfig`) - "smarttabs": false, - - // Suppress warnings about trailing whitespace - // (this is already enforced by the .editorconfig, so no need to warn as well) - "trailing": false, - - // Suppress warnings about the use of expressions where fn calls or assignments - // are expected, and about using assignments where conditionals are expected. - // (while generally a good idea, without this setting, JSHint needlessly lights up warnings - // in existing, working code that really shouldn't be tampered with. Pandora's box and all.) - "expr": true, - "boss": true, - - // Do NOT suppress warnings about using functions inside loops - // (in the general case, we should be using iteratee functions with `_.each()` - // or `Array.prototype.forEach()` instead of `for` or `while` statements - // anyway. This warning serves as a helpful reminder.) - "loopfunc": false, - - // Suppress warnings about "weird constructions" - // i.e. allow code like: - // ``` - // (new (function OneTimeUsePrototype () { } )) - // ``` - // - // (sometimes order of operations in JavaScript can be scary. There is - // nothing wrong with using an extra set of parantheses when the mood - // strikes or you get "that special feeling".) - "supernew": true, - - // Do NOT allow backwards, node-dependency-style commas. - // (while this code style choice was used by the project in the past, - // we have since standardized these practices to make code easier to - // read, albeit a bit less exciting) - "laxcomma": false, - - // Do NOT allow avant garde use of commas in conditional statements. - // (this prevents accidentally writing code like: - // ``` - // if (!_.contains(['+ci', '-ci', '∆ci', '+ce', '-ce', '∆ce']), change.verb) {...} - // ``` - // See the problem in that code? Neither did we-- that's the problem!) - "nocomma": true, - - // Strictly enforce the consistent use of single quotes. - // (this is a convention that was established primarily to make it easier - // to grep [or FIND+REPLACE in Sublime] particular string literals in - // JavaScript [.js] files. Note that JSON [.json] files are, of course, - // still written exclusively using double quotes around key names and - // around string literals.) - "quotmark": "single", - - // Do NOT suppress warnings about the use of `==null` comparisons. - // (please be explicit-- use Lodash or `require('util')` and call - // either `.isNull()` or `.isUndefined()`) - "eqnull": false, - - // Strictly enforce the use of curly braces with `if`, `else`, and `switch` - // as well as, much less commonly, `for` and `while` statements. - // (this is just so that all of our code is consistent, and to avoid bugs) - "curly": true, - - // Strictly enforce the use of `===` and `!==`. - // (this is always a good idea. Check out "Truth, Equality, and JavaScript" - // by Angus Croll [the author of "If Hemmingway Wrote JavaScript"] for more - // explanation as to why.) - "eqeqeq": true, - - // Allow initializing variables to `undefined`. - // For more information, see: - // • https://jslinterrors.com/it-is-not-necessary-to-initialize-a-to-undefined - // • https://github.com/jshint/jshint/issues/1484 - // - // (it is often very helpful to explicitly clarify the initial value of - // a local variable-- especially for folks new to more advanced JavaScript - // and who might not recognize the subtle, yet critically important differences between our seemingly - // between `null` and `undefined`, and the impact on `typeof` checks) - "-W080": true - -} diff --git a/.travis.yml b/.travis.yml index 4564cc1b6..bfbf3773f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,9 +14,10 @@ dist: xenial language: node_js node_js: - - "12" - - "14" - "16" + - "18" + - "19" + - "20" env: global: diff --git a/README.md b/README.md index 5f0bb9f24..1c701f519 100644 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -[![Node.js and MongoDB on Sails.js/Waterline](https://camo.githubusercontent.com/9e49073459ed4e0e2687b80eaf515d87b0da4a6b/687474703a2f2f62616c64657264617368792e6769746875622e696f2f7361696c732f696d616765732f6c6f676f2e706e67)](http://sailsjs.com) +[![Node.js and MongoDB on Sails.js/Waterline](https://camo.githubusercontent.com/9e49073459ed4e0e2687b80eaf515d87b0da4a6b/687474703a2f2f62616c64657264617368792e6769746875622e696f2f7361696c732f696d616765732f6c6f676f2e706e67)](https://sailsjs.com) # sails-mongo @@ -19,11 +19,11 @@ To install this adapter, run: $ npm install sails-mongo ``` -Then [connect the adapter](http://sailsjs.com/documentation/reference/configuration/sails-config-datastores) to one or more of your app's datastores. +Then [connect the adapter](https://sailsjs.com/documentation/reference/configuration/sails-config-datastores) to one or more of your app's datastores. ## Usage -Visit [Models & ORM](http://sailsjs.com/docs/concepts/models-and-orm) in the docs for more information about using models, datastores, and adapters in your app/microservice. For a low-level usage example, check out the [tutorial](https://sailsjs.com/documentation/tutorials/using-mongo-db#?lowlevel-mongodb-usage-advanced). +Visit [Models & ORM](https://sailsjs.com/docs/concepts/models-and-orm) in the docs for more information about using models, datastores, and adapters in your app/microservice. For a low-level usage example, check out the [tutorial](https://sailsjs.com/documentation/tutorials/using-mongo-db#?lowlevel-mongodb-usage-advanced). ## Compatibility @@ -33,35 +33,35 @@ Visit [Models & ORM](http://sailsjs.com/docs/concepts/models-and-orm) in the doc This adapter implements the following methods: -| Method | Status | Layer | -|:---------------------|:------------------|:--------------| -| ~~registerDatastore~~| ~~Implemented~~ | _N/A_ | -| ~~teardown~~ | ~~Implemented~~ | _N/A_ | -| validateModelDef | Implemented | Modeled | -| createRecord | Implemented | Modeled (DML) | -| createEachRecord | Implemented | Modeled (DML) | -| updateRecords | Implemented | Modeled (DML) | -| destroyRecords | Implemented | Modeled (DML) | -| findRecords | Implemented | Modeled (DQL) | -| join | _not supported_ | Modeled (DQL) | -| countRecords | Implemented | Modeled (DQL) | -| sumRecords | Implemented | Modeled (DQL) | -| avgRecords | Implemented | Modeled (DQL) | -| definePhysicalModel | Implemented | Migratable | -| dropPhysicalModel | Implemented | Migratable | -| setPhysicalSequence | _not supported_ | Migratable | +| Method | Status | Layer | +|:----------------------|:----------------|:--------------| +| ~~registerDatastore~~ | ~~Implemented~~ | _N/A_ | +| ~~teardown~~ | ~~Implemented~~ | _N/A_ | +| validateModelDef | Implemented | Modeled | +| createRecord | Implemented | Modeled (DML) | +| createEachRecord | Implemented | Modeled (DML) | +| updateRecords | Implemented | Modeled (DML) | +| destroyRecords | Implemented | Modeled (DML) | +| findRecords | Implemented | Modeled (DQL) | +| join | _not supported_ | Modeled (DQL) | +| countRecords | Implemented | Modeled (DQL) | +| sumRecords | Implemented | Modeled (DQL) | +| avgRecords | Implemented | Modeled (DQL) | +| definePhysicalModel | Implemented | Migratable | +| dropPhysicalModel | Implemented | Migratable | +| setPhysicalSequence | _not supported_ | Migratable | ## Questions? -See [Extending Sails > Adapters > Custom Adapters](http://sailsjs.com/documentation/concepts/extending-sails/adapters/custom-adapters) in the [Sails documentation](http://sailsjs.com/documentation), or check out [recommended support options](http://sailsjs.com/support). +See [Extending Sails > Adapters > Custom Adapters](https://sailsjs.com/documentation/concepts/extending-sails/adapters/custom-adapters) in the [Sails documentation](https://sailsjs.com/documentation), or check out [recommended support options](https://sailsjs.com/support). ## Contributing   [![Build Status](https://travis-ci.org/balderdashy/sails-mongo.svg?branch=master)](https://travis-ci.org/balderdashy/sails-mongo)   [![Build status on Windows](https://ci.appveyor.com/api/projects/status/u0i1o62tsw6ymbjd/branch/master?svg=true)](https://ci.appveyor.com/project/mikermcneil/sails-mongo/branch/master) -Please observe the guidelines and conventions laid out in the [Sails project contribution guide](http://sailsjs.com/documentation/contributing) when opening issues or submitting pull requests. +Please observe the guidelines and conventions laid out in the [Sails project contribution guide](https://sailsjs.com/documentation/contributing) when opening issues or submitting pull requests. -[![NPM](https://nodei.co/npm/sails-mongo.png?downloads=true)](http://npmjs.com/package/sails-mongo) +[![NPM](https://nodei.co/npm/sails-mongo.png?downloads=true)](https://npmjs.com/package/sails-mongo) #### Development and Test @@ -106,17 +106,17 @@ From there you can run `npm test` to run the tests as many times as you need. Thanks so much to Ted Kulp ([@tedkulp](https://twitter.com/tedkulp)) and Robin Persson ([@prssn](https://twitter.com/prssn)) for building the first version of this adapter back in 2013. Since then, it has evolved into a core adapter within the framework. -## Bugs   [![NPM version](https://badge.fury.io/js/sails-mongo.svg)](http://npmjs.com/package/sails-mongo) +## Bugs   [![NPM version](https://badge.fury.io/js/sails-mongo.svg)](https://npmjs.com/package/sails-mongo) -To report a bug, [click here](http://sailsjs.com/bugs). +To report a bug, [click here](https://sailsjs.com/bugs). ## License -This [core adapter](http://sailsjs.com/documentation/concepts/extending-sails/adapters/available-adapters) is available under the **MIT license**. +This [core adapter](https://sailsjs.com/documentation/concepts/extending-sails/adapters/available-adapters) is available under the **MIT license**. -As for [Waterline](http://waterlinejs.org) and the [Sails framework](http://sailsjs.com)? They're free and open-source under the [MIT License](http://sailsjs.com/license). +As for [Waterline](https://waterlinejs.org) and the [Sails framework](https://sailsjs.com)? They're free and open-source under the [MIT License](https://sailsjs.com/license). -© [The Sails Co.](http://sailsjs.com/about) +© [The Sails Co.](https://sailsjs.com/about) -![image_squidhome@2x.png](http://i.imgur.com/RIvu9.png) +![image_squidhome@2x.png](https://i.imgur.com/RIvu9.png) diff --git a/lib/index.js b/lib/index.js index 531c779fb..ff910f48d 100644 --- a/lib/index.js +++ b/lib/index.js @@ -2,14 +2,14 @@ * Module dependencies */ -var util = require('util'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); -var async = require('async'); -var Machine = require('machine'); -var mongodb = require('mongodb'); -var normalizeDatastoreConfig = require('./private/normalize-datastore-config'); -var buildStdAdapterMethod = require('./private/build-std-adapter-method'); +const util = require('util'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); +const async = require('async'); +const Machine = require('machine'); +const mongodb = require('mongodb'); +const normalizeDatastoreConfig = require('./private/normalize-datastore-config'); +const buildStdAdapterMethod = require('./private/build-std-adapter-method'); /** @@ -19,7 +19,7 @@ var buildStdAdapterMethod = require('./private/build-std-adapter-method'); // Private var to cache dry machine definitions. // > This is set up in a dictionary instead of as separate variables // > just to allow the code below to be a bit easier to read) -var DRY_MACHINES = { +const DRY_MACHINES = { verifyModelDef: require('./private/machines/verify-model-def'), createManager: require('./private/machines/create-manager'), destroyManager: require('./private/machines/destroy-manager'), @@ -33,16 +33,16 @@ var DRY_MACHINES = { // Private var to cache pre-built machines for certain adapter methods. // (This is an optimization for improved performance.) -var WET_MACHINES = {}; -_.each(DRY_MACHINES, function(def, methodName) { +const WET_MACHINES = {}; +_.each(DRY_MACHINES, (def, methodName) => { WET_MACHINES[methodName] = Machine.build(def); }); -var CONFIG_WHITELIST = require('./private/constants/config-whitelist.constant'); +const CONFIG_WHITELIST = require('./private/constants/config-whitelist.constant'); -var EXPECTED_URL_PROTOCOL_PFX = require('./private/constants/expected-url-protocol-pfx.constant'); +const EXPECTED_URL_PROTOCOL_PFX = require('./private/constants/expected-url-protocol-pfx.constant'); @@ -57,12 +57,12 @@ var EXPECTED_URL_PROTOCOL_PFX = require('./private/constants/expected-url-protoc // > Note that this approach of process global state will be changing in an upcoming version of // > the Waterline adapter spec (a breaking change). But if you follow the conventions laid out // > below in this adapter template, future upgrades should be a breeze. -var registeredDsEntries = {}; +const registeredDsEntries = {}; // Keep track of all the model definitions registered by the adapter (for the entire Node process). // (indexed by the model's `identity` -- NOT by its `tableName`!!) -var registeredDryModels = {}; +const registeredDryModels = {}; @@ -137,12 +137,12 @@ module.exports = { // Also give the driver a `mongodb` property, so that it provides access - // to the static Mongo library for Node.js. (See http://npmjs.com/package/mongodb) - mongodb: mongodb, + // to the static Mongo library for Node.js. (See https://npmjs.com/package/mongodb) + mongodb, - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// // ██╗ ██╗███████╗███████╗ ██████╗██╗ ██╗ ██████╗██╗ ███████╗ // // ██║ ██║██╔════╝██╔════╝██╔════╝╚██╗ ██╔╝██╔════╝██║ ██╔════╝ // // ██║ ██║█████╗ █████╗ ██║ ╚████╔╝ ██║ ██║ █████╗ // @@ -152,7 +152,7 @@ module.exports = { // // // Lifecycle adapter methods: // // Methods related to setting up and tearing down; registering/un-registering datastores. // - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// /** * ╦═╗╔═╗╔═╗╦╔═╗╔╦╗╔═╗╦═╗ ┌┬┐┌─┐┌┬┐┌─┐┌─┐┌┬┐┌─┐┬─┐┌─┐ @@ -185,17 +185,17 @@ module.exports = { * @param {Error?} err <-« An Error instance, if something went wrong. (Otherwise `undefined`.) * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - registerDatastore: function (dsConfig, physicalModelsReport, done) { + registerDatastore(dsConfig, physicalModelsReport, done) { // Grab the unique name for this datastore for easy access below. - var datastoreName = dsConfig.identity; + const datastoreName = dsConfig.identity; // Some sanity checks: if (!datastoreName) { - return done(new Error('Consistency violation: A datastore should contain an "identity" property: a special identifier that uniquely identifies it across this app. This should have been provided by Waterline core! If you are seeing this message, there could be a bug in Waterline, or the datastore could have become corrupted by userland code, or other code in this adapter. If you determine that this is a Waterline bug, please report this at http://sailsjs.com/bugs.')); + return done(new Error('Consistency violation: A datastore should contain an "identity" property: a special identifier that uniquely identifies it across this app. This should have been provided by Waterline core! If you are seeing this message, there could be a bug in Waterline, or the datastore could have become corrupted by userland code, or other code in this adapter. If you determine that this is a Waterline bug, please report this at https://sailsjs.com/bugs.')); } if (registeredDsEntries[datastoreName]) { - return done(new Error('Consistency violation: Cannot register datastore: `' + datastoreName + '`, because it is already registered with this adapter! This could be due to an unexpected race condition in userland code (e.g. attempting to initialize Waterline more than once), or it could be due to a bug in this adapter. (If you get stumped, reach out at http://sailsjs.com/support.)')); + return done(new Error(`Consistency violation: Cannot register datastore: \`${datastoreName}\`, because it is already registered with this adapter! This could be due to an unexpected race condition in userland code (e.g. attempting to initialize Waterline more than once), or it could be due to a bug in this adapter. (If you get stumped, reach out at https://sailsjs.com/support.)`)); } @@ -206,7 +206,7 @@ module.exports = { normalizeDatastoreConfig(dsConfig, CONFIG_WHITELIST, EXPECTED_URL_PROTOCOL_PFX); } catch (e) { switch (e.code) { - case 'E_BAD_CONFIG': return done(flaverr(e.code, new Error('Invalid configuration for datastore `' + datastoreName + '`: '+e.message))); + case 'E_BAD_CONFIG': return done(flaverr(e.code, new Error(`Invalid configuration for datastore \`${datastoreName}\`: ${e.message}`))); default: return done(e); } } @@ -223,9 +223,9 @@ module.exports = { // ============================================================================================ if (WET_MACHINES.verifyModelDef) { - var modelIncompatibilitiesMap = {}; + const modelIncompatibilitiesMap = {}; try { - _.each(physicalModelsReport, function (phModelInfo){ + _.each(physicalModelsReport, (phModelInfo) => { try { WET_MACHINES.verifyModelDef({ modelDef: phModelInfo }).execSync(); } catch (e) { @@ -234,20 +234,18 @@ module.exports = { default: throw e; } } - });// + });// } catch (e) { return done(e); } - var numNotCompatible = _.keys(modelIncompatibilitiesMap).length; + const numNotCompatible = _.keys(modelIncompatibilitiesMap).length; if (numNotCompatible > 0) { return done(flaverr('E_MODELS_NOT_COMPATIBLE', new Error( - numNotCompatible+' model(s) are not compatible with this adapter:\n'+ - _.reduce(modelIncompatibilitiesMap, function(memo, incompatibility, modelIdentity) { - return memo + '• `'+modelIdentity+'` :: '+incompatibility+'\n'; - }, '') + `${numNotCompatible} model(s) are not compatible with this adapter:\n${ + _.reduce(modelIncompatibilitiesMap, (memo, incompatibility, modelIdentity) => `${memo}• \`${modelIdentity}\` :: ${incompatibility}\n`, '')}` ))); - }//-• + }// -• - }//>-• + }// >-• @@ -264,27 +262,27 @@ module.exports = { connectionString: dsConfig.url, meta: _.omit(dsConfig, ['adapter', 'url', 'identity', 'schema']) }).switch({ - error: function(err) { - return done(new Error('Consistency violation: Unexpected error creating db connection manager:\n```\n'+err.stack+'\n```')); + error(err) { + return done(new Error(`Consistency violation: Unexpected error creating db connection manager:\n\`\`\`\n${err.stack}\n\`\`\``)); }, - malformed: function(report) { + malformed(report) { return done(flaverr({ code: 'E_BAD_CONFIG', raw: report.error, meta: report.meta - }, new Error('The given connection URL is not valid for this database adapter. Details:\n```\n'+report.error.stack+'\n```'))); + }, new Error(`The given connection URL is not valid for this database adapter. Details:\n\`\`\`\n${report.error.stack}\n\`\`\``))); }, - failed: function(report) { + failed(report) { return done(flaverr({ code: 'E_FAILED_TO_CONNECT', raw: report.error, meta: report.meta - }, new Error('Failed to connect with the given datastore configuration. Details:\n```\n'+report.error.stack+'\n```'))); + }, new Error(`Failed to connect with the given datastore configuration. Details:\n\`\`\`\n${report.error.stack}\n\`\`\``))); }, - success: function (report) { + success(report) { try { - var manager = report.manager; + const {manager} = report; // ╔╦╗╦═╗╔═╗╔═╗╦╔═ ┌┬┐┌─┐ ┌─┐┌┐┌┌┬┐┬─┐┬ ┬ // ║ ╠╦╝╠═╣║ ╠╩╗ ││└─┐ ├┤ │││ │ ├┬┘└┬┘ @@ -309,13 +307,13 @@ module.exports = { // registeredDsEntries[datastoreName] = { config: dsConfig, - manager: manager, + manager, driver: { createManager: WET_MACHINES.createManager, destroyManager: WET_MACHINES.destroyManager, getConnection: WET_MACHINES.getConnection, releaseConnection: WET_MACHINES.releaseConnection, - mongodb: mongodb + mongodb } // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // ^Note: In future releases, the driver and the adapter will simply become one thing. @@ -331,11 +329,11 @@ module.exports = { // FUTURE: Remove the need for this step by giving the adapter some kind of simpler access // to the orm instance, or an accessor function for models. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - _.each(physicalModelsReport, function(phModelInfo){ + _.each(physicalModelsReport, (phModelInfo) => { // console.log('in datastore: `%s` ……tracking physical model: `%s` (tableName: `%s`)',datastoreName, phModelInfo.identity, phModelInfo.tableName); if (registeredDryModels[phModelInfo.identity]) { - throw new Error('Consistency violation: Cannot register model: `' + phModelInfo.identity + '`, because it is already registered with this adapter! This could be due to an unexpected race condition in userland code (e.g. attempting to initialize multiple ORM instances at the same time), or it could be due to a bug in this adapter. (If you get stumped, reach out at http://sailsjs.com/support.)'); + throw new Error(`Consistency violation: Cannot register model: \`${phModelInfo.identity}\`, because it is already registered with this adapter! This could be due to an unexpected race condition in userland code (e.g. attempting to initialize multiple ORM instances at the same time), or it could be due to a bug in this adapter. (If you get stumped, reach out at https://sailsjs.com/support.)`); } registeredDryModels[phModelInfo.identity] = { @@ -347,15 +345,15 @@ module.exports = { // console.log('\n\nphModelInfo:',util.inspect(phModelInfo,{depth:5})); - });// + });// } catch (e) { return done(e); } // Inform Waterline that the datastore was registered successfully. return done(undefined, report.meta); - }//•-success> - });//createManager()> + }// •-success> + });// createManager()> }, @@ -377,17 +375,17 @@ module.exports = { * @param {Error?} * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - teardown: function (datastoreName, done) { + teardown(datastoreName, done) { // Look up the datastore entry (manager/driver/config). - var dsEntry = registeredDsEntries[datastoreName]; + const dsEntry = registeredDsEntries[datastoreName]; // Sanity checks: if (!datastoreName) { - return done(new Error('Consistency violation: Internal error in Waterline: Adapter received unexpected falsey datastore name (`'+datastoreName+'`)! Can\'t look up a DS entry from this adapter with that... (Please report this error at http://sailsjs.com/bugs.)')); + return done(new Error(`Consistency violation: Internal error in Waterline: Adapter received unexpected falsey datastore name (\`${datastoreName}\`)! Can't look up a DS entry from this adapter with that... (Please report this error at https://sailsjs.com/bugs.)`)); } if (_.isUndefined(dsEntry)) { - return done(new Error('Consistency violation: Attempting to tear down a datastore (`'+datastoreName+'`) which is not currently registered with this adapter. This is usually due to a race condition in userland code (e.g. attempting to tear down the same ORM instance more than once), or it could be due to a bug in this adapter. (If you get stumped, reach out at http://sailsjs.com/support.)')); + return done(new Error(`Consistency violation: Attempting to tear down a datastore (\`${datastoreName}\`) which is not currently registered with this adapter. This is usually due to a race condition in userland code (e.g. attempting to tear down the same ORM instance more than once), or it could be due to a bug in this adapter. (If you get stumped, reach out at https://sailsjs.com/support.)`)); } if (!dsEntry.manager) { return done(new Error('Consistency violation: Missing manager for this datastore. (This datastore may already be in the process of being destroyed.)')); @@ -398,14 +396,14 @@ module.exports = { // ═╩╝╚═╝╚═╝ ╩ ╩╚═╚═╝ ╩ ┴ ┴┴ ┴┘└┘┴ ┴└─┘└─┘┴└─ // Destroy the manager. WET_MACHINES.destroyManager({ manager: dsEntry.manager }).switch({ - error: function(err) { return done(new Error('Encountered unexpected error when attempting to destroy the connection manager.\n\n```\n'+err.stack+'\n```')); }, - failed: function(report) { - var err = new Error('Datastore (`'+datastoreName+'`) could not be torn down because of a failure when attempting to destroy the connection manager.\n\n```\n'+report.error.stack+'\n```'); + error(err) { return done(new Error(`Encountered unexpected error when attempting to destroy the connection manager.\n\n\`\`\`\n${err.stack}\n\`\`\``)); }, + failed(report) { + const err = new Error(`Datastore (\`${datastoreName}\`) could not be torn down because of a failure when attempting to destroy the connection manager.\n\n\`\`\`\n${report.error.stack}\n\`\`\``); err.raw = report.error; if (report.meta) { err.meta = report.meta; } return done(err); }, - success: function (report) { + success(report) { // ╦ ╦╔╗╔ ╔╦╗╦═╗╔═╗╔═╗╦╔═ ┌┬┐┌─┐ ┌─┐┌┐┌┌┬┐┬─┐┬ ┬ // ║ ║║║║───║ ╠╦╝╠═╣║ ╠╩╗ ││└─┐ ├┤ │││ │ ├┬┘└┬┘ @@ -418,7 +416,7 @@ module.exports = { try { delete registeredDsEntries[datastoreName]; - _.each(_.keys(registeredDryModels), function(modelIdentity) { + _.each(_.keys(registeredDryModels), (modelIdentity) => { if (registeredDryModels[modelIdentity].datastore === datastoreName) { delete registeredDryModels[modelIdentity]; } @@ -429,8 +427,8 @@ module.exports = { // Inform Waterline that we're done, and that everything went as expected. return done(undefined, report.meta); - }//•-success> - });//destroyManager()> + }// •-success> + });// destroyManager()> }, @@ -472,18 +470,18 @@ module.exports = { releaseConnection: DRY_MACHINES.releaseConnection, - /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + /// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // ██╗ ██╗███████╗██████╗ ██╗███████╗██╗ ██╗ ███╗ ███╗ ██████╗ ██████╗ ███████╗██╗ ██████╗ ███████╗███████╗ // // ██║ ██║██╔════╝██╔══██╗██║██╔════╝╚██╗ ██╔╝ ████╗ ████║██╔═══██╗██╔══██╗██╔════╝██║ ██╔══██╗██╔════╝██╔════╝ // // ██║ ██║█████╗ ██████╔╝██║█████╗ ╚████╔╝ ██╔████╔██║██║ ██║██║ ██║█████╗ ██║ ██║ ██║█████╗ █████╗ // // ╚██╗ ██╔╝██╔══╝ ██╔══██╗██║██╔══╝ ╚██╔╝ ██║╚██╔╝██║██║ ██║██║ ██║██╔══╝ ██║ ██║ ██║██╔══╝ ██╔══╝ // // ╚████╔╝ ███████╗██║ ██║██║██║ ██║ ██║ ╚═╝ ██║╚██████╔╝██████╔╝███████╗███████╗ ██████╔╝███████╗██║ // // ╚═══╝ ╚══════╝╚═╝ ╚═╝╚═╝╚═╝ ╚═╝ ╚═╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═════╝ ╚══════╝╚═╝ // - /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + /// //////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// verifyModelDef: DRY_MACHINES.verifyModelDef, - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// // ██████╗ ███╗ ███╗██╗ // // ██╔══██╗████╗ ████║██║ // // ██║ ██║██╔████╔██║██║ // @@ -494,7 +492,7 @@ module.exports = { // // // DML adapter methods: // // Methods related to manipulating records stored in the database. // - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// /** @@ -593,7 +591,7 @@ module.exports = { - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// // ██████╗ ██████╗ ██╗ // // ██╔══██╗██╔═══██╗██║ // // ██║ ██║██║ ██║██║ // @@ -604,7 +602,7 @@ module.exports = { // // // DQL adapter methods: // // Methods related to fetching information from the database (e.g. finding stored records). // - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// /** @@ -710,7 +708,7 @@ module.exports = { - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// // ██████╗ ██████╗ ██╗ // // ██╔══██╗██╔══██╗██║ // // ██║ ██║██║ ██║██║ // @@ -721,7 +719,7 @@ module.exports = { // // // DDL adapter methods: // // Methods related to modifying the underlying structure of physical models in the database. // - ////////////////////////////////////////////////////////////////////////////////////////////////// + /// /////////////////////////////////////////////////////////////////////////////////////////////// /** @@ -742,14 +740,14 @@ module.exports = { * @param {Error?} * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - define: function (datastoreName, tableName, phmDef, done) { + define(datastoreName, tableName, phmDef, done) { // Look up the datastore entry (manager/driver/config). - var dsEntry = registeredDsEntries[datastoreName]; + const dsEntry = registeredDsEntries[datastoreName]; // Sanity check: if (_.isUndefined(dsEntry)) { - return done(new Error('Consistency violation: Cannot do that with datastore (`'+datastoreName+'`) because no matching datastore entry is registered in this adapter! This is usually due to a race condition (e.g. a lifecycle callback still running after the ORM has been torn down), or it could be due to a bug in this adapter. (If you get stumped, reach out at http://sailsjs.com/support.)')); + return done(new Error(`Consistency violation: Cannot do that with datastore (\`${datastoreName}\`) because no matching datastore entry is registered in this adapter! This is usually due to a race condition (e.g. a lifecycle callback still running after the ORM has been torn down), or it could be due to a bug in this adapter. (If you get stumped, reach out at https://sailsjs.com/support.)`)); } @@ -763,8 +761,8 @@ module.exports = { // Build an array of any UNIQUE indexes needed // > Go through each item in the definition to locate fields // > which demand a uniqueness constraint. - var uniqueIndexesToCreate = []; - _.each(phmDef, function (phmAttrDef, key) { + const uniqueIndexesToCreate = []; + _.each(phmDef, (phmAttrDef, key) => { if (_.has(phmAttrDef, 'unique') && phmAttrDef.unique) { uniqueIndexesToCreate.push(key); } @@ -772,29 +770,27 @@ module.exports = { // "Clean" the list of unique indexes. // > Remove `_id`. - _.remove(uniqueIndexesToCreate, function (val) { - return val === '_id'; - }); + _.remove(uniqueIndexesToCreate, val => val === '_id'); // If there are no indexes to create, bail out (we're done). if (uniqueIndexesToCreate.length === 0) { return done(); - }//-• + }// -• // Otherwise we'll need to create some indexes.... // First, get a reference to the Mongo collection. - var db = dsEntry.manager; - var mongoCollection = db.collection(tableName); + const db = dsEntry.manager; + const mongoCollection = db.collection(tableName); // Then simultaneously create all of the indexes: - async.each(uniqueIndexesToCreate, function (key, next) { + async.each(uniqueIndexesToCreate, (key, next) => { // Build up a special "keys" dictionary for Mongo. // (e.g. `{foo:1}`) // // > This is the definition for a "single-field index". // > (https://docs.mongodb.com/manual/indexes/#index-types) - var mongoSingleFieldIdxKeys = {}; + const mongoSingleFieldIdxKeys = {}; mongoSingleFieldIdxKeys[key] = 1; // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // ^^^NOTE: @@ -810,19 +806,19 @@ module.exports = { // Create the index on the Mongo collection. // (https://docs.mongodb.com/manual/reference/method/db.collection.createIndex) - mongoCollection.createIndex(mongoSingleFieldIdxKeys, { unique: true }, function (err) { + mongoCollection.createIndex(mongoSingleFieldIdxKeys, { unique: true }, (err) => { if (err && !_.isError(err)) { - err = flaverr({raw: err}, new Error('Consistency violation: Expecting Error instance, but instead got: '+util.inspect(err))); + err = flaverr({raw: err}, new Error(`Consistency violation: Expecting Error instance, but instead got: ${util.inspect(err)}`)); return next(err); } else if (err) { return next(err); } else { return next(); } - });// + });// - }, function (err) { + }, (err) => { if (err) { return done(err); } return done(); - });// + });// }, @@ -847,19 +843,19 @@ module.exports = { * @param {Error?} * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ - drop: function (datastoreName, tableName, unused, done) { + drop(datastoreName, tableName, unused, done) { // Look up the datastore entry (manager/driver/config). - var dsEntry = registeredDsEntries[datastoreName]; + const dsEntry = registeredDsEntries[datastoreName]; // Sanity check: if (_.isUndefined(dsEntry)) { - return done(new Error('Consistency violation: Cannot do that with datastore (`'+datastoreName+'`) because no matching datastore entry is registered in this adapter! This is usually due to a race condition (e.g. a lifecycle callback still running after the ORM has been torn down), or it could be due to a bug in this adapter. (If you get stumped, reach out at http://sailsjs.com/support.)')); + return done(new Error(`Consistency violation: Cannot do that with datastore (\`${datastoreName}\`) because no matching datastore entry is registered in this adapter! This is usually due to a race condition (e.g. a lifecycle callback still running after the ORM has been torn down), or it could be due to a bug in this adapter. (If you get stumped, reach out at https://sailsjs.com/support.)`)); } // Drop the physical model (e.g. table/etc.) - var db = dsEntry.manager; - db.collection(tableName).drop(function (err) { + const db = dsEntry.manager; + db.collection(tableName).drop((err) => { try { if (err) { @@ -867,8 +863,8 @@ module.exports = { throw flaverr('E_PHM_NOT_FOUND', new Error('No such physical model is currently defined.')); } else if (_.isError(err)) { throw err; } - else { throw new Error('Consistency violation: Expecting Error instance, but instead got: '+util.inspect(err)); } - }//>- + else { throw new Error(`Consistency violation: Expecting Error instance, but instead got: ${util.inspect(err)}`); } + }// >- } catch (e) { switch (e.code) { @@ -877,13 +873,13 @@ module.exports = { e.raw = err; return done(e); } - }// + }// // >-• // IWMIH, then either the physical model was successfully dropped, // or it didn't exist in the first place. return done(); - });// + });// }, diff --git a/lib/private/build-std-adapter-method.js b/lib/private/build-std-adapter-method.js index 90c9ff85e..51b5efd1a 100644 --- a/lib/private/build-std-adapter-method.js +++ b/lib/private/build-std-adapter-method.js @@ -2,9 +2,9 @@ * Module dependencies */ -var _ = require('@sailshq/lodash'); -var Machine = require('machine'); -var doWithConnection = require('./do-with-connection'); +const _ = require('@sailshq/lodash'); +const Machine = require('machine'); +const doWithConnection = require('./do-with-connection'); /** @@ -32,33 +32,33 @@ var doWithConnection = require('./do-with-connection'); * @param {Function} done * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function buildStdAdapterMethod (machineDef, WET_MACHINES, registeredDsEntries, registeredDryModels) { +module.exports = function buildStdAdapterMethod(machineDef, WET_MACHINES, registeredDsEntries, registeredDryModels) { // Build wet machine. - var performQuery = Machine.build(machineDef); + const performQuery = Machine.build(machineDef); // Return function that will be the adapter method. return function (datastoreName, s3q, done) { // Look up the datastore entry (to get the manager). - var dsEntry = registeredDsEntries[datastoreName]; + const dsEntry = registeredDsEntries[datastoreName]; // Sanity check: if (_.isUndefined(dsEntry)) { - return done(new Error('Consistency violation: Cannot do that with datastore (`'+datastoreName+'`) because no matching datastore entry is registered in this adapter! This is usually due to a race condition (e.g. a lifecycle callback still running after the ORM has been torn down), or it could be due to a bug in this adapter. (If you get stumped, reach out at http://sailsjs.com/support.)')); + return done(new Error(`Consistency violation: Cannot do that with datastore (\`${datastoreName}\`) because no matching datastore entry is registered in this adapter! This is usually due to a race condition (e.g. a lifecycle callback still running after the ORM has been torn down), or it could be due to a bug in this adapter. (If you get stumped, reach out at https://sailsjs.com/support.)`)); } // Obtain a connection. doWithConnection({ - WET_MACHINES: WET_MACHINES, + WET_MACHINES, manager: dsEntry.manager, connection: (s3q.meta && s3q.meta.leasedConnection) || undefined, meta: s3q.meta, - during: function (connection, proceed) { + during(connection, proceed) { - var handlers = { - error: function (err) { return proceed(err); }, - success: function (result) { return proceed(undefined, result); } + const handlers = { + error(err) { return proceed(err); }, + success(result) { return proceed(undefined, result); } }; // If this machine has a `notUnique` exit, then set up a `notUnique` handler. // > (Note that `err.footprint` should already be attached, so there's no need to mess w/ it.) @@ -69,14 +69,14 @@ module.exports = function buildStdAdapterMethod (machineDef, WET_MACHINES, regis // Perform the query (and if relevant, send back a result.) performQuery({ query: s3q, - connection: connection, + connection, dryOrm: { models: registeredDryModels } }).switch(handlers); - }// - }, done);// + }// + }, done);// - };// + };// }; diff --git a/lib/private/constants/dry-orm.input.js b/lib/private/constants/dry-orm.input.js index 5f82e9ec0..aea7943b9 100644 --- a/lib/private/constants/dry-orm.input.js +++ b/lib/private/constants/dry-orm.input.js @@ -11,12 +11,12 @@ module.exports = { required: true, readOnly: true, example: '===' - //e.g. - //``` - //{ + // e.g. + // ``` + // { // models: { // pet: {attributes:{...}, tableName: 'sack_of_pets', identity: 'pet'}, // }, - //} - //``` + // } + // ``` }; diff --git a/lib/private/constants/query.input.js b/lib/private/constants/query.input.js index 9921bc3f0..c3734ca74 100644 --- a/lib/private/constants/query.input.js +++ b/lib/private/constants/query.input.js @@ -10,5 +10,5 @@ module.exports = { extendedDescription: 'The `meta` key of this dictionary is reserved for certain special "meta keys" (e.g. flags, signals, etc.) and other custom, adapter-specific extensions.', required: true, readOnly: true, - example: '==='//e.g. `{ method: 'create', using: 'the_table_name', ... }` + example: '==='// e.g. `{ method: 'create', using: 'the_table_name', ... }` }; diff --git a/lib/private/do-with-connection.js b/lib/private/do-with-connection.js index d8cbfafb1..609ce0b87 100644 --- a/lib/private/do-with-connection.js +++ b/lib/private/do-with-connection.js @@ -2,9 +2,9 @@ * Module dependencies */ -var assert = require('assert'); -var util = require('util'); -var _ = require('@sailshq/lodash'); +const assert = require('assert'); +const util = require('util'); +const _ = require('@sailshq/lodash'); /** @@ -38,7 +38,7 @@ var _ = require('@sailshq/lodash'); * If set, this is the result sent back from the provided * `during` function. */ -module.exports = function doWithConnection(options, done){ +module.exports = function doWithConnection(options, done) { assert(options.WET_MACHINES); assert(options.manager || options.connection); @@ -55,77 +55,77 @@ module.exports = function doWithConnection(options, done){ // If a pre-leased connection was passed in, proceed with that. // Otherwise, use the pre-built machines (i.e. from the adapter/driver) // to acquire a new connection from the manager. - (function _ensureConnection(proceed){ + (function _ensureConnection(proceed) { if (options.connection) { return proceed(undefined, options.connection); - }//-• + }// -• if (options.WET_MACHINES.getConnection.sync) { - var connection; + let connection; try { connection = options.WET_MACHINES.getConnection({ manager: options.manager }).execSync().connection; // (`report.meta` is ignored...) } catch (e) { if (e.exit === 'failed') { - var failureReport = e.output; + const failureReport = e.output; if (failureReport.meta) { failureReport.error.meta = failureReport.meta; } return proceed(failureReport.error); } else { return proceed(e); } } return proceed(undefined, connection); - }//-• + }// -• options.WET_MACHINES.getConnection({ manager: options.manager, meta: options.meta }).switch({ - error: function (err){ return proceed(err); }, - failed: function (report){ + error(err) { return proceed(err); }, + failed(report) { if (report.meta) { report.error.meta = report.meta; } return proceed(report.error); }, - success: function (report){ + success(report) { // (`report.meta` is ignored) return proceed(undefined, report.connection); } }); - })(function (err, db){ + })((err, db) => { if (err) { return done(err); } // ╦═╗╦ ╦╔╗╔ ┌┬┐┬ ┬┌─┐ \│/┌┬┐┬ ┬┬─┐┬┌┐┌┌─┐\│/ ┌─┐┬ ┬┌┐┌┌─┐┌┬┐┬┌─┐┌┐┌ // ╠╦╝║ ║║║║ │ ├─┤├┤ ─ ─ │││ │├┬┘│││││ ┬─ ─ ├┤ │ │││││ │ ││ ││││ // ╩╚═╚═╝╝╚╝ ┴ ┴ ┴└─┘ /│\─┴┘└─┘┴└─┴┘└┘└─┘/│\ └ └─┘┘└┘└─┘ ┴ ┴└─┘┘└┘ // Call the provided `during` function. - (function _makeCallToDuringFn(proceed){ + (function _makeCallToDuringFn(proceed) { // Note that, if you try to call the callback more than once in the iteratee, // this method logs a warning explaining what's up, ignoring any subsequent calls // to the callback that occur after the first one. - var didDuringFnAlreadyHalt; + let didDuringFnAlreadyHalt; try { - options.during(db, function (err, resultMaybe) { + options.during(db, (err, resultMaybe) => { if (err) { return proceed(err); } if (didDuringFnAlreadyHalt) { console.warn( - 'Warning: The provided `during` function triggered its callback again-- after\n'+ - 'already triggering it once! Please carefully check your `during` function\'s \n'+ + 'Warning: The provided `during` function triggered its callback again-- after\n' + + 'already triggering it once! Please carefully check your `during` function\'s \n' + 'code to figure out why this is happening. (Ignoring this subsequent invocation...)' ); return; - }//-• + }// -• didDuringFnAlreadyHalt = true; return proceed(undefined, resultMaybe); - });// + });// } catch (e) { return proceed(e); } - })(function (duringErr, resultMaybe){ + })((duringErr, resultMaybe) => { // ╦ ╦╔═╗╔╗╔╔╦╗╦ ╔═╗ ┌─┐┬─┐┬─┐┌─┐┬─┐ ┌─┐┬─┐┌─┐┌┬┐ \│/┌┬┐┬ ┬┬─┐┬┌┐┌┌─┐\│/ // ╠═╣╠═╣║║║ ║║║ ║╣ ├┤ ├┬┘├┬┘│ │├┬┘ ├┤ ├┬┘│ ││││ ─ ─ │││ │├┬┘│││││ ┬─ ─ @@ -144,9 +144,9 @@ module.exports = function doWithConnection(options, done){ // because that's what Waterline knows how to handle. We'll do this // by copying all of `duringErr`s properties into a new Error. if (_.isObject(duringErr) && duringErr.name === 'MongoError') { - duringErr = (function() { - var newError = new Error(); - _.each(Object.getOwnPropertyNames(duringErr), function(prop) { + duringErr = (function () { + const newError = new Error(); + _.each(Object.getOwnPropertyNames(duringErr), (prop) => { newError[prop] = duringErr[prop]; }); return newError; @@ -161,7 +161,7 @@ module.exports = function doWithConnection(options, done){ duringErr = new Error(util.inspect(duringErr, {depth:5})); } - }//>- + }// >- // Before exiting with this `during` error, check to see if we acquired // our own ad hoc connection earlier. If not, then go ahead and just @@ -169,7 +169,7 @@ module.exports = function doWithConnection(options, done){ // ad hoc connection first before calling the `done` callback. if (options.connection) { return done(duringErr); - }//-• + }// -• // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // > NOTE: We don't bother with the "is sync?" optimization here (see below). @@ -181,27 +181,27 @@ module.exports = function doWithConnection(options, done){ // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - options.WET_MACHINES.releaseConnection({ connection: db, meta: options.meta }).switch({ - error: function(secondaryErr) { + error(secondaryErr) { // This is a rare case, but still, if it happens, we make sure to tell // the calling code _exactly_ what occurred. return done(new Error( - 'The code using this db connection encountered an error:\n'+ - '``` (1)\n'+ - duringErr.stack +'\n'+ - '```\n'+ - '...AND THEN when attempting to automatically release the db connection\n'+ - '(since it was leased ad hoc), there was a secondary issue:\n'+ - '``` (2)\n'+ - secondaryErr.stack+'\n'+ - '```' + `The code using this db connection encountered an error:\n` + + `\`\`\` (1)\n${ + duringErr.stack}\n` + + `\`\`\`\n` + + `...AND THEN when attempting to automatically release the db connection\n` + + `(since it was leased ad hoc), there was a secondary issue:\n` + + `\`\`\` (2)\n${ + secondaryErr.stack}\n` + + `\`\`\`` )); }, - success: function(){ + success() { return done(duringErr); } - });//_∏_ + });// _∏_ return; - }//--• + }// --• // ┌─┐┌┬┐┬ ┬┌─┐┬─┐┬ ┬┬┌─┐┌─┐ ╦ ╦╔═╗╔╗╔╔╦╗╦ ╔═╗ ┌─┐┬ ┬┌─┐┌─┐┌─┐┌─┐┌─┐ @@ -216,7 +216,7 @@ module.exports = function doWithConnection(options, done){ // If not, then go ahead and just send back the result from `during`. if (options.connection) { return done(undefined, resultMaybe); - }//-• + }// -• // But otherwise, we must have made an ad hoc connection earlier. // So before calling the `done` callback, try to release it. @@ -228,33 +228,33 @@ module.exports = function doWithConnection(options, done){ // (`report.meta` is ignored...) } catch (secondaryErr) { return done(new Error( - 'The code in the provided `during` function ran successfully with this\n'+ - 'db connection, but afterwards, when attempting to automatically release\n'+ - 'the connection (since it was leased ad hoc), there was an error:\n'+ - '```\n' + - secondaryErr.stack+'\n'+ - '```' + `The code in the provided \`during\` function ran successfully with this\n` + + `db connection, but afterwards, when attempting to automatically release\n` + + `the connection (since it was leased ad hoc), there was an error:\n` + + `\`\`\`\n${ + secondaryErr.stack}\n` + + `\`\`\`` )); } return done(undefined, resultMaybe); - }//-• + }// -• options.WET_MACHINES.releaseConnection({ connection: db, meta: options.meta }).switch({ - error: function(secondaryErr) { + error(secondaryErr) { return done(new Error( - 'The code in the provided `during` function ran successfully with this\n'+ - 'db connection, but afterwards, when attempting to automatically release\n'+ - 'the connection (since it was leased ad hoc), there was an error:\n'+ - '```\n' + - secondaryErr.stack+'\n'+ - '```' + `The code in the provided \`during\` function ran successfully with this\n` + + `db connection, but afterwards, when attempting to automatically release\n` + + `the connection (since it was leased ad hoc), there was an error:\n` + + `\`\`\`\n${ + secondaryErr.stack}\n` + + `\`\`\`` )); }, - success: function(){ + success() { return done(undefined, resultMaybe); } - });// + });// - });// - });// + });// + });// }; diff --git a/lib/private/machines/avg-records.js b/lib/private/machines/avg-records.js index d48ec9f4c..97013357e 100644 --- a/lib/private/machines/avg-records.js +++ b/lib/private/machines/avg-records.js @@ -25,36 +25,36 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var _ = require('@sailshq/lodash'); - var buildMongoWhereClause = require('./private/build-mongo-where-clause'); + const _ = require('@sailshq/lodash'); + const buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Local var for the name of the numeric field, for clarity. // // > Remember: Contrary to what you might think given its naming, // > by the time it gets to the adapter (in an s3q), the `numericAttrName` // > qk has already been normalized to be a column name, not an attribute name. - var numericFieldName = s3q.numericAttrName; + const numericFieldName = s3q.numericAttrName; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // ┌┬┐┌─┐┌┐┌┌─┐┌─┐┬┌─┐┬ ┬ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ // Build a Mongo-style WHERE from the `where` clause. - var mongoWhere; + let mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } @@ -62,27 +62,27 @@ module.exports = { // ╔═╗╔═╗╔╦╗╔╦╗╦ ╦╔╗╔╦╔═╗╔═╗╔╦╗╔═╗ ┬ ┬┬┌┬┐┬ ┬ ┌┬┐┌┐ // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ - var db = inputs.connection; - var mongoCollection = db.collection(tableName); - var cursor = mongoCollection.aggregate([ + const db = inputs.connection; + const mongoCollection = db.collection(tableName); + const cursor = mongoCollection.aggregate([ { $match: mongoWhere }, { $group: { _id: numericFieldName, avg: { - $avg: '$'+numericFieldName + $avg: `$${numericFieldName}` } } } ], { cursor: {} }); - cursor.toArray(function aggregateCb(err, nativeResult) { + cursor.toArray((err, nativeResult) => { if (err) { return exits.error(err); } - var mean = 0; + let mean = 0; if (_.first(nativeResult)) { mean = _.first(nativeResult).avg; } return exits.success(mean); - });// + });// } }; diff --git a/lib/private/machines/count-records.js b/lib/private/machines/count-records.js index 5d0e7156b..8d9e3f937 100644 --- a/lib/private/machines/count-records.js +++ b/lib/private/machines/count-records.js @@ -25,32 +25,32 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var _ = require('@sailshq/lodash'); - var buildMongoWhereClause = require('./private/build-mongo-where-clause'); + const _ = require('@sailshq/lodash'); + const buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (COUNT RECORDS):',require('util').inspect(s3q,{depth:5}),'\n'); } // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // ┌┬┐┌─┐┌┐┌┌─┐┌─┐┬┌─┐┬ ┬ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ // Build a Mongo-style WHERE from the `where` clause. - var mongoWhere; + let mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } @@ -59,9 +59,9 @@ module.exports = { // ╔═╗╔═╗╔╦╗╔╦╗╦ ╦╔╗╔╦╔═╗╔═╗╔╦╗╔═╗ ┬ ┬┬┌┬┐┬ ┬ ┌┬┐┌┐ // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ - var db = inputs.connection; - var mongoCollection = db.collection(tableName); - mongoCollection.find(mongoWhere).count(function countCb(err, nativeResult) { + const db = inputs.connection; + const mongoCollection = db.collection(tableName); + mongoCollection.find(mongoWhere).count((err, nativeResult) => { if (err) { return exits.error(err); } return exits.success(nativeResult); diff --git a/lib/private/machines/create-each-record.js b/lib/private/machines/create-each-record.js index 57ad7cd83..a1d47c591 100644 --- a/lib/private/machines/create-each-record.js +++ b/lib/private/machines/create-each-record.js @@ -27,36 +27,36 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var _ = require('@sailshq/lodash'); - var processNativeRecord = require('./private/process-native-record'); - var processNativeError = require('./private/process-native-error'); - var reifyValuesToSet = require('./private/reify-values-to-set'); + const _ = require('@sailshq/lodash'); + const processNativeRecord = require('./private/process-native-record'); + const processNativeError = require('./private/process-native-error'); + const reifyValuesToSet = require('./private/reify-values-to-set'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (CREATE EACH RECORD):',require('util').inspect(s3q,{depth:5}),'\n'); } // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // ╦═╗╔═╗╦╔═╗╦ ╦ ┌─┐┌─┐┌─┐┬ ┬ ┌┐┌┌─┐┬ ┬ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐ // ╠╦╝║╣ ║╠╣ ╚╦╝ ├┤ ├─┤│ ├─┤ │││├┤ │││ ├┬┘├┤ │ │ │├┬┘ ││ // ╩╚═╚═╝╩╚ ╩ └─┘┴ ┴└─┘┴ ┴ ┘└┘└─┘└┴┘ ┴└─└─┘└─┘└─┘┴└──┴┘ try { - _.each(s3q.newRecords, function (newRecord){ + _.each(s3q.newRecords, (newRecord) => { reifyValuesToSet(newRecord, WLModel, s3q.meta); }); } catch (e) { return exits.error(e); } @@ -65,7 +65,7 @@ module.exports = { // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┌─┐┌┬┐┬ ┬┌─┐┬─┐ ┌┬┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌─┐┬─┐ ┌┐┌┌─┐┌┬┐ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤├┤ │ ├─┤├┤ ├┬┘ │ │ │ ╠╣ ║╣ ║ ║ ╠═╣ │ │├┬┘ ││││ │ │ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴└─┘ ┴ ┴ ┴└─┘┴└─ ┴ └─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ └─┘┴└─ ┘└┘└─┘ ┴ - var isFetchEnabled; + let isFetchEnabled; if (s3q.meta && s3q.meta.fetch) { isFetchEnabled = true; } else { isFetchEnabled = false; } @@ -77,25 +77,25 @@ module.exports = { // FUTURE: Carry through the `fetch: false` optimization all the way to Mongo here, // if possible (e.g. using Mongo's projections API) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - var db = inputs.connection; - var mongoCollection = db.collection(tableName); + const db = inputs.connection; + const mongoCollection = db.collection(tableName); // if (s3q.meta && s3q.meta.logMongoS3Qs) { // console.log('- - - - - - - - - -CREATE EACH: s3q.newRecords:',require('util').inspect(s3q.newRecords,{depth:5}),'\n'); // } - mongoCollection.insertMany(s3q.newRecords, function (err, nativeResult) { + mongoCollection.insertMany(s3q.newRecords, (err, nativeResult) => { if (err) { err = processNativeError(err); if (err.footprint && err.footprint.identity === 'notUnique') { return exits.notUnique(err); } return exits.error(err); - }//-• + }// -• // If `fetch` is NOT enabled, we're done. if (!isFetchEnabled) { return exits.success(); - }//-• + }// -• // Otherwise, IWMIH we'll be sending back records: @@ -105,9 +105,9 @@ module.exports = { // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │││├─┤ │ │└┐┌┘├┤ ├┬┘├┤ │ │ │├┬┘ │││ └─┐ │ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─└─┘─┘ // Process record(s) (mutate in-place) to wash away adapter-specific eccentricities. - var phRecords = nativeResult.ops; + const phRecords = nativeResult.ops; try { - _.each(phRecords, function (phRecord){ + _.each(phRecords, (phRecord) => { processNativeRecord(phRecord, WLModel, s3q.meta); }); } catch (e) { return exits.error(e); } diff --git a/lib/private/machines/create-manager.js b/lib/private/machines/create-manager.js index bd64f0d4d..0c0720bd0 100644 --- a/lib/private/machines/create-manager.js +++ b/lib/private/machines/create-manager.js @@ -14,7 +14,7 @@ module.exports = { connectionString: { description: 'The Mongo connection URL containing the configuration/credentials necessary for connecting to the database.', - moreInfoUrl: 'http://sailsjs.com/documentation/reference/configuration/sails-config-datastores#?the-connection-url', + moreInfoUrl: 'https://sailsjs.com/documentation/reference/configuration/sails-config-datastores#?the-connection-url', // example: 'mongodb://foo:bar@localhost:27017/thedatabase', example: '===', required: true @@ -89,13 +89,13 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { - var _ = require('@sailshq/lodash'); - var NodeMongoDBNativeLib = require('mongodb'); - var CONFIG_WHITELIST = require('../constants/config-whitelist.constant'); - var EXPECTED_URL_PROTOCOL_PFX = require('../constants/expected-url-protocol-pfx.constant'); - var normalizeDatastoreConfig = require('../normalize-datastore-config'); + const _ = require('@sailshq/lodash'); + const NodeMongoDBNativeLib = require('mongodb'); + const CONFIG_WHITELIST = require('../constants/config-whitelist.constant'); + const EXPECTED_URL_PROTOCOL_PFX = require('../constants/expected-url-protocol-pfx.constant'); + const normalizeDatastoreConfig = require('../normalize-datastore-config'); // Note: // Support for different types of managers is database-specific, and is not @@ -106,7 +106,7 @@ module.exports = { // contributions to the core adapter in this area are welcome and greatly appreciated! // Normalize datastore. - var _clientConfig = _.extend({ + let _clientConfig = _.extend({ url: inputs.connectionString }, inputs.meta); @@ -122,7 +122,7 @@ module.exports = { // Mongo doesn't like some of our standard properties, so we'll remove them // (we don't need any of them now anyway, since we know at this point that // they'll have been baked into the URL) - var mongoUrl = _clientConfig.url; + const mongoUrl = _clientConfig.url; _clientConfig = _.omit(_clientConfig, ['url', 'user', 'password', 'host', 'port', 'database']); // Use unified topology. MongoDB node maintainers recommends this to be enabled @@ -133,13 +133,13 @@ module.exports = { useUnifiedTopology: true }, _clientConfig); - // http://mongodb.github.io/node-mongodb-native/3.5/api/MongoClient.html#.connect - NodeMongoDBNativeLib.MongoClient.connect(mongoUrl, _clientConfig, function connectCb(err, client) { + // https://mongodb.github.io/node-mongodb-native/3.5/api/MongoClient.html#.connect + NodeMongoDBNativeLib.MongoClient.connect(mongoUrl, _clientConfig, (err, client) => { if (err) { return exits.error(err); } - var manager = client.db(_clientConfig.database); + const manager = client.db(_clientConfig.database); manager.client = client; // Now mutate this manager, giving it a telltale. @@ -153,10 +153,10 @@ module.exports = { manager._isFromMPMongo = true; return exits.success({ - manager: manager, + manager, meta: inputs.meta }); - });// + });// } diff --git a/lib/private/machines/create-record.js b/lib/private/machines/create-record.js index 22d390657..7683245ee 100644 --- a/lib/private/machines/create-record.js +++ b/lib/private/machines/create-record.js @@ -27,28 +27,28 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var util = require('util'); - var _ = require('@sailshq/lodash'); - var processNativeRecord = require('./private/process-native-record'); - var processNativeError = require('./private/process-native-error'); - var reifyValuesToSet = require('./private/reify-values-to-set'); + const util = require('util'); + const _ = require('@sailshq/lodash'); + const processNativeRecord = require('./private/process-native-record'); + const processNativeError = require('./private/process-native-error'); + const reifyValuesToSet = require('./private/reify-values-to-set'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (CREATE RECORD):',require('util').inspect(s3q,{depth:5}),'\n'); } // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // ╦═╗╔═╗╦╔═╗╦ ╦ ┬ ┬┌─┐┬ ┬ ┬┌─┐┌─┐ ┌┬┐┌─┐ ┌─┐┌─┐┌┬┐ @@ -62,7 +62,7 @@ module.exports = { // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┌─┐┌┬┐┬ ┬┌─┐┬─┐ ┌┬┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌─┐┬─┐ ┌┐┌┌─┐┌┬┐ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤├┤ │ ├─┤├┤ ├┬┘ │ │ │ ╠╣ ║╣ ║ ║ ╠═╣ │ │├┬┘ ││││ │ │ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴└─┘ ┴ ┴ ┴└─┘┴└─ ┴ └─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ └─┘┴└─ ┘└┘└─┘ ┴ - var isFetchEnabled; + let isFetchEnabled; if (s3q.meta && s3q.meta.fetch) { isFetchEnabled = true; } else { isFetchEnabled = false; } @@ -74,16 +74,16 @@ module.exports = { // FUTURE: Carry through the `fetch: false` optimization all the way to Mongo here, // if possible (e.g. using Mongo's projections API) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - var db = inputs.connection; - var mongoCollection = db.collection(tableName); - mongoCollection.insertOne(s3q.newRecord, function (err, nativeResult) { + const db = inputs.connection; + const mongoCollection = db.collection(tableName); + mongoCollection.insertOne(s3q.newRecord, (err, nativeResult) => { if (err) { err = processNativeError(err); if (err.footprint && err.footprint.identity === 'notUnique') { return exits.notUnique(err); } return exits.error(err); - }//-• + }// -• // If `fetch` is NOT enabled, we're done. @@ -97,7 +97,7 @@ module.exports = { // https://github.com/node-machine/driver-interface) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - return exits.success(); - }//-• + }// -• // Otherwise, IWMIH we'll be sending back a record: @@ -105,14 +105,14 @@ module.exports = { // Sanity check: Verify that there is only one record. if (nativeResult.ops.length !== 1) { - return exits.error(new Error('Consistency violation: Unexpected # of records returned from Mongo (in `.ops`). Native result:\n```\n'+util.inspect(nativeResult, {depth: 5})+'\n```')); + return exits.error(new Error(`Consistency violation: Unexpected # of records returned from Mongo (in \`.ops\`). Native result:\n\`\`\`\n${util.inspect(nativeResult, {depth: 5})}\n\`\`\``)); } // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │││├─┤ │ │└┐┌┘├┤ ├┬┘├┤ │ │ │├┬┘ ││ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ ┴└─└─┘└─┘└─┘┴└──┴┘ // Process record (mutate in-place) to wash away adapter-specific eccentricities. - var phRecord = nativeResult.ops[0]; + const phRecord = nativeResult.ops[0]; try { processNativeRecord(phRecord, WLModel, s3q.meta); } catch (e) { return exits.error(e); } diff --git a/lib/private/machines/define-physical-model.js b/lib/private/machines/define-physical-model.js index 4aac1a7ee..4d58bef04 100644 --- a/lib/private/machines/define-physical-model.js +++ b/lib/private/machines/define-physical-model.js @@ -38,7 +38,7 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Note that this is currently implemented inline in the main adapter file. // (It will change to use this approach in a future release of sails-mongo.) return exits.error(new Error('Not implemented yet')); diff --git a/lib/private/machines/destroy-manager.js b/lib/private/machines/destroy-manager.js index 4643dd5dc..c8ee8fb7f 100644 --- a/lib/private/machines/destroy-manager.js +++ b/lib/private/machines/destroy-manager.js @@ -1,4 +1,4 @@ -var _ = require('@sailshq/lodash'); +const _ = require('@sailshq/lodash'); module.exports = { @@ -64,7 +64,7 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // If the manager doesn't have a `close` function for some reason, // then catch that ahead of time so we can provide a slightly nicer diff --git a/lib/private/machines/destroy-records.js b/lib/private/machines/destroy-records.js index 38833766b..741a7c7cf 100644 --- a/lib/private/machines/destroy-records.js +++ b/lib/private/machines/destroy-records.js @@ -25,31 +25,31 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var _ = require('@sailshq/lodash'); - var processNativeRecord = require('./private/process-native-record'); - var buildMongoWhereClause = require('./private/build-mongo-where-clause'); + const _ = require('@sailshq/lodash'); + const processNativeRecord = require('./private/process-native-record'); + const buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (DESTROY RECORDS):',require('util').inspect(s3q,{depth:5}),'\n'); } // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // Grab the pk column name (for use below) - var pkColumnName; + let pkColumnName; try { pkColumnName = WLModel.attributes[WLModel.primaryKey].columnName; } catch (e) { return exits.error(e); } @@ -58,7 +58,7 @@ module.exports = { // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┌─┐┌┬┐┬ ┬┌─┐┬─┐ ┌┬┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌─┐┬─┐ ┌┐┌┌─┐┌┬┐ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤├┤ │ ├─┤├┤ ├┬┘ │ │ │ ╠╣ ║╣ ║ ║ ╠═╣ │ │├┬┘ ││││ │ │ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴└─┘ ┴ ┴ ┴└─┘┴└─ ┴ └─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ └─┘┴└─ ┘└┘└─┘ ┴ - var isFetchEnabled; + let isFetchEnabled; if (s3q.meta && s3q.meta.fetch) { isFetchEnabled = true; } else { isFetchEnabled = false; } @@ -67,7 +67,7 @@ module.exports = { // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ // Build a Mongo-style WHERE from the `where` clause. - var mongoWhere; + let mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } @@ -76,8 +76,8 @@ module.exports = { // ╔═╗╔═╗╔╦╗╔╦╗╦ ╦╔╗╔╦╔═╗╔═╗╔╦╗╔═╗ ┬ ┬┬┌┬┐┬ ┬ ┌┬┐┌┐ // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ - var db = inputs.connection; - var mongoCollection = db.collection(tableName); + const db = inputs.connection; + const mongoCollection = db.collection(tableName); // First, if fetch is set to true get all the records that match the given // criteria. This way they can be returned after the destroy. @@ -87,16 +87,16 @@ module.exports = { } // Find matching records. - mongoCollection.find(mongoWhere).toArray(function findCb(err, nativeResult) { + mongoCollection.find(mongoWhere).toArray((err, nativeResult) => { if (err) { return proceed(err); } return proceed(undefined, nativeResult); }); - })(function findMatchingRecordsCb(err, phRecords) { + })((err, phRecords) => { if (err) { return exits.error(err); } // Destroy the documents in the db. - var secondaryMongoWhere; + let secondaryMongoWhere; if (!isFetchEnabled) { secondaryMongoWhere = mongoWhere; } @@ -104,19 +104,19 @@ module.exports = { secondaryMongoWhere = {}; secondaryMongoWhere[pkColumnName] = { '$in': _.pluck(phRecords, pkColumnName) }; } - mongoCollection.deleteMany(secondaryMongoWhere, function deleteCb(err) { + mongoCollection.deleteMany(secondaryMongoWhere, (err) => { if (err) { return exits.error(err); } if (!isFetchEnabled) { return exits.success(); - }//-• + }// -• // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┌─┐─┐ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │││├─┤ │ │└┐┌┘├┤ ├┬┘├┤ │ │ │├┬┘ │││ └─┐ │ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─└─┘─┘ // Process records (mutate in-place) to wash away adapter-specific eccentricities. try { - _.each(phRecords, function (phRecord){ + _.each(phRecords, (phRecord) => { processNativeRecord(phRecord, WLModel, s3q.meta); }); } catch (e) { return exits.error(e); } @@ -124,7 +124,7 @@ module.exports = { return exits.success(phRecords); }); // - });// + });// } }; diff --git a/lib/private/machines/drop-physical-model.js b/lib/private/machines/drop-physical-model.js index 138357a2b..1b9b81b24 100644 --- a/lib/private/machines/drop-physical-model.js +++ b/lib/private/machines/drop-physical-model.js @@ -22,7 +22,7 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Note that this is currently implemented inline in the main adapter file. // (It will change to use this approach in a future release of sails-mongo.) return exits.error(new Error('Not implemented yet')); diff --git a/lib/private/machines/find-records.js b/lib/private/machines/find-records.js index e5b53bfc1..bea862837 100644 --- a/lib/private/machines/find-records.js +++ b/lib/private/machines/find-records.js @@ -19,45 +19,45 @@ module.exports = { success: { outputFriendlyName: 'Records', outputDescription: 'An array of physical records.', - outputExample: '===' //[ {===} ] + outputExample: '===' // [ {===} ] }, }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var assert = require('assert'); - var _ = require('@sailshq/lodash'); - var processNativeRecord = require('./private/process-native-record'); - var buildMongoWhereClause = require('./private/build-mongo-where-clause'); + const assert = require('assert'); + const _ = require('@sailshq/lodash'); + const processNativeRecord = require('./private/process-native-record'); + const buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (FIND RECORDS):',require('util').inspect(s3q,{depth:10}),'\n'); } // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // ┌┬┐┌─┐┌┐┌┌─┐┌─┐┬┌─┐┬ ┬ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ - var db = inputs.connection; - var mongoCollection = db.collection(tableName); + const db = inputs.connection; + const mongoCollection = db.collection(tableName); // Build a Mongo-style WHERE from the `where` clause. - var mongoWhere; + let mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } @@ -69,14 +69,14 @@ module.exports = { // Transform the `sort` clause from a stage 3 query into a Mongo sort. - var mongoSort = _.map(s3q.criteria.sort, function mapSort(s3qSortDirective) { + const mongoSort = _.map(s3q.criteria.sort, (s3qSortDirective) => { - var mongoSortDirective = []; + const mongoSortDirective = []; - var sortByKey = _.first(_.keys(s3qSortDirective)); + const sortByKey = _.first(_.keys(s3qSortDirective)); mongoSortDirective.push(sortByKey); - var sortDirection = s3qSortDirective[sortByKey]; + const sortDirection = s3qSortDirective[sortByKey]; assert(sortDirection === 'ASC' || sortDirection === 'DESC', 'At this point, the sort direction should always be ASC or DESC (capitalized). If you are seeing this message, there is probably a bug somewhere in your version of Waterline core.'); mongoSortDirective.push(sortDirection === 'ASC' ? 1 : -1); @@ -85,9 +85,9 @@ module.exports = { }); // Create the initial Mongo deferred, taking care of `where`, `limit`, and `sort`. - var mongoDeferred; + let mongoDeferred; try { - assert(_.isNumber(s3q.criteria.limit), 'At this point, the limit should always be a number, but instead it is `'+s3q.criteria.limit+'`. If you are seeing this message, there is probably a bug somewhere in your version of Waterline core.'); + assert(_.isNumber(s3q.criteria.limit), `At this point, the limit should always be a number, but instead it is \`${s3q.criteria.limit}\`. If you are seeing this message, there is probably a bug somewhere in your version of Waterline core.`); mongoDeferred = mongoCollection.find(mongoWhere).limit(s3q.criteria.limit); if (mongoSort.length) { mongoDeferred = mongoDeferred.sort(mongoSort); @@ -99,7 +99,7 @@ module.exports = { if (s3q.criteria.select) { // Transform the stage-3 query select array into a Mongo projection dictionary. - var projection = _.reduce(s3q.criteria.select, function reduceProjection(memo, colName) { + const projection = _.reduce(s3q.criteria.select, (memo, colName) => { memo[colName] = 1; return memo; }, {}); @@ -117,16 +117,16 @@ module.exports = { // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ // Find the documents in the db. - mongoDeferred.toArray(function findCb(err, nativeResult) { + mongoDeferred.toArray((err, nativeResult) => { if (err) { return exits.error(err); } // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┌─┐─┐ // ╠═╝╠╦╝║ ║║ ║╣ ╚═╗╚═╗ │││├─┤ │ │└┐┌┘├┤ ├┬┘├┤ │ │ │├┬┘ │││ └─┐ │ // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─└─┘─┘ // Process records (mutate in-place) to wash away adapter-specific eccentricities. - var phRecords = nativeResult; + const phRecords = nativeResult; try { - _.each(phRecords, function (phRecord){ + _.each(phRecords, (phRecord) => { processNativeRecord(phRecord, WLModel, s3q.meta); }); } catch (e) { return exits.error(e); } diff --git a/lib/private/machines/get-connection.js b/lib/private/machines/get-connection.js index 07f56788c..0af33a2bb 100644 --- a/lib/private/machines/get-connection.js +++ b/lib/private/machines/get-connection.js @@ -55,7 +55,7 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // This is a no-op that just sends back the manager and `meta` that were passed in. // Currently in mp-mongo, a "manager" and "connection" are the same thing: a Mongo client instance. return exits.success({ diff --git a/lib/private/machines/private/build-mongo-where-clause.js b/lib/private/machines/private/build-mongo-where-clause.js index 39b775537..16fe8b44b 100644 --- a/lib/private/machines/private/build-mongo-where-clause.js +++ b/lib/private/machines/private/build-mongo-where-clause.js @@ -2,10 +2,10 @@ * Module dependencies */ -var util = require('util'); -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var normalizeMongoObjectId = require('./normalize-mongo-object-id'); +const util = require('util'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const normalizeMongoObjectId = require('./normalize-mongo-object-id'); /** @@ -42,24 +42,22 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Recursively build and return a transformed `where` clause for use with Mongo. - var mongoQueryFilter = (function recurse(branch) { - var loneKey = _.first(_.keys(branch)); + const mongoQueryFilter = (function recurse(branch) { + const loneKey = _.first(_.keys(branch)); // ╔═╗╦═╗╔═╗╔╦╗╦╔═╗╔═╗╔╦╗╔═╗ // ╠═╝╠╦╝║╣ ║║║║ ╠═╣ ║ ║╣ // ╩ ╩╚═╚═╝═╩╝╩╚═╝╩ ╩ ╩ ╚═╝ if (loneKey === 'and' || loneKey === 'or') { - var conjunctsOrDisjuncts = branch[loneKey]; - branch['$' + loneKey] = _.map(conjunctsOrDisjuncts, function(conjunctOrDisjunct){ - return recurse(conjunctOrDisjunct); - }); + const conjunctsOrDisjuncts = branch[loneKey]; + branch[`$${loneKey}`] = _.map(conjunctsOrDisjuncts, conjunctOrDisjunct => recurse(conjunctOrDisjunct)); delete branch[loneKey]; return branch; - }//-• + }// -• // IWMIH, we're dealing with a constraint of some kind. - var constraintColumnName = loneKey; - var constraint = branch[constraintColumnName]; + const constraintColumnName = loneKey; + const constraint = branch[constraintColumnName]; // Determine whether we should compare as an object id. @@ -72,20 +70,20 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { // > try to convert the eq constraint / relevant modifier into an ObjectId // > instance, if possible. (We still gracefully fall back to tolerate // > filtering by pk/fk vs. miscellaneous strings.) - var doCompareAsObjectIdIfPossible; + let doCompareAsObjectIdIfPossible; assert(_.isString(WLModel.primaryKey) && WLModel.primaryKey, 'Model def should always have a `primaryKey` setting by the time the model definition is handed down to the adapter (this should have already been taken care of in WL core)'); - var pkAttrDef = WLModel.attributes[WLModel.primaryKey]; + const pkAttrDef = WLModel.attributes[WLModel.primaryKey]; assert(_.isObject(pkAttrDef), 'PK attribute should always exist (this should have already been taken care of in WL core)'); - var pkColumnName = pkAttrDef.columnName; - assert(_.isString(pkColumnName) && pkColumnName, 'PK attribute should always have a column name by the time the model definition is handed down to the adapter (this should have already been taken care of in WL core). But actual pk attribute def on the model looks like this: '+util.inspect(pkAttrDef, {depth:5})+''); + const pkColumnName = pkAttrDef.columnName; + assert(_.isString(pkColumnName) && pkColumnName, `PK attribute should always have a column name by the time the model definition is handed down to the adapter (this should have already been taken care of in WL core). But actual pk attribute def on the model looks like this: ${util.inspect(pkAttrDef, {depth:5})}`); if (constraintColumnName === pkColumnName && (!meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, WLModel.identity))) { doCompareAsObjectIdIfPossible = true; } else { - _.each(WLModel.attributes, function (attrDef /*, attrName */) { - var isForeignKey = !!attrDef.model; + _.each(WLModel.attributes, (attrDef /* , attrName */) => { + const isForeignKey = !!attrDef.model; // Sanity checks: if (isForeignKey) { assert(attrDef.foreignKey, 'attribute has a `model` property, but wl-schema did not give it `foreignKey: true`!'); @@ -115,16 +113,16 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { default: throw e; } } - }//>- + }// >- return branch; - }//-• + }// -• // ╔═╗╔═╗╔╦╗╔═╗╦ ╔═╗═╗ ╦ ╔═╗╔═╗╔╗╔╔═╗╔╦╗╦═╗╔═╗╦╔╗╔╔╦╗ // ║ ║ ║║║║╠═╝║ ║╣ ╔╩╦╝ ║ ║ ║║║║╚═╗ ║ ╠╦╝╠═╣║║║║ ║ // ╚═╝╚═╝╩ ╩╩ ╩═╝╚═╝╩ ╚═ ╚═╝╚═╝╝╚╝╚═╝ ╩ ╩╚═╩ ╩╩╝╚╝ ╩ - var modifierKind = _.first(_.keys(constraint)); - var modifier = constraint[modifierKind]; + const modifierKind = _.first(_.keys(constraint)); + let modifier = constraint[modifierKind]; delete constraint[modifierKind]; @@ -158,7 +156,7 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { default: throw e; } } - }//>- + }// >- constraint['$ne'] = modifier; @@ -167,7 +165,7 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { case 'nin': // Same as above: Convert mongo id(s) to ObjectId instance(s) if appropriate/possible. - modifier = _.map(modifier, function (item) { + modifier = _.map(modifier, (item) => { if (doCompareAsObjectIdIfPossible && _.isString(item)) { try { item = normalizeMongoObjectId(item); @@ -177,9 +175,9 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { default: throw e; } } - }//>- + }// >- return item; - });// + });// constraint['$nin'] = modifier; break; @@ -191,7 +189,7 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { // console.log('doCompareAsObjectIdIfPossible:', doCompareAsObjectIdIfPossible); // Same as above: Convert mongo id(s) to ObjectId instance(s) if appropriate/possible. - modifier = _.map(modifier, function (item) { + modifier = _.map(modifier, (item) => { if (doCompareAsObjectIdIfPossible && _.isString(item)) { try { item = normalizeMongoObjectId(item); @@ -201,9 +199,9 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { default: throw e; } } - }//>- + }// >- return item; - });// + });// // console.log('Mongo-ified $in:', modifier); // console.log('typeof the first one:', typeof modifier[0]); @@ -211,14 +209,14 @@ module.exports = function buildMongoWhereClause(whereClause, WLModel, meta) { break; case 'like': - constraint['$regex'] = new RegExp('^' + _.escapeRegExp(modifier).replace(/^%/, '.*').replace(/([^\\])%/g, '$1.*').replace(/\\%/g, '%') + '$'); + constraint['$regex'] = new RegExp(`^${_.escapeRegExp(modifier).replace(/^%/, '.*').replace(/([^\\])%/g, '$1.*').replace(/\\%/g, '%')}$`); if (meta && meta.makeLikeModifierCaseInsensitive && _.isBoolean(meta.makeLikeModifierCaseInsensitive)) { constraint['$options'] = 'i'; } break; default: - throw new Error('Consistency violation: `where` clause modifier `' + modifierKind + '` is not valid! This should never happen-- a stage 3 query should have already been normalized in Waterline core.'); + throw new Error(`Consistency violation: \`where\` clause modifier \`${modifierKind}\` is not valid! This should never happen-- a stage 3 query should have already been normalized in Waterline core.`); } diff --git a/lib/private/machines/private/normalize-mongo-object-id.js b/lib/private/machines/private/normalize-mongo-object-id.js index c1e109b6a..b23502693 100644 --- a/lib/private/machines/private/normalize-mongo-object-id.js +++ b/lib/private/machines/private/normalize-mongo-object-id.js @@ -2,9 +2,9 @@ * Module dependencies */ -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); -var ObjectId = require('mongodb').ObjectID || require('mongodb').ObjectId; +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); +const ObjectId = require('mongodb').ObjectID || require('mongodb').ObjectId; /** * normalizeMongoObjectId() @@ -67,35 +67,35 @@ module.exports = function normalizeMongoObjectId(supposedId) { // First of all, if the supposed id is a Mongo ObjectId instance, // then just return it, straight away. - if (_.isObject(supposedId) && supposedId instanceof ObjectId){ + if (_.isObject(supposedId) && supposedId instanceof ObjectId) { return supposedId; } // Otherwise try to interpret the supposed mongo id as a hex string. // (note that we also implement a failsafe) else if (_.isString(supposedId) && ObjectId.isValid(supposedId)) { - var objectified = new ObjectId(supposedId); + const objectified = new ObjectId(supposedId); // Sanity check: if (objectified.toString() !== supposedId) { throw new Error( - 'Consistency violation: Unexpected result interpreting `'+supposedId+'` as a Mongo ObjectId. '+ - 'After instantiating the provided value as an ObjectId instance, then calling .toString() '+ - 'on it, the result (`'+objectified.toString()+'`) is somehow DIFFERENT than the originally-provided '+ - 'value (`'+supposedId+'`)... even though the mongo lib said it was `.isValid()`. (This is likely '+ - 'due to a bug in the Mongo adapter, or somewhere else along the way. Please report at http://sailsjs.com/bugs)' + `Consistency violation: Unexpected result interpreting \`${supposedId}\` as a Mongo ObjectId. ` + + `After instantiating the provided value as an ObjectId instance, then calling .toString() ` + + `on it, the result (\`${objectified.toString()}\`) is somehow DIFFERENT than the originally-provided ` + + `value (\`${supposedId}\`)... even though the mongo lib said it was \`.isValid()\`. (This is likely ` + + `due to a bug in the Mongo adapter, or somewhere else along the way. Please report at https://sailsjs.com/bugs)` ); - }//-• + }// -• return objectified; } // Otherwise, give up. else { throw flaverr('E_CANNOT_INTERPRET_AS_OBJECTID', new Error( - 'Cannot interpret `'+supposedId+'` as a Mongo id.\n'+ - '(Usually, this is the result of a bug in application logic.)\n'+ - 'For more info on Mongo ids, see:\n'+ - '• https://docs.mongodb.com/manual/reference/bson-types/#objectid\n'+ - '• http://sailsjs.com/support' + `Cannot interpret \`${supposedId}\` as a Mongo id.\n` + + `(Usually, this is the result of a bug in application logic.)\n` + + `For more info on Mongo ids, see:\n` + + `• https://docs.mongodb.com/manual/reference/bson-types/#objectid\n` + + `• https://sailsjs.com/support` )); } diff --git a/lib/private/machines/private/process-native-error.js b/lib/private/machines/private/process-native-error.js index 8cd0f738e..8319ca936 100644 --- a/lib/private/machines/private/process-native-error.js +++ b/lib/private/machines/private/process-native-error.js @@ -2,7 +2,7 @@ * Module dependencies */ -var _ = require('@sailshq/lodash'); +const _ = require('@sailshq/lodash'); /** @@ -19,7 +19,7 @@ module.exports = function processNativeError(err) { // Verify that there is no pre-existing footprint. // (This should never happen) if (!_.isUndefined(err.footprint)) { - return new Error('Consistency violation: Raw error from MongoDB arrived with a pre-existing `footprint` property! Should never happen... but maybe this error didn\'t actually come from Mongo..? Here\'s the error:\n\n```\n'+err.stack+'\n```\n'); + return new Error(`Consistency violation: Raw error from MongoDB arrived with a pre-existing \`footprint\` property! Should never happen... but maybe this error didn't actually come from Mongo..? Here's the error:\n\n\`\`\`\n${err.stack}\n\`\`\`\n`); } // ███╗ ██╗ ██████╗ ████████╗ ██╗ ██╗███╗ ██╗██╗ ██████╗ ██╗ ██╗███████╗ @@ -42,7 +42,7 @@ module.exports = function processNativeError(err) { // If we can infer which field this error is referring to, then add // that problematic key to the `keys` array of the footprint. // > Remember, this is by "columnName", not attr name! - var problematicKey; + let problematicKey; // For now, we avoid trying to determine this extra information, since we don't // have a strategy that can successfully figure it out in a performant way @@ -83,7 +83,7 @@ module.exports = function processNativeError(err) { return err; - }//‡-• + }// ‡-• // ███╗ ███╗██╗███████╗ ██████╗ // ████╗ ████║██║██╔════╝██╔════╝ // ██╔████╔██║██║███████╗██║ diff --git a/lib/private/machines/private/process-native-record.js b/lib/private/machines/private/process-native-record.js index 75466e731..c75d69cd0 100644 --- a/lib/private/machines/private/process-native-record.js +++ b/lib/private/machines/private/process-native-record.js @@ -2,10 +2,10 @@ * Module dependencies */ -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var ObjectId = require('mongodb').ObjectID || require('mongodb').ObjectId; -var Binary = require('mongodb').Binary; +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const ObjectId = require('mongodb').ObjectID || require('mongodb').ObjectId; +const {Binary} = require('mongodb'); @@ -36,13 +36,13 @@ module.exports = function processNativeRecord(nativeRecord, WLModel, meta) { // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - // Determine whether or not to use object ids. - var useObjectIds = !meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, WLModel.identity); + const useObjectIds = !meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, WLModel.identity); // Convert pk values (instantiated ObjectIds) back to hex strings. (if relevant) if (useObjectIds) { - var primaryKeyColumnName = WLModel.attributes[WLModel.primaryKey].columnName; - var pkValue = nativeRecord[primaryKeyColumnName]; + const primaryKeyColumnName = WLModel.attributes[WLModel.primaryKey].columnName; + const pkValue = nativeRecord[primaryKeyColumnName]; if (_.isObject(pkValue) && pkValue instanceof ObjectId) { nativeRecord[primaryKeyColumnName] = pkValue.toString(); } @@ -53,12 +53,12 @@ module.exports = function processNativeRecord(nativeRecord, WLModel, meta) { // might have had a non-object ID was stored at some point) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - } - }//>- + }// >- // Check out each known attribute... - _.each(WLModel.attributes, function (attrDef) { - var phRecordKey = attrDef.columnName; + _.each(WLModel.attributes, (attrDef) => { + const phRecordKey = attrDef.columnName; // Detect any `type: 'ref'` attributes that were retrieved as Binary objects, // and transform them back to Buffer objects. @@ -69,9 +69,9 @@ module.exports = function processNativeRecord(nativeRecord, WLModel, meta) { if (attrDef.type === 'ref' && _.isObject(nativeRecord[phRecordKey]) && nativeRecord[phRecordKey] instanceof Binary && nativeRecord[phRecordKey].buffer) { nativeRecord[phRecordKey] = nativeRecord[phRecordKey].buffer; return; - }//-• + }// -• - var isForeignKey = !!attrDef.model; + const isForeignKey = !!attrDef.model; // Sanity checks: if (isForeignKey) { assert(attrDef.foreignKey, 'attribute has a `model` property, but wl-schema did not give it `foreignKey: true`!'); @@ -80,9 +80,9 @@ module.exports = function processNativeRecord(nativeRecord, WLModel, meta) { assert(!attrDef.foreignKey, 'wl-schema gave this attribute `foreignKey: true`, but it has no `model` property!'); } - if (!isForeignKey) { return; }//-• - if (_.isUndefined(nativeRecord[phRecordKey])) { /* This is weird, but WL core deals with warning about it. */ return; }//-• - if (_.isNull(nativeRecord[phRecordKey])) { return; }//-• + if (!isForeignKey) { return; }// -• + if (_.isUndefined(nativeRecord[phRecordKey])) { /* This is weird, but WL core deals with warning about it. */ return; }// -• + if (_.isNull(nativeRecord[phRecordKey])) { return; }// -• // Now, if relevant, convert ObjectId foreign keys to hex strings. (i.e. for singular associations) if (!meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, attrDef.model)) { @@ -96,8 +96,8 @@ module.exports = function processNativeRecord(nativeRecord, WLModel, meta) { // might have had a non-object ID was stored at some point) // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - } - }//>- + }// >- - });// + });// }; diff --git a/lib/private/machines/private/reify-values-to-set.js b/lib/private/machines/private/reify-values-to-set.js index ed7557a50..b697d5261 100644 --- a/lib/private/machines/private/reify-values-to-set.js +++ b/lib/private/machines/private/reify-values-to-set.js @@ -2,10 +2,10 @@ * Module dependencies */ -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); -var normalizeMongoObjectId = require('./normalize-mongo-object-id'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); +const normalizeMongoObjectId = require('./normalize-mongo-object-id'); /** @@ -29,15 +29,15 @@ module.exports = function reifyValuesToSet(valuesToSet, WLModel, meta) { assert(_.isObject(valuesToSet) && !_.isArray(valuesToSet) && !_.isFunction(valuesToSet),'2nd argument must be a WLModel, and it has to have a `definition` property for this utility to work.'); // Determine whether or not to use object ids. - var useObjectIds = !meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, WLModel.identity); + const useObjectIds = !meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, WLModel.identity); // If trying to set the PK value explicitly (e.g. `_id`), try to interpret it // as a hex string, instantiate a Mongo ObjectId instance for it, and swap out // the original string for that instead before proceeding. - // (Why? See http://stackoverflow.com/a/27897720/486547) - var primaryKeyAttrName = WLModel.primaryKey; - var primaryKeyColumnName = WLModel.attributes[WLModel.primaryKey].columnName; - var pkValue = valuesToSet[primaryKeyColumnName]; + // (Why? See https://stackoverflow.com/a/27897720/486547) + const primaryKeyAttrName = WLModel.primaryKey; + const primaryKeyColumnName = WLModel.attributes[WLModel.primaryKey].columnName; + const pkValue = valuesToSet[primaryKeyColumnName]; // If the PK value is set to `null`, then remove it. // > Remember: `null` here has special meaning in Waterline -- it means there @@ -54,21 +54,21 @@ module.exports = function reifyValuesToSet(valuesToSet, WLModel, meta) { } catch (e) { switch (e.code) { case 'E_CANNOT_INTERPRET_AS_OBJECTID': - throw flaverr(e.code, new Error('Invalid primary key value provided for `'+primaryKeyAttrName+'`. '+e.message)); + throw flaverr(e.code, new Error(`Invalid primary key value provided for \`${primaryKeyAttrName}\`. ${e.message}`)); default: throw e; } } - }//>- + }// >- - }//>- + }// >- // Now we'll do the same thing for any explicit foreign keys that were provided. // (i.e. for singular associations) - _.each(WLModel.attributes, function (attrDef, attrName) { - var phRecordKey = attrDef.columnName; + _.each(WLModel.attributes, (attrDef, attrName) => { + const phRecordKey = attrDef.columnName; - var isForeignKey = !!attrDef.model; + const isForeignKey = !!attrDef.model; // Sanity checks: if (isForeignKey) { assert(attrDef.foreignKey, 'attribute has a `model` property, but wl-schema did not give it `foreignKey: true`!'); @@ -83,7 +83,7 @@ module.exports = function reifyValuesToSet(valuesToSet, WLModel, meta) { // If the FK was provided as `null`, then it's automatically OK. if (_.isNull(valuesToSet[phRecordKey])) { return; - }//-• + }// -• // But otherwise, we'll attempt to convert it into an ObjectID instance. if (!meta || !meta.modelsNotUsingObjectIds || !_.contains(meta.modelsNotUsingObjectIds, attrDef.model)) { @@ -92,12 +92,12 @@ module.exports = function reifyValuesToSet(valuesToSet, WLModel, meta) { } catch (e) { switch (e.code) { case 'E_CANNOT_INTERPRET_AS_OBJECTID': - throw flaverr(e.code, new Error('Invalid replacement foreign key value provided for association (`'+attrName+'`). '+e.message)); + throw flaverr(e.code, new Error(`Invalid replacement foreign key value provided for association (\`${attrName}\`). ${e.message}`)); default: throw e; } - }// - }//>- + }// + }// >- - });// + });// }; diff --git a/lib/private/machines/release-connection.js b/lib/private/machines/release-connection.js index d7d18f0a0..b9b3e0395 100644 --- a/lib/private/machines/release-connection.js +++ b/lib/private/machines/release-connection.js @@ -1,4 +1,4 @@ -var _ = require('@sailshq/lodash'); +const _ = require('@sailshq/lodash'); module.exports = { @@ -62,7 +62,7 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // If the connection doesn't have a `close` function for some reason, // then catch that ahead of time so we can provide a slightly nicer diff --git a/lib/private/machines/set-physical-sequence.js b/lib/private/machines/set-physical-sequence.js index 0ddd8b20a..58d361a8a 100644 --- a/lib/private/machines/set-physical-sequence.js +++ b/lib/private/machines/set-physical-sequence.js @@ -23,7 +23,7 @@ module.exports = { }, - fn: function(inputs, exits) { + fn(inputs, exits) { // This is a no-op in this adapter. // // > i.e. we do not currently implement support for ad hoc auto-incrementing diff --git a/lib/private/machines/sum-records.js b/lib/private/machines/sum-records.js index 490077da3..fcecf1054 100644 --- a/lib/private/machines/sum-records.js +++ b/lib/private/machines/sum-records.js @@ -28,33 +28,33 @@ module.exports = { fn: function sum(inputs, exits) { // Dependencies - var _ = require('@sailshq/lodash'); - var buildMongoWhereClause = require('./private/build-mongo-where-clause'); + const _ = require('@sailshq/lodash'); + const buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Local var for the name of the numeric field, for clarity. // // > Remember: Contrary to what you might think given its naming, // > by the time it gets to the adapter (in an s3q), the `numericAttrName` // > qk has already been normalized to be a column name, not an attribute name. - var numericFieldName = s3q.numericAttrName; + const numericFieldName = s3q.numericAttrName; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // ┌┬┐┌─┐┌┐┌┌─┐┌─┐┬┌─┐┬ ┬ ╔═╗╦═╗╦╔╦╗╔═╗╦═╗╦╔═╗ // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ // Build a Mongo-style WHERE from the `where` clause. - var mongoWhere; + let mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } @@ -62,27 +62,27 @@ module.exports = { // ╔═╗╔═╗╔╦╗╔╦╗╦ ╦╔╗╔╦╔═╗╔═╗╔╦╗╔═╗ ┬ ┬┬┌┬┐┬ ┬ ┌┬┐┌┐ // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ - var db = inputs.connection; - var mongoCollection = db.collection(tableName); - var cursor = mongoCollection.aggregate([ + const db = inputs.connection; + const mongoCollection = db.collection(tableName); + const cursor = mongoCollection.aggregate([ { $match: mongoWhere }, { $group: { _id: numericFieldName, sum: { - $sum: '$'+numericFieldName + $sum: `$${numericFieldName}` } } } ], { cursor: {} }); - cursor.toArray(function aggregateCb(err, nativeResult) { + cursor.toArray((err, nativeResult) => { if (err) { return exits.error(err); } - var sum = 0; + let sum = 0; if (_.first(nativeResult)) { sum = _.first(nativeResult).sum; } return exits.success(sum); - });// + });// } }; diff --git a/lib/private/machines/update-records.js b/lib/private/machines/update-records.js index d885a96b7..697baac34 100644 --- a/lib/private/machines/update-records.js +++ b/lib/private/machines/update-records.js @@ -27,34 +27,34 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { // Dependencies - var _ = require('@sailshq/lodash'); - var processNativeRecord = require('./private/process-native-record'); - var processNativeError = require('./private/process-native-error'); - var reifyValuesToSet = require('./private/reify-values-to-set'); - var buildMongoWhereClause = require('./private/build-mongo-where-clause'); + const _ = require('@sailshq/lodash'); + const processNativeRecord = require('./private/process-native-record'); + const processNativeError = require('./private/process-native-error'); + const reifyValuesToSet = require('./private/reify-values-to-set'); + const buildMongoWhereClause = require('./private/build-mongo-where-clause'); // Local var for the stage 3 query, for easier access. - var s3q = inputs.query; + const s3q = inputs.query; if (s3q.meta && s3q.meta.logMongoS3Qs) { console.log('* * * * * *\nADAPTER (UPDATE RECORDS):',require('util').inspect(s3q,{depth:5}),'\n'); // console.log(typeof s3q.criteria.where._id.in[0]); } // Local var for the `tableName`, for clarity. - var tableName = s3q.using; + const tableName = s3q.using; // Grab the model definition - var WLModel = _.find(inputs.dryOrm.models, {tableName: tableName}); + const WLModel = _.find(inputs.dryOrm.models, {tableName}); if (!WLModel) { - return exits.error(new Error('No model with that tableName (`'+tableName+'`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter\'s internal state have been corrupted? (This error is usually due to a bug in this adapter\'s implementation.)')); - }//-• + return exits.error(new Error(`No model with that tableName (\`${tableName}\`) has been registered with this adapter. Were any unexpected modifications made to the stage 3 query? Could the adapter's internal state have been corrupted? (This error is usually due to a bug in this adapter's implementation.)`)); + }// -• // Grab the pk column name (for use below) - var pkColumnName; + let pkColumnName; try { pkColumnName = WLModel.attributes[WLModel.primaryKey].columnName; } catch (e) { return exits.error(e); } @@ -63,7 +63,7 @@ module.exports = { // ╔╦╗╔═╗╔╦╗╔═╗╦═╗╔╦╗╦╔╗╔╔═╗ ┬ ┬┬ ┬┌─┐┌┬┐┬ ┬┌─┐┬─┐ ┌┬┐┌─┐ ╔═╗╔═╗╔╦╗╔═╗╦ ╦ ┌─┐┬─┐ ┌┐┌┌─┐┌┬┐ // ║║║╣ ║ ║╣ ╠╦╝║║║║║║║║╣ │││├─┤├┤ │ ├─┤├┤ ├┬┘ │ │ │ ╠╣ ║╣ ║ ║ ╠═╣ │ │├┬┘ ││││ │ │ // ═╩╝╚═╝ ╩ ╚═╝╩╚═╩ ╩╩╝╚╝╚═╝ └┴┘┴ ┴└─┘ ┴ ┴ ┴└─┘┴└─ ┴ └─┘ ╚ ╚═╝ ╩ ╚═╝╩ ╩ └─┘┴└─ ┘└┘└─┘ ┴ - var isFetchEnabled; + let isFetchEnabled; if (s3q.meta && s3q.meta.fetch) { isFetchEnabled = true; } else { isFetchEnabled = false; } @@ -79,7 +79,7 @@ module.exports = { // ││││ │││││ ┬│ ││├┤ └┬┘ ║ ╠╦╝║ ║ ║╣ ╠╦╝║╠═╣ // ┴ ┴└─┘┘└┘└─┘└─┘┴└ ┴ ╚═╝╩╚═╩ ╩ ╚═╝╩╚═╩╩ ╩ // Build a Mongo-style WHERE from the `where` clause. - var mongoWhere; + let mongoWhere; try { mongoWhere = buildMongoWhereClause(s3q.criteria.where, WLModel, s3q.meta); } catch (e) { return exits.error(e); } @@ -88,8 +88,8 @@ module.exports = { // ╔═╗╔═╗╔╦╗╔╦╗╦ ╦╔╗╔╦╔═╗╔═╗╔╦╗╔═╗ ┬ ┬┬┌┬┐┬ ┬ ┌┬┐┌┐ // ║ ║ ║║║║║║║║ ║║║║║║ ╠═╣ ║ ║╣ ││││ │ ├─┤ ││├┴┐ // ╚═╝╚═╝╩ ╩╩ ╩╚═╝╝╚╝╩╚═╝╩ ╩ ╩ ╚═╝ └┴┘┴ ┴ ┴ ┴ ─┴┘└─┘ - var db = inputs.connection; - var mongoCollection = db.collection(tableName); + const db = inputs.connection; + const mongoCollection = db.collection(tableName); // First, get the IDs of records which match this criteria (if needed). (function findMatchingIdsMaybe(proceed) { @@ -97,22 +97,22 @@ module.exports = { return proceed(); } - var projection = {}; + const projection = {}; projection[pkColumnName] = 1; // console.log('* * * *'); // console.log('mongoWhere:',mongoWhere); // console.log('typeof mongoWhere._id.$in[0]:',typeof mongoWhere._id.$in[0]); // console.log('projection:',projection); - mongoCollection.find(mongoWhere, projection).toArray(function findCb(err, nativeResult) { + mongoCollection.find(mongoWhere, projection).toArray((err, nativeResult) => { if (err) { return proceed(err); } return proceed(undefined, _.pluck(nativeResult, pkColumnName)); }); - })(function findMatchingIdsMaybeCb(err, pkValsOfMatchingRecords) { + })((err, pkValsOfMatchingRecords) => { if (err) { return exits.error(err); } // Update the documents in the db. - var secondaryMongoWhere; + let secondaryMongoWhere; if (!isFetchEnabled) { secondaryMongoWhere = mongoWhere; } @@ -126,19 +126,19 @@ module.exports = { // console.log('- - - - - - - - - -UPDATE: secondaryMongoWhere:',secondaryMongoWhere, { '$set': s3q.valuesToSet }); // } - mongoCollection.updateMany(secondaryMongoWhere, { '$set': s3q.valuesToSet }, function updateManyCb(err) { + mongoCollection.updateMany(secondaryMongoWhere, { '$set': s3q.valuesToSet }, (err) => { if (err) { err = processNativeError(err); if (err.footprint && err.footprint.identity === 'notUnique') { return exits.notUnique(err); } return exits.error(err); - }//-• + }// -• // If fetch:true was not enabled, we're done! if (!isFetchEnabled) { return exits.success(); - }//-• + }// -• // Handle case where pk value was changed: @@ -148,17 +148,17 @@ module.exports = { // There should only ever be either zero or one record that were found before. if (pkValsOfMatchingRecords.length === 0) { /* do nothing */ } else if (pkValsOfMatchingRecords.length === 1) { - var oldPkValue = pkValsOfMatchingRecords[0]; + const oldPkValue = pkValsOfMatchingRecords[0]; _.remove(secondaryMongoWhere[pkColumnName]['$in'], oldPkValue); - var newPkValue = s3q.valuesToSet[pkColumnName]; + const newPkValue = s3q.valuesToSet[pkColumnName]; secondaryMongoWhere[pkColumnName]['$in'].push(newPkValue); } else { return exits.error(new Error('Consistency violation: Updated multiple records to have the same primary key value. (PK values should be unique!)')); } - }//>- + }// >- // Now re-fetch the now-updated records. - mongoCollection.find(secondaryMongoWhere).toArray(function (err, phRecords) { + mongoCollection.find(secondaryMongoWhere).toArray((err, phRecords) => { if (err) { return exits.error(err); } // ╔═╗╦═╗╔═╗╔═╗╔═╗╔═╗╔═╗ ┌┐┌┌─┐┌┬┐┬┬ ┬┌─┐ ┬─┐┌─┐┌─┐┌─┐┬─┐┌┬┐┌─┌─┐─┐ @@ -166,7 +166,7 @@ module.exports = { // ╩ ╩╚═╚═╝╚═╝╚═╝╚═╝╚═╝ ┘└┘┴ ┴ ┴ ┴ └┘ └─┘ ┴└─└─┘└─┘└─┘┴└──┴┘└─└─┘─┘ // Process records (mutate in-place) to wash away adapter-specific eccentricities. try { - _.each(phRecords, function (phRecord){ + _.each(phRecords, (phRecord) => { processNativeRecord(phRecord, WLModel, s3q.meta); }); } catch (e) { return exits.error(e); } @@ -174,7 +174,7 @@ module.exports = { return exits.success(phRecords); });// });// - });// + });// } }; diff --git a/lib/private/machines/verify-model-def.js b/lib/private/machines/verify-model-def.js index fc27035f8..91056dcb2 100644 --- a/lib/private/machines/verify-model-def.js +++ b/lib/private/machines/verify-model-def.js @@ -21,7 +21,7 @@ module.exports = { modelDef: { description: 'A Waterline model definition.', extendedDescription: 'This model definition should already be fully-formed (i.e. it should have undergone generic normalization/validation already).', - moreInfoUrl: 'http://sailsjs.com/documentation/concepts/models-and-orm/models', + moreInfoUrl: 'https://sailsjs.com/documentation/concepts/models-and-orm/models', example: '===',// {} readOnly: true, required: true @@ -41,7 +41,7 @@ module.exports = { }, - fn: function (inputs, exits) { + fn(inputs, exits) { return exits.success(); } diff --git a/lib/private/normalize-datastore-config/index.js b/lib/private/normalize-datastore-config/index.js index f42b0665f..0a31eeb5c 100644 --- a/lib/private/normalize-datastore-config/index.js +++ b/lib/private/normalize-datastore-config/index.js @@ -2,17 +2,17 @@ * Module dependencies */ -var assert = require('assert'); -var util = require('util'); -var url = require('url'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); -var qs = require('qs'); -var normalizeDatabase = require('./private/normalize-database'); -var normalizeUser = require('./private/normalize-user'); -var normalizePort = require('./private/normalize-port'); -var normalizeHost = require('./private/normalize-host'); -var normalizePassword = require('./private/normalize-password'); +const assert = require('assert'); +const util = require('util'); +const url = require('url'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); +const qs = require('qs'); +const normalizeDatabase = require('./private/normalize-database'); +const normalizeUser = require('./private/normalize-user'); +const normalizePort = require('./private/normalize-port'); +const normalizeHost = require('./private/normalize-host'); +const normalizePassword = require('./private/normalize-password'); /** @@ -54,7 +54,7 @@ var normalizePassword = require('./private/normalize-password'); * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expectedProtocolPrefix) { +module.exports = function normalizeDatastoreConfig(dsConfig, whitelist, expectedProtocolPrefix) { // Sanity checks assert(_.isObject(dsConfig), '`dsConfig` should exist and be a dictionary!'); @@ -66,7 +66,7 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // If items in BASELINE_PROPS are included in the querystring of the connection url, // they are allowed to remain, but are not automatically applied at the top-level. // (Note that this whitelist applies to overrides AND to querystring-encoded values) - var BASELINE_PROPS = [ + const BASELINE_PROPS = [ 'url', 'adapter', 'schema', @@ -84,12 +84,12 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // Have a look at the datastore config to get an idea of what's there. - var hasUrl = !_.isUndefined(dsConfig.url); - var hasUserOverride = !_.isUndefined(dsConfig.user); - var hasPasswordOverride = !_.isUndefined(dsConfig.password); - var hasHostOverride = !_.isUndefined(dsConfig.host); - var hasPortOverride = !_.isUndefined(dsConfig.port); - var hasDatabaseOverride = !_.isUndefined(dsConfig.database); + const hasUrl = !_.isUndefined(dsConfig.url); + const hasUserOverride = !_.isUndefined(dsConfig.user); + const hasPasswordOverride = !_.isUndefined(dsConfig.password); + const hasHostOverride = !_.isUndefined(dsConfig.host); + const hasPortOverride = !_.isUndefined(dsConfig.port); + const hasDatabaseOverride = !_.isUndefined(dsConfig.database); // ┌┐┌┌─┐┬─┐┌┬┐┌─┐┬ ┬┌─┐┌─┐ ╔═╗╦ ╦╔═╗╦═╗╦═╗╦╔╦╗╔═╗╔═╗ @@ -129,20 +129,20 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte } catch (e) { switch (e.code) { case 'E_BAD_CONFIG': throw flaverr('E_BAD_CONFIG', new Error( - 'Invalid override specified. '+e.message+'\n'+ - '--\n'+ - 'Please correct this and try again... Or better yet, specify a `url`! '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `Invalid override specified. ${e.message}\n` + + `--\n` + + `Please correct this and try again... Or better yet, specify a \`url\`! ` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); default: throw e; } - }// + }// // Strip out any overrides w/ undefined values. // (And along the way, check overrides against whitelist if relevant) - var unrecognizedKeys; - _.each(Object.keys(dsConfig), function (key) { + let unrecognizedKeys; + _.each(Object.keys(dsConfig), (key) => { if (_.isUndefined(dsConfig[key])) { delete dsConfig[key]; } @@ -156,11 +156,11 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte if (unrecognizedKeys) { throw flaverr('E_BAD_CONFIG', new Error( - 'Unrecognized options (`'+unrecognizedKeys+'`) specified as config overrides.\n'+ - 'This adapter expects only whitelisted properties.\n'+ - '--\n'+ - 'See http://sailsjs.com/config/datastores#?the-connection-url for info,\n'+ - 'or visit https://sailsjs.com/support for more help.' + `Unrecognized options (\`${unrecognizedKeys}\`) specified as config overrides.\n` + + `This adapter expects only whitelisted properties.\n` + + `--\n` + + `See https://sailsjs.com/config/datastores#?the-connection-url for info,\n` + + `or visit https://sailsjs.com/support for more help.` )); } @@ -182,22 +182,22 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // FUTURE: Appropriately URL/URIComponent-encode this stuff as we build the url // Invent a connection URL on the fly. - var inventedUrl = (expectedProtocolPrefix||'db')+'://'; + let inventedUrl = `${expectedProtocolPrefix || 'db'}://`; // If authentication info was specified, add it: if (hasPasswordOverride && hasUserOverride) { - inventedUrl += dsConfig.user+':'+dsConfig.password+'@'; + inventedUrl += `${dsConfig.user}:${dsConfig.password}@`; } else if (!hasPasswordOverride && hasUserOverride) { - inventedUrl += dsConfig.user+'@'; + inventedUrl += `${dsConfig.user}@`; } else if (hasPasswordOverride && !hasUserOverride) { throw flaverr('E_BAD_CONFIG', new Error( - 'No `url` was specified, so tried to infer an appropriate connection URL from other properties. '+ - 'However, it looks like a `password` was specified, but no `user` was specified to go along with it.\n'+ - '--\n'+ - 'Please remove `password` or also specify a `user`. Or better yet, specify a `url`! '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + 'No `url` was specified, so tried to infer an appropriate connection URL from other properties. ' + + 'However, it looks like a `password` was specified, but no `user` was specified to go along with it.\n' + + '--\n' + + 'Please remove `password` or also specify a `user`. Or better yet, specify a `url`! ' + + '(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)' )); } @@ -207,11 +207,11 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte } else { throw flaverr('E_BAD_CONFIG', new Error( - 'No `url` was specified, and no appropriate connection URL can be inferred (tried to use '+ - '`host: '+util.inspect(dsConfig.host)+'`).\n'+ - '--\n'+ - 'Please specify a `host`... Or better yet, specify a `url`! '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `No \`url\` was specified, and no appropriate connection URL can be inferred (tried to use ` + + `\`host: ${util.inspect(dsConfig.host)}\`).\n` + + `--\n` + + `Please specify a \`host\`... Or better yet, specify a \`url\`! ` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); // Or alternatively... // ``` @@ -221,20 +221,20 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // If a port was specified, use it. if (hasPortOverride) { - inventedUrl += ':'+dsConfig.port; + inventedUrl += `:${dsConfig.port}`; } // If a database was specified, use it. if (hasDatabaseOverride) { - inventedUrl += '/'+dsConfig.database; + inventedUrl += `/${dsConfig.database}`; } else { throw flaverr('E_BAD_CONFIG', new Error( - 'No `url` was specified, and no appropriate connection URL can be inferred (tried to use '+ - '`database: '+util.inspect(dsConfig.database)+'`).\n'+ - '--\n'+ - 'Please specify a `database`... Or better yet, specify a `url`! '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `No \`url\` was specified, and no appropriate connection URL can be inferred (tried to use ` + + `\`database: ${util.inspect(dsConfig.database)}\`).\n` + + `--\n` + + `Please specify a \`database\`... Or better yet, specify a \`url\`! ` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); } @@ -245,7 +245,7 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // > are encouraged to support the `url` setting, if conceivable. // > // > Read more here: - // > http://sailsjs.com/config/datastores#?the-connection-url + // > https://sailsjs.com/config/datastores#?the-connection-url // - - - - - - - - - - - - - - - - - - - - - - - - // Now save our invented URL as `url`. @@ -261,9 +261,9 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // Perform a basic sanity check & string coercion. if (!_.isString(dsConfig.url) || dsConfig.url === '') { throw flaverr('E_BAD_CONFIG', new Error( - 'Invalid `url` specified. Must be a non-empty string.\n'+ - '--\n'+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + 'Invalid `url` specified. Must be a non-empty string.\n' + + '--\n' + + '(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)' )); } @@ -279,7 +279,7 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // TODO: Implement explicit parsing for this kind of URL instead of just bailing silently. // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - return; - }//• + }// • // IWMIH, this is the general case where we're actually going to validate the URL like normal. @@ -288,12 +288,12 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // Plus, more importantly, Node's `url.parse()` returns funky results if the argument doesn't // have one. So we'll add one if necessary. // > See https://en.wikipedia.org/wiki/Uniform_Resource_Identifier#Syntax - var urlToParse; + let urlToParse; if (dsConfig.url.match(/^:\/\//)) { - urlToParse = dsConfig.url.replace(/^:\/\//, (expectedProtocolPrefix||'db')+'://'); + urlToParse = dsConfig.url.replace(/^:\/\//, `${expectedProtocolPrefix || 'db'}://`); } else if (!dsConfig.url.match(/^[a-zA-Z][a-zA-Z0-9+.-]*:\/\//)) { - urlToParse = (expectedProtocolPrefix||'db')+'://'+dsConfig.url; + urlToParse = `${expectedProtocolPrefix || 'db'}://${dsConfig.url}`; } else { urlToParse = dsConfig.url; @@ -303,7 +303,7 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // console.log('\n\n**********\nurl to parse:',urlToParse, (new Error()).stack); // Now attempt to parse out the URL's pieces and validate each one. - var parsedConnectionStr = url.parse(urlToParse); + const parsedConnectionStr = url.parse(urlToParse); // Ensure a valid protocol. @@ -312,35 +312,35 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // (otherwise other parsed info could be very weird and wrong) if (!parsedConnectionStr.protocol) { throw flaverr('E_BAD_CONFIG', new Error( - 'Could not parse provided URL ('+util.inspect(dsConfig.url,{depth:5})+').\n'+ - '(If you continue to experience issues, try checking that the URL begins with an '+ - 'appropriate protocol; e.g. `mysql://` or `mongo://`.\n'+ - '--\n'+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `Could not parse provided URL (${util.inspect(dsConfig.url,{depth:5})}).\n` + + `(If you continue to experience issues, try checking that the URL begins with an ` + + `appropriate protocol; e.g. \`mysql://\` or \`mongo://\`.\n` + + `--\n` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); } // If relevant, validate that the RIGHT protocol was found. if (expectedProtocolPrefix) { - if (parsedConnectionStr.protocol !== expectedProtocolPrefix+':' && parsedConnectionStr.protocol !== expectedProtocolPrefix+'+srv:') { + if (parsedConnectionStr.protocol !== `${expectedProtocolPrefix}:` && parsedConnectionStr.protocol !== `${expectedProtocolPrefix}+srv:`) { throw flaverr('E_BAD_CONFIG', new Error( - 'Provided URL ('+util.inspect(dsConfig.url,{depth:5})+') has an invalid protocol.\n'+ - 'If included, the protocol must be "'+expectedProtocolPrefix+'://".\n'+ - '--\n'+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `Provided URL (${util.inspect(dsConfig.url,{depth:5})}) has an invalid protocol.\n` + + `If included, the protocol must be "${expectedProtocolPrefix}://".\n` + + `--\n` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); } - }//>- + }// >- // Parse authentication credentials from url, if specified. - var userInUrl; - var passwordInUrl; + let userInUrl; + let passwordInUrl; if (parsedConnectionStr.auth && _.isString(parsedConnectionStr.auth)) { - var authPieces = parsedConnectionStr.auth.split(/:/); + const authPieces = parsedConnectionStr.auth.split(/:/); if (authPieces[0]) { userInUrl = authPieces[0]; - }//>- + }// >- if (authPieces[1]) { passwordInUrl = authPieces[1]; } @@ -348,18 +348,18 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // Parse the rest of the standard information from the URL. - var hostInUrl = parsedConnectionStr.hostname; - var portInUrl = parsedConnectionStr.port; - var databaseInUrl = parsedConnectionStr.pathname; + const hostInUrl = parsedConnectionStr.hostname; + const portInUrl = parsedConnectionStr.port; + let databaseInUrl = parsedConnectionStr.pathname; // And finally parse the non-standard info from the URL's querystring. - var miscOptsInUrlQs; + let miscOptsInUrlQs; try { miscOptsInUrlQs = qs.parse(parsedConnectionStr.query); } catch (e) { throw flaverr('E_BAD_CONFIG', new Error( - 'Could not parse query string from URL: `'+dsConfig.url+'`. '+ - 'Details: '+e.stack + `Could not parse query string from URL: \`${dsConfig.url}\`. ` + + `Details: ${e.stack}` )); } @@ -390,15 +390,15 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte dsConfig.database = normalizeDatabase(databaseInUrl); } - _.each(miscOptsInUrlQs, function (val, key) { + _.each(miscOptsInUrlQs, (val, key) => { if (whitelist && !_.contains(whitelist, key)) { throw flaverr('E_BAD_CONFIG', new Error( - 'Unrecognized option (`'+key+'`) specified in query string of connection URL.\n'+ - '(This adapter expects only standard, whitelisted properties.)\n'+ - '--\n'+ - 'See http://sailsjs.com/config/datastores#?the-connection-url for info, or visit\n)'+ - 'https://sailsjs.com/support for more help.' + `Unrecognized option (\`${key}\`) specified in query string of connection URL.\n` + + `(This adapter expects only standard, whitelisted properties.)\n` + + `--\n` + + `See https://sailsjs.com/config/datastores#?the-connection-url for info, or visit\n)` + + `https://sailsjs.com/support for more help.` )); } @@ -414,7 +414,7 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // throw flaverr('E_BAD_CONFIG', new Error( // 'Unexpected option (`'+key+'`) is NEVER allowed in the query string of a connection URL.\n'+ // '--\n'+ - // 'See http://sailsjs.com/config/datastores#?the-connection-url for info, or visit\n)'+ + // 'See https://sailsjs.com/config/datastores#?the-connection-url for info, or visit\n)'+ // 'https://sailsjs.com/support for more help.' // )); // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - @@ -423,42 +423,42 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte dsConfig[key] = val; } - });// + });// } catch (e) { switch (e.code) { case 'E_BAD_CONFIG': throw flaverr('E_BAD_CONFIG', new Error( - 'Could not process connection url. '+e.message+'\n'+ - '--\n'+ - 'Please correct this and try again.\n'+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `Could not process connection url. ${e.message}\n` + + `--\n` + + `Please correct this and try again.\n` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); default: throw e; } - }// + }// // And finally, rebuild the URL - var rebuiltUrl = ''; + let rebuiltUrl = ''; // Start with the protocol... - rebuiltUrl += parsedConnectionStr.protocol+'//'; + rebuiltUrl += `${parsedConnectionStr.protocol}//`; // If user/password were specified in the url OR as overrides, use them. if (dsConfig.user && dsConfig.password) { - rebuiltUrl += dsConfig.user+':'+dsConfig.password+'@'; + rebuiltUrl += `${dsConfig.user}:${dsConfig.password}@`; } else if (!dsConfig.password && dsConfig.user) { - rebuiltUrl += dsConfig.user+'@'; + rebuiltUrl += `${dsConfig.user}@`; } else if (dsConfig.password && !dsConfig.user) { throw flaverr('E_BAD_CONFIG', new Error( - 'It looks like a `password` was specified, but no `user` was specified to go along with it.\n'+ - '--\n'+ - 'Please remove `password` or also specify a `user`. '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + 'It looks like a `password` was specified, but no `user` was specified to go along with it.\n' + + '--\n' + + 'Please remove `password` or also specify a `user`. ' + + '(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)' )); } @@ -469,11 +469,11 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte } else { throw flaverr('E_BAD_CONFIG', new Error( - 'No host could be determined from configuration (tried to use '+ - '`host: '+util.inspect(dsConfig.host)+'`).\n'+ - '--\n'+ - 'Please specify a `host` or, better yet, include it in the `url`. '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `No host could be determined from configuration (tried to use ` + + `\`host: ${util.inspect(dsConfig.host)}\`).\n` + + `--\n` + + `Please specify a \`host\` or, better yet, include it in the \`url\`. ` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); // Or alternatively... // ``` @@ -484,21 +484,21 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // If a port was specified in the url OR as an override, use it. // (prefer override) if (dsConfig.port) { - rebuiltUrl += ':'+dsConfig.port; + rebuiltUrl += `:${dsConfig.port}`; } // If a database was specified in the url OR as an override, use it. // (prefer override) if (dsConfig.database) { - rebuiltUrl += '/'+dsConfig.database; + rebuiltUrl += `/${dsConfig.database}`; } else { throw flaverr('E_BAD_CONFIG', new Error( - 'No database could be determined from configuration (tried to use '+ - '`database: '+util.inspect(dsConfig.database)+'`).\n'+ - '--\n'+ - 'Please specify a `database` or, better yet, include it in the `url`. '+ - '(See http://sailsjs.com/config/datastores#?the-connection-url for more info.)' + `No database could be determined from configuration (tried to use ` + + `\`database: ${util.inspect(dsConfig.database)}\`).\n` + + `--\n` + + `Please specify a \`database\` or, better yet, include it in the \`url\`. ` + + `(See https://sailsjs.com/config/datastores#?the-connection-url for more info.)` )); } @@ -508,9 +508,9 @@ module.exports = function normalizeDatastoreConfig (dsConfig, whitelist, expecte // > If there were any non-standard options, we'll **LEAVE THEM IN** the URL // > when we rebuild it. But note that we did fold them into the dsConfig // > dictionary as well earlier. - var newQs = qs.stringify(miscOptsInUrlQs); + const newQs = qs.stringify(miscOptsInUrlQs); if (newQs.length > 0) { - rebuiltUrl += '?'+newQs; + rebuiltUrl += `?${newQs}`; } diff --git a/lib/private/normalize-datastore-config/private/normalize-database.js b/lib/private/normalize-datastore-config/private/normalize-database.js index 8f4d82857..d0c685532 100644 --- a/lib/private/normalize-datastore-config/private/normalize-database.js +++ b/lib/private/normalize-datastore-config/private/normalize-database.js @@ -2,10 +2,10 @@ * Module dependencies */ -var util = require('util'); -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); +const util = require('util'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); /** @@ -21,16 +21,16 @@ var flaverr = require('flaverr'); * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function normalizeDatabase (dbName) { +module.exports = function normalizeDatabase(dbName) { assert(!_.isUndefined(dbName), 'Should be defined'); if (_.isNumber(dbName)) { - dbName = ''+dbName; - }//>- + dbName = `${dbName}`; + }// >- if (!_.isString(dbName)) { - throw flaverr('E_BAD_CONFIG', new Error('Invalid database (`'+util.inspect(dbName)+'`). Must be a string or number.')); + throw flaverr('E_BAD_CONFIG', new Error(`Invalid database (\`${util.inspect(dbName)}\`). Must be a string or number.`)); } return dbName; diff --git a/lib/private/normalize-datastore-config/private/normalize-host.js b/lib/private/normalize-datastore-config/private/normalize-host.js index 836fc6531..2dfcd3423 100644 --- a/lib/private/normalize-datastore-config/private/normalize-host.js +++ b/lib/private/normalize-datastore-config/private/normalize-host.js @@ -2,10 +2,10 @@ * Module dependencies */ -var util = require('util'); -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); +const util = require('util'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); /** @@ -21,16 +21,16 @@ var flaverr = require('flaverr'); * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function normalizeHost (host) { +module.exports = function normalizeHost(host) { assert(!_.isUndefined(host), 'Should be defined'); if (_.isNumber(host)) { - host = ''+host; - }//>- + host = `${host}`; + }// >- if (!_.isString(host) || host === '') { - throw flaverr('E_BAD_CONFIG', new Error('Invalid host (`'+util.inspect(host)+'`). Must be a non-empty string.')); + throw flaverr('E_BAD_CONFIG', new Error(`Invalid host (\`${util.inspect(host)}\`). Must be a non-empty string.`)); } return host; diff --git a/lib/private/normalize-datastore-config/private/normalize-password.js b/lib/private/normalize-datastore-config/private/normalize-password.js index ceca20543..1690725f6 100644 --- a/lib/private/normalize-datastore-config/private/normalize-password.js +++ b/lib/private/normalize-datastore-config/private/normalize-password.js @@ -2,9 +2,9 @@ * Module dependencies */ -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); /** @@ -20,7 +20,7 @@ var flaverr = require('flaverr'); * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function normalizePassword (password) { +module.exports = function normalizePassword(password) { assert(!_.isUndefined(password), 'Should be defined'); diff --git a/lib/private/normalize-datastore-config/private/normalize-port.js b/lib/private/normalize-datastore-config/private/normalize-port.js index 8aa468979..d6ead4729 100644 --- a/lib/private/normalize-datastore-config/private/normalize-port.js +++ b/lib/private/normalize-datastore-config/private/normalize-port.js @@ -2,10 +2,10 @@ * Module dependencies */ -var util = require('util'); -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); +const util = require('util'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); /** @@ -21,16 +21,16 @@ var flaverr = require('flaverr'); * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function normalizePort (port) { +module.exports = function normalizePort(port) { assert(!_.isUndefined(port), 'Should be defined'); if (_.isString(port)) { port = +port; - }//>- + }// >- if (!_.isNumber(port) || _.isNaN(port) || port < 1 || Math.floor(port) !== port) { - throw flaverr('E_BAD_CONFIG', new Error('Invalid port (`'+util.inspect(port)+'`). Must be a positive number.')); + throw flaverr('E_BAD_CONFIG', new Error(`Invalid port (\`${util.inspect(port)}\`). Must be a positive number.`)); } return port; diff --git a/lib/private/normalize-datastore-config/private/normalize-user.js b/lib/private/normalize-datastore-config/private/normalize-user.js index ab3acf8fb..57779fe97 100644 --- a/lib/private/normalize-datastore-config/private/normalize-user.js +++ b/lib/private/normalize-datastore-config/private/normalize-user.js @@ -2,10 +2,10 @@ * Module dependencies */ -var util = require('util'); -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var flaverr = require('flaverr'); +const util = require('util'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const flaverr = require('flaverr'); /** @@ -21,16 +21,16 @@ var flaverr = require('flaverr'); * - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - */ -module.exports = function normalizeUser (user) { +module.exports = function normalizeUser(user) { assert(!_.isUndefined(user), 'Should be defined'); if (_.isNumber(user)) { - user = ''+user; - }//>- + user = `${user}`; + }// >- if (!_.isString(user)) { - throw flaverr('E_BAD_CONFIG', new Error('Invalid user (`'+util.inspect(user)+'`). Must be a string.')); + throw flaverr('E_BAD_CONFIG', new Error(`Invalid user (\`${util.inspect(user)}\`). Must be a string.`)); } return user; diff --git a/package.json b/package.json index 112875582..842ac4f32 100644 --- a/package.json +++ b/package.json @@ -39,7 +39,8 @@ }, "devDependencies": { "benchmark": "2.1.4", - "eslint": "4.19.1", + "eslint": "8.41.0", + "eslint-plugin-eslint-comments": "^3.2.0", "mocha": "3.0.2", "waterline": "^0.13.6", "waterline-adapter-tests": "^1.0.1", @@ -57,7 +58,7 @@ }, "machinepack": { "friendlyName": "MongoDB", - "extendedDescription": "Uses the Node.js mongdb driver located at http://npmjs.com/package/mongodb", + "extendedDescription": "Uses the Node.js mongdb driver located at https://npmjs.com/package/mongodb", "moreInfoUrl": "https://github.com/mongodb/node-mongodb-native", "implements": [ "connectable", diff --git a/test/connectable/create-manager.test.js b/test/connectable/create-manager.test.js index c3ecfe5a6..1ad22fb79 100644 --- a/test/connectable/create-manager.test.js +++ b/test/connectable/create-manager.test.js @@ -1,14 +1,14 @@ -var assert = require('assert'); -var createManager = require('machine').build(require('../../').createManager); -var MongoClient = require('mongodb').MongoClient; +const assert = require('assert'); +const createManager = require('machine').build(require('../../').createManager); +const {MongoClient} = require('mongodb'); -describe('Connectable ::', function() { - describe('Create Manager', function() { - it('should work without a protocol in the connection string', function(done) { +describe('Connectable ::', () => { + describe('Create Manager', () => { + it('should work without a protocol in the connection string', (done) => { createManager({ connectionString: process.env.WATERLINE_ADAPTER_TESTS_URL || 'localhost:27017/mppg' }) - .exec(function(err) { + .exec((err) => { if (err) { return done(err); } @@ -16,35 +16,35 @@ describe('Connectable ::', function() { }); }); - it('should not work with an invalid protocol in the connection string', function(done) { + it('should not work with an invalid protocol in the connection string', (done) => { createManager({ connectionString: 'foobar://localhost:27017/mppg' }) - .exec(function(err) { + .exec((err) => { try { assert(err, 'Expected error of SOME kind, but didnt get one!'); - assert.equal(err.exit, 'malformed', 'Expected it to exit from the `malformed` exit! But it didndt... The error:'+err.stack); + assert.equal(err.exit, 'malformed', `Expected it to exit from the \`malformed\` exit! But it didndt... The error:${err.stack}`); } catch (e) { return done(e); } return done(); }); }); - it('should successfully return a Mongo Server instance', function(done) { + it('should successfully return a Mongo Server instance', (done) => { // Needed to dynamically get the host using the docker container - var host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; + const host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; createManager({ - connectionString: 'mongodb://' + host + ':27017/mppg' + connectionString: `mongodb://${host}:27017/mppg` }) - .exec(function(err, report) { + .exec((err, report) => { if (err) { return done(err); } try { assert(report.manager); - assert(report.manager.client instanceof MongoClient ); + assert(report.manager.client instanceof MongoClient); } catch (e) { return done(e); } return done(); diff --git a/test/connectable/destroy-manager.test.js b/test/connectable/destroy-manager.test.js index 61928732d..5b38ede1b 100644 --- a/test/connectable/destroy-manager.test.js +++ b/test/connectable/destroy-manager.test.js @@ -1,19 +1,19 @@ -var createManager = require('machine').build(require('../../').createManager); -var destroyManager = require('machine').build(require('../../').destroyManager); +const createManager = require('machine').build(require('../../').createManager); +const destroyManager = require('machine').build(require('../../').destroyManager); -describe('Connectable ::', function() { - describe('Destroy Manager', function() { - var manager; +describe('Connectable ::', () => { + describe('Destroy Manager', () => { + let manager; // Create a manager - before(function(done) { + before((done) => { // Needed to dynamically get the host using the docker container - var host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; + const host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; createManager({ - connectionString: 'mongodb://' + host + ':27017/mppg' + connectionString: `mongodb://${host}:27017/mppg` }) - .exec(function(err, report) { + .exec((err, report) => { if (err) { return done(err); } @@ -24,11 +24,11 @@ describe('Connectable ::', function() { }); - it('should successfully destroy the manager', function(done) { + it('should successfully destroy the manager', (done) => { destroyManager({ - manager: manager + manager }) - .exec(function(err) { + .exec((err) => { if (err) { return done(err); } return done(); }); diff --git a/test/connectable/get-connection.test.js b/test/connectable/get-connection.test.js index 9829424fd..770bd02fc 100644 --- a/test/connectable/get-connection.test.js +++ b/test/connectable/get-connection.test.js @@ -1,20 +1,20 @@ -var assert = require('assert'); -var createManager = require('machine').build(require('../../').createManager); -var getConnection = require('machine').build(require('../../').getConnection); +const assert = require('assert'); +const createManager = require('machine').build(require('../../').createManager); +const getConnection = require('machine').build(require('../../').getConnection); -describe('Connectable ::', function() { - describe('Get Connection', function() { - var manager; +describe('Connectable ::', () => { + describe('Get Connection', () => { + let manager; // Create a manager - before(function(done) { + before((done) => { // Needed to dynamically get the host using the docker container - var host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; + const host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; createManager({ - connectionString: 'mongodb://' + host + ':27017/mppg' + connectionString: `mongodb://${host}:27017/mppg` }) - .exec(function(err, report) { + .exec((err, report) => { if (err) { return done(err); } @@ -24,11 +24,11 @@ describe('Connectable ::', function() { }); }); - it('should successfully return a Mongo Server instance', function(done) { + it('should successfully return a Mongo Server instance', (done) => { getConnection({ - manager: manager + manager }) - .exec(function(err, report) { + .exec((err, report) => { if (err) { return done(err); } diff --git a/test/connectable/release-connection.test.js b/test/connectable/release-connection.test.js index 5fa68fe1e..52b23ba9c 100644 --- a/test/connectable/release-connection.test.js +++ b/test/connectable/release-connection.test.js @@ -1,21 +1,21 @@ -var createManager = require('machine').build(require('../../').createManager); -var getConnection = require('machine').build(require('../../').getConnection); -var releaseConnection = require('machine').build(require('../../').releaseConnection); +const createManager = require('machine').build(require('../../').createManager); +const getConnection = require('machine').build(require('../../').getConnection); +const releaseConnection = require('machine').build(require('../../').releaseConnection); -describe('Connectable ::', function() { - describe('Release Connection', function() { - var manager; - var connection; +describe('Connectable ::', () => { + describe('Release Connection', () => { + let manager; + let connection; // Create a manager and connection - before(function(done) { + before((done) => { // Needed to dynamically get the host using the docker container - var host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; + const host = process.env.WATERLINE_ADAPTER_TESTS_HOST || 'localhost'; createManager({ - connectionString: 'mongodb://' + host + ':27017/mppg' + connectionString: `mongodb://${host}:27017/mppg` }) - .exec(function(err, report) { + .exec((err, report) => { if (err) { return done(err); } @@ -23,9 +23,9 @@ describe('Connectable ::', function() { manager = report.manager; getConnection({ - manager: manager + manager }) - .exec(function(err, report) { + .exec((err, report) => { if (err) { return done(err); } @@ -37,11 +37,11 @@ describe('Connectable ::', function() { }); // The actual machine is a no-op so just ensure no error comes back. - it('should successfully release a connection', function(done) { + it('should successfully release a connection', (done) => { releaseConnection({ - connection: connection + connection }) - .exec(function(err) { + .exec((err) => { if (err) { return done(err); } diff --git a/test/run-adapter-specific-tests.js b/test/run-adapter-specific-tests.js index 557fb7d33..946cdfcd9 100644 --- a/test/run-adapter-specific-tests.js +++ b/test/run-adapter-specific-tests.js @@ -1,26 +1,26 @@ -var assert = require('assert'); -var _ = require('@sailshq/lodash'); -var Waterline = require('waterline'); -var waterlineUtils = require('waterline-utils'); -var normalizeDatastoreConfig = require('../lib/private/normalize-datastore-config'); +const assert = require('assert'); +const _ = require('@sailshq/lodash'); +const Waterline = require('waterline'); +const waterlineUtils = require('waterline-utils'); +const normalizeDatastoreConfig = require('../lib/private/normalize-datastore-config'); -var waterline; -var models = {}; +let waterline; +let models = {}; -describe('normalizeDatastoreConfig', function() { +describe('normalizeDatastoreConfig', () => { - it('Given a URL without a prefix, normalizeDatastoreConfig should add the prefix', function() { - var config = { + it('Given a URL without a prefix, normalizeDatastoreConfig should add the prefix', () => { + const config = { url: 'creepygiggles:shipyard4eva@localhost/test' }; normalizeDatastoreConfig(config, undefined, 'mongodb'); assert.equal(config.url, 'mongodb://creepygiggles:shipyard4eva@localhost/test'); }); - it('Given a URL with a comma in it (like a Mongo Atlas URL), normalizeDatastoreConfig should not modify the URL.', function() { - var url = 'mongodb://creepygiggles:shipyard4eva@cluster0-shard-00-00-ienyq.mongodb.net:27017,cluster0-shard-00-01-ienyq.mongodb.net:27017,cluster0-shard-00-02-ienyq.mongodb.net:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin'; - var config = { + it('Given a URL with a comma in it (like a Mongo Atlas URL), normalizeDatastoreConfig should not modify the URL.', () => { + const url = 'mongodb://creepygiggles:shipyard4eva@cluster0-shard-00-00-ienyq.mongodb.net:27017,cluster0-shard-00-01-ienyq.mongodb.net:27017,cluster0-shard-00-02-ienyq.mongodb.net:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin'; + const config = { url: 'mongodb://creepygiggles:shipyard4eva@cluster0-shard-00-00-ienyq.mongodb.net:27017,cluster0-shard-00-01-ienyq.mongodb.net:27017,cluster0-shard-00-02-ienyq.mongodb.net:27017/test?ssl=true&replicaSet=Cluster0-shard-0&authSource=admin' }; normalizeDatastoreConfig(config); @@ -29,11 +29,11 @@ describe('normalizeDatastoreConfig', function() { }); -describe('aggregations', function() { +describe('aggregations', () => { - describe('Using `sum`', function() { + describe('Using `sum`', () => { - before(function(done) { + before((done) => { setup( [createModel('user', {dontUseObjectIds: true})], models, @@ -41,7 +41,7 @@ describe('aggregations', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -49,8 +49,8 @@ describe('aggregations', function() { return done(); }); - it('should not throw an error if the given critieria don\'t match any records', function(done) { - models.user.sum('id', {name: 'joe'}).exec(function(err, sum) { + it('should not throw an error if the given critieria don\'t match any records', (done) => { + models.user.sum('id', {name: 'joe'}).exec((err, sum) => { if (err) { return done(err); } assert.equal(sum, 0); return done(); @@ -60,9 +60,9 @@ describe('aggregations', function() { }); - describe('Using `avg`', function() { + describe('Using `avg`', () => { - before(function(done) { + before((done) => { setup( [createModel('user', {dontUseObjectIds: true})], models, @@ -70,7 +70,7 @@ describe('aggregations', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -78,8 +78,8 @@ describe('aggregations', function() { return done(); }); - it('should not throw an error if the given critieria don\'t match any records', function(done) { - models.user.avg('id', {name: 'joe'}).exec(function(err, avg) { + it('should not throw an error if the given critieria don\'t match any records', (done) => { + models.user.avg('id', {name: 'joe'}).exec((err, avg) => { if (err) { return done(err); } assert.equal(avg, 0); return done(); @@ -90,11 +90,11 @@ describe('aggregations', function() { }); -describe('dontUseObjectIds', function() { +describe('dontUseObjectIds', () => { - describe('Without associations', function() { + describe('Without associations', () => { - afterEach(function(done) { + afterEach((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -102,7 +102,7 @@ describe('dontUseObjectIds', function() { return done(); }); - beforeEach(function(done) { + beforeEach((done) => { setup( [createModel('user', {dontUseObjectIds: true})], models, @@ -110,11 +110,11 @@ describe('dontUseObjectIds', function() { ); }); - describe('Creating a single record', function() { + describe('Creating a single record', () => { - it('should create a record w/ a numeric ID', function(done) { + it('should create a record w/ a numeric ID', (done) => { - models.user.create({id: 123, name: 'bob'}).exec(function(err, record) { + models.user.create({id: 123, name: 'bob'}).exec((err, record) => { if (err) {return done(err);} assert.equal(record.id, 123); assert.equal(record.name, 'bob'); @@ -125,11 +125,11 @@ describe('dontUseObjectIds', function() { }); - describe('Creating multiple records', function() { + describe('Creating multiple records', () => { - it('should create multiple record w/ a numeric ID', function(done) { + it('should create multiple record w/ a numeric ID', (done) => { - models.user.createEach([{id: 123, name: 'sid'},{id: 555, name: 'nancy'}]).exec(function(err, records) { + models.user.createEach([{id: 123, name: 'sid'},{id: 555, name: 'nancy'}]).exec((err, records) => { if (err) {return done(err);} assert.equal(records[0].id, 123); assert.equal(records[0].name, 'sid'); @@ -142,12 +142,12 @@ describe('dontUseObjectIds', function() { }); - describe('Updating a single record', function() { + describe('Updating a single record', () => { - it('should update the record correctly', function(done) { - models.user._adapter.datastores.test.manager.collection('user').insertOne({_id: 123, name: 'bob'}, function(err) { + it('should update the record correctly', (done) => { + models.user._adapter.datastores.test.manager.collection('user').insertOne({_id: 123, name: 'bob'}, (err) => { if (err) {return done(err);} - models.user.updateOne({id: 123}, {name: 'joe'}).exec(function(err, record) { + models.user.updateOne({id: 123}, {name: 'joe'}).exec((err, record) => { if (err) {return done(err);} assert.equal(record.id, 123); assert.equal(record.name, 'joe'); @@ -160,13 +160,13 @@ describe('dontUseObjectIds', function() { }); - describe('Updating multiple records', function() { + describe('Updating multiple records', () => { - it('should update the records correctly', function(done) { + it('should update the records correctly', (done) => { - models.user._adapter.datastores.test.manager.collection('user').insertMany([{_id: 123, name: 'sid'}, {_id: 555, name: 'nancy'}], function(err) { + models.user._adapter.datastores.test.manager.collection('user').insertMany([{_id: 123, name: 'sid'}, {_id: 555, name: 'nancy'}], (err) => { if (err) {return done(err);} - models.user.update({id: {'>': 0}}, {name: 'joe'}).exec(function(err, records) { + models.user.update({id: {'>': 0}}, {name: 'joe'}).exec((err, records) => { if (err) {return done(err);} assert.equal(records[0].id, 123); assert.equal(records[0].name, 'joe'); @@ -181,13 +181,13 @@ describe('dontUseObjectIds', function() { }); - describe('Finding a single record', function() { + describe('Finding a single record', () => { - it('should find a record w/ a numeric ID', function(done) { + it('should find a record w/ a numeric ID', (done) => { - models.user._adapter.datastores.test.manager.collection('user').insertOne({_id: 123, name: 'bob'}, function(err) { + models.user._adapter.datastores.test.manager.collection('user').insertOne({_id: 123, name: 'bob'}, (err) => { if (err) {return done(err);} - models.user.findOne({id: 123}).exec(function(err, record) { + models.user.findOne({id: 123}).exec((err, record) => { if (err) {return done(err);} assert.equal(record.id, 123); assert.equal(record.name, 'bob'); @@ -199,13 +199,13 @@ describe('dontUseObjectIds', function() { }); - describe('Finding multiple records', function() { + describe('Finding multiple records', () => { - it('should find the records correctly', function(done) { + it('should find the records correctly', (done) => { - models.user._adapter.datastores.test.manager.collection('user').insertMany([{_id: 123, name: 'sid'}, {_id: 555, name: 'nancy'}], function(err) { + models.user._adapter.datastores.test.manager.collection('user').insertMany([{_id: 123, name: 'sid'}, {_id: 555, name: 'nancy'}], (err) => { if (err) {return done(err);} - models.user.find({id: {'>': 0}}).exec(function(err, records) { + models.user.find({id: {'>': 0}}).exec((err, records) => { if (err) {return done(err);} assert.equal(records[0].id, 123); assert.equal(records[0].name, 'sid'); @@ -219,14 +219,14 @@ describe('dontUseObjectIds', function() { }); }); - describe('Deleting a single record', function() { + describe('Deleting a single record', () => { - it('should delete the record correctly', function(done) { - models.user._adapter.datastores.test.manager.collection('user').insertOne({_id: 123, name: 'bob'}, function(err) { + it('should delete the record correctly', (done) => { + models.user._adapter.datastores.test.manager.collection('user').insertOne({_id: 123, name: 'bob'}, (err) => { if (err) {return done(err);} - models.user.destroy({id: 123}).exec(function(err) { + models.user.destroy({id: 123}).exec((err) => { if (err) {return done(err);} - models.user._adapter.datastores.test.manager.collection('user').find({}).toArray(function(err, records) { + models.user._adapter.datastores.test.manager.collection('user').find({}).toArray((err, records) => { if (err) {return done(err);} assert.equal(records.length, 0); return done(); @@ -240,15 +240,15 @@ describe('dontUseObjectIds', function() { }); - describe('Deleting multiple records', function() { + describe('Deleting multiple records', () => { - it('should delete the records correctly', function(done) { + it('should delete the records correctly', (done) => { - models.user._adapter.datastores.test.manager.collection('user').insertMany([{_id: 123, name: 'sid'}, {_id: 555, name: 'nancy'}], function(err) { + models.user._adapter.datastores.test.manager.collection('user').insertMany([{_id: 123, name: 'sid'}, {_id: 555, name: 'nancy'}], (err) => { if (err) {return done(err);} - models.user.destroy({id: {'>': 0}}).exec(function(err) { + models.user.destroy({id: {'>': 0}}).exec((err) => { if (err) {return done(err);} - models.user._adapter.datastores.test.manager.collection('user').find({}).toArray(function(err, records) { + models.user._adapter.datastores.test.manager.collection('user').find({}).toArray((err, records) => { if (err) {return done(err);} assert.equal(records.length, 0); return done(); @@ -262,11 +262,11 @@ describe('dontUseObjectIds', function() { }); - describe('With associations', function() { + describe('With associations', () => { - describe('Where a single model using number keys belongsTo a model using ObjectID', function() { + describe('Where a single model using number keys belongsTo a model using ObjectID', () => { - before(function(done) { + before((done) => { setup( [createModel('user', {toOne: 'pet'}), createModel('pet', {dontUseObjectIds: true})], models, @@ -274,7 +274,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -282,13 +282,13 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully', function(done) { + it('Should be able to create and retrieve the association successfully', (done) => { - models.pet.create({id: 123, name: 'alice'}).exec(function(err) { + models.pet.create({id: 123, name: 'alice'}).exec((err) => { if (err) {return done(err);} - models.user.create({name: 'scott', friend: 123}).exec(function(err, user) { + models.user.create({name: 'scott', friend: 123}).exec((err, user) => { if (err) {return done(err);} - models.user.findOne({id: user.id}).populate('friend').exec(function(err, record) { + models.user.findOne({id: user.id}).populate('friend').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friend); @@ -304,9 +304,9 @@ describe('dontUseObjectIds', function() { }); - describe('Where a single model using ObjectID belongsTo a model using number keys', function() { + describe('Where a single model using ObjectID belongsTo a model using number keys', () => { - before(function(done) { + before((done) => { setup( [createModel('user', {toOne: 'pet', dontUseObjectIds: true}), createModel('pet')], models, @@ -314,7 +314,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -322,13 +322,13 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully', function(done) { + it('Should be able to create and retrieve the association successfully', (done) => { - models.pet.create({name: 'alice'}).exec(function(err, pet) { + models.pet.create({name: 'alice'}).exec((err, pet) => { if (err) {return done(err);} - models.user.create({id: 123, name: 'scott', friend: pet.id}).exec(function(err, user) { + models.user.create({id: 123, name: 'scott', friend: pet.id}).exec((err, user) => { if (err) {return done(err);} - models.user.findOne({id: user.id}).populate('friend').exec(function(err, record) { + models.user.findOne({id: user.id}).populate('friend').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friend); @@ -342,19 +342,19 @@ describe('dontUseObjectIds', function() { }); }); - describe('Where a collection using number keys belongsTo a model using ObjectID ', function() { + describe('Where a collection using number keys belongsTo a model using ObjectID ', () => { - var userId; + let userId; - before(function(done) { + before((done) => { setup( [createModel('user', {oneToMany: 'pet'}), createModel('pet', {toOne: 'user', dontUseObjectIds: true})], models, - function(err) { + (err) => { if (err) {return done(err);} - models.pet.create({id: 123, name: 'alice'}).exec(function(err) { + models.pet.create({id: 123, name: 'alice'}).exec((err) => { if (err) {return done(err);} - models.user.create({name: 'scott', friends: [123]}).exec(function(err, user) { + models.user.create({name: 'scott', friends: [123]}).exec((err, user) => { if (err) {return done(err);} userId = user.id; return done(); @@ -364,7 +364,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -372,9 +372,9 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully from the "hasMany" side', function(done) { + it('Should be able to create and retrieve the association successfully from the "hasMany" side', (done) => { - models.user.findOne({id: userId}).populate('friends').exec(function(err, record) { + models.user.findOne({id: userId}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friends); @@ -386,9 +386,9 @@ describe('dontUseObjectIds', function() { }); - it('Should be able to create and retrieve the association successfully from the "hasOne" side', function(done) { + it('Should be able to create and retrieve the association successfully from the "hasOne" side', (done) => { - models.pet.findOne({id: 123}).populate('friend').exec(function(err, record) { + models.pet.findOne({id: 123}).populate('friend').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'alice'); assert(record.friend); @@ -402,20 +402,20 @@ describe('dontUseObjectIds', function() { }); - describe('Where a collection using ObjectID belongsTo a model using number keys', function() { + describe('Where a collection using ObjectID belongsTo a model using number keys', () => { - var petId; + let petId; - before(function(done) { + before((done) => { setup( [createModel('user', {oneToMany: 'pet', dontUseObjectIds: true}), createModel('pet', {toOne: 'user'})], models, - function(err) { + (err) => { if (err) {return done(err);} - models.pet.create({name: 'alice'}).exec(function(err, pet) { + models.pet.create({name: 'alice'}).exec((err, pet) => { if (err) {return done(err);} petId = pet.id; - models.user.create({id: 123, name: 'scott', friends: [pet.id]}).exec(function(err) { + models.user.create({id: 123, name: 'scott', friends: [pet.id]}).exec((err) => { if (err) {return done(err);} return done(); }); @@ -424,7 +424,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -432,9 +432,9 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully from the "hasMany" side', function(done) { + it('Should be able to create and retrieve the association successfully from the "hasMany" side', (done) => { - models.user.findOne({id: 123}).populate('friends').exec(function(err, record) { + models.user.findOne({id: 123}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friends); @@ -446,9 +446,9 @@ describe('dontUseObjectIds', function() { }); - it('Should be able to create and retrieve the association successfully from the "hasOne" side', function(done) { + it('Should be able to create and retrieve the association successfully from the "hasOne" side', (done) => { - models.pet.findOne({id: petId}).populate('friend').exec(function(err, record) { + models.pet.findOne({id: petId}).populate('friend').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'alice'); assert(record.friend); @@ -461,19 +461,19 @@ describe('dontUseObjectIds', function() { }); - describe('Where a collection using number keys belongsTo a model using ObjectID (vialess)', function() { + describe('Where a collection using number keys belongsTo a model using ObjectID (vialess)', () => { - var userId; + let userId; - before(function(done) { + before((done) => { setup( [createModel('user', {toManyVialess: 'pet'}), createModel('pet', {dontUseObjectIds: true})], models, - function(err) { + (err) => { if (err) {return done(err);} - models.pet.create({id: 123, name: 'alice'}).exec(function(err) { + models.pet.create({id: 123, name: 'alice'}).exec((err) => { if (err) {return done(err);} - models.user.create({name: 'scott', friends: [123]}).exec(function(err, user) { + models.user.create({name: 'scott', friends: [123]}).exec((err, user) => { if (err) {return done(err);} userId = user.id; return done(); @@ -483,7 +483,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -491,9 +491,9 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully from the "hasMany" side', function(done) { + it('Should be able to create and retrieve the association successfully from the "hasMany" side', (done) => { - models.user.findOne({id: userId}).populate('friends').exec(function(err, record) { + models.user.findOne({id: userId}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friends); @@ -507,22 +507,22 @@ describe('dontUseObjectIds', function() { }); - describe('Where a collection using ObjectID belongsTo a model using number keys (vialess)', function() { + describe('Where a collection using ObjectID belongsTo a model using number keys (vialess)', () => { - var petId; + let petId; // eslint-disable-next-line no-unused-vars - var userId; + let userId; - before(function(done) { + before((done) => { setup( [createModel('user', {toManyVialess: 'pet', dontUseObjectIds: true}), createModel('pet')], models, - function(err) { + (err) => { if (err) {return done(err);} - models.pet.create({name: 'alice'}).exec(function(err, pet) { + models.pet.create({name: 'alice'}).exec((err, pet) => { if (err) {return done(err);} petId = pet.id; - models.user.create({id: 123, name: 'scott', friends: [petId]}).exec(function(err, user) { + models.user.create({id: 123, name: 'scott', friends: [petId]}).exec((err, user) => { if (err) {return done(err);} userId = user.id; return done(); @@ -532,7 +532,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -540,9 +540,9 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully from the "hasMany" side', function(done) { + it('Should be able to create and retrieve the association successfully from the "hasMany" side', (done) => { - models.user.findOne({id: 123}).populate('friends').exec(function(err, record) { + models.user.findOne({id: 123}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friends); @@ -556,20 +556,20 @@ describe('dontUseObjectIds', function() { }); - describe('Where a collection using ObjectID has many-to-many relationship with a model using number keys', function() { + describe('Where a collection using ObjectID has many-to-many relationship with a model using number keys', () => { - var petId; + let petId; - before(function(done) { + before((done) => { setup( [createModel('user', {manyToMany: 'pet', dontUseObjectIds: true}), createModel('pet', {manyToMany: 'user'})], models, - function(err) { + (err) => { if (err) {return done(err);} - models.pet.create({name: 'alice'}).exec(function(err, pet) { + models.pet.create({name: 'alice'}).exec((err, pet) => { if (err) {return done(err);} petId = pet.id; - models.user.create({id: 123, name: 'scott', friends: [pet.id]}).exec(function(err) { + models.user.create({id: 123, name: 'scott', friends: [pet.id]}).exec((err) => { if (err) {return done(err);} return done(); }); @@ -578,7 +578,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -586,9 +586,9 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully from the side w/out ObjectID', function(done) { + it('Should be able to create and retrieve the association successfully from the side w/out ObjectID', (done) => { - models.user.findOne({id: 123}).populate('friends').exec(function(err, record) { + models.user.findOne({id: 123}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friends); @@ -600,8 +600,8 @@ describe('dontUseObjectIds', function() { }); - it('Should be able to create and retrieve the association successfully from the side w/ ObjectID', function(done) { - models.pet.findOne({id: petId}).populate('friends').exec(function(err, record) { + it('Should be able to create and retrieve the association successfully from the side w/ ObjectID', (done) => { + models.pet.findOne({id: petId}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'alice'); assert(record.friends); @@ -615,17 +615,17 @@ describe('dontUseObjectIds', function() { }); - describe('Where a collection using number keys has many-to-many relationship with a model using number keys', function() { + describe('Where a collection using number keys has many-to-many relationship with a model using number keys', () => { - before(function(done) { + before((done) => { setup( [createModel('user', {manyToMany: 'pet', dontUseObjectIds: true}), createModel('pet', {manyToMany: 'user', dontUseObjectIds: true})], models, - function(err) { + (err) => { if (err) {return done(err);} - models.pet.create({id: 555, name: 'alice'}).exec(function(err) { + models.pet.create({id: 555, name: 'alice'}).exec((err) => { if (err) {return done(err);} - models.user.create({id: 123, name: 'scott', friends: [555]}).exec(function(err) { + models.user.create({id: 123, name: 'scott', friends: [555]}).exec((err) => { if (err) {return done(err);} return done(); }); @@ -634,7 +634,7 @@ describe('dontUseObjectIds', function() { ); }); - after(function(done) { + after((done) => { models = {}; if (waterline) { return waterline.teardown(done); @@ -642,9 +642,9 @@ describe('dontUseObjectIds', function() { return done(); }); - it('Should be able to create and retrieve the association successfully from the first side', function(done) { + it('Should be able to create and retrieve the association successfully from the first side', (done) => { - models.user.findOne({id: 123}).populate('friends').exec(function(err, record) { + models.user.findOne({id: 123}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'scott'); assert(record.friends); @@ -656,8 +656,8 @@ describe('dontUseObjectIds', function() { }); - it('Should be able to create and retrieve the association successfully from the second side', function(done) { - models.pet.findOne({id: 555}).populate('friends').exec(function(err, record) { + it('Should be able to create and retrieve the association successfully from the second side', (done) => { + models.pet.findOne({id: 555}).populate('friends').exec((err, record) => { if (err) {return done(err);} assert.equal(record.name, 'alice'); assert(record.friends); @@ -677,7 +677,7 @@ describe('dontUseObjectIds', function() { function setup(fixtures, modelsContainer, cb) { - var defaults = { + const defaults = { primaryKey: 'id', datastore: 'test', fetchRecordsOnUpdate: true, @@ -689,12 +689,12 @@ function setup(fixtures, modelsContainer, cb) { waterline = new Waterline(); - _.each(fixtures, function(val, key) { - var modelFixture = _.extend({}, defaults, fixtures[key]); + _.each(fixtures, (val, key) => { + const modelFixture = _.extend({}, defaults, fixtures[key]); waterline.registerModel(Waterline.Collection.extend(modelFixture)); }); - var datastores = { + const datastores = { test: { adapter: 'sails-mongo', url: process.env.WATERLINE_ADAPTER_TESTS_URL || 'localhost/sails_mongo' @@ -704,22 +704,22 @@ function setup(fixtures, modelsContainer, cb) { // Clear the adapter from memory. delete require.cache[require.resolve('../')]; - waterline.initialize({ adapters: { 'sails-mongo': require('../') }, datastores: datastores, defaults: defaults }, function(err, orm) { + waterline.initialize({ adapters: { 'sails-mongo': require('../') }, datastores, defaults }, (err, orm) => { if (err) { return cb(err); } // Save a reference to the ORM - var ORM = orm; + const ORM = orm; // Run migrations - waterlineUtils.autoMigrations('drop', orm, function(err) { + waterlineUtils.autoMigrations('drop', orm, (err) => { if (err) { return cb(err); } // Globalize collections for normalization - _.each(ORM.collections, function(collection, identity) { + _.each(ORM.collections, (collection, identity) => { modelsContainer[identity] = collection; }); return cb(); @@ -728,13 +728,13 @@ function setup(fixtures, modelsContainer, cb) { } -function createModel (identity, options) { +function createModel(identity, options) { options = options || {}; - var model = { + const model = { datastore: 'test', - identity: identity, + identity, attributes: { id: { type: 'string', columnName: '_id', autoMigrations: { columnType: 'string', unique: true, autoIncrement: false } }, name: { type: 'string', autoMigrations: { columnType: 'string', unique: false, autoIncrement: false } } diff --git a/test/run-standard-tests.js b/test/run-standard-tests.js index 145130ed1..b87fb474c 100644 --- a/test/run-standard-tests.js +++ b/test/run-standard-tests.js @@ -4,10 +4,10 @@ * Module dependencies */ -var util = require('util'); -var TestRunner = require('waterline-adapter-tests'); -var packageMD = require('../package.json'); -var Adapter = require('../'); +const util = require('util'); +const TestRunner = require('waterline-adapter-tests'); +const packageMD = require('../package.json'); +const Adapter = require('../'); @@ -27,26 +27,26 @@ try { packageMD.waterlineAdapter.interfaces; } catch (e) { throw new Error( - '\n' + - 'Could not read supported interfaces from `waterlineAdapter.interfaces`' + '\n' + - 'in this adapter\'s `package.json` file ::' + '\n' + - util.inspect(e) + `\n` + + `Could not read supported interfaces from \`waterlineAdapter.interfaces\`` + `\n` + + `in this adapter's \`package.json\` file ::` + `\n${ + util.inspect(e)}` ); } // Log an intro message. -console.log('Testing `' + packageMD.name + '`, a Sails/Waterline adapter.'); -console.log('Running `waterline-adapter-tests` against ' + packageMD.waterlineAdapter.interfaces.length + ' interface(s) and ' + packageMD.waterlineAdapter.features.length + ' feature(s)...'); -console.log('| Interfaces: ' + (packageMD.waterlineAdapter.interfaces.join(', ') || 'n/a') + ''); -console.log('| Extra features: ' + ((packageMD.waterlineAdapter.features || []).join(', ') || 'n/a') + ''); +console.log(`Testing \`${packageMD.name}\`, a Sails/Waterline adapter.`); +console.log(`Running \`waterline-adapter-tests\` against ${packageMD.waterlineAdapter.interfaces.length} interface(s) and ${packageMD.waterlineAdapter.features.length} feature(s)...`); +console.log(`| Interfaces: ${packageMD.waterlineAdapter.interfaces.join(', ') || 'n/a'}`); +console.log(`| Extra features: ${(packageMD.waterlineAdapter.features || []).join(', ') || 'n/a'}`); console.log(); console.log('> More info about building Waterline adapters:'); -console.log('> http://sailsjs.com/docs/concepts/extending-sails/adapters/custom-adapters'); +console.log('> https://sailsjs.com/docs/concepts/extending-sails/adapters/custom-adapters'); // Ensure a `url` was specified. -// (http://sailsjs.com/config/datastores#?the-connection-url) +// (https://sailsjs.com/config/datastores#?the-connection-url) if (!process.env.WATERLINE_ADAPTER_TESTS_URL) { console.warn(); console.warn('-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-'); @@ -58,7 +58,7 @@ if (!process.env.WATERLINE_ADAPTER_TESTS_URL) { console.warn(' WATERLINE_ADAPTER_TESTS_URL=root@localhost/testdb npm test'); console.warn('```'); console.warn('-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-x-'); -}//-• +}// -• // Use the `waterline-adapter-tests` module to @@ -91,13 +91,13 @@ new TestRunner({ // Most databases implement 'semantic' and 'queryable' interface layers. // // For full interface reference, see: -// http://sailsjs.com/docs/concepts/extending-sails/adapters/custom-adapters +// https://sailsjs.com/docs/concepts/extending-sails/adapters/custom-adapters // // Some features are polyfilled if omitted; allowing optimizations at the adapter // level for databases that support the feature. For example, if you don't implement // a `join` method, it will be polyfilled for you by Waterline core (using the same // "polypopulate" that it uses for cross-datastore joins.) For more on that, talk -// to an adapter maintainer @ http://sailsjs.com/support. +// to an adapter maintainer @ https://sailsjs.com/support. // // - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - From ee715f62736d7e260bfa62a0bb9a1541c41a864c Mon Sep 17 00:00:00 2001 From: Luis Lobo Borobia Date: Sat, 30 Sep 2023 23:18:22 -0500 Subject: [PATCH 2/2] Switch to Github Actions --- .github/workflows/lint.yaml | 25 ++++++++++ .github/workflows/test-ubuntu.yaml | 42 +++++++++++++++++ .github/workflows/test-windows.yaml | 62 ++++++++++++++++++++++++ .travis.yml | 70 ---------------------------- appveyor.yml | 67 -------------------------- docker-compose.yml | 44 ++++++++--------- package.json | 28 +++++------ scripts/appveyor/install_mongodb.ps1 | 20 -------- scripts/travis/install_mongodb.sh | 2 +- test/run-adapter-specific-tests.js | 1 - 10 files changed, 168 insertions(+), 193 deletions(-) create mode 100644 .github/workflows/lint.yaml create mode 100644 .github/workflows/test-ubuntu.yaml create mode 100644 .github/workflows/test-windows.yaml delete mode 100644 .travis.yml delete mode 100644 appveyor.yml delete mode 100644 scripts/appveyor/install_mongodb.ps1 diff --git a/.github/workflows/lint.yaml b/.github/workflows/lint.yaml new file mode 100644 index 000000000..e26fbfa9a --- /dev/null +++ b/.github/workflows/lint.yaml @@ -0,0 +1,25 @@ +name: sails-mongo lint + +on: + push + +jobs: + lint: + runs-on: ubuntu-22.04 + + strategy: + matrix: + node-version: [20] + + steps: + + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install dependencies + run: | + npm install --no-audit --no-fund + + - name: Lint the code + run: | + npm run lint-test diff --git a/.github/workflows/test-ubuntu.yaml b/.github/workflows/test-ubuntu.yaml new file mode 100644 index 000000000..79f636983 --- /dev/null +++ b/.github/workflows/test-ubuntu.yaml @@ -0,0 +1,42 @@ +name: sails-mongo test (Ubuntu) + +on: + push + +env: + WATERLINE_ADAPTER_TESTS_URL: mongo/testdb:27027 + WATERLINE_ADAPTER_TESTS_HOST: mongo + WATERLINE_ADAPTER_TESTS_DATABASE: sails-mongo + NODE_ENV: test + +jobs: + test-ubuntu: + runs-on: ubuntu-22.04 + container: node:${{ matrix.node-version }} + + strategy: + matrix: + node-version: [16, 18, 20] + mongodb-version: ['4.4', '5', '6', '7'] + + services: + mongo: + image: mongo:${{ matrix.mongodb-version }} + ports: + - 27027:27017 + + steps: + + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install dependencies + run: | + node --eval "console.log('Running Node.js: ' + process.version)" + node --eval "console.log('Current directory: ' + process.cwd())" + node --eval "console.log('Files in directory: ' + require('fs').readdirSync(process.cwd()))" + npm install --no-audit --no-fund + + - name: Test it out + run: | + npm run custom-tests diff --git a/.github/workflows/test-windows.yaml b/.github/workflows/test-windows.yaml new file mode 100644 index 000000000..059838402 --- /dev/null +++ b/.github/workflows/test-windows.yaml @@ -0,0 +1,62 @@ +name: sails-mongo test (Windows) + +on: + push + +env: + WATERLINE_ADAPTER_TESTS_URL: 127.0.0.1/testdb + WATERLINE_ADAPTER_TESTS_HOST: 127.0.0.1 + WATERLINE_ADAPTER_TESTS_DATABASE: sails-mongo + NODE_ENV: test + +jobs: + test-windows: + runs-on: windows-2022 + + strategy: + matrix: + node-version: [16.x, 18.x, 20.x] + mongodb-version: ['5.0'] + + steps: + - uses: ankane/setup-mongodb@ce30d9041565cb469945895d5bde3384a254dd2e # use commit ID until action is versioned, see https://github.com/ankane/setup-mongodb/issues/2 + with: + mongodb-version: ${{ matrix.mongodb-version }} + + - name: Wait for MongoDB to start + run: | + while ($true) { + $status = Get-Service MongoDB | Select-Object -ExpandProperty Status + if ($status -eq "Running") { + Write-Host "MongoDB is running." + break + } + Write-Host "Waiting for MongoDB to start..." + Start-Sleep -Seconds 5 + } + shell: pwsh + + - name: Install Mongodb Shell + run: | + choco install mongodb-shell -y + shell: pwsh + + - name: Check connection to Mongodb using mongodb shell + run: | + mongosh --eval "db.adminCommand('listDatabases')" + + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Install dependencies + run: | + npm install --no-audit --no-fund + + - name: Test code + run: | + npm run custom-tests diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index bfbf3773f..000000000 --- a/.travis.yml +++ /dev/null @@ -1,70 +0,0 @@ -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # -# ╔╦╗╦═╗╔═╗╦ ╦╦╔═╗ ┬ ┬┌┬┐┬ # -# ║ ╠╦╝╠═╣╚╗╔╝║╚═╗ └┬┘││││ # -# o ╩ ╩╚═╩ ╩ ╚╝ ╩╚═╝o ┴ ┴ ┴┴─┘ # -# # -# This file configures Travis CI. # -# (i.e. how we run the tests... mainly) # -# # -# https://docs.travis-ci.com/user/customizing-the-build # -# # # # # # # # # # # # # # # # # # # # # # # # # # # # # - -dist: xenial - -language: node_js - -node_js: - - "16" - - "18" - - "19" - - "20" - -env: - global: - - WATERLINE_ADAPTER_TESTS_URL=localhost/testdb - - WATERLINE_ADAPTER_TESTS_HOST=localhost - - WATERLINE_ADAPTER_TESTS_DATABASE=sails-mongo - - NODE_ENV=test - - matrix: - - MONGODB=3.6.18 - - MONGODB=4.0.18 - - MONGODB=4.2.7 - -cache: - directories: - - "$TRAVIS_BUILD_DIR/mongodb" - - "$HOME/.npm" - -matrix: - fast_finish: true - -before_install: - - chmod +x "$TRAVIS_BUILD_DIR/scripts/travis/install_mongodb.sh" "$TRAVIS_BUILD_DIR/scripts/travis/run_mongodb.sh" - - npm i -g npm@8.11.0 - -install: - # Don't let npm send metrics as it creates a file in the .npm folder invalidating the cache every time - - npm config set send-metrics false - - npm i --no-audit - - "$TRAVIS_BUILD_DIR/scripts/travis/install_mongodb.sh" - -before_script: - - "$TRAVIS_BUILD_DIR/scripts/travis/run_mongodb.sh" - -script: - - npm test - -after_script: - - pkill mongod - -branches: - only: - - master - - upgrade-mongodb-drivers - - update-test-environment - -notifications: - email: - - ci@sailsjs.com - - luislobo@gmail.com diff --git a/appveyor.yml b/appveyor.yml deleted file mode 100644 index c68b2724d..000000000 --- a/appveyor.yml +++ /dev/null @@ -1,67 +0,0 @@ -# # # # # # # # # # # # # # # # # # # # # # # # # # -# ╔═╗╔═╗╔═╗╦ ╦╔═╗╦ ╦╔═╗╦═╗ ┬ ┬┌┬┐┬ # -# ╠═╣╠═╝╠═╝╚╗╔╝║╣ ╚╦╝║ ║╠╦╝ └┬┘││││ # -# ╩ ╩╩ ╩ ╚╝ ╚═╝ ╩ ╚═╝╩╚═o ┴ ┴ ┴┴─┘ # -# # -# This file configures Appveyor CI. # -# (i.e. how we run the tests on Windows) # -# # -# https://www.appveyor.com/docs/lang/nodejs-iojs/ # -# # # # # # # # # # # # # # # # # # # # # # # # # # - - -# Test against these versions of Node.js. -environment: - WATERLINE_ADAPTER_TESTS_URL: localhost/testdb - WATERLINE_ADAPTER_TESTS_HOST: localhost - WATERLINE_ADAPTER_TESTS_DATABASE: sails-mongo - NODE_ENV: test - matrix: - - nodejs_version: "10" - - nodejs_version: "12" - - nodejs_version: "14" - -# Install scripts. (runs after repo cloning) -install: - # Get the latest stable version of Node.js - # (Not sure what this is for, it's just in Appveyor's example.) - - ps: Install-Product node $env:nodejs_version - # Don't let npm send metrics as it creates a file in the .npm folder invalidating the cache every time - - npm config set send-metrics false - # Install declared dependencies - - npm install --no-audit - -branches: - only: - - master - - upgrade-mongodb-drivers - - update-test-environment - -# Post-install test scripts. -test_script: - # Output Node and NPM version info. - # (Presumably just in case Appveyor decides to try any funny business? - # But seriously, always good to audit this kind of stuff for debugging.) - - node --version - - npm --version - # Run the actual tests. - - npm test - -# Setup Mongo Database -services: - - mongodb - - -# Don't actually build. -# (Not sure what this is for, it's just in Appveyor's example. -# I'm not sure what we're not building... but I'm OK with not -# building it. I guess.) -build: off - - -# # # # # # # # # # # # # # # # # # # # # # # # # # # # - -# TODO: Set up appveyor + mongo*: -# https://www.appveyor.com/docs/services-databases/ -# Old example on how to install different versions of MongoDB added to `scripts/appveyor/install_mongodb.ps1` -# # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/docker-compose.yml b/docker-compose.yml index f4a773433..ebaa59eed 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,22 +1,24 @@ -adapter: - image: node:12 - volumes: - - $PWD:/home/node/sails-mongo - links: - - mongo - environment: - - WATERLINE_ADAPTER_TESTS_DATABASE=sails-mongo - - WATERLINE_ADAPTER_TESTS_URL=mongo/testdb - - WATERLINE_ADAPTER_TESTS_HOST=mongo - - NODE_ENV=test - user: node - working_dir: /home/node/sails-mongo - command: - - bash -c "npm test" +version: "3.8" +services: + adapter: + image: node:20 + volumes: + - $PWD:/home/node/sails-mongo + links: + - mongo + environment: + - WATERLINE_ADAPTER_TESTS_DATABASE=sails-mongo + - WATERLINE_ADAPTER_TESTS_URL=mongo/testdb + - WATERLINE_ADAPTER_TESTS_HOST=mongo + - NODE_ENV=test + user: node + working_dir: /home/node/sails-mongo + command: + - bash -c "npm test" -mongo: - image: mongo:4.2 - restart: always - command: "--logpath=/dev/null" - ports: - - "27017:27017" + mongo: + image: mongo:7 + restart: always + command: "--logpath=/dev/null" + ports: + - "27017:27017" diff --git a/package.json b/package.json index 842ac4f32..1a6f6c85c 100644 --- a/package.json +++ b/package.json @@ -4,14 +4,15 @@ "description": "Mongo DB adapter for Sails.js/Waterline.", "main": "./lib", "scripts": { - "test": "npm run lint && npm run custom-tests", + "test": "npm run lint-test && npm run custom-tests", "custom-tests": "node node_modules/mocha/bin/mocha test/run-adapter-specific-tests && node node_modules/mocha/bin/mocha test/connectable/ && node test/run-standard-tests", - "lint": "node node_modules/eslint/bin/eslint . --max-warnings=0", - "start-mongodb": "docker-compose up -d mongo", - "stop-mongodb": "docker-compose down", - "docker": "docker-compose run adapter bash", - "mongodb-shell": "docker-compose exec mongo mongo", - "docker-test": "docker-compose run adapter bash -c \"npm test\" && docker-compose down" + "lint": "node node_modules/eslint/bin/eslint . --max-warnings=0 ", + "lint-test": "node node_modules/eslint/bin/eslint --rule \"linebreak-style: 0\" --max-warnings=0 . ", + "start-mongodb": "docker compose up -d mongo", + "stop-mongodb": "docker compose down", + "docker": "docker compose run adapter bash", + "mongodb-shell": "docker compose exec mongo mongosh", + "docker-test": "docker compose run adapter bash -c \"npm test\" && docker compose down" }, "keywords": [ "mongo", @@ -30,7 +31,7 @@ "url": "git://github.com/balderdashy/sails-mongo.git" }, "dependencies": { - "@sailshq/lodash": "^3.10.4", + "@sailshq/lodash": "3.10.4", "async": "3.2.4", "flaverr": "^1.10.0", "machine": "^15.2.2", @@ -39,12 +40,13 @@ }, "devDependencies": { "benchmark": "2.1.4", - "eslint": "8.41.0", - "eslint-plugin-eslint-comments": "^3.2.0", + "eslint": "8.50.0", + "eslint-plugin-eslint-comments": "3.2.0", "mocha": "3.0.2", - "waterline": "^0.13.6", - "waterline-adapter-tests": "^1.0.1", - "waterline-utils": "^1.4.2" + "debug": "4.3.4", + "waterline": "0.15.2", + "waterline-adapter-tests": "1.0.1", + "waterline-utils": "1.4.5" }, "waterlineAdapter": { "interfaces": [ diff --git a/scripts/appveyor/install_mongodb.ps1 b/scripts/appveyor/install_mongodb.ps1 deleted file mode 100644 index 0c3cfdb68..000000000 --- a/scripts/appveyor/install_mongodb.ps1 +++ /dev/null @@ -1,20 +0,0 @@ -# Example. Not yet being used -$msiPath = "$($env:USERPROFILE)\mongodb-win32-x86_64-2008plus-ssl-3.0.4-signed.msi" -(New-Object Net.WebClient).DownloadFile('https://fastdl.mongodb.org/win32/mongodb-win32-x86_64-2008plus-ssl-3.0.4-signed.msi', $msiPath) -cmd /c start /wait msiexec /q /i $msiPath INSTALLLOCATION=C:\mongodb ADDLOCAL="all" -del $msiPath - -mkdir c:\mongodb\data\db | Out-Null -mkdir c:\mongodb\log | Out-Null - -'systemLog: - destination: file - path: c:\mongodb\log\mongod.log -storage: - dbPath: c:\mongodb\data\db' | Out-File C:\mongodb\mongod.cfg -Encoding utf8 - -cmd /c start /wait sc create MongoDB binPath= "C:\mongodb\bin\mongod.exe --service --config=C:\mongodb\mongod.cfg" DisplayName= "MongoDB" start= "demand" - -& c:\mongodb\bin\mongod --version - -Start-Service mongodb diff --git a/scripts/travis/install_mongodb.sh b/scripts/travis/install_mongodb.sh index 499b7190f..edc50fdbf 100755 --- a/scripts/travis/install_mongodb.sh +++ b/scripts/travis/install_mongodb.sh @@ -1,6 +1,6 @@ #!/usr/bin/env bash -MDB_TGZ=mongodb-linux-x86_64-ubuntu1604-${MONGODB}.tgz +MDB_TGZ=mongodb-linux-x86_64-ubuntu2004-${MONGODB}.tgz MDB_ROOT=${TRAVIS_BUILD_DIR}/mongodb/${MONGODB} MDB_DATA=${TRAVIS_BUILD_DIR}/mongodb-data diff --git a/test/run-adapter-specific-tests.js b/test/run-adapter-specific-tests.js index 946cdfcd9..1eeeb88a3 100644 --- a/test/run-adapter-specific-tests.js +++ b/test/run-adapter-specific-tests.js @@ -775,4 +775,3 @@ function createModel(identity, options) { return model; } -