Socket
Socket
Sign inDemoInstall

dynamic-record

Package Overview
Dependencies
61
Maintainers
1
Versions
30
Alerts
File Explorer

Advanced tools

Install Socket

Detect and block malicious and high-risk dependencies

Install

Comparing version 0.5.3 to 0.5.4

11

build/DynamicCollection.js

@@ -63,14 +63,9 @@ "use strict";

*/
// NOTE: parallel save of collection with auto incrementing index failing (write lock)
// CONSIDER: Saving in series thus slow. Can consider assigning a block of counters at once
saveAll() {
const promises = [];
_.each(this, (model) => {
return Promise.each(this, (model) => {
if (model.save) {
// Consider doing bulk write
promises.push(model.save());
return model.save();
}
});
return Promise.all(promises).then(() => {
return Promise.resolve(this);
});
}

@@ -77,0 +72,0 @@ /**

@@ -9,3 +9,2 @@ "use strict";

// Let's get mongodb working first
// NOTE: making too many connections, each instance create one connection
const connect = require("./mongoConnection.js");

@@ -81,2 +80,7 @@ const schemaValidator = new (require("./schemaValidation.js"))(connect);

*
* If you have a series of models of the same schema, it is recommended
* to put them in a DynamicCollection and calling `saveAll()` on it
* instead of attempting to save them all in parallel. This applies to
* schemas with auto incrementing counters.
*
* @method save

@@ -89,3 +93,2 @@ * @memberOf DynamicRecord.Model

// NOTE: need some way to modify counters (?) or bypass schema check if none exist
// NOTE: parallel save of collection with auto incrementing index failing (write lock)
return _ready.then((col) => {

@@ -92,0 +95,0 @@ if (this._original) {

@@ -268,5 +268,5 @@ "use strict";

}
//renameIndex(columnName, newColumnName){
// Maybe drop index then recreate but do consider why you need to do this
//}
// renameIndex(columnName, newColumnName){
// Maybe drop index then recreate but do consider why you need to do this
// }
/**

@@ -273,0 +273,0 @@ * Remove an index to the table's schema

require("dotenv").config();
const f = require("util").format;
const MongoClient = require("mongodb").MongoClient;
const mongoURL = f("mongodb://%s:%s@%s/%s", process.env.mongo_user, process.env.mongo_pass, process.env.mongo_server, process.env.mongo_db_name);
const mongoURL = `mongodb://${process.env.mongo_user}:${process.env.mongo_pass}@${process.env.mongo_server}/${process.env.mongo_db_name}`;
const client = new MongoClient(mongoURL, {

@@ -14,6 +13,1 @@ poolSize: 10,

});
// module.exports = function(databaseAddress, databaseName, username, password){
// const mongoURL = f("mongodb://%s:%s@%s/%s", username, password, databaseAddress, databaseName);
// const connection = MongoClient.connect(mongoURL, {poolSize: 10});
// return connection;
// };
{
"name": "dynamic-record",
"version": "0.5.3",
"version": "0.5.4",
"description": "A bare minimum Javascript implementation of the Active Record pattern",

@@ -40,5 +40,5 @@ "keywords": [

"bluebird": "^3.5.1",
"commander": "^2.20.0",
"dotenv": "^5.0.1",
"inquirer": "^6.3.1",
"commander": "^4.0.0",
"dotenv": "^8.2.0",
"inquirer": "^7.0.0",
"lodash": "^4.17.15",

@@ -49,11 +49,13 @@ "moment": "^2.22.2",

"devDependencies": {
"@types/node": "^10.12.2",
"chai": "^4.1.2",
"eslint": "^4.18.2",
"@types/node": "^12.12.7",
"@typescript-eslint/eslint-plugin": "^2.6.1",
"@typescript-eslint/parser": "^2.6.1",
"chai": "^4.2.0",
"eslint": "^6.6.0",
"jsdoc": "^3.6.3",
"mocha": "^5.0.4",
"surge": "^0.20.5",
"mocha": "^6.2.2",
"surge": "^0.21.3",
"tui-jsdoc-template": "^1.2.2",
"typescript": "^3.1.6"
"typescript": "^3.7.2"
}
}

@@ -48,3 +48,3 @@ import _ = require("lodash");

*/
// Not sure if this is necessary as we can just destructure array
// Not sure if this is necessary as we can just destructure array
static fromArray(arr, Model){

@@ -67,15 +67,9 @@ const result = _.reduce(arr, (acc, el, i) => {

*/
// NOTE: parallel save of collection with auto incrementing index failing (write lock)
// CONSIDER: Saving in series thus slow. Can consider assigning a block of counters at once
saveAll(){
const promises = [];
_.each(this, (model) => {
return Promise.each(this, (model) => {
if(model.save){
// Consider doing bulk write
promises.push(model.save());
return model.save();
}
});
return Promise.all(promises).then(() => {
return Promise.resolve(this);
});
}

@@ -82,0 +76,0 @@

@@ -8,3 +8,2 @@ require("dotenv").config();

// Let's get mongodb working first
// NOTE: making too many connections, each instance create one connection
const connect = require("./mongoConnection.js");

@@ -99,2 +98,7 @@ const schemaValidator = new (require("./schemaValidation.js"))(connect);

*
* If you have a series of models of the same schema, it is recommended
* to put them in a DynamicCollection and calling `saveAll()` on it
* instead of attempting to save them all in parallel. This applies to
* schemas with auto incrementing counters.
*
* @method save

@@ -107,3 +111,2 @@ * @memberOf DynamicRecord.Model

// NOTE: need some way to modify counters (?) or bypass schema check if none exist
// NOTE: parallel save of collection with auto incrementing index failing (write lock)
return _ready.then((col) => {

@@ -284,3 +287,3 @@ if(this._original){

_.each(results, (result) => {
result._original = _.cloneDeep(result.data)
result._original = _.cloneDeep(result.data);
});

@@ -312,3 +315,3 @@

_.each(results, (result) => {
result._original = _.cloneDeep(result.data)
result._original = _.cloneDeep(result.data);
});

@@ -315,0 +318,0 @@

@@ -330,5 +330,5 @@ require("dotenv").config();

//renameIndex(columnName, newColumnName){
// Maybe drop index then recreate but do consider why you need to do this
//}
// renameIndex(columnName, newColumnName){
// Maybe drop index then recreate but do consider why you need to do this
// }

@@ -457,3 +457,3 @@ /**

type: type
}
};

@@ -675,6 +675,6 @@ return this._writeSchema().then(() => {

//private _validate(){
// Validate database schema with this.definition
// Return boolean
//}
// private _validate(){
// Validate database schema with this.definition
// Return boolean
// }
}

@@ -681,0 +681,0 @@

require("dotenv").config();
const f = require("util").format;
const MongoClient = require("mongodb").MongoClient;
const mongoURL = f("mongodb://%s:%s@%s/%s", process.env.mongo_user, process.env.mongo_pass, process.env.mongo_server, process.env.mongo_db_name);
const mongoURL = `mongodb://${process.env.mongo_user}:${process.env.mongo_pass}@${process.env.mongo_server}/${process.env.mongo_db_name}`;
const client = new MongoClient(mongoURL, {

@@ -14,8 +13,2 @@ poolSize: 10,

return Promise.resolve({db, client});
});
// module.exports = function(databaseAddress, databaseName, username, password){
// const mongoURL = f("mongodb://%s:%s@%s/%s", username, password, databaseAddress, databaseName);
// const connection = MongoClient.connect(mongoURL, {poolSize: 10});
// return connection;
// };
});
// For setup and clean ups
require("dotenv").config();
const f = require("util").format;
const Promise = require("bluebird");
const _ = require("lodash");
const MongoClient = require("mongodb").MongoClient;
const mongoURL = f("mongodb://%s:%s@%s/%s", process.env.mongo_user, process.env.mongo_pass, process.env.mongo_server, process.env.mongo_db_name);
const mongoURL = `mongodb://${process.env.mongo_user}:${process.env.mongo_pass}@${process.env.mongo_server}/${process.env.mongo_db_name}`;
const connect = MongoClient.connect(mongoURL, {useUnifiedTopology: true});

@@ -133,3 +132,41 @@

it("should set the autoincrementing index correctly");
describe("autoIncrement", function(){
beforeEach(function(){
return connect.then((client) => {
const db = client.db();
return db.collection("_counters").insertOne({
"_$id": testSchema.$id,
sequences: {
int: 0
}
});
});
});
afterEach(function(){
return connect.then((client) => {
const db = client.db();
return db.collection("_counters").deleteOne({"_$id": testSchema.$id});
});
});
it("should set the autoincrementing index correctly", function(){
col.forEach((model) => {
delete model.data.int;
});
return col.saveAll().then((res) => {
return connect.then((client) => {
const db = client.db();
return db.collection(testSchema.$id).find().toArray();
});
}).then((res) => {
for(let i=0; i<res.length; i++){
assert.equal(res[i].int, i+1, `database entry has auto increment value ${i+1}`);
}
for(let i=0; i<col.length; i++){
assert.equal(col[i].data.int, i+1, `collection entry has auto increment value ${i+1}`);
}
});
});
});
});

@@ -136,0 +173,0 @@

// For setup and clean ups
require("dotenv").config();
const f = require("util").format;
const Promise = require("bluebird");
const _ = require("lodash");
const MongoClient = require("mongodb").MongoClient;
const mongoURL = f("mongodb://%s:%s@%s/%s", process.env.mongo_user, process.env.mongo_pass, process.env.mongo_server, process.env.mongo_db_name);
const mongoURL = `mongodb://${process.env.mongo_user}:${process.env.mongo_pass}@${process.env.mongo_server}/${process.env.mongo_db_name}`;
const connect = MongoClient.connect(mongoURL, {useUnifiedTopology: true});

@@ -9,0 +8,0 @@

// For setup and clean ups
require("dotenv").config();
const f = require("util").format;
const Promise = require("bluebird");
const _ = require("lodash");
const MongoClient = require("mongodb").MongoClient;
const mongoURL = f("mongodb://%s:%s@%s/%s", process.env.mongo_user, process.env.mongo_pass, process.env.mongo_server, process.env.mongo_db_name);
const mongoURL = `mongodb://${process.env.mongo_user}:${process.env.mongo_pass}@${process.env.mongo_server}/${process.env.mongo_db_name}`;
const connect = MongoClient.connect(mongoURL, {useUnifiedTopology: true});

@@ -9,0 +8,0 @@

// For setup and clean ups
require("dotenv").config();
const f = require("util").format;
const Promise = require("bluebird");
const _ = require("lodash");
const MongoClient = require("mongodb").MongoClient;
const mongoURL = f("mongodb://%s:%s@%s/%s", process.env.mongo_user, process.env.mongo_pass, process.env.mongo_server, process.env.mongo_db_name);
const mongoURL = `mongodb://${process.env.mongo_user}:${process.env.mongo_pass}@${process.env.mongo_server}/${process.env.mongo_db_name}`;
const connect = MongoClient.connect(mongoURL, {useUnifiedTopology: true});

@@ -9,0 +8,0 @@

@@ -28,3 +28,3 @@ {

},
"required": ["string", "int", "float"]
"required": ["string", "float"]
}

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc