Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

fh-sync

Package Overview
Dependencies
Maintainers
4
Versions
22
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

fh-sync - npm Package Compare versions

Comparing version 1.0.3 to 1.0.4

5

examples/basic-express-example/lib/dataAccessLayer.js

@@ -15,3 +15,4 @@ // Sample data source returned by list handler

var datalistHandler = function (dataset_id, query_params, cb, meta_data) {
var datalistHandler = function (dataset_id, query_params, meta_data, cb) {
console.log('listing items for dataset "%s"', dataset_id);
return cb(null, data);

@@ -22,2 +23,2 @@ }

list: datalistHandler
};
};

7

examples/basic-express-example/package.json

@@ -12,10 +12,11 @@ {

"dependencies": {
"body-parser": "^1.17.2",
"async": "~2.4.1",
"body-parser": "~1.17.2",
"cookie-parser": "~1.4.3",
"cors": "^2.8.3",
"cors": "~2.8.3",
"debug": "~2.6.3",
"express": "~4.15.3",
"fh-sync": "^1.0.0-RC1",
"fh-sync": "~1.0.0-RC1",
"morgan": "~1.8.1"
}
}

@@ -0,1 +1,3 @@

'use strict';
var sync = require('fh-sync');

@@ -5,10 +7,4 @@ var cors = require('cors');

var bodyParser = require('body-parser');
var async = require('async');
var app = express();
//middleware
app.use(bodyParser.json());
app.use(cors());
// Sync framework requires mongodb and redis to be running

@@ -18,63 +14,76 @@ var mongodbConnectionString = process.env.MONGO_CONNECTION_URL || 'mongodb://127.0.0.1:27017/sync';

// Following example will sync for single domain object
// called messages
var datasetId = "messages";
function startApplicationServer (err) {
if (err) {
console.log('error starting sync server:');
throw err;
}
app.get('/', function (req, res) {
res.send('Sample application is running!')
})
console.log('Sync initialised')
/**
* Sync express api required for sync clients
* All sync clients will call that endpoint to sync data
*/
app.post('/sync/:datasetId', function (req, res) {
var dataset_id = req.params.datasetId;
var params = req.body;
// Invoke action in sync for specific dataset
sync.api.invoke(dataset_id, params, function (err, result) {
if (err) {
res.status(500).json(err);
return;
}
return res.json(result)
});
});
var app = express();
var mongoOptions = {};
// Initialize sync to connect to mongodb and redis
sync.api.connect(mongodbConnectionString, mongoOptions, redisUrl, function (err) {
if (err) {
console.log('Problem with initializing sync', err);
} else {
console.log('Sync initialized');
activateForDataset(datasetId);
}
});
//middleware
app.use(bodyParser.json());
app.use(cors());
/**
* This function with create new sync dataset and seed initial data
*/
function activateForDataset(datasetId) {
// See documentation for more options
var options = {
syncFrequency: 10 // seconds
};
console.log("Init sync data handlers for dataset");
sync.api.init(datasetId, options, function (err) {
if (err) {
console.error(err);
} else {
var dataHandler = require("./lib/dataAccessLayer");
// List is just one of the CRUD operations that sync supports.
// See documentation for more options.
// If not defined data will be handled by mongodb driver.
sync.api.handleList(datasetId, dataHandler.list);
}
app.get('/', function (req, res) {
res.send('Sample application is running!')
})
/**
* Sync express api required for sync clients
* All sync clients will call that endpoint to sync data
*/
app.post('/sync/:datasetId', function (req, res) {
var dataset_id = req.params.datasetId;
var params = req.body;
// Invoke action in sync for specific dataset
sync.invoke(dataset_id, params, function (err, result) {
if (err) {
res.status(500).json(err.toString());
return;
}
return res.json(result)
});
});
app.listen(3000, function () {
console.log('\nExample app listening on port 3000!');
console.log('\nRunthe following from a terminal to get records via sync:');
console.log('curl http://localhost:3000/sync/messages -X POST --data \'{"fn": "syncRecords"}\' -H "content-type:application/json"\n')
});
}
app.listen(3000, function () {
console.log('Example app listening on port 3000!')
})
async.series([
function connectSync (next) {
var mongoOptions = {};
sync.connect(mongodbConnectionString, mongoOptions, redisUrl, next);
},
function initialiseDataset (next) {
// See documentation for more options
var options = {
syncFrequency: 10 // seconds
};
// Following example will sync for single domain object called messages
var datasetId = "messages";
console.log("Init sync data handlers for dataset");
sync.init(datasetId, options, function (err) {
if (err) {
next(err);
} else {
var dataHandler = require("./lib/dataAccessLayer");
// List is just one of the CRUD operations that sync supports.
// See documentation for more options.
// If not defined data will be handled by mongodb driver.
sync.handleList(datasetId, dataHandler.list);
next();
}
});
}
], startApplicationServer);

@@ -8,3 +8,2 @@ var mongodbQ = require('mongodb-queue');

var debugError = syncUtil.debugError;
/**

@@ -42,4 +41,6 @@ * Make sure the real queue is created. Otherwise throws an error.

this.mongodb = opts.mongodb;
this.queueOptions = {visibility: opts.visibility || 30};
this.queueTTL = opts.queueMessagesTTL || 24*60*60;
this.queueOptions = {
visibility: opts.visibility || 30,
ttl: opts.queueMessagesTTL || 24*60*60
};
this.queue;

@@ -77,55 +78,55 @@ }

async.waterfall([
function createIndexes(callback) {
debug('[%s] creating queue indexes', self.queueName);
self.queue.createIndexes(callback);
},
function addExtraIdIndex(createdIndex, callback) {
debug('[%s] adding extra _id index for queue', self.queueName);
collection.createIndex({ deleted : 1, visible : 1, _id : 1}, callback);
},
function addExtraAckIndex(createdIndex, callback) {
debug('[%s] adding extra ack index for queue', self.queueName);
collection.createIndex({ ack: 1, visible : 1, deleted : 1}, callback);
},
function listIndexes(createdIndex, callback) {
// Check if there's already a TTL index for the 'deleted' field.
// If there is, and the TTL value is different, delete it so it can be
// recreated by the mongodb-queue module
function listIndexes(callback) {
debug('[%s] list existing indexes', self.queueName);
collection.indexInformation({full: true}, callback);
collection.indexInformation({full: true}, function(err, indexInfo) {
if (err) {
debug('[%s] error getting indexInfo. skipping ttl index check: %s', self.queueName, err);
return callback(null, null);
}
return callback(null, indexInfo);
});
},
function checkIndexTTL(indexInfo, callback) {
if (!indexInfo) {
// skipping ttl index check
return callback(null, false);
}
debug('[%s] found queue indexInfo : %j', self.queueName, indexInfo);
var existingIndex = _.findWhere(indexInfo, {name: indexName});
var needDrop = false;
var needCreate = false;
if (!existingIndex) {
needCreate = true;
} else if (existingIndex.expireAfterSeconds !== self.queueTTL) {
needDrop = true;
needCreate = true;
return callback(null, false);
} else if (existingIndex.expireAfterSeconds !== self.queueOptions.ttl) {
return callback(null, true);
}
return callback(null, needDrop, needCreate);
},
function dropTTLIndex(needDrop, needCreate, callback) {
function dropTTLIndex(needDrop, callback) {
if (needDrop) {
debug('[%s] dropping ttl index: %s', self.queueName, indexName);
collection.dropIndex(indexName, function(err){
return callback(err, needCreate);
collection.dropIndex(indexName, function(err) {
return callback(err);
});
} else {
debug('[%s] skip dropping ttl index', self.queueName);
return callback(null, needCreate);
return callback(null);
}
},
function createTTLIndex(needCreate, callback) {
if (needCreate) {
debug('[%s] creating ttl index: %s', self.queueName, indexName);
collection.createIndex({'deleted': 1}, {'expireAfterSeconds': self.queueTTL, 'background': true}, callback);
} else {
debug('[%s] skip creating ttl index', self.queueName);
return callback();
}
function createIndexes(callback) {
debug('[%s] creating queue indexes', self.queueName);
self.queue.createIndexes(callback);
},
function addExtraIdIndex(createdIndex, callback) {
debug('[%s] adding extra _id index for queue', self.queueName);
collection.createIndex({ deleted : 1, visible : 1, _id : 1}, callback);
},
function addExtraAckIndex(createdIndex, callback) {
debug('[%s] adding extra ack index for queue', self.queueName);
collection.createIndex({ ack: 1, visible : 1, deleted : 1}, callback);
}
], function(err){
if (err) {
debugError('[%s] failed to create queue index due to error: %s', self.queueName, err);
debugError('[%s] failed to create queue index due to error: %s %s', self.queueName, err, err.stack);
return cb(err);

@@ -132,0 +133,0 @@ }

@@ -54,3 +54,3 @@ var metricsModule = require('./sync-metrics');

pendingWorkerConcurrency: 1,
/** @type {Object} the backoff strategy for the pending worker to use.
/** @type {Object} the backoff strategy for the pending worker to use.
* Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `pendingWorkerInterval`

@@ -63,3 +63,3 @@ * The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */

ackWorkerConcurrency: 1,
/** @type {Object} the backoff strategy for the ack worker to use.
/** @type {Object} the backoff strategy for the ack worker to use.
* Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `ackWorkerInterval`

@@ -72,3 +72,3 @@ * The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */

syncWorkerConcurrency: 1,
/** @type {Object} the backoff strategy for the sync worker to use.
/** @type {Object} the backoff strategy for the sync worker to use.
* Default strategy is `exp` (exponential) with a max delay of 1s. The min value will always be the same as `syncWorkerInterval`

@@ -160,3 +160,3 @@ * Other valid strategies are `none` and `fib` (fibonacci).*/

if (mongoDbClient === null || redisClient === null) {
throw new Error('MongoDB Client & Redis Client are not connected. Ensure connect() is called before calling start');
throw new Error('MongoDB Client & Redis Client are not connected. Ensure you have called sync.connect() before calling sync.init()');
}

@@ -378,2 +378,2 @@

}
};
};
{
"name": "fh-sync",
"version": "1.0.3",
"version": "1.0.4",
"description": "FeedHenry Data Synchronization Server",

@@ -8,3 +8,3 @@ "main": "index.js",

"async": "2.1.5",
"backoff": "^2.5.0",
"backoff": "2.5.0",
"debug": "2.6.3",

@@ -14,9 +14,8 @@ "fh-component-metrics": "2.7.0",

"mongodb-lock": "0.4.0",
"mongodb-queue": "2.2.0",
"mongodb-queue": "david-martin/mongodb-queue#ttl-index-01",
"parse-duration": "0.1.1",
"redis": "^2.6.5",
"redis": "2.6.5",
"underscore": "1.7.0"
},
"devDependencies": {
"jsdoc": "^3.4.3",
"grunt": "^0.4.5",

@@ -31,7 +30,8 @@ "grunt-cli": "^1.2.0",

"sinon": "^1.17.5",
"typedoc": "^0.7.1",
"valid-url": "1.0.9"
},
"types":"./types/fh-sync.d.ts",
"types": "./types/fh-sync.d.ts",
"scripts": {
"docs": "./node_modules/.bin/jsdoc -d docs/api ./lib",
"doc": "typedoc --includeDeclarations --excludeExternals --out docs/api ./types/fh-sync.d.ts",
"pretest": "./scripts/pretest.sh",

@@ -38,0 +38,0 @@ "test": "grunt fh:unit mochaTest:integration",

@@ -6,5 +6,5 @@ # fh-sync

## Dependencies
## Dependencies
You will need a local Mongodb server and Redis server. For information on setting up these
You will need a local Mongodb server and Redis server. For information on setting up these

@@ -15,6 +15,23 @@ Mongodb see

Redis see
Redis see
https://redis.io/topics/quickstart
## Example Server
To run the example server start MongoDB and Redis locally on their default ports
then issue the following commands in this repository:
```
cd examples/basic-express-example/
npm install
node server.js
```
When the server has started try making the following cURL request:
```
curl http://localhost:3000/sync/messages -X POST --data '{"fn": "syncRecords"}' -H "content-type:application/json"
```
## Usage

@@ -65,2 +82,1 @@

Then to run the tests use ```npm test```

@@ -6,307 +6,354 @@ // Type definitions for fh-sync

declare module SyncCloud {
/**
* Options used to initialize Sync Server
*/
interface SyncGlobalOptions {
/** How often pending workers should check for the next job, in ms. Default: 1 */
pendingWorkerInterval?: number;
/** The concurrency value of the pending workers. Default is 1. Can set to 0 to disable the pendingWorkers completely */
pendingWorkerConcurrency?: number;
/** The backoff strategy for the pending worker to use.
* Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `pendingWorkerInterval`
* The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */
pendingWorkerBackoff?: PendingWorkerBackoff;
/** How often ack workers should check for the next job, in ms. Default: 1 */
ackWorkerInterval?: number;
/** The concurrency value of the ack workers. Default is 1. Can set to 0 to disable the ackWorker completely */
ackWorkerConcurrency?: number;
/**
* The backoff strategy for the ack worker to use.
* Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `ackWorkerInterval`
* The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */
ackWorkerBackoff?: AckWorkerBackoff;
/** How often sync workers should check for the next job, in ms. Default: 100 */
syncWorkerInterval?: number;
/** The concurrency value of the sync workers. Default is 1. Can set to 0 to disable the syncWorker completely. */
syncWorkgerConcurrency?: number;
/** the backoff strategy for the sync worker to use.
* Default strategy is `exp` (exponential) with a max delay of 1s. The min value will always be the same as `syncWorkerInterval`
* Other valid strategies are `none` and `fib` (fibonacci).*/
syncWorkerBackoff?: SyncWorkerBackoff;
/** How often the scheduler should check the datasetClients, in ms. Default: 500 */
schedulerInterval?: number;
/** The max time a scheduler can hold the lock for, in ms. Default: 20000 */
schedulerLockMaxTime?: number;
/** The default lock name for the sync scheduler */
schedulerLockName?: string;
/** The default concurrency value when update dataset clients in the sync API. Default is 10. In most case this value should not need to be changed */
datasetClientUpdateConcurrency?: number;
/** Enable/disable collect sync stats to allow query via an endpoint */
collectStats?: boolean;
/** The number of records to keep in order to compute the stats data. Default is 1000. */
statsRecordsToKeep?: number;
/** How often the stats should be collected. In milliseconds. */
collectStatsInterval?: number;
/** The host of the influxdb server. If set, the metrics data will be sent to the influxdb server. */
metricsInfluxdbHost?: string;
/** The port of the influxdb server. It should be a UDP port. */
metricsInfluxdbPort?: number;
/** The concurrency value for the component metrics. Default is 10. This value should be increased if there are many concurrent workers. Otherwise the memory useage of the app could go up.*/
metricsReportConcurrency?: number;
/** If cache the dataset client records using redis. This can help improve performance for the syncRecords API.
* Can be turned on if there are no records are shared between many different dataset clients. Default is false.*/
useCache?: boolean;
/**The TTL (Time To Live) value for the messages on the queue. In seconds. Default to 24 hours. */
queueMessagesTTL?: string;
/** Specify the maximum retention time of an inactive datasetClient. Any inactive datasetClient that is older than this period of time will be removed.*/
datasetClientCleanerRetentionPeriod?: string;
/** Specify the frequency the datasetClient cleaner should run. Default every hour ('1h').*/
datasetClientCleanerCheckFrequency?: string;
}
/**
* Options used to initialize Sync Server
*/
interface SyncGlobalOptions {
/** How often pending workers should check for the next job, in ms. Default: 1 */
pendingWorkerInterval?: number;
/** The concurrency value of the pending workers. Default is 1. Can set to 0 to disable the pendingWorkers completely */
pendingWorkerConcurrency?: number;
/** The backoff strategy for the pending worker to use.
* Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `pendingWorkerInterval`
* The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */
pendingWorkerBackoff?: PendingWorkerBackoff;
/** How often ack workers should check for the next job, in ms. Default: 1 */
ackWorkerInterval?: number;
/** The concurrency value of the ack workers. Default is 1. Can set to 0 to disable the ackWorker completely */
ackWorkerConcurrency?: number;
/**
* The backoff strategy for the ack worker to use.
* Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `ackWorkerInterval`
* The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */
ackWorkerBackoff?: AckWorkerBackoff;
/** How often sync workers should check for the next job, in ms. Default: 100 */
syncWorkerInterval?: number;
/** The concurrency value of the sync workers. Default is 1. Can set to 0 to disable the syncWorker completely. */
syncWorkgerConcurrency?: number;
/** the backoff strategy for the sync worker to use.
* Default strategy is `exp` (exponential) with a max delay of 1s. The min value will always be the same as `syncWorkerInterval`
* Other valid strategies are `none` and `fib` (fibonacci).*/
syncWorkerBackoff?: SyncWorkerBackoff;
/** How often the scheduler should check the datasetClients, in ms. Default: 500 */
schedulerInterval?: number;
/** The max time a scheduler can hold the lock for, in ms. Default: 20000 */
schedulerLockMaxTime?: number;
/** The default lock name for the sync scheduler */
schedulerLockName?: string;
/** The default concurrency value when update dataset clients in the sync API. Default is 10. In most case this value should not need to be changed */
datasetClientUpdateConcurrency?: number;
/** Enable/disable collect sync stats to allow query via an endpoint */
collectStats?: boolean;
/** The number of records to keep in order to compute the stats data. Default is 1000. */
statsRecordsToKeep?: number;
/** How often the stats should be collected. In milliseconds. */
collectStatsInterval?: number;
/** The host of the influxdb server. If set, the metrics data will be sent to the influxdb server. */
metricsInfluxdbHost?: string;
/** The port of the influxdb server. It should be a UDP port. */
metricsInfluxdbPort?: number;
/** The concurrency value for the component metrics. Default is 10. This value should be increased if there are many concurrent workers. Otherwise the memory useage of the app could go up.*/
metricsReportConcurrency?: number;
/** If cache the dataset client records using redis. This can help improve performance for the syncRecords API.
* Can be turned on if there are no records are shared between many different dataset clients. Default is false.*/
useCache?: boolean;
/**The TTL (Time To Live) value for the messages on the queue. In seconds. Default to 24 hours. */
queueMessagesTTL?: string;
/** Specify the maximum retention time of an inactive datasetClient. Any inactive datasetClient that is older than this period of time will be removed.*/
datasetClientCleanerRetentionPeriod?: string;
/** Specify the frequency the datasetClient cleaner should run. Default every hour ('1h').*/
datasetClientCleanerCheckFrequency?: string;
}
/**
* Backoff Strategy
* Example: {strategy: 'exp', max: 60*1000},
*/
interface PendingWorkerBackoff {
strategy: string;
max: number;
}
/**
* Backoff Strategy
* Example: {strategy: 'exp', max: 60*1000},
*/
interface AckWorkerBackoff {
strategy: string;
max: number;
}
/**
* Backoff Strategy
* Example: {strategy: 'exp', max: 60*1000},
*/
interface PendingWorkerBackoff {
strategy: string;
max: number;
}
/**
* Backoff Strategy
* Example: {strategy: 'exp', max: 60*1000},
*/
interface AckWorkerBackoff {
strategy: string;
max: number;
}
/**
* Backoff Strategy
* Example: {strategy: 'exp', max: 60*1000},
*/
interface SyncWorkerBackoff {
strategy: string;
max: number;
}
/**
* Backoff Strategy
* Example: {strategy: 'exp', max: 60*1000},
*/
interface SyncWorkerBackoff {
strategy: string;
max: number;
}
type StandardCb<T> = (err: Error | string | undefined, res: T | undefined) => void;
type NoRespCb = (err: Error | string | undefined) => void;
type StandardCb<T> = (err: Error | string | undefined, res: T | undefined) => void;
type NoRespCb = (err: Error | string | undefined) => void;
/**
* Options used to initialize sync for specific dataset
*/
interface SyncInitOptions {
/**
* Value indicating how often the dataset client should be sync with the backend. Matches the clients default
* frequency. Value in seconds
* Options used to initialize sync for specific dataset
*/
syncFrequency?: number,
interface SyncInitOptions {
/**
* Value indicating how often the dataset client should be sync with the backend. Matches the clients default
* frequency. Value in seconds
*/
syncFrequency?: number,
/**
* Value that will be used to decide if the dataset client is not active anymore.
*/
clientSyncTimeout?: number,
/**
* Value that determines how long it should wait for the backend list operation to complete
*/
backendListTimeout?: number,
/**
* Specify the max wait time the dataset can be scheduled to sync again after its previous schedule, in seconds.
*/
maxScheduleWaitTime?: number
}
/**
* Value that will be used to decide if the dataset client is not active anymore.
* Parameters object for request and response interceptors
*/
clientSyncTimeout?: number,
interface SyncInterceptParams {
query_params: any;
metaData: any;
}
/**
* Connect sync server to mongo and redis
*
* @param mongoDBConnectionUrl
* @param mongoDBConnectionOption
* @param redisUrl
* @param cb
*/
function connect(mongoDBConnectionUrl: string, mongoDBConnectionOption: any, redisUrl: string, cb: StandardCb<void>): void;
/**
* Value that determines how long it should wait for the backend list operation to complete
* Initialize sync for specific dataset
*
* @param datasetId
* @param options
* @param callback
*/
backendListTimeout?: number,
function init(datasetId: string, options: SyncInitOptions, callback: StandardCb<void>): void;
/**
* Specify the max wait time the dataset can be scheduled to sync again after its previous schedule, in seconds.
*/
maxScheduleWaitTime?: number
}
* Internal method used to invoke sync methods. Used to handle json request from client.
* Supported operations 'sync', 'syncRecords', 'listCollisions', 'removeCollision'
*
* @param datasetId
* @param options
* @param callback
*/
function invoke(datasetId: string, options: any, callback: (err: any, result: any) => void): void;
/**
* Parameters object for request and response interceptors
*/
interface SyncInterceptParams {
query_params: any;
metaData: any;
}
/**
* Connect sync server to mongo and redis
*
* @param mongoDBConnectionUrl
* @param mongoDBConnectionOption
* @param redisUrl
* @param cb
*/
function connect(mongoDBConnectionUrl: string, mongoDBConnectionOption: any, redisUrl: string, cb: StandardCb<void>): void;
/**
* Stop sync loop for dataset
*
* @param datasetId
* @param onStop callback called when operation is finished
*/
function stop(datasetId: string, onStop: NoRespCb): void;
/**
* Initialize sync for specific dataset
*
* @param datasetId
* @param options
* @param callback
*/
function init(datasetId: string, options: SyncInitOptions, callback: StandardCb<void>): void;
/**
* Stop sync loop for all datasets
*
* @param datasetId
* @param onStop callback called when operation is finished
*/
function stopAll(onStop: StandardCb<string[]>): void;
/**
* Internal method used to invoke sync methods. Used to handle json request from client.
* Supported operations 'sync', 'syncRecords', 'listCollisions', 'removeCollision'
*
* @param datasetId
* @param options
* @param callback
*/
function invoke(datasetId: string, options: any, callback: (err: any, result: any) => void): void;
/**
* Handle list operation for specific dataset.
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param datasetId - unique id of the dataset (usually collection, table in your database)
* @param onList - function called to retrieve data
* params - set of call parameters (usually query string) used to filter out data
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function handleList(datasetId: string, onList: (datasetId: string, params: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Stop sync loop for dataset
*
* @param datasetId
* @param onStop callback called when operation is finished
*/
function stop(datasetId: string, onStop: NoRespCb): void;
/**
* Handle list operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param onList - function called to retrieve data
* params - set of call parameters (usually query string) used to filter out data
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function globalHandleList(onList: (datasetId: string, params: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Stop sync loop for all datasets
*
* @param datasetId
* @param onStop callback called when operation is finished
*/
function stopAll(onStop: StandardCb<string[]>): void;
/**
* Handle create operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param datasetId - unique id of the dataset (usually collection, table in your database)
* @param onCreate - function called to create data entry
* @param data - data that needs to be stored
* @param metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function handleCreate(datasetId: string, onCreate: (datasetId: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle list operation for specific dataset.
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function handleList(datasetId: string, onList: (datasetId: string, params: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle create operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param onCreate - function called to create data entry
* data - data that needs to be stored
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function globalHandleCreate(onCreate: (datasetId: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle list operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function globalHandleList(onList: (datasetId: string, params: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle read operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param datasetId - unique id of the dataset (usually collection, table in your database)
* @param onRead - function called to read single data entry
* uid - data identifier
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function handleRead(datasetId: string, onRead: (datasetId: string, uid: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle create operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function handleCreate(datasetId: string, onCreate: (datasetId: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle read operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param onRead - function called to read single data entry
* uid - data identifier
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function globalHandleRead(onRead: (datasetId: string, uid: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle create operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function globalHandleCreate(onCreate: (datasetId: string, params: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle update operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param datasetId - unique id of the dataset (usually collection, table in your database)
* @param onUpdate - function called to update single data entry
* uid - data identifier
* data - data that needs to be stored
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function handleUpdate(datasetId: string, onUpdate: (datasetId: string, uid: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle read operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function handleRead(datasetId: string, onRead: (datasetId: string, uid: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle update operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param onUpdate - function called to update single data entry
* uid - data identifier
* data - data that needs to be stored
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function globalHandleUpdate(onUpdate: (datasetId: string, uid: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle read operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function globalHandleRead(onRead: (datasetId: string, uid: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle delete operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param datasetId - unique id of the dataset (usually collection, table in your database)
* @param onDelete - function called to delete single data entry
* uid - data identifier
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function handleDelete(datasetId: string, onDelete: (datasetId: string, uid: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle update operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function handleUpdate(datasetId: string, onUpdate: (datasetId: string, uid: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle delete operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*
* @param onDelete - function called to delete single data entry
* uid - data identifier
* metadtata - metdata for query - can contain any additional information that is not part of the query
*/
function globalHandleDelete(onDelete: (datasetId: string, uid: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle update operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function globalHandleUpdate(onCreate: (datasetId: string, uid: string, data: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle data collision for specific dataset (when both entries were changed)
*
* @param datasetId
* @param onCollision method called on collision
*/
function handleCollision(datasetId: string, onCollision: (datasetId: string, hash: string, timestamp: any, uid: string, pre: any, post: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle delete operation for specific dataset
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function handleDelete(datasetId: string, onDelete: (datasetId: string, uid: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle data collision for all managed datasets (when both entries were changed)
*
* @param datasetId
* @param onCollision method called on collision
*/
function globalHandleCollision(onCollision: (datasetId: string, hash: string, timestamp: Date, uid: string, pre: any, post: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Handle delete operation for all datasets
* Method may be used to override default data handler to have control over how sync is retrieving and storing data
*/
function globalHandleDelete(onDelete: (datasetId: string, uid: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* List collisions for specific dataset
*
* @param datasetId
* @param onList
*/
function listCollisions(datasetId: string, onList: (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: any }>) => void): void;
/**
* Handle data collision for specific dataset (when both entries were changed)
*
* @param datasetId
* @param onCollision method called on collision
*/
function handleCollision(datasetId: string, onCollision: (datasetId: string, hash: string, timestamp: any, uid: string, pre: any, post: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* List collisions for all datasets
*
* @param datasetId
* @param onList
*/
function globalListCollisions(onList: (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: any }>) => void): void;
/**
* Handle data collision for all managed datasets (when both entries were changed)
*
* @param datasetId
* @param onCollision method called on collision
*/
function globalHandleCollision(onCollision: (datasetId: string, hash: string, timestamp: Date, uid: string, pre: any, post: any, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Remove collision from dataset?
*/
function removeCollision(datasetId: string, onRemove: (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* List collisions for specific dataset
*
* @param datasetId
* @param onList
*/
function listCollisions(datasetId: string, onList: (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: any }>) => void): void;
/**
* Request interceptor for dataset - allows to perform custom operations before executing sync method.
*/
function interceptRequest(datasetId: string, onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* List collisions for all datasets
*
* @param datasetId
* @param onList
*/
function globalListCollisions(onList: (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: any }>) => void): void;
/**
* Response interceptor for dataset - allows to perform custom operations after executing sync method.
*/
function interceptResponse(datasetId: string, onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Remove collision from dataset?
*/
function removeCollision(datasetId: string, onRemove: (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb<any>) => void): void;
/**
* Set configuration for sync
*/
function setConfig(config: SyncGlobalOptions): void;
/**
* Request interceptor for dataset - allows to perform custom operations before executing sync method.
*/
function interceptRequest(datasetId: string, onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Request interceptor for all sync calls - allows to perform custom operations after executing sync method.
*/
function globalInterceptRequest(onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Response interceptor for dataset - allows to perform custom operations after executing sync method.
*/
function interceptResponse(datasetId: string, onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Response interceptor for all sync calls - allows to perform custom operations after executing sync method.
*/
function globalInterceptResponse(onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Set configuration for sync
*/
function setConfig(config: SyncGlobalOptions): void;
/**
* Sets custom global hashing method for determining if objects were changed.
*
* @param datasetId
* @param hashFunction allows to perform hashing for array of hashes returned for specific datasets
*/
function setGlobalHashFn(datasetId: string, hashFunction: (target: string[]) => string): void;
/**
* Request interceptor for all sync calls - allows to perform custom operations after executing sync method.
*/
function globalInterceptRequest(onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Response interceptor for all sync calls - allows to perform custom operations after executing sync method.
*/
function globalInterceptResponse(onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void;
/**
* Sets custom global hashing method for determining if objects were changed.
*
* @param datasetId
* @param hashFunction allows to perform hashing for array of hashes returned for specific datasets
*/
function setGlobalHashFn(datasetId: string, hashFunction: (target: string[]) => string): void;
/**
* Sets custom dataset hashing method for determining if objects were changed.
*
* @param datasetId
* @param hashFunction allows to perform hashing for dataset
*/
function setRecordHashFn(datasetId: string, hashFunction: (target: any) => string): void;
/**
* Sets custom dataset hashing method for determining if objects were changed.
*
* @param datasetId
* @param hashFunction allows to perform hashing for dataset
*/
function setRecordHashFn(datasetId: string, hashFunction: (target: any) => string): void;
}
export = SyncCloud;
SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc