From ced9f4278f014e0e15dbcf2f452b74d7d856d7bf Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Tue, 21 Nov 2017 15:29:39 -0800 Subject: [PATCH 01/12] add typescript example. cleanup definition comments. remove need for external grunt --- .editorconfig | 11 + .gitignore | 3 + .travis.yml | 4 +- README.md | 5 +- .../basic-express-typescript/package.json | 25 ++ examples/basic-express-typescript/route.js | 21 + .../basic-express-typescript/route.js.map | 1 + examples/basic-express-typescript/route.ts | 27 ++ examples/basic-express-typescript/server.js | 31 ++ .../basic-express-typescript/server.js.map | 1 + examples/basic-express-typescript/server.ts | 38 ++ examples/basic-express-typescript/sync.js | 54 +++ examples/basic-express-typescript/sync.js.map | 1 + examples/basic-express-typescript/sync.ts | 71 ++++ .../basic-express-typescript/tsconfig.json | 13 + fh-sync.d.ts | 350 +++++++++++++++++ index.js => fh-sync.js | 0 package.json | 21 +- scripts/pretest.sh | 1 + types/fh-sync.d.ts | 358 ------------------ 20 files changed, 664 insertions(+), 372 deletions(-) create mode 100644 .editorconfig create mode 100644 examples/basic-express-typescript/package.json create mode 100644 examples/basic-express-typescript/route.js create mode 100644 examples/basic-express-typescript/route.js.map create mode 100644 examples/basic-express-typescript/route.ts create mode 100644 examples/basic-express-typescript/server.js create mode 100644 examples/basic-express-typescript/server.js.map create mode 100644 examples/basic-express-typescript/server.ts create mode 100644 examples/basic-express-typescript/sync.js create mode 100644 examples/basic-express-typescript/sync.js.map create mode 100644 examples/basic-express-typescript/sync.ts create mode 100644 examples/basic-express-typescript/tsconfig.json create mode 100644 fh-sync.d.ts rename index.js => fh-sync.js (100%) delete mode 100644 types/fh-sync.d.ts diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..92223bc --- /dev/null +++ b/.editorconfig @@ -0,0 +1,11 @@ +# Tells the .editorconfg plugin to stop searching once it finds this file +root = true + +[*] +indent_size = 2 +indent_style = space +charset = utf-8 +end_of_line = lf +insert_final_newline = true +trim_trailing_whitespace = true + diff --git a/.gitignore b/.gitignore index 3a634ce..453b1e2 100755 --- a/.gitignore +++ b/.gitignore @@ -17,3 +17,6 @@ dump.rdb cov-test/ cov-unit/ coverage/ + +examples/typescript/*.js +examples/typescript/*.js.map diff --git a/.travis.yml b/.travis.yml index 021c636..c6a8c4f 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,14 +2,16 @@ language: node_js sudo: required node_js: - "0.10" + - "4" - "4.4.3" + - "6" + - "8" services: - docker before_install: - sudo apt-get update - sudo apt-get install --assume-yes apache2-utils - npm install -g npm@2.13.5 - - npm install -g grunt-cli - npm config set strict-ssl false install: npm install env: diff --git a/README.md b/README.md index 65416ba..1503df2 100644 --- a/README.md +++ b/README.md @@ -78,12 +78,11 @@ Before running tests do: ``` npm install -npm install -g grunt-cli ``` -Then to run the tests use ```npm test``` +Then to run the tests use `npm test` -## Cordova client template +## Cordova Client Template The [Feedhenry Cordova Sync Template](https://github.com/feedhenry-templates/feedhenry-cordova-sync-app) can be used to create client application talking to the sync server. diff --git a/examples/basic-express-typescript/package.json b/examples/basic-express-typescript/package.json new file mode 100644 index 0000000..db0dd6c --- /dev/null +++ b/examples/basic-express-typescript/package.json @@ -0,0 +1,25 @@ +{ + "name": "basic-express-typescript", + "version": "0.0.1", + "description": "Example of using TypeScript with fh-sync", + "main": "server.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1", + "start": "tsc && node server.js" + }, + "author": "Evan Shortiss (http://evanshortiss.com/)", + "license": "MIT", + "dependencies": { + "bluebird": "~3.5.1", + "body-parser": "~1.18.2", + "cors": "~2.8.4", + "express": "~4.16.2" + }, + "devDependencies": { + "@types/bluebird": "~3.5.18", + "@types/body-parser": "~1.16.8", + "@types/cors": "~2.8.3", + "@types/express": "~4.0.39", + "typescript": "~2.6.1" + } +} diff --git a/examples/basic-express-typescript/route.js b/examples/basic-express-typescript/route.js new file mode 100644 index 0000000..54ce65f --- /dev/null +++ b/examples/basic-express-typescript/route.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var express = require("express"); +var parsers = require("body-parser"); +var cors = require("cors"); +var sync = require("../../fh-sync"); +var router = express.Router(); +router.use(cors()); +router.use(parsers.json()); +router.post('/:datasetId', function (req, res, next) { + sync.invoke(req.params.datasetId, req.body, function (err, result) { + if (err) { + next(err); + } + else { + res.json(result); + } + }); +}); +exports.default = router; +//# sourceMappingURL=route.js.map \ No newline at end of file diff --git a/examples/basic-express-typescript/route.js.map b/examples/basic-express-typescript/route.js.map new file mode 100644 index 0000000..0f63d61 --- /dev/null +++ b/examples/basic-express-typescript/route.js.map @@ -0,0 +1 @@ +{"version":3,"file":"route.js","sourceRoot":"","sources":["route.ts"],"names":[],"mappings":";;AACA,iCAAkC;AAClC,qCAAsC;AACtC,2BAA4B;AAC5B,oCAAqC;AAErC,IAAM,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAA;AAG/B,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAA;AAGlB,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAG1B,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,UAAC,GAAoB,EAAE,GAAqB,EAAE,IAA0B;IAEjG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,SAAS,EAAE,GAAG,CAAC,IAAI,EAAE,UAAU,GAAG,EAAE,MAAM;QAC/D,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;YACR,IAAI,CAAC,GAAG,CAAC,CAAA;QACX,CAAC;QAAC,IAAI,CAAC,CAAC;YACN,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QAClB,CAAC;IACH,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA;AAEF,kBAAe,MAAM,CAAA"} \ No newline at end of file diff --git a/examples/basic-express-typescript/route.ts b/examples/basic-express-typescript/route.ts new file mode 100644 index 0000000..5e1ad3e --- /dev/null +++ b/examples/basic-express-typescript/route.ts @@ -0,0 +1,27 @@ + +import * as express from 'express' +import * as parsers from 'body-parser' +import * as cors from 'cors' +import * as sync from '../../fh-sync' + +const router = express.Router() + + // Mobile clients typically require CORS headers to be set +router.use(cors()) + +// Need to parse incoming JSON bodies +router.use(parsers.json()) + +// All sync requests are performed using a HTTP POST +router.post('/:datasetId', (req: express.Request, res: express.Response, next: express.NextFunction) => { + // Invoke action in sync for specific dataset + sync.invoke(req.params.datasetId, req.body, function (err, result) { + if (err) { + next(err) + } else { + res.json(result) + } + }) +}) + +export default router diff --git a/examples/basic-express-typescript/server.js b/examples/basic-express-typescript/server.js new file mode 100644 index 0000000..9ff7d35 --- /dev/null +++ b/examples/basic-express-typescript/server.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var express = require("express"); +var sync = require("./sync"); +var route_1 = require("./route"); +var app = express(); +sync.init() + .then(startApplicationServer) + .catch(function (e) { + console.log('error occurred during startup', e); + process.exit(1); +}); +function startApplicationServer(err) { + if (err) { + console.log('error starting sync server:'); + throw err; + } + console.log('Sync initialised'); + app.use('/sync', route_1.default); + app.get('/', function (req, res) { + res.send('Sample application is running!'); + }); + app.listen(3000, function (err) { + if (err) + throw err; + console.log('\nExample app listening on port 3000!'); + console.log('\nRun the following from a terminal to get records via sync:'); + console.log('curl http://localhost:3000/sync/messages -X POST --data \'{"fn": "syncRecords"}\' -H "content-type:application/json"\n'); + }); +} +//# sourceMappingURL=server.js.map \ No newline at end of file diff --git a/examples/basic-express-typescript/server.js.map b/examples/basic-express-typescript/server.js.map new file mode 100644 index 0000000..5115dd7 --- /dev/null +++ b/examples/basic-express-typescript/server.js.map @@ -0,0 +1 @@ +{"version":3,"file":"server.js","sourceRoot":"","sources":["server.ts"],"names":[],"mappings":";;AACA,iCAAkC;AAClC,6BAA8B;AAC9B,iCAAgC;AAEhC,IAAM,GAAG,GAAG,OAAO,EAAE,CAAA;AAErB,IAAI,CAAC,IAAI,EAAE;KACR,IAAI,CAAC,sBAAsB,CAAC;KAC5B,KAAK,CAAC,UAAC,CAAC;IACP,OAAO,CAAC,GAAG,CAAC,+BAA+B,EAAE,CAAC,CAAC,CAAA;IAC/C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC,CAAC,CAAA;AAEJ,gCAAiC,GAAQ;IACvC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QACR,OAAO,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;QAC1C,MAAM,GAAG,CAAA;IACX,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IAG/B,GAAG,CAAC,GAAG,CAAC,OAAO,EAAE,eAAU,CAAC,CAAA;IAG5B,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,GAAG;QACpB,GAAG,CAAC,IAAI,CAAC,gCAAgC,CAAC,CAAA;IAC5C,CAAC,CAAC,CAAA;IAEF,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,UAAC,GAAQ;QACxB,EAAE,CAAC,CAAC,GAAG,CAAC;YAAC,MAAM,GAAG,CAAA;QAElB,OAAO,CAAC,GAAG,CAAC,uCAAuC,CAAC,CAAA;QACpD,OAAO,CAAC,GAAG,CAAC,8DAA8D,CAAC,CAAA;QAC3E,OAAO,CAAC,GAAG,CAAC,wHAAwH,CAAC,CAAA;IACvI,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/examples/basic-express-typescript/server.ts b/examples/basic-express-typescript/server.ts new file mode 100644 index 0000000..34508fd --- /dev/null +++ b/examples/basic-express-typescript/server.ts @@ -0,0 +1,38 @@ + +import * as express from 'express' +import * as sync from './sync' +import syncRouter from './route' + +const app = express() + +sync.init() + .then(startApplicationServer) + .catch((e) => { + console.log('error occurred during startup', e) + process.exit(1) + }) + +function startApplicationServer (err: any) { + if (err) { + console.log('error starting sync server:') + throw err + } + + console.log('Sync initialised') + + // Sync express api required for sync clients. All sync clients will call this endpoint to sync data + app.use('/sync', syncRouter) + + // Default route. Can be used to check application is up and running + app.get('/', (req, res) => { + res.send('Sample application is running!') + }) + + app.listen(3000, (err: any) => { + if (err) throw err + + console.log('\nExample app listening on port 3000!') + console.log('\nRun the following from a terminal to get records via sync:') + console.log('curl http://localhost:3000/sync/messages -X POST --data \'{"fn": "syncRecords"}\' -H "content-type:application/json"\n') + }); +} diff --git a/examples/basic-express-typescript/sync.js b/examples/basic-express-typescript/sync.js new file mode 100644 index 0000000..62c2f72 --- /dev/null +++ b/examples/basic-express-typescript/sync.js @@ -0,0 +1,54 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +var sync = require("../../fh-sync"); +var Promise = require("bluebird"); +var MONGO_CONN_STRING = process.env.MONGO_CONNECTION_URL || 'mongodb://127.0.0.1:27017/sync'; +var REDIS_CONN_STRING = process.env.REDIS_CONNECTION_URL || 'redis://127.0.0.1:6379'; +var MONGO_OPTS = {}; +var DATASET_NAME = 'messages'; +var DATASET_OPTS = { + syncFrequency: 10 +}; +function initialiseDataset() { + return new Promise(function (resolve, reject) { + sync.init(DATASET_NAME, DATASET_OPTS, function (err) { + if (err) { + reject(err); + } + else { + sync.handleList(DATASET_NAME, function (dataset, query, meta, done) { + console.log("received request from " + query.username + " with tracking ID " + meta.trackingId); + done(null, { + '00001': { + 'item': 'item1' + }, + '00002': { + 'item': 'item2' + }, + '00003': { + 'item': 'item3' + } + }); + }); + resolve(); + } + }); + }); +} +function connect() { + return new Promise(function (resolve, reject) { + sync.connect(MONGO_CONN_STRING, MONGO_OPTS, REDIS_CONN_STRING, function (err) { + if (err) { + reject(err); + } + else { + resolve(); + } + }); + }); +} +function init() { + return connect().then(initialiseDataset); +} +exports.init = init; +//# sourceMappingURL=sync.js.map \ No newline at end of file diff --git a/examples/basic-express-typescript/sync.js.map b/examples/basic-express-typescript/sync.js.map new file mode 100644 index 0000000..4c785df --- /dev/null +++ b/examples/basic-express-typescript/sync.js.map @@ -0,0 +1 @@ +{"version":3,"file":"sync.js","sourceRoot":"","sources":["sync.ts"],"names":[],"mappings":";;AACA,oCAAqC;AACrC,kCAAmC;AAGnC,IAAM,iBAAiB,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,gCAAgC,CAAC;AAC/F,IAAM,iBAAiB,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,wBAAwB,CAAC;AAGvF,IAAM,UAAU,GAAG,EAAE,CAAA;AAGrB,IAAM,YAAY,GAAG,UAAU,CAAA;AAC/B,IAAM,YAAY,GAAG;IACnB,aAAa,EAAE,EAAE;CAClB,CAAC;AAWF;IACE,MAAM,CAAC,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,YAAY,EAAE,UAAC,GAAG;YACxC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACR,MAAM,CAAC,GAAG,CAAC,CAAA;YACb,CAAC;YAAC,IAAI,CAAC,CAAC;gBAGN,IAAI,CAAC,UAAU,CAAC,YAAY,EAAE,UAAC,OAAO,EAAE,KAAY,EAAE,IAAU,EAAE,IAAI;oBACpE,OAAO,CAAC,GAAG,CAAC,2BAAyB,KAAK,CAAC,QAAQ,0BAAqB,IAAI,CAAC,UAAY,CAAC,CAAA;oBAE1F,IAAI,CAAC,IAAI,EAAE;wBACT,OAAO,EAAE;4BACP,MAAM,EAAE,OAAO;yBAChB;wBACD,OAAO,EAAE;4BACP,MAAM,EAAE,OAAO;yBAChB;wBACD,OAAO,EAAE;4BACP,MAAM,EAAE,OAAO;yBAChB;qBACF,CAAC,CAAA;gBACJ,CAAC,CAAC,CAAA;gBAEF,OAAO,EAAE,CAAA;YACX,CAAC;QACH,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;AACJ,CAAC;AAED;IACE,MAAM,CAAC,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,UAAU,EAAE,iBAAiB,EAAE,UAAC,GAAG;YACjE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACR,MAAM,CAAC,GAAG,CAAC,CAAA;YACb,CAAC;YAAC,IAAI,CAAC,CAAC;gBACN,OAAO,EAAE,CAAA;YACX,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAA;AACJ,CAAC;AAED;IACE,MAAM,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;AAC1C,CAAC;AAFD,oBAEC"} \ No newline at end of file diff --git a/examples/basic-express-typescript/sync.ts b/examples/basic-express-typescript/sync.ts new file mode 100644 index 0000000..75142bc --- /dev/null +++ b/examples/basic-express-typescript/sync.ts @@ -0,0 +1,71 @@ + +import * as sync from '../../fh-sync' +import * as Promise from 'bluebird' + +// Sync framework requires mongodb and redis to be running +const MONGO_CONN_STRING = process.env.MONGO_CONNECTION_URL || 'mongodb://127.0.0.1:27017/sync'; +const REDIS_CONN_STRING = process.env.REDIS_CONNECTION_URL || 'redis://127.0.0.1:6379'; + +// Options to pass to the mongodb driver +const MONGO_OPTS = {} + +// Define our dataset name and the option such as how often to sync to system of record +const DATASET_NAME = 'messages' +const DATASET_OPTS = { + syncFrequency: 10 // seconds +}; + + +interface Query { + username: string +} + +interface Meta { + trackingId: string +} + +function initialiseDataset () { + return new Promise((resolve, reject) => { + sync.init(DATASET_NAME, DATASET_OPTS, (err) => { + if (err) { + reject(err) + } else { + // Sample list handler. Uses a custom query and metadata interface to provide + // better typings in the handler logic. + sync.handleList(DATASET_NAME, (dataset, query: Query, meta: Meta, done) => { + console.log(`received request from ${query.username} with tracking ID ${meta.trackingId}`) + + done(null, { + '00001': { + 'item': 'item1' + }, + '00002': { + 'item': 'item2' + }, + '00003': { + 'item': 'item3' + } + }) + }) + + resolve() + } + }) + }) +} + +function connect () { + return new Promise((resolve, reject) => { + sync.connect(MONGO_CONN_STRING, MONGO_OPTS, REDIS_CONN_STRING, (err) => { + if (err) { + reject(err) + } else { + resolve() + } + }); + }) +} + +export function init () { + return connect().then(initialiseDataset) +} diff --git a/examples/basic-express-typescript/tsconfig.json b/examples/basic-express-typescript/tsconfig.json new file mode 100644 index 0000000..14d0092 --- /dev/null +++ b/examples/basic-express-typescript/tsconfig.json @@ -0,0 +1,13 @@ +{ + "compilerOptions": { + "module": "commonjs", + "noImplicitAny": true, + "removeComments": true, + "preserveConstEnums": true, + "sourceMap": true, + "target": "es5" + }, + "include": [ + "server.ts" + ] +} diff --git a/fh-sync.d.ts b/fh-sync.d.ts new file mode 100644 index 0000000..73482e1 --- /dev/null +++ b/fh-sync.d.ts @@ -0,0 +1,350 @@ +// Type definitions for fh-sync +// Project: https://github.com/feedhenry/fh-sync +// Maintainer feedhenry-dev@redhat.com + +declare module SyncCloud { + + /** + * Valid actions (the "fn" param) that can be passed to sync.invoke + */ + type InvokeAction = 'sync'|'syncRecords'|'listCollisions'|'removeCollision' + + /** + * Options that can be passed to sync.invoke + */ + interface InvokeOptions { + fn: InvokeAction + + // TODO: we should define this in more detail + [key: string]: any + } + + /** + * Interfaces that describe how sync responses should be structured + */ + namespace HandlerResults { + interface Create { + uid: string + data: Object + } + + interface Read { + [key: string]: any + } + + interface Update { + [key: string]: any + } + + interface Delete { + [key: string]: any + } + + interface List { + [uid: string]: Object + } + } + + /** + * Unique callback structures for sync handlers. + * + * Can be used by other modules to create pre-built sync compliant handlers. + */ + namespace HandlerFunctions { + type Create = (dataset: string, data: Object, metaData: Object, done: StandardCb) => void + type Read = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void + type Update = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void + type Delete = (dataset: string, queryParams: Object, metaData: Object, done: StandardCb) => void + type List = (dataset: string, queryParams: Object, metaData: Object, done: StandardCb) => void + type Collision = (datasetId: string, hash: string, timestamp: number, uid: string, pre: Object, post: Object, metaData: Object, callback: StandardCb) => void + type ListCollisions = (datasetId: string, metaData: Object, callback: StandardCb<{ [hash: string]: Object }>) => void + type RemoveCollision = (datasetId: string, collision_hash: string, metaData: Object, callback: StandardCb) => void + type Interceptor = (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void + type Hash = (datasetId: string, data: Object) => void + } + + /** + * Options used to initialize Sync Server + */ + interface SyncGlobalOptions { + /** How often pending workers should check for the next job, in ms. Default: 1 */ + pendingWorkerInterval?: number; + /** The concurrency value of the pending workers. Default is 1. Can set to 0 to disable the pendingWorkers completely */ + pendingWorkerConcurrency?: number; + /** The backoff strategy for the pending worker to use. + * Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `pendingWorkerInterval` + * The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */ + pendingWorkerBackoff?: PendingWorkerBackoff; + /** How often ack workers should check for the next job, in ms. Default: 1 */ + ackWorkerInterval?: number; + /** The concurrency value of the ack workers. Default is 1. Can set to 0 to disable the ackWorker completely */ + ackWorkerConcurrency?: number; + /** + * The backoff strategy for the ack worker to use. + * Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `ackWorkerInterval` + * The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */ + ackWorkerBackoff?: AckWorkerBackoff; + /** How often sync workers should check for the next job, in ms. Default: 100 */ + syncWorkerInterval?: number; + /** The concurrency value of the sync workers. Default is 1. Can set to 0 to disable the syncWorker completely. */ + syncWorkgerConcurrency?: number; + /** the backoff strategy for the sync worker to use. + * Default strategy is `exp` (exponential) with a max delay of 1s. The min value will always be the same as `syncWorkerInterval` + * Other valid strategies are `none` and `fib` (fibonacci).*/ + syncWorkerBackoff?: SyncWorkerBackoff; + /** How often the scheduler should check the datasetClients, in ms. Default: 500 */ + schedulerInterval?: number; + /** The max time a scheduler can hold the lock for, in ms. Default: 20000 */ + schedulerLockMaxTime?: number; + /** The default lock name for the sync scheduler */ + schedulerLockName?: string; + /** The default concurrency value when update dataset clients in the sync API. Default is 10. In most case this value should not need to be changed */ + datasetClientUpdateConcurrency?: number; + /** Enable/disable collect sync stats to allow query via an endpoint */ + collectStats?: boolean; + /** The number of records to keep in order to compute the stats data. Default is 1000. */ + statsRecordsToKeep?: number; + /** How often the stats should be collected. In milliseconds. */ + collectStatsInterval?: number; + /** The host of the influxdb server. If set, the metrics data will be sent to the influxdb server. */ + metricsInfluxdbHost?: string; + /** The port of the influxdb server. It should be a UDP port. */ + metricsInfluxdbPort?: number; + /** The concurrency value for the component metrics. Default is 10. This value should be increased if there are many concurrent workers. Otherwise the memory useage of the app could go up.*/ + metricsReportConcurrency?: number; + /** If cache the dataset client records using redis. This can help improve performance for the syncRecords API. + * Can be turned on if there are no records are shared between many different dataset clients. Default is false.*/ + useCache?: boolean; + /**The TTL (Time To Live) value for the messages on the queue. In seconds. Default to 24 hours. */ + queueMessagesTTL?: string; + /** Specify the maximum retention time of an inactive datasetClient. Any inactive datasetClient that is older than this period of time will be removed.*/ + datasetClientCleanerRetentionPeriod?: string; + /** Specify the frequency the datasetClient cleaner should run. Default every hour ('1h').*/ + datasetClientCleanerCheckFrequency?: string; + } + + /** + * Backoff Strategy + * Example: {strategy: 'exp', max: 60*1000}, + */ + interface PendingWorkerBackoff { + strategy: string; + max: number; + } + /** + * Backoff Strategy + * Example: {strategy: 'exp', max: 60*1000}, + */ + interface AckWorkerBackoff { + strategy: string; + max: number; + } + + /** + * Backoff Strategy + * Example: {strategy: 'exp', max: 60*1000}, + */ + interface SyncWorkerBackoff { + strategy: string; + max: number; + } + + type StandardCb = (err: Error | null | string | undefined, res?: T | undefined) => void; + type NoRespCb = (err: Error | string | undefined) => void; + + /** + * Options used to initialize sync for specific dataset + */ + interface SyncInitOptions { + /** + * Value indicating how often the dataset client should be sync with the backend. Matches the clients default + * frequency. Value in seconds + */ + syncFrequency?: number, + + /** + * Value that will be used to decide if the dataset client is not active anymore. + */ + clientSyncTimeout?: number, + + /** + * Value that determines how long it should wait for the backend list operation to complete + */ + backendListTimeout?: number, + + /** + * Specify the max wait time the dataset can be scheduled to sync again after its previous schedule, in seconds. + */ + maxScheduleWaitTime?: number + } + + /** + * Parameters object for request and response interceptors + */ + interface SyncInterceptParams { + query_params: any; + metaData: any; + } + /** + * Connect sync server to mongo and redis. + * + * Returns the MongoDB and Redis clients being used internally. + */ + function connect(mongoDBConnectionUrl: string, mongoDBConnectionOption: any, redisUrl: string, callback: (err: any, mongoDbClient?: any, redisClient?: any) => void): void; + + /** + * Initialize sync for specific dataset. + * + * The passed datasetId must be a unique string. + */ + function init(datasetId: string, options: SyncInitOptions, callback: StandardCb): void; + + /** + * Internal method used to invoke sync methods. Should be used to handle json request from client. + * + * Supported operations are 'sync', 'syncRecords', 'listCollisions', 'removeCollision' and should be passed as a + * "fn" key in the options object. + */ + function invoke(datasetId: string, options: InvokeOptions, callback: (err: any, result: any) => void): void; + + /** + * Stop sync loop for the given datasetId. + * + * Invokes the passed callback once all operations are stopped. + */ + function stop(datasetId: string, onStop: NoRespCb): void; + + /** + * Stop sync loop for all datasets. + * + * Invokes the passed callback once all operations are stopped. + */ + function stopAll(onStop: StandardCb): void; + + /** + * Provide a custom list implementation for the specified dataset. + */ + function handleList(datasetId: string, onList: HandlerFunctions.List): void; + + /** + * Provide a custom implementation of the list operation for all datasets + */ + function globalHandleList(onList: HandlerFunctions.List): void; + + /** + * Provide a custom create implementation for the specified dataset. + */ + function handleCreate(datasetId: string, onCreate: HandlerFunctions.Create): void; + + /** + * Provide a custom implementation of the create operation for all datasets + */ + function globalHandleCreate(onCreate: HandlerFunctions.Create): void; + + /** + * Provide a custom read implementation for the specified dataset. + */ + function handleRead(datasetId: string, onRead: HandlerFunctions.Read): void; + + /** + * Provide a custom implementation of the read operation for all datasets. + */ + function globalHandleRead(onRead: HandlerFunctions.Read): void; + + /** + * Provide a custom update implementation for the specified dataset. + */ + function handleUpdate(datasetId: string, onUpdate: HandlerFunctions.Update): void; + + /** + * Provide a custom implementation of the update operation for all datasets. + */ + function globalHandleUpdate(onUpdate: HandlerFunctions.Update): void; + + /** + * Provide a custom delete implementation for the specified dataset. + */ + function handleDelete(datasetId: string, onDelete: HandlerFunctions.Delete): void; + + /** + * Provide a custom implementation of the delete operation for all datasets. + */ + function globalHandleDelete(onDelete: HandlerFunctions.Delete): void; + + /** + * Provide a custom collision handler implementation for the specified dataset. + */ + function handleCollision(datasetId: string, onCollision: HandlerFunctions.Collision): void; + + /** + * Provide a custom implementation of the collision handler for all datasets. + */ + function globalHandleCollision(onCollision: HandlerFunctions.Collision): void; + + /** + * Provide a custom collision list handler implementation for the specified dataset. + */ + function listCollisions(datasetId: string, onList: HandlerFunctions.ListCollisions): void; + + /** + * Provide a custom implementation of the list collision handler for all datasets. + */ + function globalListCollisions(onList: HandlerFunctions.ListCollisions): void; + + /** + * Provide a custom collision removal handler for the specified dataset. + */ + function removeCollision(datasetId: string, onRemove: HandlerFunctions.RemoveCollision): void; + + /** + * Allows developers to provide a request interceptor for the given datasetId. + * + * Facilitates performing custom operations prior to invoking the required sync method. + * + * Useful for performing authorisation checks, logging, etc. + */ + function interceptRequest(datasetId: string, onIntercept: HandlerFunctions.Interceptor): void; + + /** + * Allows developers to provide a response interceptor for the given datasetId. + * + * Facilitates performing custom operations after internal sync operations have completed. + */ + function interceptResponse(datasetId: string, onIntercept: HandlerFunctions.Interceptor): void; + + /** + * Override global options utilised by the library. + */ + function setConfig(config: SyncGlobalOptions): void; + + /** + * Allows developers to provide a global request interceptor. + * + * Facilitates performing custom operations prior to invoking the required sync method. + * + * Useful for performing authorisation checks, logging, etc. + */ + function globalInterceptRequest(onIntercept: HandlerFunctions.Interceptor): void; + + /** + * Allows developers to provide a response interceptor for all datasets. + * + * Facilitates performing custom operations after internal sync operations have completed. + */ + function globalInterceptResponse(onIntercept: HandlerFunctions.Interceptor): void; + + /** + * Sets a custom global hashing method. + * + * This is used to determine if a difference exists between the previous and current state of a record. + */ + function setGlobalHashFn(datasetId: string, hashFunction: HandlerFunctions.Hash): void; + + /** + * Sets a custom hashing method for the given datasetId. + * + * This is used to determine if a difference exists between the previous and current state of a record. + */ + function setRecordHashFn(datasetId: string, hashFunction: HandlerFunctions.Hash): void; + } + export = SyncCloud; diff --git a/index.js b/fh-sync.js similarity index 100% rename from index.js rename to fh-sync.js diff --git a/package.json b/package.json index d4016c0..02f242e 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,8 @@ { "name": "fh-sync", - "version": "1.0.13", + "version": "1.0.14", "description": "FeedHenry Data Synchronization Server", - "main": "index.js", + "main": "fh-sync.js", "dependencies": { "async": "2.1.5", "backoff": "2.5.0", @@ -16,19 +16,20 @@ "underscore": "1.7.0" }, "devDependencies": { - "grunt": "^0.4.5", - "grunt-cli": "^1.2.0", - "grunt-fh-build": "^0.5.0", - "grunt-mocha-test": "^0.13.2", + "grunt": "~0.4.5", + "grunt-cli": "~1.2.0", + "grunt-fh-build": "~0.5.0", + "grunt-mocha-test": "~0.13.2", "istanbul": "0.2.14", - "jshint": "^2.5.2", + "jshint": "~2.5.2", "mocha": "2.4.5", "proxyquire": "1.4.0", - "sinon": "^1.17.5", - "typedoc": "^0.7.1", + "sinon": "~1.17.5", + "typedoc": "~0.7.1", + "typescript": "~2.6.1", "valid-url": "1.0.9" }, - "types": "./types/fh-sync.d.ts", + "types": "fh-sync.d.ts", "scripts": { "doc": "typedoc --includeDeclarations --excludeExternals --out docs/api ./types/fh-sync.d.ts", "pretest": "./scripts/pretest.sh", diff --git a/scripts/pretest.sh b/scripts/pretest.sh index f62be3c..078b906 100755 --- a/scripts/pretest.sh +++ b/scripts/pretest.sh @@ -13,4 +13,5 @@ docker pull mongo:$MONGODB_VERSION docker rm -f $(docker ps -a -q --filter name=mongodb-fh-mbaas-api) docker run -d -p 127.0.0.1:27017:27017 --name mongodb-fh-mbaas-api mongo:$MONGODB_VERSION mongod --smallfiles #give it some time to complete starting +echo "waiting for services to start..." sleep 30s diff --git a/types/fh-sync.d.ts b/types/fh-sync.d.ts deleted file mode 100644 index c986b41..0000000 --- a/types/fh-sync.d.ts +++ /dev/null @@ -1,358 +0,0 @@ -// Type definitions for fh-sync -// Project: https://github.com/feedhenry/fh-sync -// Maintainer feedhenry-dev@redhat.com - -declare module SyncCloud { - /** - * Options used to initialize Sync Server - */ - interface SyncGlobalOptions { - /** How often pending workers should check for the next job, in ms. Default: 1 */ - pendingWorkerInterval?: number; - /** The concurrency value of the pending workers. Default is 1. Can set to 0 to disable the pendingWorkers completely */ - pendingWorkerConcurrency?: number; - /** The backoff strategy for the pending worker to use. - * Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `pendingWorkerInterval` - * The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */ - pendingWorkerBackoff?: PendingWorkerBackoff; - /** How often ack workers should check for the next job, in ms. Default: 1 */ - ackWorkerInterval?: number; - /** The concurrency value of the ack workers. Default is 1. Can set to 0 to disable the ackWorker completely */ - ackWorkerConcurrency?: number; - /** - * The backoff strategy for the ack worker to use. - * Default strategy is `exp` (exponential) with a max delay of 60s. The min value will always be the same as `ackWorkerInterval` - * The other valid strategy is `fib` (fibonacci). Set it to anything else will disable the backoff behavior */ - ackWorkerBackoff?: AckWorkerBackoff; - /** How often sync workers should check for the next job, in ms. Default: 100 */ - syncWorkerInterval?: number; - /** The concurrency value of the sync workers. Default is 1. Can set to 0 to disable the syncWorker completely. */ - syncWorkgerConcurrency?: number; - /** the backoff strategy for the sync worker to use. - * Default strategy is `exp` (exponential) with a max delay of 1s. The min value will always be the same as `syncWorkerInterval` - * Other valid strategies are `none` and `fib` (fibonacci).*/ - syncWorkerBackoff?: SyncWorkerBackoff; - /** How often the scheduler should check the datasetClients, in ms. Default: 500 */ - schedulerInterval?: number; - /** The max time a scheduler can hold the lock for, in ms. Default: 20000 */ - schedulerLockMaxTime?: number; - /** The default lock name for the sync scheduler */ - schedulerLockName?: string; - /** The default concurrency value when update dataset clients in the sync API. Default is 10. In most case this value should not need to be changed */ - datasetClientUpdateConcurrency?: number; - /** Enable/disable collect sync stats to allow query via an endpoint */ - collectStats?: boolean; - /** The number of records to keep in order to compute the stats data. Default is 1000. */ - statsRecordsToKeep?: number; - /** How often the stats should be collected. In milliseconds. */ - collectStatsInterval?: number; - /** The host of the influxdb server. If set, the metrics data will be sent to the influxdb server. */ - metricsInfluxdbHost?: string; - /** The port of the influxdb server. It should be a UDP port. */ - metricsInfluxdbPort?: number; - /** The concurrency value for the component metrics. Default is 10. This value should be increased if there are many concurrent workers. Otherwise the memory useage of the app could go up.*/ - metricsReportConcurrency?: number; - /** If cache the dataset client records using redis. This can help improve performance for the syncRecords API. - * Can be turned on if there are no records are shared between many different dataset clients. Default is false.*/ - useCache?: boolean; - /**The TTL (Time To Live) value for the messages on the queue. In seconds. Default to 24 hours. */ - queueMessagesTTL?: string; - /** Specify the maximum retention time of an inactive datasetClient. Any inactive datasetClient that is older than this period of time will be removed.*/ - datasetClientCleanerRetentionPeriod?: string; - /** Specify the frequency the datasetClient cleaner should run. Default every hour ('1h').*/ - datasetClientCleanerCheckFrequency?: string; - } - - /** - * Backoff Strategy - * Example: {strategy: 'exp', max: 60*1000}, - */ - interface PendingWorkerBackoff { - strategy: string; - max: number; - } - /** - * Backoff Strategy - * Example: {strategy: 'exp', max: 60*1000}, - */ - interface AckWorkerBackoff { - strategy: string; - max: number; - } - - /** - * Backoff Strategy - * Example: {strategy: 'exp', max: 60*1000}, - */ - interface SyncWorkerBackoff { - strategy: string; - max: number; - } - - type StandardCb = (err: Error | string | undefined, res?: T | undefined) => void; - type NoRespCb = (err: Error | string | undefined) => void; - - /** - * Options used to initialize sync for specific dataset - */ - interface SyncInitOptions { - /** - * Value indicating how often the dataset client should be sync with the backend. Matches the clients default - * frequency. Value in seconds - */ - syncFrequency?: number, - - /** - * Value that will be used to decide if the dataset client is not active anymore. - */ - clientSyncTimeout?: number, - - /** - * Value that determines how long it should wait for the backend list operation to complete - */ - backendListTimeout?: number, - - /** - * Specify the max wait time the dataset can be scheduled to sync again after its previous schedule, in seconds. - */ - maxScheduleWaitTime?: number - } - - /** - * Parameters object for request and response interceptors - */ - interface SyncInterceptParams { - query_params: any; - metaData: any; - } - /** - * Connect sync server to mongo and redis - * - * @param mongoDBConnectionUrl - * @param mongoDBConnectionOption - * @param redisUrl - * @param cb - */ - function connect(mongoDBConnectionUrl: string, mongoDBConnectionOption: any, redisUrl: string, callback: (err: any, mongoDbClient?: any, redisClient?: any) => void): void; - - /** - * Initialize sync for specific dataset - * - * @param datasetId - * @param options - * @param callback - */ - function init(datasetId: string, options: SyncInitOptions, callback: StandardCb): void; - - /** - * Internal method used to invoke sync methods. Used to handle json request from client. - * Supported operations 'sync', 'syncRecords', 'listCollisions', 'removeCollision' - * - * @param datasetId - * @param options - * @param callback - */ - function invoke(datasetId: string, options: any, callback: (err: any, result: any) => void): void; - - /** - * Stop sync loop for dataset - * - * @param datasetId - * @param onStop callback called when operation is finished - */ - function stop(datasetId: string, onStop: NoRespCb): void; - - /** - * Stop sync loop for all datasets - * - * @param datasetId - * @param onStop callback called when operation is finished - */ - function stopAll(onStop: StandardCb): void; - - /** - * Handle list operation for specific dataset. - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param datasetId - unique id of the dataset (usually collection, table in your database) - * @param onList - function called to retrieve data - * params - set of call parameters (usually query string) used to filter out data - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function handleList(datasetId: string, onList: (datasetId: string, params: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle list operation for all datasets - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param onList - function called to retrieve data - * params - set of call parameters (usually query string) used to filter out data - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function globalHandleList(onList: (datasetId: string, params: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle create operation for specific dataset - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param datasetId - unique id of the dataset (usually collection, table in your database) - * @param onCreate - function called to create data entry - * @param data - data that needs to be stored - * @param metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function handleCreate(datasetId: string, onCreate: (datasetId: string, data: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle create operation for all datasets - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param onCreate - function called to create data entry - * data - data that needs to be stored - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function globalHandleCreate(onCreate: (datasetId: string, data: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle read operation for specific dataset - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param datasetId - unique id of the dataset (usually collection, table in your database) - * @param onRead - function called to read single data entry - * uid - data identifier - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function handleRead(datasetId: string, onRead: (datasetId: string, uid: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle read operation for all datasets - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param onRead - function called to read single data entry - * uid - data identifier - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function globalHandleRead(onRead: (datasetId: string, uid: string, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle update operation for specific dataset - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param datasetId - unique id of the dataset (usually collection, table in your database) - * @param onUpdate - function called to update single data entry - * uid - data identifier - * data - data that needs to be stored - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function handleUpdate(datasetId: string, onUpdate: (datasetId: string, uid: string, data: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle update operation for all datasets - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param onUpdate - function called to update single data entry - * uid - data identifier - * data - data that needs to be stored - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function globalHandleUpdate(onUpdate: (datasetId: string, uid: string, data: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle delete operation for specific dataset - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param datasetId - unique id of the dataset (usually collection, table in your database) - * @param onDelete - function called to delete single data entry - * uid - data identifier - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function handleDelete(datasetId: string, onDelete: (datasetId: string, uid: string, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle delete operation for all datasets - * Method may be used to override default data handler to have control over how sync is retrieving and storing data - * - * @param onDelete - function called to delete single data entry - * uid - data identifier - * metadtata - metdata for query - can contain any additional information that is not part of the query - */ - function globalHandleDelete(onDelete: (datasetId: string, uid: string, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle data collision for specific dataset (when both entries were changed) - * - * @param datasetId - * @param onCollision method called on collision - */ - function handleCollision(datasetId: string, onCollision: (datasetId: string, hash: string, timestamp: any, uid: string, pre: any, post: any, metaData: any, callback: StandardCb) => void): void; - - /** - * Handle data collision for all managed datasets (when both entries were changed) - * - * @param datasetId - * @param onCollision method called on collision - */ - function globalHandleCollision(onCollision: (datasetId: string, hash: string, timestamp: Date, uid: string, pre: any, post: any, metaData: any, callback: StandardCb) => void): void; - - /** - * List collisions for specific dataset - * - * @param datasetId - * @param onList - */ - function listCollisions(datasetId: string, onList: (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: any }>) => void): void; - - /** - * List collisions for all datasets - * - * @param datasetId - * @param onList - */ - function globalListCollisions(onList: (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: any }>) => void): void; - - /** - * Remove collision from dataset? - */ - function removeCollision(datasetId: string, onRemove: (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb) => void): void; - - /** - * Request interceptor for dataset - allows to perform custom operations before executing sync method. - */ - function interceptRequest(datasetId: string, onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void; - - /** - * Response interceptor for dataset - allows to perform custom operations after executing sync method. - */ - function interceptResponse(datasetId: string, onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void; - - /** - * Set configuration for sync - */ - function setConfig(config: SyncGlobalOptions): void; - - /** - * Request interceptor for all sync calls - allows to perform custom operations after executing sync method. - */ - function globalInterceptRequest(onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void; - - /** - * Response interceptor for all sync calls - allows to perform custom operations after executing sync method. - */ - function globalInterceptResponse(onIntercept: (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void): void; - - /** - * Sets custom global hashing method for determining if objects were changed. - * - * @param datasetId - * @param hashFunction allows to perform hashing for array of hashes returned for specific datasets - */ - function setGlobalHashFn(datasetId: string, hashFunction: (target: string[]) => string): void; - - /** - * Sets custom dataset hashing method for determining if objects were changed. - * - * @param datasetId - * @param hashFunction allows to perform hashing for dataset - */ - function setRecordHashFn(datasetId: string, hashFunction: (target: any) => string): void; -} -export = SyncCloud; From 6a2853e4512a3f0df1fd0784c1048b2d5f0827b7 Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Tue, 21 Nov 2017 15:31:32 -0800 Subject: [PATCH 02/12] update ignores --- .gitignore | 4 +- examples/basic-express-typescript/route.js | 21 -------- .../basic-express-typescript/route.js.map | 1 - examples/basic-express-typescript/server.js | 31 ----------- .../basic-express-typescript/server.js.map | 1 - examples/basic-express-typescript/sync.js | 54 ------------------- examples/basic-express-typescript/sync.js.map | 1 - 7 files changed, 2 insertions(+), 111 deletions(-) delete mode 100644 examples/basic-express-typescript/route.js delete mode 100644 examples/basic-express-typescript/route.js.map delete mode 100644 examples/basic-express-typescript/server.js delete mode 100644 examples/basic-express-typescript/server.js.map delete mode 100644 examples/basic-express-typescript/sync.js delete mode 100644 examples/basic-express-typescript/sync.js.map diff --git a/.gitignore b/.gitignore index 453b1e2..0028934 100755 --- a/.gitignore +++ b/.gitignore @@ -18,5 +18,5 @@ cov-test/ cov-unit/ coverage/ -examples/typescript/*.js -examples/typescript/*.js.map +examples/basic-express-typescript/*.js +examples/basic-express-typescript/*.js.map diff --git a/examples/basic-express-typescript/route.js b/examples/basic-express-typescript/route.js deleted file mode 100644 index 54ce65f..0000000 --- a/examples/basic-express-typescript/route.js +++ /dev/null @@ -1,21 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var express = require("express"); -var parsers = require("body-parser"); -var cors = require("cors"); -var sync = require("../../fh-sync"); -var router = express.Router(); -router.use(cors()); -router.use(parsers.json()); -router.post('/:datasetId', function (req, res, next) { - sync.invoke(req.params.datasetId, req.body, function (err, result) { - if (err) { - next(err); - } - else { - res.json(result); - } - }); -}); -exports.default = router; -//# sourceMappingURL=route.js.map \ No newline at end of file diff --git a/examples/basic-express-typescript/route.js.map b/examples/basic-express-typescript/route.js.map deleted file mode 100644 index 0f63d61..0000000 --- a/examples/basic-express-typescript/route.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"route.js","sourceRoot":"","sources":["route.ts"],"names":[],"mappings":";;AACA,iCAAkC;AAClC,qCAAsC;AACtC,2BAA4B;AAC5B,oCAAqC;AAErC,IAAM,MAAM,GAAG,OAAO,CAAC,MAAM,EAAE,CAAA;AAG/B,MAAM,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAA;AAGlB,MAAM,CAAC,GAAG,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAA;AAG1B,MAAM,CAAC,IAAI,CAAC,aAAa,EAAE,UAAC,GAAoB,EAAE,GAAqB,EAAE,IAA0B;IAEjG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,MAAM,CAAC,SAAS,EAAE,GAAG,CAAC,IAAI,EAAE,UAAU,GAAG,EAAE,MAAM;QAC/D,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;YACR,IAAI,CAAC,GAAG,CAAC,CAAA;QACX,CAAC;QAAC,IAAI,CAAC,CAAC;YACN,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,CAAA;QAClB,CAAC;IACH,CAAC,CAAC,CAAA;AACJ,CAAC,CAAC,CAAA;AAEF,kBAAe,MAAM,CAAA"} \ No newline at end of file diff --git a/examples/basic-express-typescript/server.js b/examples/basic-express-typescript/server.js deleted file mode 100644 index 9ff7d35..0000000 --- a/examples/basic-express-typescript/server.js +++ /dev/null @@ -1,31 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var express = require("express"); -var sync = require("./sync"); -var route_1 = require("./route"); -var app = express(); -sync.init() - .then(startApplicationServer) - .catch(function (e) { - console.log('error occurred during startup', e); - process.exit(1); -}); -function startApplicationServer(err) { - if (err) { - console.log('error starting sync server:'); - throw err; - } - console.log('Sync initialised'); - app.use('/sync', route_1.default); - app.get('/', function (req, res) { - res.send('Sample application is running!'); - }); - app.listen(3000, function (err) { - if (err) - throw err; - console.log('\nExample app listening on port 3000!'); - console.log('\nRun the following from a terminal to get records via sync:'); - console.log('curl http://localhost:3000/sync/messages -X POST --data \'{"fn": "syncRecords"}\' -H "content-type:application/json"\n'); - }); -} -//# sourceMappingURL=server.js.map \ No newline at end of file diff --git a/examples/basic-express-typescript/server.js.map b/examples/basic-express-typescript/server.js.map deleted file mode 100644 index 5115dd7..0000000 --- a/examples/basic-express-typescript/server.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"server.js","sourceRoot":"","sources":["server.ts"],"names":[],"mappings":";;AACA,iCAAkC;AAClC,6BAA8B;AAC9B,iCAAgC;AAEhC,IAAM,GAAG,GAAG,OAAO,EAAE,CAAA;AAErB,IAAI,CAAC,IAAI,EAAE;KACR,IAAI,CAAC,sBAAsB,CAAC;KAC5B,KAAK,CAAC,UAAC,CAAC;IACP,OAAO,CAAC,GAAG,CAAC,+BAA+B,EAAE,CAAC,CAAC,CAAA;IAC/C,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC,CAAC,CAAA;AAEJ,gCAAiC,GAAQ;IACvC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;QACR,OAAO,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAA;QAC1C,MAAM,GAAG,CAAA;IACX,CAAC;IAED,OAAO,CAAC,GAAG,CAAC,kBAAkB,CAAC,CAAA;IAG/B,GAAG,CAAC,GAAG,CAAC,OAAO,EAAE,eAAU,CAAC,CAAA;IAG5B,GAAG,CAAC,GAAG,CAAC,GAAG,EAAE,UAAC,GAAG,EAAE,GAAG;QACpB,GAAG,CAAC,IAAI,CAAC,gCAAgC,CAAC,CAAA;IAC5C,CAAC,CAAC,CAAA;IAEF,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,UAAC,GAAQ;QACxB,EAAE,CAAC,CAAC,GAAG,CAAC;YAAC,MAAM,GAAG,CAAA;QAElB,OAAO,CAAC,GAAG,CAAC,uCAAuC,CAAC,CAAA;QACpD,OAAO,CAAC,GAAG,CAAC,8DAA8D,CAAC,CAAA;QAC3E,OAAO,CAAC,GAAG,CAAC,wHAAwH,CAAC,CAAA;IACvI,CAAC,CAAC,CAAC;AACL,CAAC"} \ No newline at end of file diff --git a/examples/basic-express-typescript/sync.js b/examples/basic-express-typescript/sync.js deleted file mode 100644 index 62c2f72..0000000 --- a/examples/basic-express-typescript/sync.js +++ /dev/null @@ -1,54 +0,0 @@ -"use strict"; -Object.defineProperty(exports, "__esModule", { value: true }); -var sync = require("../../fh-sync"); -var Promise = require("bluebird"); -var MONGO_CONN_STRING = process.env.MONGO_CONNECTION_URL || 'mongodb://127.0.0.1:27017/sync'; -var REDIS_CONN_STRING = process.env.REDIS_CONNECTION_URL || 'redis://127.0.0.1:6379'; -var MONGO_OPTS = {}; -var DATASET_NAME = 'messages'; -var DATASET_OPTS = { - syncFrequency: 10 -}; -function initialiseDataset() { - return new Promise(function (resolve, reject) { - sync.init(DATASET_NAME, DATASET_OPTS, function (err) { - if (err) { - reject(err); - } - else { - sync.handleList(DATASET_NAME, function (dataset, query, meta, done) { - console.log("received request from " + query.username + " with tracking ID " + meta.trackingId); - done(null, { - '00001': { - 'item': 'item1' - }, - '00002': { - 'item': 'item2' - }, - '00003': { - 'item': 'item3' - } - }); - }); - resolve(); - } - }); - }); -} -function connect() { - return new Promise(function (resolve, reject) { - sync.connect(MONGO_CONN_STRING, MONGO_OPTS, REDIS_CONN_STRING, function (err) { - if (err) { - reject(err); - } - else { - resolve(); - } - }); - }); -} -function init() { - return connect().then(initialiseDataset); -} -exports.init = init; -//# sourceMappingURL=sync.js.map \ No newline at end of file diff --git a/examples/basic-express-typescript/sync.js.map b/examples/basic-express-typescript/sync.js.map deleted file mode 100644 index 4c785df..0000000 --- a/examples/basic-express-typescript/sync.js.map +++ /dev/null @@ -1 +0,0 @@ -{"version":3,"file":"sync.js","sourceRoot":"","sources":["sync.ts"],"names":[],"mappings":";;AACA,oCAAqC;AACrC,kCAAmC;AAGnC,IAAM,iBAAiB,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,gCAAgC,CAAC;AAC/F,IAAM,iBAAiB,GAAG,OAAO,CAAC,GAAG,CAAC,oBAAoB,IAAI,wBAAwB,CAAC;AAGvF,IAAM,UAAU,GAAG,EAAE,CAAA;AAGrB,IAAM,YAAY,GAAG,UAAU,CAAA;AAC/B,IAAM,YAAY,GAAG;IACnB,aAAa,EAAE,EAAE;CAClB,CAAC;AAWF;IACE,MAAM,CAAC,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,IAAI,CAAC,YAAY,EAAE,YAAY,EAAE,UAAC,GAAG;YACxC,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACR,MAAM,CAAC,GAAG,CAAC,CAAA;YACb,CAAC;YAAC,IAAI,CAAC,CAAC;gBAGN,IAAI,CAAC,UAAU,CAAC,YAAY,EAAE,UAAC,OAAO,EAAE,KAAY,EAAE,IAAU,EAAE,IAAI;oBACpE,OAAO,CAAC,GAAG,CAAC,2BAAyB,KAAK,CAAC,QAAQ,0BAAqB,IAAI,CAAC,UAAY,CAAC,CAAA;oBAE1F,IAAI,CAAC,IAAI,EAAE;wBACT,OAAO,EAAE;4BACP,MAAM,EAAE,OAAO;yBAChB;wBACD,OAAO,EAAE;4BACP,MAAM,EAAE,OAAO;yBAChB;wBACD,OAAO,EAAE;4BACP,MAAM,EAAE,OAAO;yBAChB;qBACF,CAAC,CAAA;gBACJ,CAAC,CAAC,CAAA;gBAEF,OAAO,EAAE,CAAA;YACX,CAAC;QACH,CAAC,CAAC,CAAA;IACJ,CAAC,CAAC,CAAA;AACJ,CAAC;AAED;IACE,MAAM,CAAC,IAAI,OAAO,CAAC,UAAC,OAAO,EAAE,MAAM;QACjC,IAAI,CAAC,OAAO,CAAC,iBAAiB,EAAE,UAAU,EAAE,iBAAiB,EAAE,UAAC,GAAG;YACjE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACR,MAAM,CAAC,GAAG,CAAC,CAAA;YACb,CAAC;YAAC,IAAI,CAAC,CAAC;gBACN,OAAO,EAAE,CAAA;YACX,CAAC;QACH,CAAC,CAAC,CAAC;IACL,CAAC,CAAC,CAAA;AACJ,CAAC;AAED;IACE,MAAM,CAAC,OAAO,EAAE,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAA;AAC1C,CAAC;AAFD,oBAEC"} \ No newline at end of file From e28633af2b9da4fdf9bb2d267e8e45a87ca7699d Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Tue, 21 Nov 2017 15:35:38 -0800 Subject: [PATCH 03/12] add 'files' entry to package.json --- package.json | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/package.json b/package.json index 02f242e..d68f5b2 100644 --- a/package.json +++ b/package.json @@ -3,6 +3,12 @@ "version": "1.0.14", "description": "FeedHenry Data Synchronization Server", "main": "fh-sync.js", + "files": [ + "fh-sync.d.ts", + "fh-sync.js", + "lib/", + "README" + ], "dependencies": { "async": "2.1.5", "backoff": "2.5.0", From 36499a8abbab82eb038553910600852a90e3bbba Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Tue, 21 Nov 2017 16:28:40 -0800 Subject: [PATCH 04/12] update d.ts param comments --- examples/basic-express-typescript/sync.ts | 1 - fh-sync.d.ts | 110 +++++++++++++++++----- 2 files changed, 85 insertions(+), 26 deletions(-) diff --git a/examples/basic-express-typescript/sync.ts b/examples/basic-express-typescript/sync.ts index 75142bc..5063d5e 100644 --- a/examples/basic-express-typescript/sync.ts +++ b/examples/basic-express-typescript/sync.ts @@ -15,7 +15,6 @@ const DATASET_OPTS = { syncFrequency: 10 // seconds }; - interface Query { username: string } diff --git a/fh-sync.d.ts b/fh-sync.d.ts index 73482e1..397be61 100644 --- a/fh-sync.d.ts +++ b/fh-sync.d.ts @@ -51,6 +51,9 @@ declare module SyncCloud { * Can be used by other modules to create pre-built sync compliant handlers. */ namespace HandlerFunctions { + /** + * @param dataset - THE THING + */ type Create = (dataset: string, data: Object, metaData: Object, done: StandardCb) => void type Read = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void type Update = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void @@ -186,16 +189,21 @@ declare module SyncCloud { metaData: any; } /** - * Connect sync server to mongo and redis. + * Connect sync server to mongo and redis. Returns the MongoDB and Redis clients being used internally. * - * Returns the MongoDB and Redis clients being used internally. + * @param mongoDBConnectionUrl - Unique id of the dataset (usually collection, table in your database) + * @param mongoDBConnectionOption - Connection options for the MongoDB driver + * @param redisUrl - Redis connection URL + * @param callback - Callback that will be invoked once connections are setup */ function connect(mongoDBConnectionUrl: string, mongoDBConnectionOption: any, redisUrl: string, callback: (err: any, mongoDbClient?: any, redisClient?: any) => void): void; /** - * Initialize sync for specific dataset. + * Initialize sync for specific dataset. The passed datasetId must be a unique string. * - * The passed datasetId must be a unique string. + * @param datasetId - Unique name of the dataset to initialise + * @param options - Specific options to apply to this dataset + * @param callback - Callback that will be invoked once initialisation is complete */ function init(datasetId: string, options: SyncInitOptions, callback: StandardCb): void; @@ -204,146 +212,198 @@ declare module SyncCloud { * * Supported operations are 'sync', 'syncRecords', 'listCollisions', 'removeCollision' and should be passed as a * "fn" key in the options object. + * + * @param datasetId - The dataset to invoke the operation on + * @param options - Options to pass to the invocation + * @param callback - Function that will receive the invocation results */ function invoke(datasetId: string, options: InvokeOptions, callback: (err: any, result: any) => void): void; /** - * Stop sync loop for the given datasetId. + * Stop sync loop for the given datasetId. Invokes the passed callback once all operations are stopped. * - * Invokes the passed callback once all operations are stopped. + * @param datasetId - The dataset to stop syncing + * @param onStop - Callback to invoke once operations have stopped */ function stop(datasetId: string, onStop: NoRespCb): void; /** - * Stop sync loop for all datasets. + * Stop sync loop for all datasets. Invokes the passed callback once all operations are stopped. * - * Invokes the passed callback once all operations are stopped. + * @param onStop - Callback to invoke once operations have stopped */ function stopAll(onStop: StandardCb): void; /** * Provide a custom list implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onList - Implementation of the handler */ function handleList(datasetId: string, onList: HandlerFunctions.List): void; /** * Provide a custom implementation of the list operation for all datasets + * + * @param onList - Implementation of the handler */ function globalHandleList(onList: HandlerFunctions.List): void; /** * Provide a custom create implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onCreate - Implementation of the handler */ function handleCreate(datasetId: string, onCreate: HandlerFunctions.Create): void; /** * Provide a custom implementation of the create operation for all datasets + * + * @param onCreate - Implementation of the handler */ function globalHandleCreate(onCreate: HandlerFunctions.Create): void; /** * Provide a custom read implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onRead - Implementation of the handler */ function handleRead(datasetId: string, onRead: HandlerFunctions.Read): void; /** * Provide a custom implementation of the read operation for all datasets. + * + * @param onRead - Implementation of the handler */ function globalHandleRead(onRead: HandlerFunctions.Read): void; /** * Provide a custom update implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onUpdate - Implementation of the handler */ function handleUpdate(datasetId: string, onUpdate: HandlerFunctions.Update): void; /** - * Provide a custom implementation of the update operation for all datasets. + * Provide a custom implementation of the update handler for all datasets. + * + * @param onUpdate - Implementation of the handler */ function globalHandleUpdate(onUpdate: HandlerFunctions.Update): void; /** * Provide a custom delete implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onDelete - Implementation of the handler */ function handleDelete(datasetId: string, onDelete: HandlerFunctions.Delete): void; /** - * Provide a custom implementation of the delete operation for all datasets. + * Provide a custom implementation of the delete handler for all datasets. + * + * @param datasetId - The dataset to apply the given handler to + * @param onDelete - Implementation of the handler */ function globalHandleDelete(onDelete: HandlerFunctions.Delete): void; /** * Provide a custom collision handler implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onCollision - Implementation of the handler */ function handleCollision(datasetId: string, onCollision: HandlerFunctions.Collision): void; /** * Provide a custom implementation of the collision handler for all datasets. + * + * @param onCollision - Implementation of the handler */ function globalHandleCollision(onCollision: HandlerFunctions.Collision): void; /** * Provide a custom collision list handler implementation for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onList - Implementation of the handler */ function listCollisions(datasetId: string, onList: HandlerFunctions.ListCollisions): void; /** * Provide a custom implementation of the list collision handler for all datasets. + * + * @param onList - Implementation of the handler */ function globalListCollisions(onList: HandlerFunctions.ListCollisions): void; /** * Provide a custom collision removal handler for the specified dataset. + * + * @param datasetId - The dataset to apply the given handler to + * @param onRemove - Implementation of the handler */ function removeCollision(datasetId: string, onRemove: HandlerFunctions.RemoveCollision): void; /** - * Allows developers to provide a request interceptor for the given datasetId. - * - * Facilitates performing custom operations prior to invoking the required sync method. + * Allows developers to provide a request interceptor for the given datasetId. Facilitates performing custom + * operations prior to invoking the required sync method. * * Useful for performing authorisation checks, logging, etc. + * + * @param datasetId - The dataset to apply the given interceptor to + * @param onIntercept - Implementation of the interceptor */ function interceptRequest(datasetId: string, onIntercept: HandlerFunctions.Interceptor): void; /** - * Allows developers to provide a response interceptor for the given datasetId. + * Allows developers to provide a response interceptor for the given datasetId. Facilitates performing custom + * operations after internal sync operations have completed. * - * Facilitates performing custom operations after internal sync operations have completed. + * @param datasetId - The dataset to apply the given interceptor to + * @param onIntercept - Implementation of the interceptor */ function interceptResponse(datasetId: string, onIntercept: HandlerFunctions.Interceptor): void; /** * Override global options utilised by the library. + * @param config - The configuration overrides to apply */ function setConfig(config: SyncGlobalOptions): void; /** - * Allows developers to provide a global request interceptor. + * Allows developers to provide a global request interceptor. Facilitates performing custom operations prior to + * invoking the required sync method. Useful for performing authorisation checks, logging, etc. * - * Facilitates performing custom operations prior to invoking the required sync method. - * - * Useful for performing authorisation checks, logging, etc. + * @param onIntercept - The function to use as interceptor for all collection requests */ function globalInterceptRequest(onIntercept: HandlerFunctions.Interceptor): void; /** - * Allows developers to provide a response interceptor for all datasets. + * Allows developers to provide a response interceptor for all datasets. Facilitates performing custom operations + * after internal sync operations have completed. * - * Facilitates performing custom operations after internal sync operations have completed. + * @param onIntercept - The function to use as interceptor for all collection resposnes */ function globalInterceptResponse(onIntercept: HandlerFunctions.Interceptor): void; /** - * Sets a custom global hashing method. + * Sets a custom global hashing method. This is used to determine if a difference exists between the previous and + * current state of a record. * - * This is used to determine if a difference exists between the previous and current state of a record. + * @param hashFunction - The custom hashing implementation to use for all datasets */ function setGlobalHashFn(datasetId: string, hashFunction: HandlerFunctions.Hash): void; /** - * Sets a custom hashing method for the given datasetId. + * Sets a custom hashing method for the given datasetId. This is used to determine if a difference exists between + * the previous and current state of a record. * - * This is used to determine if a difference exists between the previous and current state of a record. + * @param datasetId - The custom hashing implementation to use for the given datasetId + * @param hashFunction - The custom hashing implementation to use for the given datasetId */ function setRecordHashFn(datasetId: string, hashFunction: HandlerFunctions.Hash): void; } From a1e21bdd2950c13dcdf6b9605cb0a463128afa1c Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Tue, 21 Nov 2017 16:29:54 -0800 Subject: [PATCH 05/12] remove test --- fh-sync.d.ts | 3 --- 1 file changed, 3 deletions(-) diff --git a/fh-sync.d.ts b/fh-sync.d.ts index 397be61..9399862 100644 --- a/fh-sync.d.ts +++ b/fh-sync.d.ts @@ -51,9 +51,6 @@ declare module SyncCloud { * Can be used by other modules to create pre-built sync compliant handlers. */ namespace HandlerFunctions { - /** - * @param dataset - THE THING - */ type Create = (dataset: string, data: Object, metaData: Object, done: StandardCb) => void type Read = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void type Update = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void From 2f68a772c9dd7b41c24dc53487635debc83292b9 Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Thu, 7 Dec 2017 15:26:01 -0800 Subject: [PATCH 06/12] allow query and meta to have 'any' type since Object is too strict. update invoke and init callback types. add backoff strategy types. add strict mode to example --- .travis.yml | 3 --- .../basic-express-typescript/package.json | 1 - .../basic-express-typescript/tsconfig.json | 3 ++- fh-sync.d.ts | 27 +++++++++++-------- package.json | 2 +- 5 files changed, 19 insertions(+), 17 deletions(-) diff --git a/.travis.yml b/.travis.yml index c6a8c4f..b6eb101 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,11 +1,9 @@ language: node_js sudo: required node_js: - - "0.10" - "4" - "4.4.3" - "6" - - "8" services: - docker before_install: @@ -32,4 +30,3 @@ matrix: - npm install - npm link fh-mbaas-api - npm test - \ No newline at end of file diff --git a/examples/basic-express-typescript/package.json b/examples/basic-express-typescript/package.json index db0dd6c..1e2b876 100644 --- a/examples/basic-express-typescript/package.json +++ b/examples/basic-express-typescript/package.json @@ -7,7 +7,6 @@ "test": "echo \"Error: no test specified\" && exit 1", "start": "tsc && node server.js" }, - "author": "Evan Shortiss (http://evanshortiss.com/)", "license": "MIT", "dependencies": { "bluebird": "~3.5.1", diff --git a/examples/basic-express-typescript/tsconfig.json b/examples/basic-express-typescript/tsconfig.json index 14d0092..841fbbd 100644 --- a/examples/basic-express-typescript/tsconfig.json +++ b/examples/basic-express-typescript/tsconfig.json @@ -5,7 +5,8 @@ "removeComments": true, "preserveConstEnums": true, "sourceMap": true, - "target": "es5" + "target": "es5", + "strict": true }, "include": [ "server.ts" diff --git a/fh-sync.d.ts b/fh-sync.d.ts index 9399862..316f03e 100644 --- a/fh-sync.d.ts +++ b/fh-sync.d.ts @@ -4,6 +4,11 @@ declare module SyncCloud { + /** + * Backoff strategies that can be passed to workers + */ + type WorkerBackoffStrategy = 'fib'|'exp' + /** * Valid actions (the "fn" param) that can be passed to sync.invoke */ @@ -51,14 +56,14 @@ declare module SyncCloud { * Can be used by other modules to create pre-built sync compliant handlers. */ namespace HandlerFunctions { - type Create = (dataset: string, data: Object, metaData: Object, done: StandardCb) => void - type Read = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void - type Update = (dataset: string, uid: string, metaData: Object, done: StandardCb) => void - type Delete = (dataset: string, queryParams: Object, metaData: Object, done: StandardCb) => void - type List = (dataset: string, queryParams: Object, metaData: Object, done: StandardCb) => void - type Collision = (datasetId: string, hash: string, timestamp: number, uid: string, pre: Object, post: Object, metaData: Object, callback: StandardCb) => void - type ListCollisions = (datasetId: string, metaData: Object, callback: StandardCb<{ [hash: string]: Object }>) => void - type RemoveCollision = (datasetId: string, collision_hash: string, metaData: Object, callback: StandardCb) => void + type Create = (dataset: string, data: Object, metaData: any, done: StandardCb) => void + type Read = (dataset: string, uid: string, metaData: any, done: StandardCb) => void + type Update = (dataset: string, uid: string, metaData: any, done: StandardCb) => void + type Delete = (dataset: string, queryParams: any, metaData: any, done: StandardCb) => void + type List = (dataset: string, queryParams: any, metaData: any, done: StandardCb) => void + type Collision = (datasetId: string, hash: string, timestamp: number, uid: string, pre: Object, post: Object, metaData: any, callback: StandardCb) => void + type ListCollisions = (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: Object }>) => void + type RemoveCollision = (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb) => void type Interceptor = (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void type Hash = (datasetId: string, data: Object) => void } @@ -128,7 +133,7 @@ declare module SyncCloud { * Example: {strategy: 'exp', max: 60*1000}, */ interface PendingWorkerBackoff { - strategy: string; + strategy?: WorkerBackoffStrategy; max: number; } /** @@ -202,7 +207,7 @@ declare module SyncCloud { * @param options - Specific options to apply to this dataset * @param callback - Callback that will be invoked once initialisation is complete */ - function init(datasetId: string, options: SyncInitOptions, callback: StandardCb): void; + function init(datasetId: string, options: SyncInitOptions, callback: NoRespCb): void; /** * Internal method used to invoke sync methods. Should be used to handle json request from client. @@ -214,7 +219,7 @@ declare module SyncCloud { * @param options - Options to pass to the invocation * @param callback - Function that will receive the invocation results */ - function invoke(datasetId: string, options: InvokeOptions, callback: (err: any, result: any) => void): void; + function invoke(datasetId: string, options: InvokeOptions, callback: StandardCb): void; /** * Stop sync loop for the given datasetId. Invokes the passed callback once all operations are stopped. diff --git a/package.json b/package.json index d68f5b2..d31d085 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "fh-sync", - "version": "1.0.14", + "version": "1.1.0", "description": "FeedHenry Data Synchronization Server", "main": "fh-sync.js", "files": [ From 99a33fb3a7c55e761cbfcdfc95a5dfdc9fc2fb04 Mon Sep 17 00:00:00 2001 From: Wojciech Trocki Date: Fri, 8 Dec 2017 11:25:28 +0000 Subject: [PATCH 07/12] Fix typescript issues --- examples/basic-express-typescript/{ => lib}/route.ts | 6 +++--- examples/basic-express-typescript/{ => lib}/sync.ts | 2 +- examples/basic-express-typescript/server.ts | 8 ++++---- 3 files changed, 8 insertions(+), 8 deletions(-) rename examples/basic-express-typescript/{ => lib}/route.ts (74%) rename examples/basic-express-typescript/{ => lib}/sync.ts (97%) diff --git a/examples/basic-express-typescript/route.ts b/examples/basic-express-typescript/lib/route.ts similarity index 74% rename from examples/basic-express-typescript/route.ts rename to examples/basic-express-typescript/lib/route.ts index 5e1ad3e..f839684 100644 --- a/examples/basic-express-typescript/route.ts +++ b/examples/basic-express-typescript/lib/route.ts @@ -2,11 +2,11 @@ import * as express from 'express' import * as parsers from 'body-parser' import * as cors from 'cors' -import * as sync from '../../fh-sync' +import * as sync from '../../../fh-sync' const router = express.Router() - // Mobile clients typically require CORS headers to be set +// Mobile clients typically require CORS headers to be set router.use(cors()) // Need to parse incoming JSON bodies @@ -15,7 +15,7 @@ router.use(parsers.json()) // All sync requests are performed using a HTTP POST router.post('/:datasetId', (req: express.Request, res: express.Response, next: express.NextFunction) => { // Invoke action in sync for specific dataset - sync.invoke(req.params.datasetId, req.body, function (err, result) { + sync.invoke(req.params.datasetId, req.body, function(err: any, result: any) { if (err) { next(err) } else { diff --git a/examples/basic-express-typescript/sync.ts b/examples/basic-express-typescript/lib/sync.ts similarity index 97% rename from examples/basic-express-typescript/sync.ts rename to examples/basic-express-typescript/lib/sync.ts index 5063d5e..b8c7aed 100644 --- a/examples/basic-express-typescript/sync.ts +++ b/examples/basic-express-typescript/lib/sync.ts @@ -1,5 +1,5 @@ -import * as sync from '../../fh-sync' +import * as sync from '../../../fh-sync' import * as Promise from 'bluebird' // Sync framework requires mongodb and redis to be running diff --git a/examples/basic-express-typescript/server.ts b/examples/basic-express-typescript/server.ts index 34508fd..b573fb9 100644 --- a/examples/basic-express-typescript/server.ts +++ b/examples/basic-express-typescript/server.ts @@ -1,7 +1,7 @@ +import * as sync from './lib/sync' +import syncRouter from './lib/route' import * as express from 'express' -import * as sync from './sync' -import syncRouter from './route' const app = express() @@ -12,7 +12,7 @@ sync.init() process.exit(1) }) -function startApplicationServer (err: any) { +function startApplicationServer(err: any) { if (err) { console.log('error starting sync server:') throw err @@ -24,7 +24,7 @@ function startApplicationServer (err: any) { app.use('/sync', syncRouter) // Default route. Can be used to check application is up and running - app.get('/', (req, res) => { + app.get('/', (req: express.Request, res: express.Response) => { res.send('Sample application is running!') }) From 705dd8efd68ea511a5a84038c4bfda0936a7c4f6 Mon Sep 17 00:00:00 2001 From: Wojciech Trocki Date: Fri, 8 Dec 2017 11:25:44 +0000 Subject: [PATCH 08/12] Fix missing global compiler --- examples/basic-express-typescript/README.md | 22 +++++++++++++++++++ .../basic-express-typescript/package.json | 4 ++-- 2 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 examples/basic-express-typescript/README.md diff --git a/examples/basic-express-typescript/README.md b/examples/basic-express-typescript/README.md new file mode 100644 index 0000000..c24fbde --- /dev/null +++ b/examples/basic-express-typescript/README.md @@ -0,0 +1,22 @@ +## Sync typescript sample application example + +## Requirements + + +`MONGO_CONNECTION_URL` environment variable needs to point to mongodb instance + +By default using: mongodb://127.0.0.1:27017/sync + + +`REDIS_CONNECTION_URL` environment variable needs to point to running redis instance + +By default using: redis://127.0.0.1:6379 + +## Running + + npm install + npm run start + +## Testing + +Please refer to documentation for information how to setup sync client. diff --git a/examples/basic-express-typescript/package.json b/examples/basic-express-typescript/package.json index 1e2b876..e52d3fa 100644 --- a/examples/basic-express-typescript/package.json +++ b/examples/basic-express-typescript/package.json @@ -5,7 +5,7 @@ "main": "server.js", "scripts": { "test": "echo \"Error: no test specified\" && exit 1", - "start": "tsc && node server.js" + "start": "ts-node server.ts" }, "license": "MIT", "dependencies": { @@ -19,6 +19,6 @@ "@types/body-parser": "~1.16.8", "@types/cors": "~2.8.3", "@types/express": "~4.0.39", - "typescript": "~2.6.1" + "ts-node": "^3.3.0" } } From 7529853ff5a8fd9d01b9e0672766b150daaf717d Mon Sep 17 00:00:00 2001 From: Wojciech Trocki Date: Fri, 8 Dec 2017 16:46:28 +0000 Subject: [PATCH 09/12] Add typescript --- examples/basic-express-typescript/package.json | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/basic-express-typescript/package.json b/examples/basic-express-typescript/package.json index e52d3fa..5af0c75 100644 --- a/examples/basic-express-typescript/package.json +++ b/examples/basic-express-typescript/package.json @@ -19,6 +19,7 @@ "@types/body-parser": "~1.16.8", "@types/cors": "~2.8.3", "@types/express": "~4.0.39", + "typescript": "~2.6.1", "ts-node": "^3.3.0" } } From 05e9fb8fff1eb583392d1aa6fd626d709d3958d9 Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Mon, 11 Dec 2017 17:31:26 -0800 Subject: [PATCH 10/12] fix hash handler types --- fh-sync.d.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/fh-sync.d.ts b/fh-sync.d.ts index 316f03e..3ff6e60 100644 --- a/fh-sync.d.ts +++ b/fh-sync.d.ts @@ -65,7 +65,7 @@ declare module SyncCloud { type ListCollisions = (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: Object }>) => void type RemoveCollision = (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb) => void type Interceptor = (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void - type Hash = (datasetId: string, data: Object) => void + type Hash = (datasetId: string, data: any) => string } /** From 43d5400a0368ec44b84d6ab40ccd0105dfebe673 Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Mon, 11 Dec 2017 21:27:02 -0800 Subject: [PATCH 11/12] fix hash fn signature --- examples/basic-express-typescript/lib/route.ts | 2 +- fh-sync.d.ts | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/basic-express-typescript/lib/route.ts b/examples/basic-express-typescript/lib/route.ts index f839684..bd6f1f5 100644 --- a/examples/basic-express-typescript/lib/route.ts +++ b/examples/basic-express-typescript/lib/route.ts @@ -15,7 +15,7 @@ router.use(parsers.json()) // All sync requests are performed using a HTTP POST router.post('/:datasetId', (req: express.Request, res: express.Response, next: express.NextFunction) => { // Invoke action in sync for specific dataset - sync.invoke(req.params.datasetId, req.body, function(err: any, result: any) { + sync.invoke(req.params.datasetId, req.body, function(err, result) { if (err) { next(err) } else { diff --git a/fh-sync.d.ts b/fh-sync.d.ts index 3ff6e60..dc2ff9b 100644 --- a/fh-sync.d.ts +++ b/fh-sync.d.ts @@ -65,7 +65,7 @@ declare module SyncCloud { type ListCollisions = (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: Object }>) => void type RemoveCollision = (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb) => void type Interceptor = (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void - type Hash = (datasetId: string, data: any) => string + type Hash = (data: any) => string } /** From 4be86018c6b4c51537343faae9c6b224d903116a Mon Sep 17 00:00:00 2001 From: Evan Shortiss Date: Wed, 13 Dec 2017 23:08:15 -0800 Subject: [PATCH 12/12] update hashing signature for global hash. normalise datasetId param --- fh-sync.d.ts | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/fh-sync.d.ts b/fh-sync.d.ts index dc2ff9b..f682e1b 100644 --- a/fh-sync.d.ts +++ b/fh-sync.d.ts @@ -56,16 +56,17 @@ declare module SyncCloud { * Can be used by other modules to create pre-built sync compliant handlers. */ namespace HandlerFunctions { - type Create = (dataset: string, data: Object, metaData: any, done: StandardCb) => void - type Read = (dataset: string, uid: string, metaData: any, done: StandardCb) => void - type Update = (dataset: string, uid: string, metaData: any, done: StandardCb) => void - type Delete = (dataset: string, queryParams: any, metaData: any, done: StandardCb) => void - type List = (dataset: string, queryParams: any, metaData: any, done: StandardCb) => void + type Create = (datasetId: string, data: Object, metaData: any, done: StandardCb) => void + type Read = (datasetId: string, uid: string, metaData: any, done: StandardCb) => void + type Update = (datasetId: string, uid: string, metaData: any, done: StandardCb) => void + type Delete = (datasetId: string, queryParams: any, metaData: any, done: StandardCb) => void + type List = (datasetId: string, queryParams: any, metaData: any, done: StandardCb) => void type Collision = (datasetId: string, hash: string, timestamp: number, uid: string, pre: Object, post: Object, metaData: any, callback: StandardCb) => void type ListCollisions = (datasetId: string, metaData: any, callback: StandardCb<{ [hash: string]: Object }>) => void type RemoveCollision = (datasetId: string, collision_hash: string, metaData: any, callback: StandardCb) => void type Interceptor = (datasetId: string, interceptorParams: SyncInterceptParams, callback: NoRespCb) => void type Hash = (data: any) => string + type GlobalHash = (hashes: Array) => string } /** @@ -398,7 +399,7 @@ declare module SyncCloud { * * @param hashFunction - The custom hashing implementation to use for all datasets */ - function setGlobalHashFn(datasetId: string, hashFunction: HandlerFunctions.Hash): void; + function setGlobalHashFn(datasetId: string, hashFunction: HandlerFunctions.GlobalHash): void; /** * Sets a custom hashing method for the given datasetId. This is used to determine if a difference exists between