From 4beb25e5b4bbb87cfb364f09097ff7a9b40471d2 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 14 Nov 2018 21:47:07 -0800 Subject: [PATCH 1/6] implement loading flags from a file --- README.md | 4 + file_data_source.js | 162 +++++++++++++++++++++ index.js | 41 ++++-- package-lock.json | 14 ++ package.json | 4 +- test/file_data_source-test.js | 265 ++++++++++++++++++++++++++++++++++ 6 files changed, 475 insertions(+), 15 deletions(-) create mode 100644 file_data_source.js create mode 100644 test/file_data_source-test.js diff --git a/README.md b/README.md index ab8074e..868331f 100644 --- a/README.md +++ b/README.md @@ -55,6 +55,10 @@ Your first feature flag }); }); +Using flag data from a file +--------------------------- + +For testing purposes, the SDK can be made to read feature flag state from a file or files instead of connecting to LaunchDarkly. See [`file_data_source.js`](https://github.com/launchdarkly/node-client/blob/master/file_data_source.js) for more details. Learn more ----------- diff --git a/file_data_source.js b/file_data_source.js new file mode 100644 index 0000000..a970df5 --- /dev/null +++ b/file_data_source.js @@ -0,0 +1,162 @@ +var fs = require('fs'), + winston = require('winston'), + yaml = require('yaml'), + dataKind = require('./versioned_data_kind'); + +/* + FileDataSource provides a way to use local files as a source of feature flag state, instead of + connecting to LaunchDarkly. This would typically be used in a test environment. + + To use this component, call FileDataSource(options) and store the result in the "updateProcessor" + property of your LaunchDarkly client configuration. In the options, set "paths" to the file + paths of your data file(s): + + var dataSource = FileDataSource({ paths: [ myFilePath ] }); + var config = { updateProcessor: dataSource }; + + Flag data files can be either JSON or YAML. They contain an object with three possible + properties: + + - "flags": Full feature flag definitions. + - "flagValues": Simplified feature flags, just a map of flag keys to values. + - "segments": User segment definitions. + + The format of the data in "flags" and "segments" is defined by the LaunchDarkly application + and is subject to change. You can query existing flags and segments from LaunchDarkly in JSON + format by querying https://app.launchdarkly.com/sdk/latest-all and passing your SDK key in + the Authorization header. + + You can also specify that flags should be reloaded whenever a file is modified, by setting + "autoUpdate: true" in the options. This feature uses Node's fs.watch() API, so it is subject to + the limitations described here: https://nodejs.org/docs/latest/api/fs.html#fs_fs_watch_filename_options_listener + + For more details, see the LaunchDarkly reference guide: + https://docs.launchdarkly.com/v2.0/docs/reading-flags-from-a-file +*/ +function FileDataSource(options) { + var paths = (options && options.paths) || []; + var autoUpdate = !!options.autoUpdate; + + return config => { + var featureStore = config.featureStore; + var watchers = []; + var pendingUpdate = false; + var logger = options.logger || config.logger || defaultLogger(); + var inited = false; + + function defaultLogger() { + return new winston.Logger({ + level: 'info', + transports: [ new (winston.transports.Console)() ] + }); + } + + async function loadFile(path, allData) { + var data = await new Promise((resolve, reject) => + fs.readFile(path, 'utf8', (err, data) => + err ? reject(err) : resolve(data) + )); + var parsed = parseData(data) || {}; + var addItem = (kind, item) => { + if (!allData[kind.namespace]) { + allData[kind.namespace] = {}; + } + if (allData[kind.namespace][item.key]) { + throw new Error('found duplicate key: "' + item.key + '"'); + } else { + allData[kind.namespace][item.key] = item; + } + } + Object.values(parsed.flags || {}).forEach(item => { + addItem(dataKind.features, item); + }); + Object.entries(parsed.flagValues || {}).forEach(e => { + addItem(dataKind.features, makeFlagWithValue(e[0], e[1])); + }); + Object.values(parsed.segments || {}).forEach(item => { + addItem(dataKind.segments, item); + }); + } + + async function loadAll() { + pendingUpdate = false; + var allData = {}; + for (var i = 0; i < paths.length; i++) { + try { + await loadFile(paths[i], allData); + } catch (e) { + throw new Error('Unable to load flags: ' + e + ' [' + paths[i] + ']'); + } + } + await new Promise(resolve => featureStore.init(allData, resolve)); + inited = true; + } + + function parseData(data) { + // Every valid JSON document is also a valid YAML document (for parsers that comply + // with the spec, which this one does) so we can parse both with the same parser. + return yaml.parse(data); + } + + function makeFlagWithValue(key, value) { + return { + key: key, + on: true, + fallthrough: { variation: 0 }, + variations: [ value ] + }; + } + + function startWatching() { + var reload = () => { + loadAll().then(() => { + logger && logger.warn('Reloaded flags from file data'); + }).catch(() => {}); + }; + paths.forEach(path => { + var watcher = fs.watch(path, { persistent: false }, (event, filename) => { + if (!pendingUpdate) { // coalesce updates to avoid reloading repeatedly + pendingUpdate = true; + setTimeout(reload, 0); + } + }); + watchers.push(watcher); + }); + } + + function stopWatching() { + watchers.forEach(w => w.close()); + watchers = []; + } + + fds = {}; + + fds.start = fn => { + var cb = fn || (() => {}); + + if (autoUpdate) { + startWatching(); + } + + loadAll().then(() => cb(), err => cb(err)); + }; + + fds.stop = () => { + if (autoUpdate) { + stopWatching(); + } + }; + + fds.initialized = () => { + return inited; + }; + + fds.close = () => { + fds.stop(); + }; + + return fds; + } +} + +module.exports = FileDataSource; diff --git a/index.js b/index.js index 0a2980e..4fe9738 100644 --- a/index.js +++ b/index.js @@ -1,5 +1,6 @@ var FeatureStoreEventWrapper = require('./feature_store_event_wrapper'); var RedisFeatureStore = require('./redis_feature_store'); +var FileDataSource = require('./file_data_source'); var Requestor = require('./requestor'); var EventEmitter = require('events').EventEmitter; var EventProcessor = require('./event_processor'); @@ -55,13 +56,12 @@ var newClient = function(sdkKey, config) { var client = new EventEmitter(), initComplete = false, failure, - queue = [], requestor, updateProcessor, eventProcessor; config = configuration.validate(config); - + // Initialize global tunnel if proxy options are set if (config.proxyHost && config.proxyPort ) { config.proxyAgent = createProxyAgent(config); @@ -85,22 +85,34 @@ var newClient = function(sdkKey, config) { throw new Error("You must configure the client with an SDK key"); } + var createDefaultUpdateProcessor = function(config) { + if (config.useLdd || config.offline) { + return NullUpdateProcessor(); + } else { + requestor = Requestor(sdkKey, config); + + if (config.stream) { + config.logger.info("Initializing stream processor to receive feature flag updates"); + return StreamingProcessor(sdkKey, config, requestor); + } else { + config.logger.info("Initializing polling processor to receive feature flag updates"); + config.logger.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support"); + return PollingProcessor(config, requestor); + } + } + } + var updateProcessorFactory = createDefaultUpdateProcessor; if (config.updateProcessor) { - updateProcessor = config.updateProcessor; - } else if (config.useLdd || config.offline) { - updateProcessor = NullUpdateProcessor(); - } else { - requestor = Requestor(sdkKey, config); - - if (config.stream) { - config.logger.info("Initializing stream processor to receive feature flag updates"); - updateProcessor = StreamingProcessor(sdkKey, config, requestor); + if (typeof config.updateProcessor === 'function') { + updateProcessorFactory = config.updateProcessor; } else { - config.logger.info("Initializing polling processor to receive feature flag updates"); - config.logger.warn("You should only disable the streaming API if instructed to do so by LaunchDarkly support"); - updateProcessor = PollingProcessor(config, requestor); + updateProcessor = config.updateProcessor; } } + if (!updateProcessor) { + updateProcessor = updateProcessorFactory(config); + } + updateProcessor.start(function(err) { if (err) { var error; @@ -378,6 +390,7 @@ var newClient = function(sdkKey, config) { module.exports = { init: newClient, RedisFeatureStore: RedisFeatureStore, + FileDataSource: FileDataSource, errors: errors }; diff --git a/package-lock.json b/package-lock.json index a42cde1..52f51bf 100644 --- a/package-lock.json +++ b/package-lock.json @@ -5759,6 +5759,15 @@ "integrity": "sha1-iQN8vJLFarGJJua6TLsgDhVnKmo=", "dev": true }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, "tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", @@ -6243,6 +6252,11 @@ "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", "integrity": "sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=" }, + "yaml": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.0.1.tgz", + "integrity": "sha512-ysU56qumPH0tEML2hiFVAo+rCnG/0+oO2Ye3fN4c40GBN7kX1fYhDqSoX7OohimcI/Xkkr1DdaUlg5+afinRqA==" + }, "yargs": { "version": "3.10.0", "resolved": "https://registry.npmjs.org/yargs/-/yargs-3.10.0.tgz", diff --git a/package.json b/package.json index d4d8ddf..1697949 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,8 @@ "request-etag": "^2.0.3", "semver": "5.5.0", "tunnel": "0.0.6", - "winston": "2.4.1" + "winston": "2.4.1", + "yaml": "1.0.1" }, "engines": { "node": ">= 0.8.x" @@ -43,6 +44,7 @@ "jest": "22.4.3", "jest-junit": "3.6.0", "nock": "9.2.3", + "tmp": "0.0.33", "typescript": "3.0.1" }, "jest": { diff --git a/test/file_data_source-test.js b/test/file_data_source-test.js new file mode 100644 index 0000000..093c02f --- /dev/null +++ b/test/file_data_source-test.js @@ -0,0 +1,265 @@ +var fs = require('fs'); +var tmp = require('tmp'); +var dataKind = require('../versioned_data_kind'); + +var LaunchDarkly = require('../index'); +var FileDataSource = require('../file_data_source'); +var InMemoryFeatureStore = require('../feature_store'); + +var flag1Key = 'flag1'; +var flag2Key = 'flag2'; +var flag2Value = 'value2'; +var segment1Key = 'seg1'; + +var flag1 = { + "key": flag1Key, + "on": true, + "fallthrough": { + "variation": 2 + }, + "variations": [ "fall", "off", "on" ] +}; + +var segment1 = { + "key": segment1Key, + "include": ["user1"] +}; + +var flagOnlyJson = ` +{ + "flags": { + "${flag1Key}": ${ JSON.stringify(flag1) } + } +}`; + +var segmentOnlyJson = ` +{ + "segments": { + "${segment1Key}": ${ JSON.stringify(segment1) } + } +}`; + +var allPropertiesJson = ` +{ + "flags": { + "${flag1Key}": ${ JSON.stringify(flag1) } + }, + "flagValues": { + "${flag2Key}": "${flag2Value}" + }, + "segments": { + "${segment1Key}": ${ JSON.stringify(segment1) } + } +}`; + +var allPropertiesYaml = ` +flags: + ${flag1Key}: + key: ${flag1Key} + on: true + fallthrough: + variation: 2 + variations: + - fall + - off + - on +flagValues: + ${flag2Key}: "${flag2Value}" +segments: + ${segment1Key}: + key: ${segment1Key} + include: + - user1 +`; + +describe('FileDataSource', function() { + var store; + var dataSources = []; + + beforeEach(() => { + store = InMemoryFeatureStore(); + dataSources = []; + }); + + afterEach(() => { + dataSources.forEach(s => s.close()); + }); + + function makeTempFile(content) { + return new Promise((resolve, reject) => { + tmp.file(function(err, path, fd) { + if (err) { + reject(err); + } else { + replaceFileContent(path, content).then(() => resolve(path)); + } + }); + }); + } + + function replaceFileContent(path, content) { + return new Promise((resolve, reject) => { + fs.writeFile(path, content, function(err) { + err ? reject(err) : resolve(); + }); + }); + } + + function setupDataSource(options) { + var factory = FileDataSource(options); + var ds = factory({ featureStore: store }); + dataSources.push(ds); + return ds; + } + + function sorted(a) { + var a1 = Array.from(a); + a1.sort(); + return a1; + } + + function asyncify(f) { + return new Promise(resolve => f(resolve)); + } + + function sleep(millis) { + return new Promise(resolve => { + setTimeout(resolve, millis); + }); + } + + it('does not load flags prior to start', async () => { + var path = await makeTempFile('{"flagValues":{"key":"value"}}'); + var fds = setupDataSource({ paths: [path] }); + + expect(fds.initialized()).toBe(false); + expect(await asyncify(cb => store.initialized(cb))).toBe(false); + expect(await asyncify(cb => store.all(dataKind.features, cb))).toEqual({}); + expect(await asyncify(cb => store.all(dataKind.segments, cb))).toEqual({}); + }); + + async function testLoadAllProperties(content) { + var path = await makeTempFile(content); + var fds = setupDataSource({ paths: [path] }); + await asyncify(fds.start); + + expect(fds.initialized()).toBe(true); + expect(await asyncify(cb => store.initialized(cb))).toBe(true); + var items = await asyncify(cb => store.all(dataKind.features, cb)); + expect(sorted(Object.keys(items))).toEqual([ flag1Key, flag2Key ]); + var flag = await asyncify(cb => store.get(dataKind.features, flag1Key, cb)); + expect(flag).toEqual(flag1); + items = await asyncify(cb => store.all(dataKind.segments, cb)); + expect(items).toEqual({ seg1: segment1 }); + } + + it('loads flags on start - from JSON', () => testLoadAllProperties(allPropertiesJson)); + + it('loads flags on start - from YAML', () => testLoadAllProperties(allPropertiesYaml)); + + it('does not load if file is missing', async () => { + var fds = setupDataSource({ paths: ['no-such-file'] }); + await asyncify(fds.start); + + expect(fds.initialized()).toBe(false); + expect(await asyncify(cb => store.initialized(cb))).toBe(false); + }); + + it('does not load if file data is malformed', async () => { + var path = await makeTempFile('{x'); + var fds = setupDataSource({ paths: [path] }); + await asyncify(fds.start); + + expect(fds.initialized()).toBe(false); + expect(await asyncify(cb => store.initialized(cb))).toBe(false); + }); + + it('can load multiple files', async () => { + var path1 = await makeTempFile(flagOnlyJson); + var path2 = await makeTempFile(segmentOnlyJson); + var fds = setupDataSource({ paths: [path1, path2] }); + await asyncify(fds.start); + + expect(fds.initialized()).toBe(true); + expect(await asyncify(cb => store.initialized(cb))).toBe(true); + + var items = await asyncify(cb => store.all(dataKind.features, cb)); + expect(Object.keys(items)).toEqual([ flag1Key ]); + items = await asyncify(cb => store.all(dataKind.segments, cb)); + expect(Object.keys(items)).toEqual([ segment1Key ]); + }); + + it('does not allow duplicate keys', async () => { + var path1 = await makeTempFile(flagOnlyJson); + var path2 = await makeTempFile(flagOnlyJson); + var fds = setupDataSource({ paths: [path1, path2] }); + await asyncify(fds.start); + + expect(fds.initialized()).toBe(false); + expect(await asyncify(store.initialized)).toBe(false); + }); + + it('does not reload modified file if auto-update is off', async () => { + var path = await makeTempFile(flagOnlyJson); + var fds = setupDataSource({ paths: [path] }); + await asyncify(fds.start); + + var items = await asyncify(cb => store.all(dataKind.segments, cb)); + expect(Object.keys(items).length).toEqual(0); + + await sleep(200); + await replaceFileContent(path, segmentOnlyJson); + await sleep(200); + + items = await asyncify(cb => store.all(dataKind.segments, cb)); + expect(Object.keys(items).length).toEqual(0); + }); + + it('reloads modified file if auto-update is on', async () => { + var path = await makeTempFile(flagOnlyJson); + var fds = setupDataSource({ paths: [path], autoUpdate: true }); + await asyncify(fds.start); + + var items = await asyncify(cb => store.all(dataKind.segments, cb)); + expect(Object.keys(items).length).toEqual(0); + + await sleep(200); + await replaceFileContent(path, segmentOnlyJson); + await sleep(200); + + items = await asyncify(cb => store.all(dataKind.segments, cb)); + expect(Object.keys(items).length).toEqual(1); + }); + + it('evaluates simplified flag with client as expected', async () => { + var path = await makeTempFile(allPropertiesJson); + var factory = FileDataSource({ paths: [ path ]}); + var config = { updateProcessor: factory, sendEvents: false }; + var client = LaunchDarkly.init('dummy-key', config); + var user = { key: 'userkey' }; + + try { + await client.waitForInitialization(); + var result = await client.variation(flag2Key, user, ''); + expect(result).toEqual(flag2Value); + } finally { + client.close(); + } + }); + + it('evaluates full flag with client as expected', async () => { + var path = await makeTempFile(allPropertiesJson); + var factory = FileDataSource({ paths: [ path ]}); + var config = { updateProcessor: factory, sendEvents: false }; + var client = LaunchDarkly.init('dummy-key', config); + var user = { key: 'userkey' }; + + try { + await client.waitForInitialization(); + var result = await client.variation(flag1Key, user, ''); + expect(result).toEqual('on'); + } finally { + client.close(); + } + }); +}); \ No newline at end of file From de27e98fcaf5bb8765d958f95d8b4f2d61ac5591 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 14 Nov 2018 22:22:53 -0800 Subject: [PATCH 2/6] don't use async/await in client code (Node 6 compatibility) --- file_data_source.js | 73 +++++++++++++++++++++++++-------------------- 1 file changed, 41 insertions(+), 32 deletions(-) diff --git a/file_data_source.js b/file_data_source.js index a970df5..0f13cbb 100644 --- a/file_data_source.js +++ b/file_data_source.js @@ -51,45 +51,54 @@ function FileDataSource(options) { }); } - async function loadFile(path, allData) { - var data = await new Promise((resolve, reject) => + function loadFilePromise(path, allData) { + return new Promise((resolve, reject) => fs.readFile(path, 'utf8', (err, data) => - err ? reject(err) : resolve(data) - )); - var parsed = parseData(data) || {}; - var addItem = (kind, item) => { - if (!allData[kind.namespace]) { - allData[kind.namespace] = {}; - } - if (allData[kind.namespace][item.key]) { - throw new Error('found duplicate key: "' + item.key + '"'); - } else { - allData[kind.namespace][item.key] = item; + err ? reject(err) : resolve(data)) + ).then(data => { + var parsed = parseData(data) || {}; + var addItem = (kind, item) => { + if (!allData[kind.namespace]) { + allData[kind.namespace] = {}; + } + if (allData[kind.namespace][item.key]) { + throw new Error('found duplicate key: "' + item.key + '"'); + } else { + allData[kind.namespace][item.key] = item; + } } - } - Object.values(parsed.flags || {}).forEach(item => { - addItem(dataKind.features, item); - }); - Object.entries(parsed.flagValues || {}).forEach(e => { - addItem(dataKind.features, makeFlagWithValue(e[0], e[1])); - }); - Object.values(parsed.segments || {}).forEach(item => { - addItem(dataKind.segments, item); + Object.values(parsed.flags || {}).forEach(item => { + addItem(dataKind.features, item); + }); + Object.entries(parsed.flagValues || {}).forEach(e => { + addItem(dataKind.features, makeFlagWithValue(e[0], e[1])); + }); + Object.values(parsed.segments || {}).forEach(item => { + addItem(dataKind.segments, item); + }); }); } - async function loadAll() { + function loadAllPromise() { pendingUpdate = false; var allData = {}; + var p = Promise.resolve(); for (var i = 0; i < paths.length; i++) { - try { - await loadFile(paths[i], allData); - } catch (e) { - throw new Error('Unable to load flags: ' + e + ' [' + paths[i] + ']'); - } + (path => { + p = p.then(() => loadFilePromise(path, allData)) + .catch(e => { + throw new Error('Unable to load flags: ' + e + ' [' + path + ']'); + }); + })(paths[i]); } - await new Promise(resolve => featureStore.init(allData, resolve)); - inited = true; + return p.then(() => initStorePromise(allData)); + } + + function initStorePromise(data) { + return new Promise(resolve => featureStore.init(data, () => { + inited = true; + resolve(); + })); } function parseData(data) { @@ -109,7 +118,7 @@ function FileDataSource(options) { function startWatching() { var reload = () => { - loadAll().then(() => { + loadAllPromise().then(() => { logger && logger.warn('Reloaded flags from file data'); }).catch(() => {}); }; @@ -138,7 +147,7 @@ function FileDataSource(options) { startWatching(); } - loadAll().then(() => cb(), err => cb(err)); + loadAllPromise().then(() => cb(), err => cb(err)); }; fds.stop = () => { From 56e1c9e3790aecab5c0e9845cc28e78c9632c75d Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 14 Nov 2018 22:34:54 -0800 Subject: [PATCH 3/6] comment edit --- file_data_source.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/file_data_source.js b/file_data_source.js index 0f13cbb..4d5d432 100644 --- a/file_data_source.js +++ b/file_data_source.js @@ -11,7 +11,7 @@ var fs = require('fs'), property of your LaunchDarkly client configuration. In the options, set "paths" to the file paths of your data file(s): - var dataSource = FileDataSource({ paths: [ myFilePath ] }); + var dataSource = LaunchDarkly.FileDataSource({ paths: [ myFilePath ] }); var config = { updateProcessor: dataSource }; Flag data files can be either JSON or YAML. They contain an object with three possible From f82b59480a7d805011fd6aa96dfa089481cae1f0 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 19:50:21 -0800 Subject: [PATCH 4/6] use async_utils --- test/file_data_source-test.js | 19 +++++-------------- 1 file changed, 5 insertions(+), 14 deletions(-) diff --git a/test/file_data_source-test.js b/test/file_data_source-test.js index 093c02f..bcafb01 100644 --- a/test/file_data_source-test.js +++ b/test/file_data_source-test.js @@ -1,6 +1,7 @@ var fs = require('fs'); var tmp = require('tmp'); var dataKind = require('../versioned_data_kind'); +const { asyncify, sleepAsync } = require('./async_utils'); var LaunchDarkly = require('../index'); var FileDataSource = require('../file_data_source'); @@ -118,16 +119,6 @@ describe('FileDataSource', function() { return a1; } - function asyncify(f) { - return new Promise(resolve => f(resolve)); - } - - function sleep(millis) { - return new Promise(resolve => { - setTimeout(resolve, millis); - }); - } - it('does not load flags prior to start', async () => { var path = await makeTempFile('{"flagValues":{"key":"value"}}'); var fds = setupDataSource({ paths: [path] }); @@ -207,9 +198,9 @@ describe('FileDataSource', function() { var items = await asyncify(cb => store.all(dataKind.segments, cb)); expect(Object.keys(items).length).toEqual(0); - await sleep(200); + await sleepAsync(200); await replaceFileContent(path, segmentOnlyJson); - await sleep(200); + await sleepAsync(200); items = await asyncify(cb => store.all(dataKind.segments, cb)); expect(Object.keys(items).length).toEqual(0); @@ -223,9 +214,9 @@ describe('FileDataSource', function() { var items = await asyncify(cb => store.all(dataKind.segments, cb)); expect(Object.keys(items).length).toEqual(0); - await sleep(200); + await sleepAsync(200); await replaceFileContent(path, segmentOnlyJson); - await sleep(200); + await sleepAsync(200); items = await asyncify(cb => store.all(dataKind.segments, cb)); expect(Object.keys(items).length).toEqual(1); From f6f037f58fa8100fc8fff7a0a5388a57cbeb7980 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 19:53:41 -0800 Subject: [PATCH 5/6] misc test fixes --- file_data_source.js | 2 +- test/file_data_source-test.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/file_data_source.js b/file_data_source.js index 4d5d432..ef3a7c7 100644 --- a/file_data_source.js +++ b/file_data_source.js @@ -138,7 +138,7 @@ function FileDataSource(options) { watchers = []; } - fds = {}; + var fds = {}; fds.start = fn => { var cb = fn || (() => {}); diff --git a/test/file_data_source-test.js b/test/file_data_source-test.js index bcafb01..e5a9cc4 100644 --- a/test/file_data_source-test.js +++ b/test/file_data_source-test.js @@ -187,7 +187,7 @@ describe('FileDataSource', function() { await asyncify(fds.start); expect(fds.initialized()).toBe(false); - expect(await asyncify(store.initialized)).toBe(false); + expect(await asyncify(cb => store.initialized(cb))).toBe(false); }); it('does not reload modified file if auto-update is off', async () => { From ff2426b0398b4f69815028b4bfe1ac1076fe6523 Mon Sep 17 00:00:00 2001 From: Eli Bishop Date: Wed, 9 Jan 2019 20:00:57 -0800 Subject: [PATCH 6/6] ES2015 compatibility --- file_data_source.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/file_data_source.js b/file_data_source.js index ef3a7c7..0c19ce9 100644 --- a/file_data_source.js +++ b/file_data_source.js @@ -67,14 +67,14 @@ function FileDataSource(options) { allData[kind.namespace][item.key] = item; } } - Object.values(parsed.flags || {}).forEach(item => { - addItem(dataKind.features, item); + Object.keys(parsed.flags || {}).forEach(key => { + addItem(dataKind.features, parsed.flags[key]); }); - Object.entries(parsed.flagValues || {}).forEach(e => { - addItem(dataKind.features, makeFlagWithValue(e[0], e[1])); + Object.keys(parsed.flagValues || {}).forEach(key => { + addItem(dataKind.features, makeFlagWithValue(key, parsed.flagValues[key])); }); - Object.values(parsed.segments || {}).forEach(item => { - addItem(dataKind.segments, item); + Object.keys(parsed.segments || {}).forEach(key => { + addItem(dataKind.segments, parsed.segments[key]); }); }); }