Skip to content
This repository has been archived by the owner on May 30, 2024. It is now read-only.

Commit

Permalink
Merge pull request #118 from launchdarkly/eb/ch29197/dependency-ordering
Browse files Browse the repository at this point in the history
implement dependency ordering for feature store data
  • Loading branch information
eli-darkly authored Jan 12, 2019
2 parents 5f013fd + b6f11f7 commit 25fab20
Show file tree
Hide file tree
Showing 4 changed files with 193 additions and 27 deletions.
119 changes: 104 additions & 15 deletions caching_store_wrapper.js
Original file line number Diff line number Diff line change
Expand Up @@ -15,9 +15,50 @@ var initializedKey = "$checkedInit";
/*
CachingStoreWrapper provides commonly needed functionality for implementations of an
SDK feature store. The underlyingStore must implement a simplified interface for
querying and updating the data store (see redis_feature_store.js for an example)
while CachingStoreWrapper adds optional caching of stored items and of the
initialized state, and ensures that asynchronous operations are serialized correctly.
querying and updating the data store, while CachingStoreWrapper adds optional caching of
stored items and of the initialized state, and ensures that asynchronous operations are
serialized correctly.
The underlyingStore object must have the following methods:
- getInternal(kind, key, callback): Queries a single item from the data store. The kind
parameter is an object with a "namespace" property that uniquely identifies the
category of data (features, segments), and the key is the unique key within that
category. It calls the callback with the resulting item as a parameter, or, if no such
item exists, null/undefined. It should not attempt to filter out any items, nor to
cache any items.
- getAllInternal(kind, callback): Queries all items in a given category from the data
store, calling the callback with an object where each key is the item's key and each
value is the item. It should not attempt to filter out any items, nor to cache any items.
- upsertInternal(kind, newItem, callback): Adds or updates a single item. If an item with
the same key already exists (in the category specified by "kind"), it should update it
only if the new item's "version" property is greater than the old one. On completion, it
should call the callback with the final state of the item, i.e. if the update succeeded
then it passes the item that was passed in, and if the update failed due to the version
check then it passes the item that is currently in the data store (this ensures that
caching works correctly). Note that deletions are implemented by upserting a placeholder
item with the property "deleted: true".
- initializedInternal(callback): Tests whether the data store contains a complete data
set, meaning that initInternal() or initOrdereInternal() has been called at least once.
In a shared data store, it should be able to detect this even if the store was
initialized by a different process, i.e. the test should be based on looking at what is
in the data store. The method does not need to worry about caching this value;
CachingStoreWrapper will only call it when necessary. Call callback with true or false.
- initInternal(allData, callback): Replaces the entire contents of the data store. This
should be done atomically (i.e. within a transaction); if that isn't possible, use
initOrderedInternal() instead. The allData parameter is an object where each key is one
of the "kind" objects, and each value is an object with the keys and values of all
items of that kind. Call callback with no parameters when done.
OR:
- initOrderedInternal(collections, callback): Replaces the entire contents of the data
store. The collections parameter is an array of objects, each of which has "kind" and
"items" properties; "items" is an array of data items. Each array should be processed
in the specified order. The store should delete any obsolete items only after writing
all of the items provided.
*/
function CachingStoreWrapper(underlyingStore, ttl) {
var cache = ttl ? new NodeCache({ stdTTL: ttl }) : null;
Expand All @@ -28,28 +69,36 @@ function CachingStoreWrapper(underlyingStore, ttl) {

this.init = function(allData, cb) {
queue.enqueue(function(cb) {
underlyingStore.initInternal(allData, function() {
// The underlying store can either implement initInternal, which receives unordered data,
// or initOrderedInternal, which receives ordered data (for implementations that cannot do
// an atomic update and therefore need to be told what order to do the operations in).
var afterInit = function() {
initialized = true;

if (cache) {
cache.del(initializedKey);
cache.flushAll();

// populate cache with initial data
for (var kindNamespace in allData) {
if (Object.hasOwnProperty.call(allData, kindNamespace)) {
var kind = dataKind[kindNamespace];
var items = allData[kindNamespace];
cache.set(allCacheKey(kind), items);
for (var key in items) {
cache.set(cacheKey(kind, key), items[key]);
}
}
}
Object.keys(allData).forEach(function(kindNamespace) {
var kind = dataKind[kindNamespace];
var items = allData[kindNamespace];
cache.set(allCacheKey(kind), items);
Object.keys(items).forEach(function(key) {
cache.set(cacheKey(kind, key), items[key]);
});
});
}

cb();
});
};

if (underlyingStore.initOrderedInternal) {
var orderedData = sortAllCollections(allData);
underlyingStore.initOrderedInternal(orderedData, afterInit);
} else {
underlyingStore.initInternal(allData, afterInit);
}
}, [], cb);
};

Expand Down Expand Up @@ -141,6 +190,46 @@ function CachingStoreWrapper(underlyingStore, ttl) {
cache.del(allCacheKey(dataKind[kindNamespace]));
}
}

// This and the next function are used by init() to provide the best ordering of items
// to write the underlying store, if the store supports the initOrderedInternal method.
function sortAllCollections(dataMap) {
var result = [];
Object.keys(dataMap).forEach(function(kindNamespace) {
var kind = dataKind[kindNamespace];
result.push({ kind: kind, items: sortCollection(kind, dataMap[kindNamespace]) });
});
var kindPriority = function(kind) {
return kind.priority === undefined ? kind.namespace.length : kind.priority
};
result.sort(function(i1, i2) {
return kindPriority(i1.kind) - kindPriority(i2.kind);
});
return result;
}

function sortCollection(kind, itemsMap) {
var itemsOut = [];
var remainingItems = new Set(Object.keys(itemsMap));
var addWithDependenciesFirst = function(key) {
if (remainingItems.has(key)) {
remainingItems.delete(key);
var item = itemsMap[key];
if (kind.getDependencyKeys) {
kind.getDependencyKeys(item).forEach(function(prereqKey) {
addWithDependenciesFirst(prereqKey);
});
}
itemsOut.push(item);
}
};
while (remainingItems.size > 0) {
// pick a random item that hasn't been updated yet
var key = remainingItems.values().next().value;
addWithDependenciesFirst(key);
}
return itemsOut;
}
}

module.exports = CachingStoreWrapper;
Expand Down
15 changes: 12 additions & 3 deletions feature_store.js
Original file line number Diff line number Diff line change
@@ -1,8 +1,17 @@
var dataKind = require('./versioned_data_kind');

// An in-memory store with an async interface.
// It's async as other implementations (e.g. the RedisFeatureStore)
// may be async, and we want to retain interface compatibility.
// The default in-memory implementation of a feature store, which holds feature flags and
// other related data received from LaunchDarkly.
//
// Other implementations of the same interface can be used by passing them in the featureStore
// property of the client configuration (that's why the interface here is async, even though
// the in-memory store doesn't do anything asynchronous - because other implementations may
// need to be async). The interface is defined by LDFeatureStore in index.d.ts. There is a
// Redis-backed implementation in RedisFeatureStore; for other options, see
// [https://docs.launchdarkly.com/v2.0/docs/using-a-persistent-feature-store].
//
// Additional implementations should use CachingStoreWrapper if possible.

var noop = function(){};
function InMemoryFeatureStore() {
var store = {allData:{}};
Expand Down
74 changes: 67 additions & 7 deletions test/caching_store_wrapper-test.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
var CachingStoreWrapper = require('../caching_store_wrapper');
var features = require('../versioned_data_kind').features;
var segments = require('../versioned_data_kind').segments;
const { asyncify, sleepAsync } = require('./async_utils');

function MockCore() {
Expand Down Expand Up @@ -58,23 +59,38 @@ function MockCore() {
return c;
}

function MockOrderedCore() {
const c = {
data: { features: {} },

initOrderedInternal: function(newData, cb) {
c.data = newData;
cb();
},
// don't bother mocking the rest of the stuff since the wrapper behaves identically except for init
};
return c;
}

const cacheSeconds = 15;

function runCachedAndUncachedTests(name, testFn) {
function runCachedAndUncachedTests(name, testFn, coreFn) {
var makeCore = coreFn ? coreFn : MockCore;
describe(name, function() {
const core1 = MockCore();
const core1 = makeCore();
const wrapper1 = new CachingStoreWrapper(core1, cacheSeconds);
it('cached', async () => await testFn(wrapper1, core1, true));
it('cached', async () => await testFn(wrapper1, core1, true), 1000);

const core2 = MockCore();
const core2 = makeCore();
const wrapper2 = new CachingStoreWrapper(core2, 0);
it('uncached', async () => await testFn(wrapper2, core2, false));
it('uncached', async () => await testFn(wrapper2, core2, false), 1000);
});
}

function runCachedTestOnly(name, testFn) {
function runCachedTestOnly(name, testFn, coreFn) {
var makeCore = coreFn ? coreFn : MockCore;
it(name, async () => {
const core = MockCore();
const core = makeCore();
const wrapper = new CachingStoreWrapper(core, cacheSeconds);
await testFn(wrapper, core);
});
Expand Down Expand Up @@ -371,4 +387,48 @@ describe('CachingStoreWrapper', function() {
expect(core.closed).toBe(true);
});
});

describe('core that uses initOrdered()', function() {
runCachedAndUncachedTests('receives properly ordered data for init', async (wrapper, core) => {
var dependencyOrderingTestData = {};
dependencyOrderingTestData[features.namespace] = {
a: { key: "a", prerequisites: [ { key: "b" }, { key: "c" } ] },
b: { key: "b", prerequisites: [ { key: "c" }, { key: "e" } ] },
c: { key: "c" },
d: { key: "d" },
e: { key: "e" },
f: { key: "f" }
};
dependencyOrderingTestData[segments.namespace] = {
o: { key: "o" }
};
await asyncify(cb => wrapper.init(dependencyOrderingTestData, cb));

var receivedData = core.data;
expect(receivedData.length).toEqual(2);

// Segments should always come first
expect(receivedData[0].kind).toEqual(segments);
expect(receivedData[0].items.length).toEqual(1);

// Features should be ordered so that a flag always appears after its prerequisites, if any
expect(receivedData[1].kind).toEqual(features);
var featuresMap = dependencyOrderingTestData[features.namespace];
var featuresList = receivedData[1].items;
expect(featuresList.length).toEqual(Object.keys(featuresMap).length);
for (var itemIndex in featuresList) {
var item = featuresList[itemIndex];
(item.prerequisites || []).forEach(function(prereq) {
var prereqKey = prereq.key;
var prereqItem = featuresMap[prereqKey];
var prereqIndex = featuresList.indexOf(prereqItem);
if (prereqIndex > itemIndex) {
var allKeys = featuresList.map(f => f.key);
throw new Error(item.key + " depends on " + prereqKey + ", but " + item.key +
" was listed first; keys in order are [" + allKeys.join(", ") + "]");
}
});
}
}, MockOrderedCore);
});
});
12 changes: 10 additions & 2 deletions versioned_data_kind.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,21 @@
var features = {
namespace: 'features',
streamApiPath: '/flags/',
requestPath: '/sdk/latest-flags/'
requestPath: '/sdk/latest-flags/',
priority: 1,
getDependencyKeys: function(flag) {
if (!flag.prerequisites || !flag.prerequisites.length) {
return [];
}
return flag.prerequisites.map(function(p) { return p.key; });
}
};

var segments = {
namespace: 'segments',
streamApiPath: '/segments/',
requestPath: '/sdk/latest-segments/'
requestPath: '/sdk/latest-segments/',
priority: 0
};

module.exports = {
Expand Down

0 comments on commit 25fab20

Please sign in to comment.