diff --git a/src/main/java/com/launchdarkly/client/Components.java b/src/main/java/com/launchdarkly/client/Components.java index 65c993869..e673b8f20 100644 --- a/src/main/java/com/launchdarkly/client/Components.java +++ b/src/main/java/com/launchdarkly/client/Components.java @@ -1,5 +1,8 @@ package com.launchdarkly.client; +import com.launchdarkly.client.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.client.interfaces.PersistentDataStoreFactory; + import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -17,28 +20,79 @@ public abstract class Components { private static final UpdateProcessorFactory nullUpdateProcessorFactory = new NullUpdateProcessorFactory(); /** - * Returns a factory for the default in-memory implementation of {@link FeatureStore}. + * Returns a factory for the default in-memory implementation of a data store. + *

+ * Note that the interface is still named {@link FeatureStoreFactory}, but in a future version it + * will be renamed to {@code DataStoreFactory}. + * + * @return a factory object + * @see LDConfig.Builder#dataStore(FeatureStoreFactory) + * @since 4.11.0 + */ + public static FeatureStoreFactory inMemoryDataStore() { + return inMemoryFeatureStoreFactory; + } + + /** + * Returns a configurable factory for some implementation of a persistent data store. + *

+ * This method is used in conjunction with another factory object provided by specific components + * such as the Redis integration. The latter provides builder methods for options that are specific + * to that integration, while the {@link PersistentDataStoreBuilder} provides options that are + * applicable to any persistent data store (such as caching). For example: + * + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataStore(
+   *             Components.persistentDataStore(
+   *                 Redis.dataStore().url("redis://my-redis-host")
+   *             ).cacheSeconds(15)
+   *         )
+   *         .build();
+   * 
+ * + * See {@link PersistentDataStoreBuilder} for more on how this method is used. + * + * @param storeFactory the factory/builder for the specific kind of persistent data store + * @return a {@link PersistentDataStoreBuilder} + * @see LDConfig.Builder#dataStore(FeatureStoreFactory) + * @see com.launchdarkly.client.integrations.Redis + * @since 4.11.0 + */ + public static PersistentDataStoreBuilder persistentDataStore(PersistentDataStoreFactory storeFactory) { + return new PersistentDataStoreBuilder(storeFactory); + } + + /** + * Deprecated name for {@link #inMemoryDataStore()}. * @return a factory object + * @deprecated Use {@link #inMemoryDataStore()}. */ + @Deprecated public static FeatureStoreFactory inMemoryFeatureStore() { return inMemoryFeatureStoreFactory; } /** - * Returns a factory with builder methods for creating a Redis-backed implementation of {@link FeatureStore}, - * using {@link RedisFeatureStoreBuilder#DEFAULT_URI}. + * Deprecated name for {@link com.launchdarkly.client.integrations.Redis#dataStore()}. * @return a factory/builder object + * @deprecated Use {@link #persistentDataStore(PersistentDataStoreFactory)} with + * {@link com.launchdarkly.client.integrations.Redis#dataStore()}. */ + @Deprecated public static RedisFeatureStoreBuilder redisFeatureStore() { return new RedisFeatureStoreBuilder(); } /** - * Returns a factory with builder methods for creating a Redis-backed implementation of {@link FeatureStore}, - * specifying the Redis URI. + * Deprecated name for {@link com.launchdarkly.client.integrations.Redis#dataStore()}. * @param redisUri the URI of the Redis host * @return a factory/builder object + * @deprecated Use {@link #persistentDataStore(PersistentDataStoreFactory)} with + * {@link com.launchdarkly.client.integrations.Redis#dataStore()} and + * {@link com.launchdarkly.client.integrations.RedisDataStoreBuilder#uri(URI)}. */ + @Deprecated public static RedisFeatureStoreBuilder redisFeatureStore(URI redisUri) { return new RedisFeatureStoreBuilder(redisUri); } @@ -48,6 +102,7 @@ public static RedisFeatureStoreBuilder redisFeatureStore(URI redisUri) { * forwards all analytics events to LaunchDarkly (unless the client is offline or you have * set {@link LDConfig.Builder#sendEvents(boolean)} to {@code false}). * @return a factory object + * @see LDConfig.Builder#eventProcessorFactory(EventProcessorFactory) */ public static EventProcessorFactory defaultEventProcessor() { return defaultEventProcessorFactory; @@ -57,17 +112,34 @@ public static EventProcessorFactory defaultEventProcessor() { * Returns a factory for a null implementation of {@link EventProcessor}, which will discard * all analytics events and not send them to LaunchDarkly, regardless of any other configuration. * @return a factory object + * @see LDConfig.Builder#eventProcessorFactory(EventProcessorFactory) */ public static EventProcessorFactory nullEventProcessor() { return nullEventProcessorFactory; } /** - * Returns a factory for the default implementation of {@link UpdateProcessor}, which receives - * feature flag data from LaunchDarkly using either streaming or polling as configured (or does - * nothing if the client is offline, or in LDD mode). + * Returns a factory for the default implementation of the component for receiving feature flag data + * from LaunchDarkly. Based on your configuration, this implementation uses either streaming or + * polling, or does nothing if the client is offline, or in LDD mode. + * + * Note that the interface is still named {@link UpdateProcessorFactory}, but in a future version it + * will be renamed to {@code DataSourceFactory}. + * * @return a factory object + * @since 4.11.0 + * @see LDConfig.Builder#dataSource(UpdateProcessorFactory) */ + public static UpdateProcessorFactory defaultDataSource() { + return defaultUpdateProcessorFactory; + } + + /** + * Deprecated name for {@link #defaultDataSource()}. + * @return a factory object + * @deprecated Use {@link #defaultDataSource()}. + */ + @Deprecated public static UpdateProcessorFactory defaultUpdateProcessor() { return defaultUpdateProcessorFactory; } @@ -75,8 +147,24 @@ public static UpdateProcessorFactory defaultUpdateProcessor() { /** * Returns a factory for a null implementation of {@link UpdateProcessor}, which does not * connect to LaunchDarkly, regardless of any other configuration. + * + * Note that the interface is still named {@link UpdateProcessorFactory}, but in a future version it + * will be renamed to {@code DataSourceFactory}. + * + * @return a factory object + * @since 4.11.0 + * @see LDConfig.Builder#dataSource(UpdateProcessorFactory) + */ + public static UpdateProcessorFactory nullDataSource() { + return nullUpdateProcessorFactory; + } + + /** + * Deprecated name for {@link #nullDataSource()}. * @return a factory object + * @deprecated Use {@link #nullDataSource()}. */ + @Deprecated public static UpdateProcessorFactory nullUpdateProcessor() { return nullUpdateProcessorFactory; } @@ -109,6 +197,7 @@ private static final class DefaultUpdateProcessorFactory implements UpdateProces // Note, logger uses LDClient class name for backward compatibility private static final Logger logger = LoggerFactory.getLogger(LDClient.class); + @SuppressWarnings("deprecation") @Override public UpdateProcessor createUpdateProcessor(String sdkKey, LDConfig config, FeatureStore featureStore) { if (config.offline) { @@ -132,6 +221,7 @@ public UpdateProcessor createUpdateProcessor(String sdkKey, LDConfig config, Fea } private static final class NullUpdateProcessorFactory implements UpdateProcessorFactory { + @SuppressWarnings("deprecation") @Override public UpdateProcessor createUpdateProcessor(String sdkKey, LDConfig config, FeatureStore featureStore) { return new UpdateProcessor.NullUpdateProcessor(); diff --git a/src/main/java/com/launchdarkly/client/EvaluationReason.java b/src/main/java/com/launchdarkly/client/EvaluationReason.java index 752744842..1b48346f7 100644 --- a/src/main/java/com/launchdarkly/client/EvaluationReason.java +++ b/src/main/java/com/launchdarkly/client/EvaluationReason.java @@ -1,14 +1,5 @@ package com.launchdarkly.client; -import com.google.gson.Gson; -import com.google.gson.TypeAdapter; -import com.google.gson.TypeAdapterFactory; -import com.google.gson.annotations.JsonAdapter; -import com.google.gson.reflect.TypeToken; -import com.google.gson.stream.JsonReader; -import com.google.gson.stream.JsonWriter; - -import java.io.IOException; import java.util.Objects; import static com.google.common.base.Preconditions.checkNotNull; diff --git a/src/main/java/com/launchdarkly/client/FeatureStoreCacheConfig.java b/src/main/java/com/launchdarkly/client/FeatureStoreCacheConfig.java index 0c3f4fd82..010886791 100644 --- a/src/main/java/com/launchdarkly/client/FeatureStoreCacheConfig.java +++ b/src/main/java/com/launchdarkly/client/FeatureStoreCacheConfig.java @@ -1,6 +1,7 @@ package com.launchdarkly.client; import com.google.common.cache.CacheBuilder; +import com.launchdarkly.client.integrations.PersistentDataStoreBuilder; import java.util.Objects; import java.util.concurrent.TimeUnit; @@ -25,7 +26,9 @@ * * @see RedisFeatureStoreBuilder#caching(FeatureStoreCacheConfig) * @since 4.6.0 + * @deprecated This has been superseded by the {@link PersistentDataStoreBuilder} interface. */ +@Deprecated public final class FeatureStoreCacheConfig { /** * The default TTL, in seconds, used by {@link #DEFAULT}. @@ -88,7 +91,40 @@ public enum StaleValuesPolicy { * See: CacheBuilder for * more specific information on cache semantics. */ - REFRESH_ASYNC + REFRESH_ASYNC; + + /** + * Used internally for backward compatibility. + * @return the equivalent enum value + * @since 4.11.0 + */ + public PersistentDataStoreBuilder.StaleValuesPolicy toNewEnum() { + switch (this) { + case REFRESH: + return PersistentDataStoreBuilder.StaleValuesPolicy.REFRESH; + case REFRESH_ASYNC: + return PersistentDataStoreBuilder.StaleValuesPolicy.REFRESH_ASYNC; + default: + return PersistentDataStoreBuilder.StaleValuesPolicy.EVICT; + } + } + + /** + * Used internally for backward compatibility. + * @param policy the enum value in the new API + * @return the equivalent enum value + * @since 4.11.0 + */ + public static StaleValuesPolicy fromNewEnum(PersistentDataStoreBuilder.StaleValuesPolicy policy) { + switch (policy) { + case REFRESH: + return StaleValuesPolicy.REFRESH; + case REFRESH_ASYNC: + return StaleValuesPolicy.REFRESH_ASYNC; + default: + return StaleValuesPolicy.EVICT; + } + } }; /** diff --git a/src/main/java/com/launchdarkly/client/InMemoryFeatureStore.java b/src/main/java/com/launchdarkly/client/InMemoryFeatureStore.java index b8db96e3a..05ad4bbb2 100644 --- a/src/main/java/com/launchdarkly/client/InMemoryFeatureStore.java +++ b/src/main/java/com/launchdarkly/client/InMemoryFeatureStore.java @@ -9,7 +9,7 @@ import java.util.concurrent.locks.ReentrantReadWriteLock; /** - * A thread-safe, versioned store for {@link FeatureFlag} objects and related data based on a + * A thread-safe, versioned store for feature flags and related data based on a * {@link HashMap}. This is the default implementation of {@link FeatureStore}. */ public class InMemoryFeatureStore implements FeatureStore { diff --git a/src/main/java/com/launchdarkly/client/LDClient.java b/src/main/java/com/launchdarkly/client/LDClient.java index 03abbee3c..676c518ed 100644 --- a/src/main/java/com/launchdarkly/client/LDClient.java +++ b/src/main/java/com/launchdarkly/client/LDClient.java @@ -72,8 +72,8 @@ public LDClient(String sdkKey, LDConfig config) { // of instances that we created ourselves from a factory. this.shouldCloseFeatureStore = false; } else { - FeatureStoreFactory factory = config.featureStoreFactory == null ? - Components.inMemoryFeatureStore() : config.featureStoreFactory; + FeatureStoreFactory factory = config.dataStoreFactory == null ? + Components.inMemoryDataStore() : config.dataStoreFactory; store = factory.createFeatureStore(); this.shouldCloseFeatureStore = true; } @@ -83,8 +83,8 @@ public LDClient(String sdkKey, LDConfig config) { Components.defaultEventProcessor() : config.eventProcessorFactory; this.eventProcessor = epFactory.createEventProcessor(sdkKey, config); - UpdateProcessorFactory upFactory = config.updateProcessorFactory == null ? - Components.defaultUpdateProcessor() : config.updateProcessorFactory; + UpdateProcessorFactory upFactory = config.dataSourceFactory == null ? + Components.defaultDataSource() : config.dataSourceFactory; this.updateProcessor = upFactory.createUpdateProcessor(sdkKey, config, featureStore); Future startFuture = updateProcessor.start(); if (config.startWaitMillis > 0L) { diff --git a/src/main/java/com/launchdarkly/client/LDConfig.java b/src/main/java/com/launchdarkly/client/LDConfig.java index 38ff7b475..bc73cfb14 100644 --- a/src/main/java/com/launchdarkly/client/LDConfig.java +++ b/src/main/java/com/launchdarkly/client/LDConfig.java @@ -56,9 +56,9 @@ public final class LDConfig { final Authenticator proxyAuthenticator; final boolean stream; final FeatureStore deprecatedFeatureStore; - final FeatureStoreFactory featureStoreFactory; + final FeatureStoreFactory dataStoreFactory; final EventProcessorFactory eventProcessorFactory; - final UpdateProcessorFactory updateProcessorFactory; + final UpdateProcessorFactory dataSourceFactory; final boolean useLdd; final boolean offline; final boolean allAttributesPrivate; @@ -88,9 +88,9 @@ protected LDConfig(Builder builder) { this.streamURI = builder.streamURI; this.stream = builder.stream; this.deprecatedFeatureStore = builder.featureStore; - this.featureStoreFactory = builder.featureStoreFactory; + this.dataStoreFactory = builder.dataStoreFactory; this.eventProcessorFactory = builder.eventProcessorFactory; - this.updateProcessorFactory = builder.updateProcessorFactory; + this.dataSourceFactory = builder.dataSourceFactory; this.useLdd = builder.useLdd; this.offline = builder.offline; this.allAttributesPrivate = builder.allAttributesPrivate; @@ -155,9 +155,9 @@ public static class Builder { private boolean sendEvents = true; private long pollingIntervalMillis = MIN_POLLING_INTERVAL_MILLIS; private FeatureStore featureStore = null; - private FeatureStoreFactory featureStoreFactory = Components.inMemoryFeatureStore(); + private FeatureStoreFactory dataStoreFactory = Components.inMemoryDataStore(); private EventProcessorFactory eventProcessorFactory = Components.defaultEventProcessor(); - private UpdateProcessorFactory updateProcessorFactory = Components.defaultUpdateProcessor(); + private UpdateProcessorFactory dataSourceFactory = Components.defaultDataSource(); private long startWaitMillis = DEFAULT_START_WAIT_MILLIS; private int samplingInterval = DEFAULT_SAMPLING_INTERVAL; private long reconnectTimeMillis = DEFAULT_RECONNECT_TIME_MILLIS; @@ -207,6 +207,24 @@ public Builder streamURI(URI streamURI) { return this; } + /** + * Sets the implementation of the data store to be used for holding feature flags and + * related data received from LaunchDarkly, using a factory object. The default is + * {@link Components#inMemoryDataStore()}; for database integrations, use + * {@link Components#persistentDataStore(com.launchdarkly.client.interfaces.PersistentDataStoreFactory)}. + *

+ * Note that the interface is still called {@link FeatureStoreFactory}, but in a future version + * it will be renamed to {@code DataStoreFactory}. + * + * @param factory the factory object + * @return the builder + * @since 4.11.0 + */ + public Builder dataStore(FeatureStoreFactory factory) { + this.dataStoreFactory = factory; + return this; + } + /** * Sets the implementation of {@link FeatureStore} to be used for holding feature flags and * related data received from LaunchDarkly. The default is {@link InMemoryFeatureStore}, but @@ -221,26 +239,37 @@ public Builder featureStore(FeatureStore store) { } /** - * Sets the implementation of {@link FeatureStore} to be used for holding feature flags and - * related data received from LaunchDarkly, using a factory object. The default is - * {@link Components#inMemoryFeatureStore()}, but you may use {@link Components#redisFeatureStore()} - * or a custom implementation. + * Deprecated name for {@link #dataStore(FeatureStoreFactory)}. * @param factory the factory object * @return the builder * @since 4.0.0 + * @deprecated Use {@link #dataStore(FeatureStoreFactory)}. */ + @Deprecated public Builder featureStoreFactory(FeatureStoreFactory factory) { - this.featureStoreFactory = factory; + this.dataStoreFactory = factory; return this; } - + /** * Sets the implementation of {@link EventProcessor} to be used for processing analytics events, * using a factory object. The default is {@link Components#defaultEventProcessor()}, but * you may choose to use a custom implementation (for instance, a test fixture). * @param factory the factory object * @return the builder + * @since 4.11.0 + */ + public Builder eventProcessor(EventProcessorFactory factory) { + this.eventProcessorFactory = factory; + return this; + } + + /** + * Deprecated name for {@link #eventProcessor(EventProcessorFactory)}. + * @param factory the factory object + * @return the builder * @since 4.0.0 + * @deprecated Use {@link #eventProcessor(EventProcessorFactory)}. */ public Builder eventProcessorFactory(EventProcessorFactory factory) { this.eventProcessorFactory = factory; @@ -248,15 +277,32 @@ public Builder eventProcessorFactory(EventProcessorFactory factory) { } /** - * Sets the implementation of {@link UpdateProcessor} to be used for receiving feature flag data, - * using a factory object. The default is {@link Components#defaultUpdateProcessor()}, but + * Sets the implementation of the component that receives feature flag data from LaunchDarkly, + * using a factory object. The default is {@link Components#defaultDataSource()}, but * you may choose to use a custom implementation (for instance, a test fixture). + * + * Note that the interface is still named {@link UpdateProcessorFactory}, but in a future version + * it will be renamed to {@code DataSourceFactory}. + * + * @param factory the factory object + * @return the builder + * @since 4.11.0 + */ + public Builder dataSource(UpdateProcessorFactory factory) { + this.dataSourceFactory = factory; + return this; + } + + /** + * Deprecated name for {@link #dataSource(UpdateProcessorFactory)}. * @param factory the factory object * @return the builder * @since 4.0.0 + * @deprecated Use {@link #dataSource(UpdateProcessorFactory)}. */ + @Deprecated public Builder updateProcessorFactory(UpdateProcessorFactory factory) { - this.updateProcessorFactory = factory; + this.dataSourceFactory = factory; return this; } diff --git a/src/main/java/com/launchdarkly/client/RedisFeatureStore.java b/src/main/java/com/launchdarkly/client/RedisFeatureStore.java index 55091fa40..ebd36913c 100644 --- a/src/main/java/com/launchdarkly/client/RedisFeatureStore.java +++ b/src/main/java/com/launchdarkly/client/RedisFeatureStore.java @@ -1,82 +1,70 @@ package com.launchdarkly.client; -import com.google.common.annotations.VisibleForTesting; import com.google.common.cache.CacheStats; import com.launchdarkly.client.utils.CachingStoreWrapper; -import com.launchdarkly.client.utils.FeatureStoreCore; - -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import java.io.IOException; -import java.util.HashMap; -import java.util.List; import java.util.Map; -import static com.launchdarkly.client.utils.FeatureStoreHelpers.marshalJson; -import static com.launchdarkly.client.utils.FeatureStoreHelpers.unmarshalJson; - -import redis.clients.jedis.Jedis; -import redis.clients.jedis.JedisPool; -import redis.clients.jedis.JedisPoolConfig; -import redis.clients.jedis.Transaction; -import redis.clients.util.JedisURIHelper; - /** - * An implementation of {@link FeatureStore} backed by Redis. Also - * supports an optional in-memory cache configuration that can be used to improve performance. + * Deprecated implementation class for the Redis-based persistent data store. + *

+ * Instead of referencing this class directly, use {@link com.launchdarkly.client.integrations.Redis#dataStore()} to obtain a builder object. + * + * @deprecated Use {@link com.launchdarkly.client.integrations.Redis#dataStore()} */ +@Deprecated public class RedisFeatureStore implements FeatureStore { - private static final Logger logger = LoggerFactory.getLogger(RedisFeatureStore.class); - - // Note that we could avoid the indirection of delegating everything to CachingStoreWrapper if we - // simply returned the wrapper itself as the FeatureStore; however, for historical reasons we can't, - // because we have already exposed the RedisFeatureStore type. - private final CachingStoreWrapper wrapper; - private final Core core; + // The actual implementation is now in the com.launchdarkly.integrations package. This class remains + // visible for backward compatibility, but simply delegates to an instance of the underlying store. + + private final FeatureStore wrappedStore; @Override public void init(Map, Map> allData) { - wrapper.init(allData); + wrappedStore.init(allData); } @Override public T get(VersionedDataKind kind, String key) { - return wrapper.get(kind, key); + return wrappedStore.get(kind, key); } @Override public Map all(VersionedDataKind kind) { - return wrapper.all(kind); + return wrappedStore.all(kind); } @Override public void upsert(VersionedDataKind kind, T item) { - wrapper.upsert(kind, item); + wrappedStore.upsert(kind, item); } @Override public void delete(VersionedDataKind kind, String key, int version) { - wrapper.delete(kind, key, version); + wrappedStore.delete(kind, key, version); } @Override public boolean initialized() { - return wrapper.initialized(); + return wrappedStore.initialized(); } @Override public void close() throws IOException { - wrapper.close(); + wrappedStore.close(); } /** * Return the underlying Guava cache stats object. + *

+ * In the newer data store API, there is a different way to do this. See + * {@link com.launchdarkly.client.integrations.PersistentDataStoreBuilder#cacheMonitor(com.launchdarkly.client.integrations.CacheMonitor)}. * * @return the cache statistics object. */ public CacheStats getCacheStats() { - return wrapper.getCacheStats(); + return ((CachingStoreWrapper)wrappedStore).getCacheStats(); } /** @@ -87,192 +75,15 @@ public CacheStats getCacheStats() { * @param builder the configured builder to construct the store with. */ protected RedisFeatureStore(RedisFeatureStoreBuilder builder) { - // There is no builder for JedisPool, just a large number of constructor overloads. Unfortunately, - // the overloads that accept a URI do not accept the other parameters we need to set, so we need - // to decompose the URI. - String host = builder.uri.getHost(); - int port = builder.uri.getPort(); - String password = builder.password == null ? JedisURIHelper.getPassword(builder.uri) : builder.password; - int database = builder.database == null ? JedisURIHelper.getDBIndex(builder.uri): builder.database.intValue(); - boolean tls = builder.tls || builder.uri.getScheme().equals("rediss"); - - String extra = tls ? " with TLS" : ""; - if (password != null) { - extra = extra + (extra.isEmpty() ? " with" : " and") + " password"; - } - logger.info(String.format("Connecting to Redis feature store at %s:%d/%d%s", host, port, database, extra)); - - JedisPoolConfig poolConfig = (builder.poolConfig != null) ? builder.poolConfig : new JedisPoolConfig(); - JedisPool pool = new JedisPool(poolConfig, - host, - port, - builder.connectTimeout, - builder.socketTimeout, - password, - database, - null, // clientName - tls, - null, // sslSocketFactory - null, // sslParameters - null // hostnameVerifier - ); - - String prefix = (builder.prefix == null || builder.prefix.isEmpty()) ? - RedisFeatureStoreBuilder.DEFAULT_PREFIX : - builder.prefix; - - this.core = new Core(pool, prefix); - this.wrapper = CachingStoreWrapper.builder(this.core).caching(builder.caching) - .build(); + wrappedStore = builder.wrappedOuterBuilder.createFeatureStore(); } /** * Creates a new store instance that connects to Redis with a default connection (localhost port 6379) and no in-memory cache. * @deprecated Please use {@link Components#redisFeatureStore()} instead. */ + @Deprecated public RedisFeatureStore() { this(new RedisFeatureStoreBuilder().caching(FeatureStoreCacheConfig.disabled())); } - - static class Core implements FeatureStoreCore { - private final JedisPool pool; - private final String prefix; - private UpdateListener updateListener; - - Core(JedisPool pool, String prefix) { - this.pool = pool; - this.prefix = prefix; - } - - @Override - public VersionedData getInternal(VersionedDataKind kind, String key) { - try (Jedis jedis = pool.getResource()) { - VersionedData item = getRedis(kind, key, jedis); - if (item != null) { - logger.debug("[get] Key: {} with version: {} found in \"{}\".", key, item.getVersion(), kind.getNamespace()); - } - return item; - } - } - - @Override - public Map getAllInternal(VersionedDataKind kind) { - try (Jedis jedis = pool.getResource()) { - Map allJson = jedis.hgetAll(itemsKey(kind)); - Map result = new HashMap<>(); - - for (Map.Entry entry : allJson.entrySet()) { - VersionedData item = unmarshalJson(kind, entry.getValue()); - result.put(entry.getKey(), item); - } - return result; - } - } - - @Override - public void initInternal(Map, Map> allData) { - try (Jedis jedis = pool.getResource()) { - Transaction t = jedis.multi(); - - for (Map.Entry, Map> entry: allData.entrySet()) { - String baseKey = itemsKey(entry.getKey()); - t.del(baseKey); - for (VersionedData item: entry.getValue().values()) { - t.hset(baseKey, item.getKey(), marshalJson(item)); - } - } - - t.set(initedKey(), ""); - t.exec(); - } - } - - @Override - public VersionedData upsertInternal(VersionedDataKind kind, VersionedData newItem) { - while (true) { - Jedis jedis = null; - try { - jedis = pool.getResource(); - String baseKey = itemsKey(kind); - jedis.watch(baseKey); - - if (updateListener != null) { - updateListener.aboutToUpdate(baseKey, newItem.getKey()); - } - - VersionedData oldItem = getRedis(kind, newItem.getKey(), jedis); - - if (oldItem != null && oldItem.getVersion() >= newItem.getVersion()) { - logger.debug("Attempted to {} key: {} version: {}" + - " with a version that is the same or older: {} in \"{}\"", - newItem.isDeleted() ? "delete" : "update", - newItem.getKey(), oldItem.getVersion(), newItem.getVersion(), kind.getNamespace()); - return oldItem; - } - - Transaction tx = jedis.multi(); - tx.hset(baseKey, newItem.getKey(), marshalJson(newItem)); - List result = tx.exec(); - if (result.isEmpty()) { - // if exec failed, it means the watch was triggered and we should retry - logger.debug("Concurrent modification detected, retrying"); - continue; - } - - return newItem; - } finally { - if (jedis != null) { - jedis.unwatch(); - jedis.close(); - } - } - } - } - - @Override - public boolean initializedInternal() { - try (Jedis jedis = pool.getResource()) { - return jedis.exists(initedKey()); - } - } - - @Override - public void close() throws IOException { - logger.info("Closing LaunchDarkly RedisFeatureStore"); - pool.destroy(); - } - - @VisibleForTesting - void setUpdateListener(UpdateListener updateListener) { - this.updateListener = updateListener; - } - - private String itemsKey(VersionedDataKind kind) { - return prefix + ":" + kind.getNamespace(); - } - - private String initedKey() { - return prefix + ":$inited"; - } - - private T getRedis(VersionedDataKind kind, String key, Jedis jedis) { - String json = jedis.hget(itemsKey(kind), key); - - if (json == null) { - logger.debug("[get] Key: {} not found in \"{}\". Returning null", key, kind.getNamespace()); - return null; - } - - return unmarshalJson(kind, json); - } - } - - static interface UpdateListener { - void aboutToUpdate(String baseKey, String itemKey); - } - - @VisibleForTesting - void setUpdateListener(UpdateListener updateListener) { - core.setUpdateListener(updateListener); - } } diff --git a/src/main/java/com/launchdarkly/client/RedisFeatureStoreBuilder.java b/src/main/java/com/launchdarkly/client/RedisFeatureStoreBuilder.java index 7c5661e82..c447da57c 100644 --- a/src/main/java/com/launchdarkly/client/RedisFeatureStoreBuilder.java +++ b/src/main/java/com/launchdarkly/client/RedisFeatureStoreBuilder.java @@ -1,25 +1,25 @@ package com.launchdarkly.client; -import redis.clients.jedis.JedisPoolConfig; -import redis.clients.jedis.Protocol; +import com.launchdarkly.client.integrations.CacheMonitor; +import com.launchdarkly.client.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.client.integrations.Redis; +import com.launchdarkly.client.integrations.RedisDataStoreBuilder; import java.net.URI; import java.net.URISyntaxException; import java.util.concurrent.TimeUnit; +import redis.clients.jedis.JedisPoolConfig; + /** - * A builder for configuring the Redis-based persistent feature store. - * - * Obtain an instance of this class by calling {@link Components#redisFeatureStore()} or {@link Components#redisFeatureStore(URI)}. - * Builder calls can be chained, for example: - * - *

- * FeatureStore store = Components.redisFeatureStore()
- *      .database(1)
- *      .caching(FeatureStoreCacheConfig.enabled().ttlSeconds(60))
- *      .build();
- * 
+ * Deprecated builder class for the Redis-based persistent data store. + *

+ * The replacement for this class is {@link com.launchdarkly.client.integrations.RedisDataStoreBuilder}. + * This class is retained for backward compatibility and will be removed in a future version. + * + * @deprecated Use {@link com.launchdarkly.client.integrations.Redis#dataStore()} */ +@Deprecated public final class RedisFeatureStoreBuilder implements FeatureStoreFactory { /** * The default value for the Redis URI: {@code redis://localhost:6379} @@ -40,25 +40,28 @@ public final class RedisFeatureStoreBuilder implements FeatureStoreFactory { */ public static final long DEFAULT_CACHE_TIME_SECONDS = FeatureStoreCacheConfig.DEFAULT_TIME_SECONDS; - final URI uri; - String prefix = DEFAULT_PREFIX; - int connectTimeout = Protocol.DEFAULT_TIMEOUT; - int socketTimeout = Protocol.DEFAULT_TIMEOUT; - Integer database = null; - String password = null; - boolean tls = false; - FeatureStoreCacheConfig caching = FeatureStoreCacheConfig.DEFAULT; - boolean refreshStaleValues = false; // this and asyncRefresh are redundant with FeatureStoreCacheConfig, but are used by deprecated setters + final PersistentDataStoreBuilder wrappedOuterBuilder; + final RedisDataStoreBuilder wrappedBuilder; + + // We have to keep track of these caching parameters separately in order to support some deprecated setters + boolean refreshStaleValues = false; boolean asyncRefresh = false; - JedisPoolConfig poolConfig = null; - // These constructors are called only from Implementations + // These constructors are called only from Components RedisFeatureStoreBuilder() { - this.uri = DEFAULT_URI; + wrappedBuilder = Redis.dataStore(); + wrappedOuterBuilder = Components.persistentDataStore(wrappedBuilder); + + // In order to make the cacheStats() method on the deprecated RedisFeatureStore class work, we need to + // turn on cache monitoring. In the newer API, cache monitoring would only be turned on if the application + // specified its own CacheMonitor, but in the deprecated API there's no way to know if they will want the + // statistics or not. + wrappedOuterBuilder.cacheMonitor(new CacheMonitor()); } RedisFeatureStoreBuilder(URI uri) { - this.uri = uri; + this(); + wrappedBuilder.uri(uri); } /** @@ -69,8 +72,9 @@ public final class RedisFeatureStoreBuilder implements FeatureStoreFactory { * @deprecated Please use {@link Components#redisFeatureStore(java.net.URI)}. */ public RedisFeatureStoreBuilder(URI uri, long cacheTimeSecs) { - this.uri = uri; - this.cacheTime(cacheTimeSecs, TimeUnit.SECONDS); + this(); + wrappedBuilder.uri(uri); + wrappedOuterBuilder.cacheSeconds(cacheTimeSecs); } /** @@ -84,8 +88,9 @@ public RedisFeatureStoreBuilder(URI uri, long cacheTimeSecs) { * @deprecated Please use {@link Components#redisFeatureStore(java.net.URI)}. */ public RedisFeatureStoreBuilder(String scheme, String host, int port, long cacheTimeSecs) throws URISyntaxException { - this.uri = new URI(scheme, null, host, port, null, null, null); - this.cacheTime(cacheTimeSecs, TimeUnit.SECONDS); + this(); + wrappedBuilder.uri(new URI(scheme, null, host, port, null, null, null)); + wrappedOuterBuilder.cacheSeconds(cacheTimeSecs); } /** @@ -100,7 +105,7 @@ public RedisFeatureStoreBuilder(String scheme, String host, int port, long cache * @since 4.7.0 */ public RedisFeatureStoreBuilder database(Integer database) { - this.database = database; + wrappedBuilder.database(database); return this; } @@ -116,7 +121,7 @@ public RedisFeatureStoreBuilder database(Integer database) { * @since 4.7.0 */ public RedisFeatureStoreBuilder password(String password) { - this.password = password; + wrappedBuilder.password(password); return this; } @@ -133,7 +138,7 @@ public RedisFeatureStoreBuilder password(String password) { * @since 4.7.0 */ public RedisFeatureStoreBuilder tls(boolean tls) { - this.tls = tls; + wrappedBuilder.tls(tls); return this; } @@ -148,7 +153,8 @@ public RedisFeatureStoreBuilder tls(boolean tls) { * @since 4.6.0 */ public RedisFeatureStoreBuilder caching(FeatureStoreCacheConfig caching) { - this.caching = caching; + wrappedOuterBuilder.cacheTime(caching.getCacheTime(), caching.getCacheTimeUnit()); + wrappedOuterBuilder.staleValuesPolicy(caching.getStaleValuesPolicy().toNewEnum()); return this; } @@ -187,12 +193,12 @@ public RedisFeatureStoreBuilder asyncRefresh(boolean enabled) { private void updateCachingStaleValuesPolicy() { // We need this logic in order to support the existing behavior of the deprecated methods above: // asyncRefresh is supposed to have no effect unless refreshStaleValues is true - if (this.refreshStaleValues) { - this.caching = this.caching.staleValuesPolicy(this.asyncRefresh ? - FeatureStoreCacheConfig.StaleValuesPolicy.REFRESH_ASYNC : - FeatureStoreCacheConfig.StaleValuesPolicy.REFRESH); + if (refreshStaleValues) { + wrappedOuterBuilder.staleValuesPolicy(this.asyncRefresh ? + PersistentDataStoreBuilder.StaleValuesPolicy.REFRESH_ASYNC : + PersistentDataStoreBuilder.StaleValuesPolicy.REFRESH); } else { - this.caching = this.caching.staleValuesPolicy(FeatureStoreCacheConfig.StaleValuesPolicy.EVICT); + wrappedOuterBuilder.staleValuesPolicy(PersistentDataStoreBuilder.StaleValuesPolicy.EVICT); } } @@ -203,7 +209,7 @@ private void updateCachingStaleValuesPolicy() { * @return the builder */ public RedisFeatureStoreBuilder prefix(String prefix) { - this.prefix = prefix; + wrappedBuilder.prefix(prefix); return this; } @@ -218,8 +224,7 @@ public RedisFeatureStoreBuilder prefix(String prefix) { * @deprecated use {@link #caching(FeatureStoreCacheConfig)} and {@link FeatureStoreCacheConfig#ttl(long, TimeUnit)}. */ public RedisFeatureStoreBuilder cacheTime(long cacheTime, TimeUnit timeUnit) { - this.caching = this.caching.ttl(cacheTime, timeUnit) - .staleValuesPolicy(this.caching.getStaleValuesPolicy()); + wrappedOuterBuilder.cacheTime(cacheTime, timeUnit); return this; } @@ -230,7 +235,7 @@ public RedisFeatureStoreBuilder cacheTime(long cacheTime, TimeUnit timeUnit) { * @return the builder */ public RedisFeatureStoreBuilder poolConfig(JedisPoolConfig poolConfig) { - this.poolConfig = poolConfig; + wrappedBuilder.poolConfig(poolConfig); return this; } @@ -243,7 +248,7 @@ public RedisFeatureStoreBuilder poolConfig(JedisPoolConfig poolConfig) { * @return the builder */ public RedisFeatureStoreBuilder connectTimeout(int connectTimeout, TimeUnit timeUnit) { - this.connectTimeout = (int) timeUnit.toMillis(connectTimeout); + wrappedBuilder.connectTimeout(connectTimeout, timeUnit); return this; } @@ -256,7 +261,7 @@ public RedisFeatureStoreBuilder connectTimeout(int connectTimeout, TimeUnit time * @return the builder */ public RedisFeatureStoreBuilder socketTimeout(int socketTimeout, TimeUnit timeUnit) { - this.socketTimeout = (int) timeUnit.toMillis(socketTimeout); + wrappedBuilder.socketTimeout(socketTimeout, timeUnit); return this; } diff --git a/src/main/java/com/launchdarkly/client/files/DataBuilder.java b/src/main/java/com/launchdarkly/client/files/DataBuilder.java deleted file mode 100644 index e9bc580a9..000000000 --- a/src/main/java/com/launchdarkly/client/files/DataBuilder.java +++ /dev/null @@ -1,32 +0,0 @@ -package com.launchdarkly.client.files; - -import com.launchdarkly.client.VersionedData; -import com.launchdarkly.client.VersionedDataKind; - -import java.util.HashMap; -import java.util.Map; - -/** - * Internal data structure that organizes flag/segment data into the format that the feature store - * expects. Will throw an exception if we try to add the same flag or segment key more than once. - */ -class DataBuilder { - private final Map, Map> allData = new HashMap<>(); - - public Map, Map> build() { - return allData; - } - - public void add(VersionedDataKind kind, VersionedData item) throws DataLoaderException { - @SuppressWarnings("unchecked") - Map items = (Map)allData.get(kind); - if (items == null) { - items = new HashMap(); - allData.put(kind, items); - } - if (items.containsKey(item.getKey())) { - throw new DataLoaderException("in " + kind.getNamespace() + ", key \"" + item.getKey() + "\" was already defined", null, null); - } - items.put(item.getKey(), item); - } -} diff --git a/src/main/java/com/launchdarkly/client/files/DataLoader.java b/src/main/java/com/launchdarkly/client/files/DataLoader.java deleted file mode 100644 index 0b4ad431c..000000000 --- a/src/main/java/com/launchdarkly/client/files/DataLoader.java +++ /dev/null @@ -1,58 +0,0 @@ -package com.launchdarkly.client.files; - -import com.google.gson.JsonElement; -import com.launchdarkly.client.VersionedDataKind; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.nio.file.Files; -import java.nio.file.Path; -import java.util.ArrayList; -import java.util.List; -import java.util.Map; - -/** - * Implements the loading of flag data from one or more files. Will throw an exception if any file can't - * be read or parsed, or if any flag or segment keys are duplicates. - */ -final class DataLoader { - private final List files; - - public DataLoader(List files) { - this.files = new ArrayList(files); - } - - public Iterable getFiles() { - return files; - } - - public void load(DataBuilder builder) throws DataLoaderException - { - for (Path p: files) { - try { - byte[] data = Files.readAllBytes(p); - FlagFileParser parser = FlagFileParser.selectForContent(data); - FlagFileRep fileContents = parser.parse(new ByteArrayInputStream(data)); - if (fileContents.flags != null) { - for (Map.Entry e: fileContents.flags.entrySet()) { - builder.add(VersionedDataKind.FEATURES, FlagFactory.flagFromJson(e.getValue())); - } - } - if (fileContents.flagValues != null) { - for (Map.Entry e: fileContents.flagValues.entrySet()) { - builder.add(VersionedDataKind.FEATURES, FlagFactory.flagWithValue(e.getKey(), e.getValue())); - } - } - if (fileContents.segments != null) { - for (Map.Entry e: fileContents.segments.entrySet()) { - builder.add(VersionedDataKind.SEGMENTS, FlagFactory.segmentFromJson(e.getValue())); - } - } - } catch (DataLoaderException e) { - throw new DataLoaderException(e.getMessage(), e.getCause(), p); - } catch (IOException e) { - throw new DataLoaderException(null, e, p); - } - } - } -} diff --git a/src/main/java/com/launchdarkly/client/files/DataLoaderException.java b/src/main/java/com/launchdarkly/client/files/DataLoaderException.java deleted file mode 100644 index 184a3211a..000000000 --- a/src/main/java/com/launchdarkly/client/files/DataLoaderException.java +++ /dev/null @@ -1,43 +0,0 @@ -package com.launchdarkly.client.files; - -import java.nio.file.Path; - -/** - * Indicates that the file processor encountered an error in one of the input files. This exception is - * not surfaced to the host application, it is only logged, and we don't do anything different programmatically - * with different kinds of exceptions, therefore it has no subclasses. - */ -@SuppressWarnings("serial") -class DataLoaderException extends Exception { - private final Path filePath; - - public DataLoaderException(String message, Throwable cause, Path filePath) { - super(message, cause); - this.filePath = filePath; - } - - public DataLoaderException(String message, Throwable cause) { - this(message, cause, null); - } - - public Path getFilePath() { - return filePath; - } - - public String getDescription() { - StringBuilder s = new StringBuilder(); - if (getMessage() != null) { - s.append(getMessage()); - if (getCause() != null) { - s.append(" "); - } - } - if (getCause() != null) { - s.append(" [").append(getCause().toString()).append("]"); - } - if (filePath != null) { - s.append(": ").append(filePath); - } - return s.toString(); - } -} diff --git a/src/main/java/com/launchdarkly/client/files/FileComponents.java b/src/main/java/com/launchdarkly/client/files/FileComponents.java index 390fb75a3..63a575555 100644 --- a/src/main/java/com/launchdarkly/client/files/FileComponents.java +++ b/src/main/java/com/launchdarkly/client/files/FileComponents.java @@ -1,100 +1,11 @@ package com.launchdarkly.client.files; /** - * The entry point for the file data source, which allows you to use local files as a source of - * feature flag state. This would typically be used in a test environment, to operate using a - * predetermined feature flag state without an actual LaunchDarkly connection. - *

- * To use this component, call {@link #fileDataSource()} to obtain a factory object, call one or - * methods to configure it, and then add it to your LaunchDarkly client configuration. At a - * minimum, you will want to call {@link FileDataSourceFactory#filePaths(String...)} to specify - * your data file(s); you can also use {@link FileDataSourceFactory#autoUpdate(boolean)} to - * specify that flags should be reloaded when a file is modified. See {@link FileDataSourceFactory} - * for all configuration options. - *

- *     FileDataSourceFactory f = FileComponents.fileDataSource()
- *         .filePaths("./testData/flags.json")
- *         .autoUpdate(true);
- *     LDConfig config = new LDConfig.Builder()
- *         .updateProcessorFactory(f)
- *         .build();
- * 
- *

- * This will cause the client not to connect to LaunchDarkly to get feature flags. The - * client may still make network connections to send analytics events, unless you have disabled - * this with {@link com.launchdarkly.client.LDConfig.Builder#sendEvents(boolean)} or - * {@link com.launchdarkly.client.LDConfig.Builder#offline(boolean)}. - *

- * Flag data files can be either JSON or YAML. They contain an object with three possible - * properties: - *

    - *
  • {@code flags}: Feature flag definitions. - *
  • {@code flagVersions}: Simplified feature flags that contain only a value. - *
  • {@code segments}: User segment definitions. - *
- *

- * The format of the data in {@code flags} and {@code segments} is defined by the LaunchDarkly application - * and is subject to change. Rather than trying to construct these objects yourself, it is simpler - * to request existing flags directly from the LaunchDarkly server in JSON format, and use this - * output as the starting point for your file. In Linux you would do this: - *

- *     curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all
- * 
- *

- * The output will look something like this (but with many more properties): - *

- * {
- *     "flags": {
- *         "flag-key-1": {
- *             "key": "flag-key-1",
- *             "on": true,
- *             "variations": [ "a", "b" ]
- *         },
- *         "flag-key-2": {
- *             "key": "flag-key-2",
- *             "on": true,
- *             "variations": [ "c", "d" ]
- *         }
- *     },
- *     "segments": {
- *         "segment-key-1": {
- *             "key": "segment-key-1",
- *             "includes": [ "user-key-1" ]
- *         }
- *     }
- * }
- * 
- *

- * Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported - * by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to - * set specific flag keys to specific values. For that, you can use a much simpler format: - *

- * {
- *     "flagValues": {
- *         "my-string-flag-key": "value-1",
- *         "my-boolean-flag-key": true,
- *         "my-integer-flag-key": 3
- *     }
- * }
- * 
- *

- * Or, in YAML: - *

- * flagValues:
- *   my-string-flag-key: "value-1"
- *   my-boolean-flag-key: true
- *   my-integer-flag-key: 3
- * 
- *

- * It is also possible to specify both {@code flags} and {@code flagValues}, if you want some flags - * to have simple values and others to have complex behavior. However, it is an error to use the - * same flag key or segment key more than once, either in a single file or across multiple files. - *

- * If the data source encounters any error in any file-- malformed content, a missing file, or a - * duplicate key-- it will not load flags from any of the files. - * + * Deprecated entry point for the file data source. * @since 4.5.0 + * @deprecated Use {@link com.launchdarkly.client.integrations.FileData}. */ +@Deprecated public abstract class FileComponents { /** * Creates a {@link FileDataSourceFactory} which you can use to configure the file data diff --git a/src/main/java/com/launchdarkly/client/files/FileDataSourceFactory.java b/src/main/java/com/launchdarkly/client/files/FileDataSourceFactory.java index 216c56213..ded4a2dd5 100644 --- a/src/main/java/com/launchdarkly/client/files/FileDataSourceFactory.java +++ b/src/main/java/com/launchdarkly/client/files/FileDataSourceFactory.java @@ -4,25 +4,21 @@ import com.launchdarkly.client.LDConfig; import com.launchdarkly.client.UpdateProcessor; import com.launchdarkly.client.UpdateProcessorFactory; +import com.launchdarkly.client.integrations.FileDataSourceBuilder; +import com.launchdarkly.client.integrations.FileData; import java.nio.file.InvalidPathException; import java.nio.file.Path; -import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; /** - * To use the file data source, obtain a new instance of this class with {@link FileComponents#fileDataSource()}, - * call the builder method {@link #filePaths(String...)} to specify file path(s), - * then pass the resulting object to {@link com.launchdarkly.client.LDConfig.Builder#updateProcessorFactory(UpdateProcessorFactory)}. - *

- * For more details, see {@link FileComponents}. + * Deprecated name for {@link FileDataSourceBuilder}. Use {@link FileData#dataSource()} to obtain the + * new builder. * * @since 4.5.0 + * @deprecated */ public class FileDataSourceFactory implements UpdateProcessorFactory { - private final List sources = new ArrayList<>(); - private boolean autoUpdate = false; + private final FileDataSourceBuilder wrappedBuilder = new FileDataSourceBuilder(); /** * Adds any number of source files for loading flag data, specifying each file path as a string. The files will @@ -36,9 +32,7 @@ public class FileDataSourceFactory implements UpdateProcessorFactory { * @throws InvalidPathException if one of the parameters is not a valid file path */ public FileDataSourceFactory filePaths(String... filePaths) throws InvalidPathException { - for (String p: filePaths) { - sources.add(Paths.get(p)); - } + wrappedBuilder.filePaths(filePaths); return this; } @@ -52,9 +46,7 @@ public FileDataSourceFactory filePaths(String... filePaths) throws InvalidPathEx * @return the same factory object */ public FileDataSourceFactory filePaths(Path... filePaths) { - for (Path p: filePaths) { - sources.add(p); - } + wrappedBuilder.filePaths(filePaths); return this; } @@ -69,7 +61,7 @@ public FileDataSourceFactory filePaths(Path... filePaths) { * @return the same factory object */ public FileDataSourceFactory autoUpdate(boolean autoUpdate) { - this.autoUpdate = autoUpdate; + wrappedBuilder.autoUpdate(autoUpdate); return this; } @@ -78,6 +70,6 @@ public FileDataSourceFactory autoUpdate(boolean autoUpdate) { */ @Override public UpdateProcessor createUpdateProcessor(String sdkKey, LDConfig config, FeatureStore featureStore) { - return new FileDataSource(featureStore, new DataLoader(sources), autoUpdate); + return wrappedBuilder.createUpdateProcessor(sdkKey, config, featureStore); } } \ No newline at end of file diff --git a/src/main/java/com/launchdarkly/client/files/FlagFactory.java b/src/main/java/com/launchdarkly/client/files/FlagFactory.java deleted file mode 100644 index 19af56282..000000000 --- a/src/main/java/com/launchdarkly/client/files/FlagFactory.java +++ /dev/null @@ -1,56 +0,0 @@ -package com.launchdarkly.client.files; - -import com.google.gson.Gson; -import com.google.gson.JsonArray; -import com.google.gson.JsonElement; -import com.google.gson.JsonObject; -import com.launchdarkly.client.VersionedData; -import com.launchdarkly.client.VersionedDataKind; - -/** - * Creates flag or segment objects from raw JSON. - * - * Note that the {@code FeatureFlag} and {@code Segment} classes are not public in the Java - * client, so we refer to those class objects indirectly via {@code VersionedDataKind}; and - * if we want to construct a flag from scratch, we can't use the constructor but instead must - * build some JSON and then parse that. - */ -class FlagFactory { - private static final Gson gson = new Gson(); - - public static VersionedData flagFromJson(String jsonString) { - return flagFromJson(gson.fromJson(jsonString, JsonElement.class)); - } - - public static VersionedData flagFromJson(JsonElement jsonTree) { - return gson.fromJson(jsonTree, VersionedDataKind.FEATURES.getItemClass()); - } - - /** - * Constructs a flag that always returns the same value. This is done by giving it a single - * variation and setting the fallthrough variation to that. - */ - public static VersionedData flagWithValue(String key, JsonElement value) { - JsonElement jsonValue = gson.toJsonTree(value); - JsonObject o = new JsonObject(); - o.addProperty("key", key); - o.addProperty("on", true); - JsonArray vs = new JsonArray(); - vs.add(jsonValue); - o.add("variations", vs); - // Note that LaunchDarkly normally prevents you from creating a flag with just one variation, - // but it's the application that validates that; the SDK doesn't care. - JsonObject ft = new JsonObject(); - ft.addProperty("variation", 0); - o.add("fallthrough", ft); - return flagFromJson(o); - } - - public static VersionedData segmentFromJson(String jsonString) { - return segmentFromJson(gson.fromJson(jsonString, JsonElement.class)); - } - - public static VersionedData segmentFromJson(JsonElement jsonTree) { - return gson.fromJson(jsonTree, VersionedDataKind.SEGMENTS.getItemClass()); - } -} diff --git a/src/main/java/com/launchdarkly/client/files/FlagFileParser.java b/src/main/java/com/launchdarkly/client/files/FlagFileParser.java deleted file mode 100644 index ed0de72a0..000000000 --- a/src/main/java/com/launchdarkly/client/files/FlagFileParser.java +++ /dev/null @@ -1,39 +0,0 @@ -package com.launchdarkly.client.files; - -import java.io.ByteArrayInputStream; -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; -import java.io.Reader; - -abstract class FlagFileParser { - private static final FlagFileParser jsonParser = new JsonFlagFileParser(); - private static final FlagFileParser yamlParser = new YamlFlagFileParser(); - - public abstract FlagFileRep parse(InputStream input) throws DataLoaderException, IOException; - - public static FlagFileParser selectForContent(byte[] data) { - Reader r = new InputStreamReader(new ByteArrayInputStream(data)); - return detectJson(r) ? jsonParser : yamlParser; - } - - private static boolean detectJson(Reader r) { - // A valid JSON file for our purposes must be an object, i.e. it must start with '{' - while (true) { - try { - int ch = r.read(); - if (ch < 0) { - return false; - } - if (ch == '{') { - return true; - } - if (!Character.isWhitespace(ch)) { - return false; - } - } catch (IOException e) { - return false; - } - } - } -} diff --git a/src/main/java/com/launchdarkly/client/files/FlagFileRep.java b/src/main/java/com/launchdarkly/client/files/FlagFileRep.java deleted file mode 100644 index db04fb51b..000000000 --- a/src/main/java/com/launchdarkly/client/files/FlagFileRep.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.launchdarkly.client.files; - -import com.google.gson.JsonElement; - -import java.util.Map; - -/** - * The basic data structure that we expect all source files to contain. Note that we don't try to - * parse the flags or segments at this level; that will be done by {@link FlagFactory}. - */ -final class FlagFileRep { - Map flags; - Map flagValues; - Map segments; - - FlagFileRep() {} - - FlagFileRep(Map flags, Map flagValues, Map segments) { - this.flags = flags; - this.flagValues = flagValues; - this.segments = segments; - } -} diff --git a/src/main/java/com/launchdarkly/client/files/JsonFlagFileParser.java b/src/main/java/com/launchdarkly/client/files/JsonFlagFileParser.java deleted file mode 100644 index c895fd6ab..000000000 --- a/src/main/java/com/launchdarkly/client/files/JsonFlagFileParser.java +++ /dev/null @@ -1,30 +0,0 @@ -package com.launchdarkly.client.files; - -import com.google.gson.Gson; -import com.google.gson.JsonElement; -import com.google.gson.JsonSyntaxException; - -import java.io.IOException; -import java.io.InputStream; -import java.io.InputStreamReader; - -final class JsonFlagFileParser extends FlagFileParser { - private static final Gson gson = new Gson(); - - @Override - public FlagFileRep parse(InputStream input) throws DataLoaderException, IOException { - try { - return parseJson(gson.fromJson(new InputStreamReader(input), JsonElement.class)); - } catch (JsonSyntaxException e) { - throw new DataLoaderException("cannot parse JSON", e); - } - } - - public FlagFileRep parseJson(JsonElement tree) throws DataLoaderException, IOException { - try { - return gson.fromJson(tree, FlagFileRep.class); - } catch (JsonSyntaxException e) { - throw new DataLoaderException("cannot parse JSON", e); - } - } -} diff --git a/src/main/java/com/launchdarkly/client/files/YamlFlagFileParser.java b/src/main/java/com/launchdarkly/client/files/YamlFlagFileParser.java deleted file mode 100644 index f4e352dfc..000000000 --- a/src/main/java/com/launchdarkly/client/files/YamlFlagFileParser.java +++ /dev/null @@ -1,52 +0,0 @@ -package com.launchdarkly.client.files; - -import com.google.gson.Gson; -import com.google.gson.JsonElement; - -import org.yaml.snakeyaml.Yaml; -import org.yaml.snakeyaml.error.YAMLException; - -import java.io.IOException; -import java.io.InputStream; - -/** - * Parses a FlagFileRep from a YAML file. Two notes about this implementation: - *

- * 1. We already have logic for parsing (and building) flags using Gson, and would rather not repeat - * that logic. So, rather than telling SnakeYAML to parse the file directly into a FlagFileRep object - - * and providing SnakeYAML-specific methods for building flags - we are just parsing the YAML into - * simple Java objects and then feeding that data into the Gson parser. This is admittedly inefficient, - * but it also means that we don't have to worry about any differences between how Gson unmarshals an - * object and how the YAML parser does it. We already know Gson does the right thing for the flag and - * segment classes, because that's what we use in the SDK. - *

- * 2. Ideally, it should be possible to have just one parser, since any valid JSON document is supposed - * to also be parseable as YAML. However, at present, that doesn't work: - *

    - *
  • SnakeYAML (1.19) rejects many valid JSON documents due to simple things like whitespace. - * Apparently this is due to supporting only YAML 1.1, not YAML 1.2 which has full JSON support. - *
  • snakeyaml-engine (https://bitbucket.org/asomov/snakeyaml-engine) says it can handle any JSON, - * but it's only for Java 8 and above. - *
  • YamlBeans (https://github.com/EsotericSoftware/yamlbeans) only works right if you're parsing - * directly into a Java bean instance (which FeatureFlag is not). If you try the "parse to simple - * Java types (and then feed them into Gson)" approach, it does not correctly parse non-string types - * (i.e. it treats true as "true"). (https://github.com/EsotericSoftware/yamlbeans/issues/7) - *
- */ -final class YamlFlagFileParser extends FlagFileParser { - private static final Yaml yaml = new Yaml(); - private static final Gson gson = new Gson(); - private static final JsonFlagFileParser jsonFileParser = new JsonFlagFileParser(); - - @Override - public FlagFileRep parse(InputStream input) throws DataLoaderException, IOException { - Object root; - try { - root = yaml.load(input); - } catch (YAMLException e) { - throw new DataLoaderException("unable to parse YAML", e); - } - JsonElement jsonRoot = gson.toJsonTree(root); - return jsonFileParser.parseJson(jsonRoot); - } -} diff --git a/src/main/java/com/launchdarkly/client/files/package-info.java b/src/main/java/com/launchdarkly/client/files/package-info.java index a5a3eafa4..da8abb785 100644 --- a/src/main/java/com/launchdarkly/client/files/package-info.java +++ b/src/main/java/com/launchdarkly/client/files/package-info.java @@ -1,6 +1,4 @@ /** - * Package for the file data source component, which may be useful in tests. - *

- * The entry point is {@link com.launchdarkly.client.files.FileComponents}. + * Deprecated package replaced by {@link com.launchdarkly.client.integrations.FileData}. */ package com.launchdarkly.client.files; diff --git a/src/main/java/com/launchdarkly/client/integrations/CacheMonitor.java b/src/main/java/com/launchdarkly/client/integrations/CacheMonitor.java new file mode 100644 index 000000000..618edc63d --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/CacheMonitor.java @@ -0,0 +1,151 @@ +package com.launchdarkly.client.integrations; + +import java.util.Objects; +import java.util.concurrent.Callable; + +/** + * A conduit that an application can use to monitor caching behavior of a persistent data store. + * + * @see PersistentDataStoreBuilder#cacheMonitor(CacheMonitor) + * @since 4.11.0 + */ +public final class CacheMonitor { + private Callable source; + + /** + * Constructs a new instance. + */ + public CacheMonitor() {} + + /** + * Called internally by the SDK to establish a source for the statistics. + * @param source provided by an internal SDK component + * @deprecated Referencing this method directly is deprecated. In a future version, it will + * only be visible to SDK implementation code. + */ + @Deprecated + public void setSource(Callable source) { + this.source = source; + } + + /** + * Queries the current cache statistics. + * + * @return a {@link CacheStats} instance, or null if not available + */ + public CacheStats getCacheStats() { + try { + return source == null ? null : source.call(); + } catch (Exception e) { + return null; + } + } + + /** + * A snapshot of cache statistics. The statistics are cumulative across the lifetime of the data store. + *

+ * This is based on the data provided by Guava's caching framework. The SDK currently uses Guava + * internally, but is not guaranteed to always do so, and to avoid embedding Guava API details in + * the SDK API this is provided as a separate class. + * + * @since 4.11.0 + */ + public static final class CacheStats { + private final long hitCount; + private final long missCount; + private final long loadSuccessCount; + private final long loadExceptionCount; + private final long totalLoadTime; + private final long evictionCount; + + /** + * Constructs a new instance. + * + * @param hitCount number of queries that produced a cache hit + * @param missCount number of queries that produced a cache miss + * @param loadSuccessCount number of cache misses that loaded a value without an exception + * @param loadExceptionCount number of cache misses that tried to load a value but got an exception + * @param totalLoadTime number of nanoseconds spent loading new values + * @param evictionCount number of cache entries that have been evicted + */ + public CacheStats(long hitCount, long missCount, long loadSuccessCount, long loadExceptionCount, + long totalLoadTime, long evictionCount) { + this.hitCount = hitCount; + this.missCount = missCount; + this.loadSuccessCount = loadSuccessCount; + this.loadExceptionCount = loadExceptionCount; + this.totalLoadTime = totalLoadTime; + this.evictionCount = evictionCount; + } + + /** + * The number of data queries that received cached data instead of going to the underlying data store. + * @return the number of cache hits + */ + public long getHitCount() { + return hitCount; + } + + /** + * The number of data queries that did not find cached data and went to the underlying data store. + * @return the number of cache misses + */ + public long getMissCount() { + return missCount; + } + + /** + * The number of times a cache miss resulted in successfully loading a data store item (or finding + * that it did not exist in the store). + * @return the number of successful loads + */ + public long getLoadSuccessCount() { + return loadSuccessCount; + } + + /** + * The number of times that an error occurred while querying the underlying data store. + * @return the number of failed loads + */ + public long getLoadExceptionCount() { + return loadExceptionCount; + } + + /** + * The total number of nanoseconds that the cache has spent loading new values. + * @return total time spent for all cache loads + */ + public long getTotalLoadTime() { + return totalLoadTime; + } + + /** + * The number of times cache entries have been evicted. + * @return the number of evictions + */ + public long getEvictionCount() { + return evictionCount; + } + + @Override + public boolean equals(Object other) { + if (!(other instanceof CacheStats)) { + return false; + } + CacheStats o = (CacheStats)other; + return hitCount == o.hitCount && missCount == o.missCount && loadSuccessCount == o.loadSuccessCount && + loadExceptionCount == o.loadExceptionCount && totalLoadTime == o.totalLoadTime && evictionCount == o.evictionCount; + } + + @Override + public int hashCode() { + return Objects.hash(hitCount, missCount, loadSuccessCount, loadExceptionCount, totalLoadTime, evictionCount); + } + + @Override + public String toString() { + return "{hit=" + hitCount + ", miss=" + missCount + ", loadSuccess=" + loadSuccessCount + + ", loadException=" + loadExceptionCount + ", totalLoadTime=" + totalLoadTime + ", evictionCount=" + evictionCount + "}"; + } + } +} diff --git a/src/main/java/com/launchdarkly/client/integrations/FileData.java b/src/main/java/com/launchdarkly/client/integrations/FileData.java new file mode 100644 index 000000000..a6f65f3e2 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/FileData.java @@ -0,0 +1,116 @@ +package com.launchdarkly.client.integrations; + +/** + * Integration between the LaunchDarkly SDK and file data. + *

+ * The file data source allows you to use local files as a source of feature flag state. This would + * typically be used in a test environment, to operate using a predetermined feature flag state + * without an actual LaunchDarkly connection. See {@link #dataSource()} for details. + * + * @since 4.11.0 + */ +public abstract class FileData { + /** + * Creates a {@link FileDataSourceBuilder} which you can use to configure the file data source. + * This allows you to use local files as a source of feature flag state, instead of using an actual + * LaunchDarkly connection. + *

+ * This object can be modified with {@link FileDataSourceBuilder} methods for any desired + * custom settings, before including it in the SDK configuration with + * {@link com.launchdarkly.client.LDConfig.Builder#dataSource(com.launchdarkly.client.UpdateProcessorFactory)}. + *

+ * At a minimum, you will want to call {@link FileDataSourceBuilder#filePaths(String...)} to specify + * your data file(s); you can also use {@link FileDataSourceBuilder#autoUpdate(boolean)} to + * specify that flags should be reloaded when a file is modified. See {@link FileDataSourceBuilder} + * for all configuration options. + *

+   *     FileDataSourceFactory f = FileData.dataSource()
+   *         .filePaths("./testData/flags.json")
+   *         .autoUpdate(true);
+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataSource(f)
+   *         .build();
+   * 
+ *

+ * This will cause the client not to connect to LaunchDarkly to get feature flags. The + * client may still make network connections to send analytics events, unless you have disabled + * this with {@link com.launchdarkly.client.LDConfig.Builder#sendEvents(boolean)} or + * {@link com.launchdarkly.client.LDConfig.Builder#offline(boolean)}. + *

+ * Flag data files can be either JSON or YAML. They contain an object with three possible + * properties: + *

    + *
  • {@code flags}: Feature flag definitions. + *
  • {@code flagVersions}: Simplified feature flags that contain only a value. + *
  • {@code segments}: User segment definitions. + *
+ *

+ * The format of the data in {@code flags} and {@code segments} is defined by the LaunchDarkly application + * and is subject to change. Rather than trying to construct these objects yourself, it is simpler + * to request existing flags directly from the LaunchDarkly server in JSON format, and use this + * output as the starting point for your file. In Linux you would do this: + *

+   *     curl -H "Authorization: {your sdk key}" https://app.launchdarkly.com/sdk/latest-all
+   * 
+ *

+ * The output will look something like this (but with many more properties): + *

+   * {
+   *     "flags": {
+   *         "flag-key-1": {
+   *             "key": "flag-key-1",
+   *             "on": true,
+   *             "variations": [ "a", "b" ]
+   *         },
+   *         "flag-key-2": {
+   *             "key": "flag-key-2",
+   *             "on": true,
+   *             "variations": [ "c", "d" ]
+   *         }
+   *     },
+   *     "segments": {
+   *         "segment-key-1": {
+   *             "key": "segment-key-1",
+   *             "includes": [ "user-key-1" ]
+   *         }
+   *     }
+   * }
+   * 
+ *

+ * Data in this format allows the SDK to exactly duplicate all the kinds of flag behavior supported + * by LaunchDarkly. However, in many cases you will not need this complexity, but will just want to + * set specific flag keys to specific values. For that, you can use a much simpler format: + *

+   * {
+   *     "flagValues": {
+   *         "my-string-flag-key": "value-1",
+   *         "my-boolean-flag-key": true,
+   *         "my-integer-flag-key": 3
+   *     }
+   * }
+   * 
+ *

+ * Or, in YAML: + *

+   * flagValues:
+   *   my-string-flag-key: "value-1"
+   *   my-boolean-flag-key: true
+   *   my-integer-flag-key: 3
+   * 
+ *

+ * It is also possible to specify both {@code flags} and {@code flagValues}, if you want some flags + * to have simple values and others to have complex behavior. However, it is an error to use the + * same flag key or segment key more than once, either in a single file or across multiple files. + *

+ * If the data source encounters any error in any file-- malformed content, a missing file, or a + * duplicate key-- it will not load flags from any of the files. + * + * @return a data source configuration object + * @since 4.11.0 + */ + public static FileDataSourceBuilder dataSource() { + return new FileDataSourceBuilder(); + } + + private FileData() {} +} diff --git a/src/main/java/com/launchdarkly/client/integrations/FileDataSourceBuilder.java b/src/main/java/com/launchdarkly/client/integrations/FileDataSourceBuilder.java new file mode 100644 index 000000000..4c3cd1993 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/FileDataSourceBuilder.java @@ -0,0 +1,83 @@ +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.FeatureStore; +import com.launchdarkly.client.LDConfig; +import com.launchdarkly.client.UpdateProcessor; +import com.launchdarkly.client.UpdateProcessorFactory; + +import java.nio.file.InvalidPathException; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; + +/** + * To use the file data source, obtain a new instance of this class with {@link FileData#dataSource()}, + * call the builder method {@link #filePaths(String...)} to specify file path(s), + * then pass the resulting object to {@link com.launchdarkly.client.LDConfig.Builder#dataSource(UpdateProcessorFactory)}. + *

+ * For more details, see {@link FileData}. + * + * @since 4.11.0 + */ +public final class FileDataSourceBuilder implements UpdateProcessorFactory { + private final List sources = new ArrayList<>(); + private boolean autoUpdate = false; + + /** + * Adds any number of source files for loading flag data, specifying each file path as a string. The files will + * not actually be loaded until the LaunchDarkly client starts up. + *

+ * Files will be parsed as JSON if their first non-whitespace character is '{'. Otherwise, they will be parsed as YAML. + * + * @param filePaths path(s) to the source file(s); may be absolute or relative to the current working directory + * @return the same factory object + * + * @throws InvalidPathException if one of the parameters is not a valid file path + */ + public FileDataSourceBuilder filePaths(String... filePaths) throws InvalidPathException { + for (String p: filePaths) { + sources.add(Paths.get(p)); + } + return this; + } + + /** + * Adds any number of source files for loading flag data, specifying each file path as a Path. The files will + * not actually be loaded until the LaunchDarkly client starts up. + *

+ * Files will be parsed as JSON if their first non-whitespace character is '{'. Otherwise, they will be parsed as YAML. + * + * @param filePaths path(s) to the source file(s); may be absolute or relative to the current working directory + * @return the same factory object + */ + public FileDataSourceBuilder filePaths(Path... filePaths) { + for (Path p: filePaths) { + sources.add(p); + } + return this; + } + + /** + * Specifies whether the data source should watch for changes to the source file(s) and reload flags + * whenever there is a change. By default, it will not, so the flags will only be loaded once. + *

+ * Note that auto-updating will only work if all of the files you specified have valid directory paths at + * startup time; if a directory does not exist, creating it later will not result in files being loaded from it. + * + * @param autoUpdate true if flags should be reloaded whenever a source file changes + * @return the same factory object + */ + public FileDataSourceBuilder autoUpdate(boolean autoUpdate) { + this.autoUpdate = autoUpdate; + return this; + } + + /** + * Used internally by the LaunchDarkly client. + */ + @Override + public UpdateProcessor createUpdateProcessor(String sdkKey, LDConfig config, FeatureStore featureStore) { + return new FileDataSourceImpl(featureStore, sources, autoUpdate); + } +} \ No newline at end of file diff --git a/src/main/java/com/launchdarkly/client/files/FileDataSource.java b/src/main/java/com/launchdarkly/client/integrations/FileDataSourceImpl.java similarity index 56% rename from src/main/java/com/launchdarkly/client/files/FileDataSource.java rename to src/main/java/com/launchdarkly/client/integrations/FileDataSourceImpl.java index e040e7902..cd2244564 100644 --- a/src/main/java/com/launchdarkly/client/files/FileDataSource.java +++ b/src/main/java/com/launchdarkly/client/integrations/FileDataSourceImpl.java @@ -1,21 +1,34 @@ -package com.launchdarkly.client.files; +package com.launchdarkly.client.integrations; import com.google.common.util.concurrent.Futures; +import com.google.gson.JsonElement; import com.launchdarkly.client.FeatureStore; import com.launchdarkly.client.UpdateProcessor; +import com.launchdarkly.client.VersionedData; +import com.launchdarkly.client.VersionedDataKind; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FileDataException; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FlagFactory; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FlagFileParser; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FlagFileRep; import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.ByteArrayInputStream; import java.io.IOException; import java.nio.file.FileSystem; import java.nio.file.FileSystems; +import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.WatchEvent; import java.nio.file.WatchKey; import java.nio.file.WatchService; import java.nio.file.Watchable; +import java.util.ArrayList; +import java.util.HashMap; import java.util.HashSet; +import java.util.List; +import java.util.Map; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.atomic.AtomicBoolean; @@ -25,20 +38,20 @@ import static java.nio.file.StandardWatchEventKinds.ENTRY_MODIFY; /** - * Implements taking flag data from files and putting it into the feature store, at startup time and + * Implements taking flag data from files and putting it into the data store, at startup time and * optionally whenever files change. */ -class FileDataSource implements UpdateProcessor { - private static final Logger logger = LoggerFactory.getLogger(FileDataSource.class); +final class FileDataSourceImpl implements UpdateProcessor { + private static final Logger logger = LoggerFactory.getLogger(FileDataSourceImpl.class); private final FeatureStore store; private final DataLoader dataLoader; private final AtomicBoolean inited = new AtomicBoolean(false); private final FileWatcher fileWatcher; - FileDataSource(FeatureStore store, DataLoader dataLoader, boolean autoUpdate) { + FileDataSourceImpl(FeatureStore store, List sources, boolean autoUpdate) { this.store = store; - this.dataLoader = dataLoader; + this.dataLoader = new DataLoader(sources); FileWatcher fw = null; if (autoUpdate) { @@ -65,7 +78,7 @@ public Future start() { if (fileWatcher != null) { fileWatcher.start(new Runnable() { public void run() { - FileDataSource.this.reload(); + FileDataSourceImpl.this.reload(); } }); } @@ -77,7 +90,7 @@ private boolean reload() { DataBuilder builder = new DataBuilder(); try { dataLoader.load(builder); - } catch (DataLoaderException e) { + } catch (FileDataException e) { logger.error(e.getDescription()); return false; } @@ -101,7 +114,7 @@ public void close() throws IOException { /** * If auto-updating is enabled, this component watches for file changes on a worker thread. */ - private static class FileWatcher implements Runnable { + private static final class FileWatcher implements Runnable { private final WatchService watchService; private final Set watchedFilePaths; private Runnable fileModifiedAction; @@ -165,7 +178,7 @@ public void run() { public void start(Runnable fileModifiedAction) { this.fileModifiedAction = fileModifiedAction; - thread = new Thread(this, FileDataSource.class.getName()); + thread = new Thread(this, FileDataSourceImpl.class.getName()); thread.setDaemon(true); thread.start(); } @@ -177,4 +190,75 @@ public void stop() { } } } + + /** + * Implements the loading of flag data from one or more files. Will throw an exception if any file can't + * be read or parsed, or if any flag or segment keys are duplicates. + */ + static final class DataLoader { + private final List files; + + public DataLoader(List files) { + this.files = new ArrayList(files); + } + + public Iterable getFiles() { + return files; + } + + public void load(DataBuilder builder) throws FileDataException + { + for (Path p: files) { + try { + byte[] data = Files.readAllBytes(p); + FlagFileParser parser = FlagFileParser.selectForContent(data); + FlagFileRep fileContents = parser.parse(new ByteArrayInputStream(data)); + if (fileContents.flags != null) { + for (Map.Entry e: fileContents.flags.entrySet()) { + builder.add(VersionedDataKind.FEATURES, FlagFactory.flagFromJson(e.getValue())); + } + } + if (fileContents.flagValues != null) { + for (Map.Entry e: fileContents.flagValues.entrySet()) { + builder.add(VersionedDataKind.FEATURES, FlagFactory.flagWithValue(e.getKey(), e.getValue())); + } + } + if (fileContents.segments != null) { + for (Map.Entry e: fileContents.segments.entrySet()) { + builder.add(VersionedDataKind.SEGMENTS, FlagFactory.segmentFromJson(e.getValue())); + } + } + } catch (FileDataException e) { + throw new FileDataException(e.getMessage(), e.getCause(), p); + } catch (IOException e) { + throw new FileDataException(null, e, p); + } + } + } + } + + /** + * Internal data structure that organizes flag/segment data into the format that the feature store + * expects. Will throw an exception if we try to add the same flag or segment key more than once. + */ + static final class DataBuilder { + private final Map, Map> allData = new HashMap<>(); + + public Map, Map> build() { + return allData; + } + + public void add(VersionedDataKind kind, VersionedData item) throws FileDataException { + @SuppressWarnings("unchecked") + Map items = (Map)allData.get(kind); + if (items == null) { + items = new HashMap(); + allData.put(kind, items); + } + if (items.containsKey(item.getKey())) { + throw new FileDataException("in " + kind.getNamespace() + ", key \"" + item.getKey() + "\" was already defined", null, null); + } + items.put(item.getKey(), item); + } + } } diff --git a/src/main/java/com/launchdarkly/client/integrations/FileDataSourceParsing.java b/src/main/java/com/launchdarkly/client/integrations/FileDataSourceParsing.java new file mode 100644 index 000000000..08083e4d9 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/FileDataSourceParsing.java @@ -0,0 +1,223 @@ +package com.launchdarkly.client.integrations; + +import com.google.gson.Gson; +import com.google.gson.JsonArray; +import com.google.gson.JsonElement; +import com.google.gson.JsonObject; +import com.google.gson.JsonSyntaxException; +import com.launchdarkly.client.VersionedData; +import com.launchdarkly.client.VersionedDataKind; + +import org.yaml.snakeyaml.Yaml; +import org.yaml.snakeyaml.error.YAMLException; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.InputStreamReader; +import java.io.Reader; +import java.nio.file.Path; +import java.util.Map; + +abstract class FileDataSourceParsing { + /** + * Indicates that the file processor encountered an error in one of the input files. This exception is + * not surfaced to the host application, it is only logged, and we don't do anything different programmatically + * with different kinds of exceptions, therefore it has no subclasses. + */ + @SuppressWarnings("serial") + static final class FileDataException extends Exception { + private final Path filePath; + + public FileDataException(String message, Throwable cause, Path filePath) { + super(message, cause); + this.filePath = filePath; + } + + public FileDataException(String message, Throwable cause) { + this(message, cause, null); + } + + public Path getFilePath() { + return filePath; + } + + public String getDescription() { + StringBuilder s = new StringBuilder(); + if (getMessage() != null) { + s.append(getMessage()); + if (getCause() != null) { + s.append(" "); + } + } + if (getCause() != null) { + s.append(" [").append(getCause().toString()).append("]"); + } + if (filePath != null) { + s.append(": ").append(filePath); + } + return s.toString(); + } + } + + /** + * The basic data structure that we expect all source files to contain. Note that we don't try to + * parse the flags or segments at this level; that will be done by {@link FlagFactory}. + */ + static final class FlagFileRep { + Map flags; + Map flagValues; + Map segments; + + FlagFileRep() {} + + FlagFileRep(Map flags, Map flagValues, Map segments) { + this.flags = flags; + this.flagValues = flagValues; + this.segments = segments; + } + } + + static abstract class FlagFileParser { + private static final FlagFileParser jsonParser = new JsonFlagFileParser(); + private static final FlagFileParser yamlParser = new YamlFlagFileParser(); + + public abstract FlagFileRep parse(InputStream input) throws FileDataException, IOException; + + public static FlagFileParser selectForContent(byte[] data) { + Reader r = new InputStreamReader(new ByteArrayInputStream(data)); + return detectJson(r) ? jsonParser : yamlParser; + } + + private static boolean detectJson(Reader r) { + // A valid JSON file for our purposes must be an object, i.e. it must start with '{' + while (true) { + try { + int ch = r.read(); + if (ch < 0) { + return false; + } + if (ch == '{') { + return true; + } + if (!Character.isWhitespace(ch)) { + return false; + } + } catch (IOException e) { + return false; + } + } + } + } + + static final class JsonFlagFileParser extends FlagFileParser { + private static final Gson gson = new Gson(); + + @Override + public FlagFileRep parse(InputStream input) throws FileDataException, IOException { + try { + return parseJson(gson.fromJson(new InputStreamReader(input), JsonElement.class)); + } catch (JsonSyntaxException e) { + throw new FileDataException("cannot parse JSON", e); + } + } + + public FlagFileRep parseJson(JsonElement tree) throws FileDataException, IOException { + try { + return gson.fromJson(tree, FlagFileRep.class); + } catch (JsonSyntaxException e) { + throw new FileDataException("cannot parse JSON", e); + } + } + } + + /** + * Parses a FlagFileRep from a YAML file. Two notes about this implementation: + *

+ * 1. We already have logic for parsing (and building) flags using Gson, and would rather not repeat + * that logic. So, rather than telling SnakeYAML to parse the file directly into a FlagFileRep object - + * and providing SnakeYAML-specific methods for building flags - we are just parsing the YAML into + * simple Java objects and then feeding that data into the Gson parser. This is admittedly inefficient, + * but it also means that we don't have to worry about any differences between how Gson unmarshals an + * object and how the YAML parser does it. We already know Gson does the right thing for the flag and + * segment classes, because that's what we use in the SDK. + *

+ * 2. Ideally, it should be possible to have just one parser, since any valid JSON document is supposed + * to also be parseable as YAML. However, at present, that doesn't work: + *

    + *
  • SnakeYAML (1.19) rejects many valid JSON documents due to simple things like whitespace. + * Apparently this is due to supporting only YAML 1.1, not YAML 1.2 which has full JSON support. + *
  • snakeyaml-engine (https://bitbucket.org/asomov/snakeyaml-engine) says it can handle any JSON, + * but it's only for Java 8 and above. + *
  • YamlBeans (https://github.com/EsotericSoftware/yamlbeans) only works right if you're parsing + * directly into a Java bean instance (which FeatureFlag is not). If you try the "parse to simple + * Java types (and then feed them into Gson)" approach, it does not correctly parse non-string types + * (i.e. it treats true as "true"). (https://github.com/EsotericSoftware/yamlbeans/issues/7) + *
+ */ + static final class YamlFlagFileParser extends FlagFileParser { + private static final Yaml yaml = new Yaml(); + private static final Gson gson = new Gson(); + private static final JsonFlagFileParser jsonFileParser = new JsonFlagFileParser(); + + @Override + public FlagFileRep parse(InputStream input) throws FileDataException, IOException { + Object root; + try { + root = yaml.load(input); + } catch (YAMLException e) { + throw new FileDataException("unable to parse YAML", e); + } + JsonElement jsonRoot = gson.toJsonTree(root); + return jsonFileParser.parseJson(jsonRoot); + } + } + + /** + * Creates flag or segment objects from raw JSON. + * + * Note that the {@code FeatureFlag} and {@code Segment} classes are not public in the Java + * client, so we refer to those class objects indirectly via {@code VersionedDataKind}; and + * if we want to construct a flag from scratch, we can't use the constructor but instead must + * build some JSON and then parse that. + */ + static final class FlagFactory { + private static final Gson gson = new Gson(); + + static VersionedData flagFromJson(String jsonString) { + return flagFromJson(gson.fromJson(jsonString, JsonElement.class)); + } + + static VersionedData flagFromJson(JsonElement jsonTree) { + return gson.fromJson(jsonTree, VersionedDataKind.FEATURES.getItemClass()); + } + + /** + * Constructs a flag that always returns the same value. This is done by giving it a single + * variation and setting the fallthrough variation to that. + */ + static VersionedData flagWithValue(String key, JsonElement value) { + JsonElement jsonValue = gson.toJsonTree(value); + JsonObject o = new JsonObject(); + o.addProperty("key", key); + o.addProperty("on", true); + JsonArray vs = new JsonArray(); + vs.add(jsonValue); + o.add("variations", vs); + // Note that LaunchDarkly normally prevents you from creating a flag with just one variation, + // but it's the application that validates that; the SDK doesn't care. + JsonObject ft = new JsonObject(); + ft.addProperty("variation", 0); + o.add("fallthrough", ft); + return flagFromJson(o); + } + + static VersionedData segmentFromJson(String jsonString) { + return segmentFromJson(gson.fromJson(jsonString, JsonElement.class)); + } + + static VersionedData segmentFromJson(JsonElement jsonTree) { + return gson.fromJson(jsonTree, VersionedDataKind.SEGMENTS.getItemClass()); + } + } +} diff --git a/src/main/java/com/launchdarkly/client/integrations/PersistentDataStoreBuilder.java b/src/main/java/com/launchdarkly/client/integrations/PersistentDataStoreBuilder.java new file mode 100644 index 000000000..43a1b42b3 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/PersistentDataStoreBuilder.java @@ -0,0 +1,216 @@ +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.FeatureStore; +import com.launchdarkly.client.FeatureStoreCacheConfig; +import com.launchdarkly.client.FeatureStoreFactory; +import com.launchdarkly.client.interfaces.PersistentDataStoreFactory; +import com.launchdarkly.client.utils.CachingStoreWrapper; +import com.launchdarkly.client.utils.FeatureStoreCore; + +import java.util.concurrent.TimeUnit; + +/** + * A configurable factory for a persistent data store. + *

+ * Several database integrations exist for the LaunchDarkly SDK, each with its own behavior and options + * specific to that database; this is described via some implementation of {@link PersistentDataStoreFactory}. + * There is also universal behavior that the SDK provides for all persistent data stores, such as caching; + * the {@link PersistentDataStoreBuilder} adds this. + *

+ * After configuring this object, pass it to {@link com.launchdarkly.client.LDConfig.Builder#dataStore(FeatureStoreFactory)} + * to use it in the SDK configuration. For example, using the Redis integration: + * + *


+ *     LDConfig config = new LDConfig.Builder()
+ *         .dataStore(
+ *             Components.persistentDataStore(
+ *                 Redis.dataStore().url("redis://my-redis-host")
+ *             ).cacheSeconds(15)
+ *         )
+ *         .build();
+ * 
+ * + * In this example, {@code .url()} is an option specifically for the Redis integration, whereas + * {@code ttlSeconds()} is an option that can be used for any persistent data store. + * + * @since 4.11.0 + */ +@SuppressWarnings("deprecation") +public final class PersistentDataStoreBuilder implements FeatureStoreFactory { + /** + * The default value for the cache TTL. + */ + public static final int DEFAULT_CACHE_TTL_SECONDS = 15; + + private final PersistentDataStoreFactory persistentDataStoreFactory; + FeatureStoreCacheConfig caching = FeatureStoreCacheConfig.DEFAULT; + CacheMonitor cacheMonitor = null; + + /** + * Possible values for {@link #staleValuesPolicy(StaleValuesPolicy)}. + */ + public enum StaleValuesPolicy { + /** + * Indicates that when the cache TTL expires for an item, it is evicted from the cache. The next + * attempt to read that item causes a synchronous read from the underlying data store; if that + * fails, no value is available. This is the default behavior. + * + * @see com.google.common.cache.CacheBuilder#expireAfterWrite(long, TimeUnit) + */ + EVICT, + /** + * Indicates that the cache should refresh stale values instead of evicting them. + *

+ * In this mode, an attempt to read an expired item causes a synchronous read from the underlying + * data store, like {@link #EVICT}--but if an error occurs during this refresh, the cache will + * continue to return the previously cached values (if any). This is useful if you prefer the most + * recently cached feature rule set to be returned for evaluation over the default value when + * updates go wrong. + *

+ * See: CacheBuilder + * for more specific information on cache semantics. This mode is equivalent to {@code expireAfterWrite}. + */ + REFRESH, + /** + * Indicates that the cache should refresh stale values asynchronously instead of evicting them. + *

+ * This is the same as {@link #REFRESH}, except that the attempt to refresh the value is done + * on another thread (using a {@link java.util.concurrent.Executor}). In the meantime, the cache + * will continue to return the previously cached value (if any) in a non-blocking fashion to threads + * requesting the stale key. Any exception encountered during the asynchronous reload will cause + * the previously cached value to be retained. + *

+ * This setting is ideal to enable when you desire high performance reads and can accept returning + * stale values for the period of the async refresh. For example, configuring this feature store + * with a very low cache time and enabling this feature would see great performance benefit by + * decoupling calls from network I/O. + *

+ * See: CacheBuilder for + * more specific information on cache semantics. + */ + REFRESH_ASYNC + }; + + /** + * Creates a new builder. + * + * @param persistentDataStoreFactory the factory implementation for the specific data store type + */ + public PersistentDataStoreBuilder(PersistentDataStoreFactory persistentDataStoreFactory) { + this.persistentDataStoreFactory = persistentDataStoreFactory; + } + + @Override + public FeatureStore createFeatureStore() { + FeatureStoreCore core = persistentDataStoreFactory.createPersistentDataStore(); + return CachingStoreWrapper.builder(core) + .caching(caching) + .cacheMonitor(cacheMonitor) + .build(); + } + + /** + * Specifies that the SDK should not use an in-memory cache for the persistent data store. + * This means that every feature flag evaluation will trigger a data store query. + * + * @return the builder + */ + public PersistentDataStoreBuilder noCaching() { + return cacheTime(0, TimeUnit.MILLISECONDS); + } + + /** + * Specifies the cache TTL. Items will be evicted or refreshed (depending on the StaleValuesPolicy) + * after this amount of time from the time when they were originally cached. + *

+ * If the value is zero, caching is disabled (equivalent to {@link #noCaching()}). + *

+ * If the value is negative, data is cached forever (equivalent to {@link #cacheForever()}). + * + * @param cacheTime the cache TTL in whatever units you wish + * @param cacheTimeUnit the time unit + * @return the builder + */ + public PersistentDataStoreBuilder cacheTime(long cacheTime, TimeUnit cacheTimeUnit) { + caching = caching.ttl(cacheTime, cacheTimeUnit); + return this; + } + + /** + * Shortcut for calling {@link #cacheTime(long, TimeUnit)} with {@link TimeUnit#MILLISECONDS}. + * + * @param millis the cache TTL in milliseconds + * @return the builder + */ + public PersistentDataStoreBuilder cacheMillis(long millis) { + return cacheTime(millis, TimeUnit.MILLISECONDS); + } + + /** + * Shortcut for calling {@link #cacheTime(long, TimeUnit)} with {@link TimeUnit#SECONDS}. + * + * @param seconds the cache TTL in seconds + * @return the builder + */ + public PersistentDataStoreBuilder cacheSeconds(long seconds) { + return cacheTime(seconds, TimeUnit.SECONDS); + } + + /** + * Specifies that the in-memory cache should never expire. In this mode, data will be written + * to both the underlying persistent store and the cache, but will only ever be read from + * the persistent store if the SDK is restarted. + *

+ * Use this mode with caution: it means that in a scenario where multiple processes are sharing + * the database, and the current process loses connectivity to LaunchDarkly while other processes + * are still receiving updates and writing them to the database, the current process will have + * stale data. + * + * @return the builder + */ + public PersistentDataStoreBuilder cacheForever() { + return cacheTime(-1, TimeUnit.MILLISECONDS); + } + + /** + * Specifies how the cache (if any) should deal with old values when the cache TTL expires. The default + * is {@link StaleValuesPolicy#EVICT}. This property has no effect if caching is disabled. + * + * @param staleValuesPolicy a {@link StaleValuesPolicy} constant + * @return the builder + */ + public PersistentDataStoreBuilder staleValuesPolicy(StaleValuesPolicy staleValuesPolicy) { + caching = caching.staleValuesPolicy(FeatureStoreCacheConfig.StaleValuesPolicy.fromNewEnum(staleValuesPolicy)); + return this; + } + + /** + * Provides a conduit for an application to monitor the effectiveness of the in-memory cache. + *

+ * Create an instance of {@link CacheMonitor}; retain a reference to it, and also pass it to this + * method when you are configuring the persistent data store. The store will use + * {@link CacheMonitor#setSource(java.util.concurrent.Callable)} to make the caching + * statistics available through that {@link CacheMonitor} instance. + *

+ * Note that turning on cache monitoring may slightly decrease performance, due to the need to + * record statistics for each cache operation. + *

+ * Example usage: + * + *


+   *     CacheMonitor cacheMonitor = new CacheMonitor();
+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataStore(Components.persistentDataStore(Redis.dataStore()).cacheMonitor(cacheMonitor))
+   *         .build();
+   *     // later...
+   *     CacheMonitor.CacheStats stats = cacheMonitor.getCacheStats();
+   * 
+ * + * @param cacheMonitor an instance of {@link CacheMonitor} + * @return the builder + */ + public PersistentDataStoreBuilder cacheMonitor(CacheMonitor cacheMonitor) { + this.cacheMonitor = cacheMonitor; + return this; + } +} diff --git a/src/main/java/com/launchdarkly/client/integrations/Redis.java b/src/main/java/com/launchdarkly/client/integrations/Redis.java new file mode 100644 index 000000000..7a167ae9d --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/Redis.java @@ -0,0 +1,35 @@ +package com.launchdarkly.client.integrations; + +/** + * Integration between the LaunchDarkly SDK and Redis. + * + * @since 4.11.0 + */ +public abstract class Redis { + /** + * Returns a builder object for creating a Redis-backed data store. + *

+ * This object can be modified with {@link RedisDataStoreBuilder} methods for any desired + * custom Redis options. Then, pass it to {@link com.launchdarkly.client.Components#persistentDataStore(com.launchdarkly.client.interfaces.PersistentDataStoreFactory)} + * and set any desired caching options. Finally, pass the result to + * {@link com.launchdarkly.client.LDConfig.Builder#dataStore(com.launchdarkly.client.FeatureStoreFactory)}. + * For example: + * + *


+   *     LDConfig config = new LDConfig.Builder()
+   *         .dataStore(
+   *             Components.persistentDataStore(
+   *                 Redis.dataStore().url("redis://my-redis-host")
+   *             ).cacheSeconds(15)
+   *         )
+   *         .build();
+   * 
+ * + * @return a data store configuration object + */ + public static RedisDataStoreBuilder dataStore() { + return new RedisDataStoreBuilder(); + } + + private Redis() {} +} diff --git a/src/main/java/com/launchdarkly/client/integrations/RedisDataStoreBuilder.java b/src/main/java/com/launchdarkly/client/integrations/RedisDataStoreBuilder.java new file mode 100644 index 000000000..70b25b792 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/RedisDataStoreBuilder.java @@ -0,0 +1,167 @@ +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.interfaces.PersistentDataStoreFactory; +import com.launchdarkly.client.utils.FeatureStoreCore; + +import java.net.URI; +import java.util.concurrent.TimeUnit; + +import static com.google.common.base.Preconditions.checkNotNull; + +import redis.clients.jedis.JedisPoolConfig; +import redis.clients.jedis.Protocol; + +/** + * A builder for configuring the Redis-based persistent data store. + *

+ * Obtain an instance of this class by calling {@link Redis#dataStore()}. After calling its methods + * to specify any desired custom settings, you can pass it directly into the SDK configuration with + * {@link com.launchdarkly.client.LDConfig.Builder#dataStore(com.launchdarkly.client.FeatureStoreFactory)}. + * You do not need to call {@link #createPersistentDataStore()} yourself to build the actual data store; that + * will be done by the SDK. + *

+ * Builder calls can be chained, for example: + * + *


+ * LDConfig config = new LDConfig.Builder()
+ *      .dataStore(
+ *           Redis.dataStore()
+ *               .database(1)
+ *               .caching(FeatureStoreCacheConfig.enabled().ttlSeconds(60))
+ *      )
+ *      .build();
+ * 
+ * + * @since 4.11.0 + */ +public final class RedisDataStoreBuilder implements PersistentDataStoreFactory { + /** + * The default value for the Redis URI: {@code redis://localhost:6379} + */ + public static final URI DEFAULT_URI = URI.create("redis://localhost:6379"); + + /** + * The default value for {@link #prefix(String)}. + */ + public static final String DEFAULT_PREFIX = "launchdarkly"; + + URI uri = DEFAULT_URI; + String prefix = DEFAULT_PREFIX; + int connectTimeout = Protocol.DEFAULT_TIMEOUT; + int socketTimeout = Protocol.DEFAULT_TIMEOUT; + Integer database = null; + String password = null; + boolean tls = false; + JedisPoolConfig poolConfig = null; + + // These constructors are called only from Implementations + RedisDataStoreBuilder() { + } + + /** + * Specifies the database number to use. + *

+ * The database number can also be specified in the Redis URI, in the form {@code redis://host:port/NUMBER}. Any + * non-null value that you set with {@link #database(Integer)} will override the URI. + * + * @param database the database number, or null to fall back to the URI or the default + * @return the builder + */ + public RedisDataStoreBuilder database(Integer database) { + this.database = database; + return this; + } + + /** + * Specifies a password that will be sent to Redis in an AUTH command. + *

+ * It is also possible to include a password in the Redis URI, in the form {@code redis://:PASSWORD@host:port}. Any + * password that you set with {@link #password(String)} will override the URI. + * + * @param password the password + * @return the builder + */ + public RedisDataStoreBuilder password(String password) { + this.password = password; + return this; + } + + /** + * Optionally enables TLS for secure connections to Redis. + *

+ * This is equivalent to specifying a Redis URI that begins with {@code rediss:} rather than {@code redis:}. + *

+ * Note that not all Redis server distributions support TLS. + * + * @param tls true to enable TLS + * @return the builder + */ + public RedisDataStoreBuilder tls(boolean tls) { + this.tls = tls; + return this; + } + + /** + * Specifies a Redis host URI other than {@link #DEFAULT_URI}. + * + * @param redisUri the URI of the Redis host + * @return the builder + */ + public RedisDataStoreBuilder uri(URI redisUri) { + this.uri = checkNotNull(uri); + return this; + } + + /** + * Optionally configures the namespace prefix for all keys stored in Redis. + * + * @param prefix the namespace prefix + * @return the builder + */ + public RedisDataStoreBuilder prefix(String prefix) { + this.prefix = prefix; + return this; + } + + /** + * Optional override if you wish to specify your own configuration to the underlying Jedis pool. + * + * @param poolConfig the Jedis pool configuration. + * @return the builder + */ + public RedisDataStoreBuilder poolConfig(JedisPoolConfig poolConfig) { + this.poolConfig = poolConfig; + return this; + } + + /** + * Optional override which sets the connection timeout for the underlying Jedis pool which otherwise defaults to + * {@link redis.clients.jedis.Protocol#DEFAULT_TIMEOUT} + * + * @param connectTimeout the timeout + * @param timeUnit the time unit for the timeout + * @return the builder + */ + public RedisDataStoreBuilder connectTimeout(int connectTimeout, TimeUnit timeUnit) { + this.connectTimeout = (int) timeUnit.toMillis(connectTimeout); + return this; + } + + /** + * Optional override which sets the connection timeout for the underlying Jedis pool which otherwise defaults to + * {@link redis.clients.jedis.Protocol#DEFAULT_TIMEOUT} + * + * @param socketTimeout the socket timeout + * @param timeUnit the time unit for the timeout + * @return the builder + */ + public RedisDataStoreBuilder socketTimeout(int socketTimeout, TimeUnit timeUnit) { + this.socketTimeout = (int) timeUnit.toMillis(socketTimeout); + return this; + } + + @Override + public FeatureStoreCore createPersistentDataStore() { + return new RedisDataStoreImpl(this); + } +} diff --git a/src/main/java/com/launchdarkly/client/integrations/RedisDataStoreImpl.java b/src/main/java/com/launchdarkly/client/integrations/RedisDataStoreImpl.java new file mode 100644 index 000000000..24e3968b7 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/RedisDataStoreImpl.java @@ -0,0 +1,196 @@ +package com.launchdarkly.client.integrations; + +import com.google.common.annotations.VisibleForTesting; +import com.launchdarkly.client.VersionedData; +import com.launchdarkly.client.VersionedDataKind; +import com.launchdarkly.client.utils.FeatureStoreCore; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import static com.launchdarkly.client.utils.FeatureStoreHelpers.marshalJson; +import static com.launchdarkly.client.utils.FeatureStoreHelpers.unmarshalJson; + +import redis.clients.jedis.Jedis; +import redis.clients.jedis.JedisPool; +import redis.clients.jedis.JedisPoolConfig; +import redis.clients.jedis.Transaction; +import redis.clients.util.JedisURIHelper; + +class RedisDataStoreImpl implements FeatureStoreCore { + private static final Logger logger = LoggerFactory.getLogger(RedisDataStoreImpl.class); + + private final JedisPool pool; + private final String prefix; + private UpdateListener updateListener; + + RedisDataStoreImpl(RedisDataStoreBuilder builder) { + // There is no builder for JedisPool, just a large number of constructor overloads. Unfortunately, + // the overloads that accept a URI do not accept the other parameters we need to set, so we need + // to decompose the URI. + String host = builder.uri.getHost(); + int port = builder.uri.getPort(); + String password = builder.password == null ? JedisURIHelper.getPassword(builder.uri) : builder.password; + int database = builder.database == null ? JedisURIHelper.getDBIndex(builder.uri): builder.database.intValue(); + boolean tls = builder.tls || builder.uri.getScheme().equals("rediss"); + + String extra = tls ? " with TLS" : ""; + if (password != null) { + extra = extra + (extra.isEmpty() ? " with" : " and") + " password"; + } + logger.info(String.format("Connecting to Redis feature store at %s:%d/%d%s", host, port, database, extra)); + + JedisPoolConfig poolConfig = (builder.poolConfig != null) ? builder.poolConfig : new JedisPoolConfig(); + JedisPool pool = new JedisPool(poolConfig, + host, + port, + builder.connectTimeout, + builder.socketTimeout, + password, + database, + null, // clientName + tls, + null, // sslSocketFactory + null, // sslParameters + null // hostnameVerifier + ); + + String prefix = (builder.prefix == null || builder.prefix.isEmpty()) ? + RedisDataStoreBuilder.DEFAULT_PREFIX : + builder.prefix; + + this.pool = pool; + this.prefix = prefix; + } + + @Override + public VersionedData getInternal(VersionedDataKind kind, String key) { + try (Jedis jedis = pool.getResource()) { + VersionedData item = getRedis(kind, key, jedis); + if (item != null) { + logger.debug("[get] Key: {} with version: {} found in \"{}\".", key, item.getVersion(), kind.getNamespace()); + } + return item; + } + } + + @Override + public Map getAllInternal(VersionedDataKind kind) { + try (Jedis jedis = pool.getResource()) { + Map allJson = jedis.hgetAll(itemsKey(kind)); + Map result = new HashMap<>(); + + for (Map.Entry entry : allJson.entrySet()) { + VersionedData item = unmarshalJson(kind, entry.getValue()); + result.put(entry.getKey(), item); + } + return result; + } + } + + @Override + public void initInternal(Map, Map> allData) { + try (Jedis jedis = pool.getResource()) { + Transaction t = jedis.multi(); + + for (Map.Entry, Map> entry: allData.entrySet()) { + String baseKey = itemsKey(entry.getKey()); + t.del(baseKey); + for (VersionedData item: entry.getValue().values()) { + t.hset(baseKey, item.getKey(), marshalJson(item)); + } + } + + t.set(initedKey(), ""); + t.exec(); + } + } + + @Override + public VersionedData upsertInternal(VersionedDataKind kind, VersionedData newItem) { + while (true) { + Jedis jedis = null; + try { + jedis = pool.getResource(); + String baseKey = itemsKey(kind); + jedis.watch(baseKey); + + if (updateListener != null) { + updateListener.aboutToUpdate(baseKey, newItem.getKey()); + } + + VersionedData oldItem = getRedis(kind, newItem.getKey(), jedis); + + if (oldItem != null && oldItem.getVersion() >= newItem.getVersion()) { + logger.debug("Attempted to {} key: {} version: {}" + + " with a version that is the same or older: {} in \"{}\"", + newItem.isDeleted() ? "delete" : "update", + newItem.getKey(), oldItem.getVersion(), newItem.getVersion(), kind.getNamespace()); + return oldItem; + } + + Transaction tx = jedis.multi(); + tx.hset(baseKey, newItem.getKey(), marshalJson(newItem)); + List result = tx.exec(); + if (result.isEmpty()) { + // if exec failed, it means the watch was triggered and we should retry + logger.debug("Concurrent modification detected, retrying"); + continue; + } + + return newItem; + } finally { + if (jedis != null) { + jedis.unwatch(); + jedis.close(); + } + } + } + } + + @Override + public boolean initializedInternal() { + try (Jedis jedis = pool.getResource()) { + return jedis.exists(initedKey()); + } + } + + @Override + public void close() throws IOException { + logger.info("Closing LaunchDarkly RedisFeatureStore"); + pool.destroy(); + } + + @VisibleForTesting + void setUpdateListener(UpdateListener updateListener) { + this.updateListener = updateListener; + } + + private String itemsKey(VersionedDataKind kind) { + return prefix + ":" + kind.getNamespace(); + } + + private String initedKey() { + return prefix + ":$inited"; + } + + private T getRedis(VersionedDataKind kind, String key, Jedis jedis) { + String json = jedis.hget(itemsKey(kind), key); + + if (json == null) { + logger.debug("[get] Key: {} not found in \"{}\". Returning null", key, kind.getNamespace()); + return null; + } + + return unmarshalJson(kind, json); + } + + static interface UpdateListener { + void aboutToUpdate(String baseKey, String itemKey); + } +} diff --git a/src/main/java/com/launchdarkly/client/integrations/package-info.java b/src/main/java/com/launchdarkly/client/integrations/package-info.java new file mode 100644 index 000000000..589a2c63a --- /dev/null +++ b/src/main/java/com/launchdarkly/client/integrations/package-info.java @@ -0,0 +1,12 @@ +/** + * This package contains integration tools for connecting the SDK to other software components. + *

+ * In the current main LaunchDarkly Java SDK library, this package contains {@link com.launchdarkly.client.integrations.Redis} + * (for using Redis as a store for flag data) and {@link com.launchdarkly.client.integrations.FileData} + * (for reading flags from a file in testing). Other SDK add-on libraries, such as database integrations, + * will define their classes in {@code com.launchdarkly.client.integrations} as well. + *

+ * The general pattern for factory methods in this package is {@code ToolName#componentType()}, + * such as {@code Redis#dataStore()} or {@code FileData#dataSource()}. + */ +package com.launchdarkly.client.integrations; diff --git a/src/main/java/com/launchdarkly/client/interfaces/PersistentDataStoreFactory.java b/src/main/java/com/launchdarkly/client/interfaces/PersistentDataStoreFactory.java new file mode 100644 index 000000000..8931247d3 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/interfaces/PersistentDataStoreFactory.java @@ -0,0 +1,26 @@ +package com.launchdarkly.client.interfaces; + +import com.launchdarkly.client.integrations.PersistentDataStoreBuilder; +import com.launchdarkly.client.utils.FeatureStoreCore; + +/** + * Interface for a factory that creates some implementation of a persistent data store. + *

+ * This interface is implemented by database integrations. Usage is described in + * {@link com.launchdarkly.client.Components#persistentDataStore}. + * + * @see com.launchdarkly.client.Components + * @since 4.11.0 + */ +public interface PersistentDataStoreFactory { + /** + * Called internally from {@link PersistentDataStoreBuilder} to create the implementation object + * for the specific type of data store. + * + * @return the implementation object + * @deprecated Do not reference this method directly, as the {@link FeatureStoreCore} interface + * will be replaced in 5.0. + */ + @Deprecated + FeatureStoreCore createPersistentDataStore(); +} diff --git a/src/main/java/com/launchdarkly/client/interfaces/package-info.java b/src/main/java/com/launchdarkly/client/interfaces/package-info.java new file mode 100644 index 000000000..d798dc8f0 --- /dev/null +++ b/src/main/java/com/launchdarkly/client/interfaces/package-info.java @@ -0,0 +1,7 @@ +/** + * The package for interfaces that allow customization of LaunchDarkly components. + *

+ * You will not need to refer to these types in your code unless you are creating a + * plug-in component, such as a database integration. + */ +package com.launchdarkly.client.interfaces; diff --git a/src/main/java/com/launchdarkly/client/utils/CachingStoreWrapper.java b/src/main/java/com/launchdarkly/client/utils/CachingStoreWrapper.java index 47de79688..a4c7853ce 100644 --- a/src/main/java/com/launchdarkly/client/utils/CachingStoreWrapper.java +++ b/src/main/java/com/launchdarkly/client/utils/CachingStoreWrapper.java @@ -13,10 +13,12 @@ import com.launchdarkly.client.FeatureStoreCacheConfig; import com.launchdarkly.client.VersionedData; import com.launchdarkly.client.VersionedDataKind; +import com.launchdarkly.client.integrations.CacheMonitor; import java.io.IOException; import java.util.HashMap; import java.util.Map; +import java.util.concurrent.Callable; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ThreadFactory; @@ -31,7 +33,11 @@ * Construct instances of this class with {@link CachingStoreWrapper#builder(FeatureStoreCore)}. * * @since 4.6.0 + * @deprecated Referencing this class directly is deprecated; it is now part of the implementation + * of {@link com.launchdarkly.client.integrations.PersistentDataStoreBuilder} and will be made + * package-private starting in version 5.0. */ +@Deprecated public class CachingStoreWrapper implements FeatureStore { private static final String CACHE_REFRESH_THREAD_POOL_NAME_FORMAT = "CachingStoreWrapper-refresher-pool-%d"; @@ -52,7 +58,7 @@ public static CachingStoreWrapper.Builder builder(FeatureStoreCore core) { return new Builder(core); } - protected CachingStoreWrapper(final FeatureStoreCore core, FeatureStoreCacheConfig caching) { + protected CachingStoreWrapper(final FeatureStoreCore core, FeatureStoreCacheConfig caching, CacheMonitor cacheMonitor) { this.core = core; this.caching = caching; @@ -81,40 +87,45 @@ public Boolean load(String key) throws Exception { } }; - if (caching.isInfiniteTtl()) { - itemCache = CacheBuilder.newBuilder().build(itemLoader); - allCache = CacheBuilder.newBuilder().build(allLoader); - executorService = null; - } else if (caching.getStaleValuesPolicy() == FeatureStoreCacheConfig.StaleValuesPolicy.EVICT) { - // We are using an "expire after write" cache. This will evict stale values and block while loading the latest - // from the underlying data store. - - itemCache = CacheBuilder.newBuilder().expireAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()).build(itemLoader); - allCache = CacheBuilder.newBuilder().expireAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()).build(allLoader); - executorService = null; - } else { - // We are using a "refresh after write" cache. This will not automatically evict stale values, allowing them - // to be returned if failures occur when updating them. Optionally set the cache to refresh values asynchronously, - // which always returns the previously cached value immediately (this is only done for itemCache, not allCache, - // since retrieving all flags is less frequently needed and we don't want to incur the extra overhead). - + if (caching.getStaleValuesPolicy() == FeatureStoreCacheConfig.StaleValuesPolicy.REFRESH_ASYNC) { ThreadFactory threadFactory = new ThreadFactoryBuilder().setNameFormat(CACHE_REFRESH_THREAD_POOL_NAME_FORMAT).setDaemon(true).build(); ExecutorService parentExecutor = Executors.newSingleThreadExecutor(threadFactory); executorService = MoreExecutors.listeningDecorator(parentExecutor); - - if (caching.getStaleValuesPolicy() == FeatureStoreCacheConfig.StaleValuesPolicy.REFRESH_ASYNC) { - itemLoader = CacheLoader.asyncReloading(itemLoader, executorService); - } - itemCache = CacheBuilder.newBuilder().refreshAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()).build(itemLoader); - allCache = CacheBuilder.newBuilder().refreshAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()).build(allLoader); + + // Note that the REFRESH_ASYNC mode is only used for itemCache, not allCache, since retrieving all flags is + // less frequently needed and we don't want to incur the extra overhead. + itemLoader = CacheLoader.asyncReloading(itemLoader, executorService); + } else { + executorService = null; } - - if (caching.isInfiniteTtl()) { - initCache = CacheBuilder.newBuilder().build(initLoader); + + itemCache = newCacheBuilder(caching, cacheMonitor).build(itemLoader); + allCache = newCacheBuilder(caching, cacheMonitor).build(allLoader); + initCache = newCacheBuilder(caching, cacheMonitor).build(initLoader); + + if (cacheMonitor != null) { + cacheMonitor.setSource(new CacheStatsSource()); + } + } + } + + private static CacheBuilder newCacheBuilder(FeatureStoreCacheConfig caching, CacheMonitor cacheMonitor) { + CacheBuilder builder = CacheBuilder.newBuilder(); + if (!caching.isInfiniteTtl()) { + if (caching.getStaleValuesPolicy() == FeatureStoreCacheConfig.StaleValuesPolicy.EVICT) { + // We are using an "expire after write" cache. This will evict stale values and block while loading the latest + // from the underlying data store. + builder = builder.expireAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()); } else { - initCache = CacheBuilder.newBuilder().expireAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()).build(initLoader); + // We are using a "refresh after write" cache. This will not automatically evict stale values, allowing them + // to be returned if failures occur when updating them. + builder = builder.refreshAfterWrite(caching.getCacheTime(), caching.getCacheTimeUnit()); } } + if (cacheMonitor != null) { + builder = builder.recordStats(); + } + return builder; } @Override @@ -291,6 +302,23 @@ private ImmutableMap itemsOnlyIfNotDeleted( return builder.build(); } + private final class CacheStatsSource implements Callable { + public CacheMonitor.CacheStats call() { + if (itemCache == null || allCache == null) { + return null; + } + CacheStats itemStats = itemCache.stats(); + CacheStats allStats = allCache.stats(); + return new CacheMonitor.CacheStats( + itemStats.hitCount() + allStats.hitCount(), + itemStats.missCount() + allStats.missCount(), + itemStats.loadSuccessCount() + allStats.loadSuccessCount(), + itemStats.loadExceptionCount() + allStats.loadExceptionCount(), + itemStats.totalLoadTime() + allStats.totalLoadTime(), + itemStats.evictionCount() + allStats.evictionCount()); + } + } + private static class CacheKey { final VersionedDataKind kind; final String key; @@ -326,6 +354,7 @@ public int hashCode() { public static class Builder { private final FeatureStoreCore core; private FeatureStoreCacheConfig caching = FeatureStoreCacheConfig.DEFAULT; + private CacheMonitor cacheMonitor = null; Builder(FeatureStoreCore core) { this.core = core; @@ -341,12 +370,22 @@ public Builder caching(FeatureStoreCacheConfig caching) { return this; } + /** + * Sets the cache monitor instance. + * @param cacheMonitor an instance of {@link CacheMonitor} + * @return the builder + */ + public Builder cacheMonitor(CacheMonitor cacheMonitor) { + this.cacheMonitor = cacheMonitor; + return this; + } + /** * Creates and configures the wrapper object. * @return a {@link CachingStoreWrapper} instance */ public CachingStoreWrapper build() { - return new CachingStoreWrapper(core, caching); + return new CachingStoreWrapper(core, caching, cacheMonitor); } } } diff --git a/src/test/java/com/launchdarkly/client/DeprecatedRedisFeatureStoreTest.java b/src/test/java/com/launchdarkly/client/DeprecatedRedisFeatureStoreTest.java new file mode 100644 index 000000000..058b252d0 --- /dev/null +++ b/src/test/java/com/launchdarkly/client/DeprecatedRedisFeatureStoreTest.java @@ -0,0 +1,80 @@ +package com.launchdarkly.client; + +import com.google.common.cache.CacheStats; + +import org.junit.BeforeClass; +import org.junit.Test; + +import java.net.URI; + +import static com.launchdarkly.client.VersionedDataKind.FEATURES; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; +import static org.junit.Assume.assumeThat; +import static org.junit.Assume.assumeTrue; + +import redis.clients.jedis.Jedis; + +@SuppressWarnings({ "javadoc", "deprecation" }) +public class DeprecatedRedisFeatureStoreTest extends FeatureStoreDatabaseTestBase { + + private static final URI REDIS_URI = URI.create("redis://localhost:6379"); + + public DeprecatedRedisFeatureStoreTest(boolean cached) { + super(cached); + } + + @BeforeClass + public static void maybeSkipDatabaseTests() { + String skipParam = System.getenv("LD_SKIP_DATABASE_TESTS"); + assumeTrue(skipParam == null || skipParam.equals("")); + } + + @Override + protected RedisFeatureStore makeStore() { + RedisFeatureStoreBuilder builder = new RedisFeatureStoreBuilder(REDIS_URI); + builder.caching(cached ? FeatureStoreCacheConfig.enabled().ttlSeconds(30) : FeatureStoreCacheConfig.disabled()); + return builder.build(); + } + + @Override + protected RedisFeatureStore makeStoreWithPrefix(String prefix) { + return new RedisFeatureStoreBuilder(REDIS_URI).caching(FeatureStoreCacheConfig.disabled()).prefix(prefix).build(); + } + + @Override + protected void clearAllData() { + try (Jedis client = new Jedis("localhost")) { + client.flushDB(); + } + } + + @Test + public void canGetCacheStats() { + assumeThat(cached, is(true)); + + CacheStats stats = store.getCacheStats(); + + assertThat(stats, equalTo(new CacheStats(0, 0, 0, 0, 0, 0))); + + // Cause a cache miss + store.get(FEATURES, "key1"); + stats = store.getCacheStats(); + assertThat(stats.hitCount(), equalTo(0L)); + assertThat(stats.missCount(), equalTo(1L)); + assertThat(stats.loadSuccessCount(), equalTo(1L)); // even though it's a miss, it's a "success" because there was no exception + assertThat(stats.loadExceptionCount(), equalTo(0L)); + + // Cause a cache hit + store.upsert(FEATURES, new FeatureFlagBuilder("key2").version(1).build()); // inserting the item also caches it + store.get(FEATURES, "key2"); // now it's a cache hit + stats = store.getCacheStats(); + assertThat(stats.hitCount(), equalTo(1L)); + assertThat(stats.missCount(), equalTo(1L)); + assertThat(stats.loadSuccessCount(), equalTo(1L)); + assertThat(stats.loadExceptionCount(), equalTo(0L)); + + // We have no way to force a load exception with a real Redis store + } +} diff --git a/src/test/java/com/launchdarkly/client/FeatureStoreCachingTest.java b/src/test/java/com/launchdarkly/client/FeatureStoreCachingTest.java index c9259b622..2d90cd4a6 100644 --- a/src/test/java/com/launchdarkly/client/FeatureStoreCachingTest.java +++ b/src/test/java/com/launchdarkly/client/FeatureStoreCachingTest.java @@ -10,7 +10,7 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; -@SuppressWarnings("javadoc") +@SuppressWarnings({ "deprecation", "javadoc" }) public class FeatureStoreCachingTest { @Test public void disabledHasExpectedProperties() { diff --git a/src/test/java/com/launchdarkly/client/LDClientEvaluationTest.java b/src/test/java/com/launchdarkly/client/LDClientEvaluationTest.java index 334d19ad5..9db225910 100644 --- a/src/test/java/com/launchdarkly/client/LDClientEvaluationTest.java +++ b/src/test/java/com/launchdarkly/client/LDClientEvaluationTest.java @@ -34,9 +34,9 @@ public class LDClientEvaluationTest { private FeatureStore featureStore = TestUtil.initedFeatureStore(); private LDConfig config = new LDConfig.Builder() - .featureStoreFactory(specificFeatureStore(featureStore)) - .eventProcessorFactory(Components.nullEventProcessor()) - .updateProcessorFactory(Components.nullUpdateProcessor()) + .dataStore(specificFeatureStore(featureStore)) + .eventProcessor(Components.nullEventProcessor()) + .dataSource(Components.nullDataSource()) .build(); private LDClientInterface client = new LDClient("SDK_KEY", config); @@ -222,9 +222,9 @@ public void variationDetailReturnsDefaultIfFlagEvaluatesToNull() { public void appropriateErrorIfClientNotInitialized() throws Exception { FeatureStore badFeatureStore = new InMemoryFeatureStore(); LDConfig badConfig = new LDConfig.Builder() - .featureStoreFactory(specificFeatureStore(badFeatureStore)) - .eventProcessorFactory(Components.nullEventProcessor()) - .updateProcessorFactory(specificUpdateProcessor(failedUpdateProcessor())) + .dataStore(specificFeatureStore(badFeatureStore)) + .eventProcessor(Components.nullEventProcessor()) + .dataSource(specificUpdateProcessor(failedUpdateProcessor())) .startWaitMillis(0) .build(); try (LDClientInterface badClient = new LDClient("SDK_KEY", badConfig)) { @@ -264,9 +264,9 @@ public void appropriateErrorForUnexpectedException() throws Exception { RuntimeException exception = new RuntimeException("sorry"); FeatureStore badFeatureStore = featureStoreThatThrowsException(exception); LDConfig badConfig = new LDConfig.Builder() - .featureStoreFactory(specificFeatureStore(badFeatureStore)) - .eventProcessorFactory(Components.nullEventProcessor()) - .updateProcessorFactory(Components.nullUpdateProcessor()) + .dataStore(specificFeatureStore(badFeatureStore)) + .eventProcessor(Components.nullEventProcessor()) + .dataSource(Components.nullDataSource()) .build(); try (LDClientInterface badClient = new LDClient("SDK_KEY", badConfig)) { EvaluationDetail expectedResult = EvaluationDetail.fromValue(false, null, diff --git a/src/test/java/com/launchdarkly/client/LDClientEventTest.java b/src/test/java/com/launchdarkly/client/LDClientEventTest.java index f71a56bf3..caf90b6fe 100644 --- a/src/test/java/com/launchdarkly/client/LDClientEventTest.java +++ b/src/test/java/com/launchdarkly/client/LDClientEventTest.java @@ -29,9 +29,9 @@ public class LDClientEventTest { private FeatureStore featureStore = TestUtil.initedFeatureStore(); private TestUtil.TestEventProcessor eventSink = new TestUtil.TestEventProcessor(); private LDConfig config = new LDConfig.Builder() - .featureStoreFactory(specificFeatureStore(featureStore)) - .eventProcessorFactory(specificEventProcessor(eventSink)) - .updateProcessorFactory(Components.nullUpdateProcessor()) + .dataStore(specificFeatureStore(featureStore)) + .eventProcessor(specificEventProcessor(eventSink)) + .dataSource(Components.nullDataSource()) .build(); private LDClientInterface client = new LDClient("SDK_KEY", config); diff --git a/src/test/java/com/launchdarkly/client/LDClientLddModeTest.java b/src/test/java/com/launchdarkly/client/LDClientLddModeTest.java index 76ee3611a..21a142823 100644 --- a/src/test/java/com/launchdarkly/client/LDClientLddModeTest.java +++ b/src/test/java/com/launchdarkly/client/LDClientLddModeTest.java @@ -15,6 +15,7 @@ @SuppressWarnings("javadoc") public class LDClientLddModeTest { + @SuppressWarnings("deprecation") @Test public void lddModeClientHasNullUpdateProcessor() throws IOException { LDConfig config = new LDConfig.Builder() @@ -50,7 +51,7 @@ public void lddModeClientGetsFlagFromFeatureStore() throws IOException { FeatureStore testFeatureStore = initedFeatureStore(); LDConfig config = new LDConfig.Builder() .useLdd(true) - .featureStoreFactory(specificFeatureStore(testFeatureStore)) + .dataStore(specificFeatureStore(testFeatureStore)) .build(); FeatureFlag flag = flagWithValue("key", LDValue.of(true)); testFeatureStore.upsert(FEATURES, flag); diff --git a/src/test/java/com/launchdarkly/client/LDClientOfflineTest.java b/src/test/java/com/launchdarkly/client/LDClientOfflineTest.java index cff7ed994..2785fc5d1 100644 --- a/src/test/java/com/launchdarkly/client/LDClientOfflineTest.java +++ b/src/test/java/com/launchdarkly/client/LDClientOfflineTest.java @@ -21,6 +21,7 @@ public class LDClientOfflineTest { private static final LDUser user = new LDUser("user"); + @SuppressWarnings("deprecation") @Test public void offlineClientHasNullUpdateProcessor() throws IOException { LDConfig config = new LDConfig.Builder() @@ -66,7 +67,7 @@ public void offlineClientGetsAllFlagsFromFeatureStore() throws IOException { FeatureStore testFeatureStore = initedFeatureStore(); LDConfig config = new LDConfig.Builder() .offline(true) - .featureStoreFactory(specificFeatureStore(testFeatureStore)) + .dataStore(specificFeatureStore(testFeatureStore)) .build(); testFeatureStore.upsert(FEATURES, flagWithValue("key", LDValue.of(true))); try (LDClient client = new LDClient("SDK_KEY", config)) { @@ -80,7 +81,7 @@ public void offlineClientGetsFlagsStateFromFeatureStore() throws IOException { FeatureStore testFeatureStore = initedFeatureStore(); LDConfig config = new LDConfig.Builder() .offline(true) - .featureStoreFactory(specificFeatureStore(testFeatureStore)) + .dataStore(specificFeatureStore(testFeatureStore)) .build(); testFeatureStore.upsert(FEATURES, flagWithValue("key", LDValue.of(true))); try (LDClient client = new LDClient("SDK_KEY", config)) { diff --git a/src/test/java/com/launchdarkly/client/LDClientTest.java b/src/test/java/com/launchdarkly/client/LDClientTest.java index b585536c9..ae4a20bd1 100644 --- a/src/test/java/com/launchdarkly/client/LDClientTest.java +++ b/src/test/java/com/launchdarkly/client/LDClientTest.java @@ -201,7 +201,7 @@ public void isFlagKnownReturnsTrueForExistingFlag() throws Exception { FeatureStore testFeatureStore = initedFeatureStore(); LDConfig.Builder config = new LDConfig.Builder() .startWaitMillis(0) - .featureStoreFactory(specificFeatureStore(testFeatureStore)); + .dataStore(specificFeatureStore(testFeatureStore)); expect(updateProcessor.start()).andReturn(initFuture); expect(updateProcessor.initialized()).andReturn(true).times(1); replayAll(); @@ -218,7 +218,7 @@ public void isFlagKnownReturnsFalseForUnknownFlag() throws Exception { FeatureStore testFeatureStore = initedFeatureStore(); LDConfig.Builder config = new LDConfig.Builder() .startWaitMillis(0) - .featureStoreFactory(specificFeatureStore(testFeatureStore)); + .dataStore(specificFeatureStore(testFeatureStore)); expect(updateProcessor.start()).andReturn(initFuture); expect(updateProcessor.initialized()).andReturn(true).times(1); replayAll(); @@ -234,7 +234,7 @@ public void isFlagKnownReturnsFalseIfStoreAndClientAreNotInitialized() throws Ex FeatureStore testFeatureStore = new InMemoryFeatureStore(); LDConfig.Builder config = new LDConfig.Builder() .startWaitMillis(0) - .featureStoreFactory(specificFeatureStore(testFeatureStore)); + .dataStore(specificFeatureStore(testFeatureStore)); expect(updateProcessor.start()).andReturn(initFuture); expect(updateProcessor.initialized()).andReturn(false).times(1); replayAll(); @@ -251,7 +251,7 @@ public void isFlagKnownUsesStoreIfStoreIsInitializedButClientIsNot() throws Exce FeatureStore testFeatureStore = initedFeatureStore(); LDConfig.Builder config = new LDConfig.Builder() .startWaitMillis(0) - .featureStoreFactory(specificFeatureStore(testFeatureStore)); + .dataStore(specificFeatureStore(testFeatureStore)); expect(updateProcessor.start()).andReturn(initFuture); expect(updateProcessor.initialized()).andReturn(false).times(1); replayAll(); @@ -267,7 +267,7 @@ public void isFlagKnownUsesStoreIfStoreIsInitializedButClientIsNot() throws Exce public void evaluationUsesStoreIfStoreIsInitializedButClientIsNot() throws Exception { FeatureStore testFeatureStore = initedFeatureStore(); LDConfig.Builder config = new LDConfig.Builder() - .featureStoreFactory(specificFeatureStore(testFeatureStore)) + .dataStore(specificFeatureStore(testFeatureStore)) .startWaitMillis(0L); expect(updateProcessor.start()).andReturn(initFuture); expect(updateProcessor.initialized()).andReturn(false); @@ -294,8 +294,8 @@ public void dataSetIsPassedToFeatureStoreInCorrectOrder() throws Exception { replay(store); LDConfig.Builder config = new LDConfig.Builder() - .updateProcessorFactory(updateProcessorWithData(DEPENDENCY_ORDERING_TEST_DATA)) - .featureStoreFactory(specificFeatureStore(store)) + .dataSource(updateProcessorWithData(DEPENDENCY_ORDERING_TEST_DATA)) + .dataStore(specificFeatureStore(store)) .sendEvents(false); client = new LDClient("SDK_KEY", config.build()); @@ -340,8 +340,8 @@ private void expectEventsSent(int count) { } private LDClientInterface createMockClient(LDConfig.Builder config) { - config.updateProcessorFactory(TestUtil.specificUpdateProcessor(updateProcessor)); - config.eventProcessorFactory(TestUtil.specificEventProcessor(eventProcessor)); + config.dataSource(TestUtil.specificUpdateProcessor(updateProcessor)); + config.eventProcessor(TestUtil.specificEventProcessor(eventProcessor)); return new LDClient("SDK_KEY", config.build()); } diff --git a/src/test/java/com/launchdarkly/client/RedisFeatureStoreBuilderTest.java b/src/test/java/com/launchdarkly/client/RedisFeatureStoreBuilderTest.java deleted file mode 100644 index 64fb15068..000000000 --- a/src/test/java/com/launchdarkly/client/RedisFeatureStoreBuilderTest.java +++ /dev/null @@ -1,106 +0,0 @@ -package com.launchdarkly.client; - -import org.junit.Test; - -import java.net.URI; -import java.net.URISyntaxException; -import java.util.concurrent.TimeUnit; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - -import redis.clients.jedis.JedisPoolConfig; -import redis.clients.jedis.Protocol; - -public class RedisFeatureStoreBuilderTest { - @Test - public void testDefaultValues() { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder(); - assertEquals(RedisFeatureStoreBuilder.DEFAULT_URI, conf.uri); - assertEquals(FeatureStoreCacheConfig.DEFAULT, conf.caching); - assertEquals(Protocol.DEFAULT_TIMEOUT, conf.connectTimeout); - assertEquals(Protocol.DEFAULT_TIMEOUT, conf.socketTimeout); - assertEquals(RedisFeatureStoreBuilder.DEFAULT_PREFIX, conf.prefix); - assertNull(conf.poolConfig); - } - - @Test - public void testConstructorSpecifyingUri() { - URI uri = URI.create("redis://host:1234"); - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder(uri); - assertEquals(uri, conf.uri); - assertEquals(FeatureStoreCacheConfig.DEFAULT, conf.caching); - assertEquals(Protocol.DEFAULT_TIMEOUT, conf.connectTimeout); - assertEquals(Protocol.DEFAULT_TIMEOUT, conf.socketTimeout); - assertEquals(RedisFeatureStoreBuilder.DEFAULT_PREFIX, conf.prefix); - assertNull(conf.poolConfig); - } - - @SuppressWarnings("deprecation") - @Test - public void testDeprecatedUriBuildingConstructor() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder("badscheme", "example", 1234, 100); - assertEquals(URI.create("badscheme://example:1234"), conf.uri); - assertEquals(100, conf.caching.getCacheTime()); - assertEquals(TimeUnit.SECONDS, conf.caching.getCacheTimeUnit()); - assertEquals(Protocol.DEFAULT_TIMEOUT, conf.connectTimeout); - assertEquals(Protocol.DEFAULT_TIMEOUT, conf.socketTimeout); - assertEquals(FeatureStoreCacheConfig.StaleValuesPolicy.EVICT, conf.caching.getStaleValuesPolicy()); - assertEquals(RedisFeatureStoreBuilder.DEFAULT_PREFIX, conf.prefix); - assertNull(conf.poolConfig); - } - - @SuppressWarnings("deprecation") - @Test - public void testRefreshStaleValues() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().refreshStaleValues(true); - assertEquals(FeatureStoreCacheConfig.StaleValuesPolicy.REFRESH, conf.caching.getStaleValuesPolicy()); - } - - @SuppressWarnings("deprecation") - @Test - public void testAsyncRefresh() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().refreshStaleValues(true).asyncRefresh(true); - assertEquals(FeatureStoreCacheConfig.StaleValuesPolicy.REFRESH_ASYNC, conf.caching.getStaleValuesPolicy()); - } - - @SuppressWarnings("deprecation") - @Test - public void testRefreshStaleValuesWithoutAsyncRefresh() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().asyncRefresh(true); - assertEquals(FeatureStoreCacheConfig.StaleValuesPolicy.EVICT, conf.caching.getStaleValuesPolicy()); - } - - @Test - public void testPrefixConfigured() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().prefix("prefix"); - assertEquals("prefix", conf.prefix); - } - - @Test - public void testConnectTimeoutConfigured() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().connectTimeout(1, TimeUnit.SECONDS); - assertEquals(1000, conf.connectTimeout); - } - - @Test - public void testSocketTimeoutConfigured() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().socketTimeout(1, TimeUnit.SECONDS); - assertEquals(1000, conf.socketTimeout); - } - - @SuppressWarnings("deprecation") - @Test - public void testCacheTimeWithUnit() throws URISyntaxException { - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().cacheTime(2000, TimeUnit.MILLISECONDS); - assertEquals(2000, conf.caching.getCacheTime()); - assertEquals(TimeUnit.MILLISECONDS, conf.caching.getCacheTimeUnit()); - } - - @Test - public void testPoolConfigConfigured() throws URISyntaxException { - JedisPoolConfig poolConfig = new JedisPoolConfig(); - RedisFeatureStoreBuilder conf = new RedisFeatureStoreBuilder().poolConfig(poolConfig); - assertEquals(poolConfig, conf.poolConfig); - } -} diff --git a/src/test/java/com/launchdarkly/client/RedisFeatureStoreTest.java b/src/test/java/com/launchdarkly/client/RedisFeatureStoreTest.java deleted file mode 100644 index e58d56388..000000000 --- a/src/test/java/com/launchdarkly/client/RedisFeatureStoreTest.java +++ /dev/null @@ -1,57 +0,0 @@ -package com.launchdarkly.client; - -import com.launchdarkly.client.RedisFeatureStore.UpdateListener; - -import org.junit.Assume; -import org.junit.BeforeClass; - -import java.net.URI; - -import static org.junit.Assume.assumeTrue; - -import redis.clients.jedis.Jedis; - -public class RedisFeatureStoreTest extends FeatureStoreDatabaseTestBase { - - private static final URI REDIS_URI = URI.create("redis://localhost:6379"); - - public RedisFeatureStoreTest(boolean cached) { - super(cached); - } - - @BeforeClass - public static void maybeSkipDatabaseTests() { - String skipParam = System.getenv("LD_SKIP_DATABASE_TESTS"); - assumeTrue(skipParam == null || skipParam.equals("")); - } - - @Override - protected RedisFeatureStore makeStore() { - RedisFeatureStoreBuilder builder = new RedisFeatureStoreBuilder(REDIS_URI); - builder.caching(cached ? FeatureStoreCacheConfig.enabled().ttlSeconds(30) : FeatureStoreCacheConfig.disabled()); - return builder.build(); - } - - @Override - protected RedisFeatureStore makeStoreWithPrefix(String prefix) { - return new RedisFeatureStoreBuilder(REDIS_URI).caching(FeatureStoreCacheConfig.disabled()).prefix(prefix).build(); - } - - @Override - protected void clearAllData() { - try (Jedis client = new Jedis("localhost")) { - client.flushDB(); - } - } - - @Override - protected boolean setUpdateHook(RedisFeatureStore storeUnderTest, final Runnable hook) { - storeUnderTest.setUpdateListener(new UpdateListener() { - @Override - public void aboutToUpdate(String baseKey, String itemKey) { - hook.run(); - } - }); - return true; - } -} diff --git a/src/test/java/com/launchdarkly/client/StreamProcessorTest.java b/src/test/java/com/launchdarkly/client/StreamProcessorTest.java index 92a45136b..ff49176c3 100644 --- a/src/test/java/com/launchdarkly/client/StreamProcessorTest.java +++ b/src/test/java/com/launchdarkly/client/StreamProcessorTest.java @@ -65,7 +65,7 @@ public class StreamProcessorTest extends EasyMockSupport { @Before public void setup() { featureStore = new InMemoryFeatureStore(); - configBuilder = new LDConfig.Builder().featureStoreFactory(specificFeatureStore(featureStore)); + configBuilder = new LDConfig.Builder().dataStore(specificFeatureStore(featureStore)); mockRequestor = createStrictMock(FeatureRequestor.class); mockEventSource = createStrictMock(EventSource.class); } diff --git a/src/test/java/com/launchdarkly/client/files/JsonFlagFileParserTest.java b/src/test/java/com/launchdarkly/client/files/JsonFlagFileParserTest.java deleted file mode 100644 index 0110105f6..000000000 --- a/src/test/java/com/launchdarkly/client/files/JsonFlagFileParserTest.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.launchdarkly.client.files; - -public class JsonFlagFileParserTest extends FlagFileParserTestBase { - public JsonFlagFileParserTest() { - super(new JsonFlagFileParser(), ".json"); - } -} diff --git a/src/test/java/com/launchdarkly/client/files/YamlFlagFileParserTest.java b/src/test/java/com/launchdarkly/client/files/YamlFlagFileParserTest.java deleted file mode 100644 index 9b94e3801..000000000 --- a/src/test/java/com/launchdarkly/client/files/YamlFlagFileParserTest.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.launchdarkly.client.files; - -public class YamlFlagFileParserTest extends FlagFileParserTestBase { - public YamlFlagFileParserTest() { - super(new YamlFlagFileParser(), ".yml"); - } -} diff --git a/src/test/java/com/launchdarkly/client/files/ClientWithFileDataSourceTest.java b/src/test/java/com/launchdarkly/client/integrations/ClientWithFileDataSourceTest.java similarity index 65% rename from src/test/java/com/launchdarkly/client/files/ClientWithFileDataSourceTest.java rename to src/test/java/com/launchdarkly/client/integrations/ClientWithFileDataSourceTest.java index e8ec26040..f7c365b41 100644 --- a/src/test/java/com/launchdarkly/client/files/ClientWithFileDataSourceTest.java +++ b/src/test/java/com/launchdarkly/client/integrations/ClientWithFileDataSourceTest.java @@ -1,4 +1,4 @@ -package com.launchdarkly.client.files; +package com.launchdarkly.client.integrations; import com.google.gson.JsonPrimitive; import com.launchdarkly.client.LDClient; @@ -7,22 +7,23 @@ import org.junit.Test; -import static com.launchdarkly.client.files.TestData.FLAG_VALUE_1; -import static com.launchdarkly.client.files.TestData.FLAG_VALUE_1_KEY; -import static com.launchdarkly.client.files.TestData.FULL_FLAG_1_KEY; -import static com.launchdarkly.client.files.TestData.FULL_FLAG_1_VALUE; -import static com.launchdarkly.client.files.TestData.resourceFilePath; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FLAG_VALUE_1; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FLAG_VALUE_1_KEY; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FULL_FLAG_1_KEY; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FULL_FLAG_1_VALUE; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.resourceFilePath; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; +@SuppressWarnings("javadoc") public class ClientWithFileDataSourceTest { private static final LDUser user = new LDUser.Builder("userkey").build(); private LDClient makeClient() throws Exception { - FileDataSourceFactory fdsf = FileComponents.fileDataSource() + FileDataSourceBuilder fdsb = FileData.dataSource() .filePaths(resourceFilePath("all-properties.json")); LDConfig config = new LDConfig.Builder() - .updateProcessorFactory(fdsf) + .dataSource(fdsb) .sendEvents(false) .build(); return new LDClient("sdkKey", config); diff --git a/src/test/java/com/launchdarkly/client/files/DataLoaderTest.java b/src/test/java/com/launchdarkly/client/integrations/DataLoaderTest.java similarity index 86% rename from src/test/java/com/launchdarkly/client/files/DataLoaderTest.java rename to src/test/java/com/launchdarkly/client/integrations/DataLoaderTest.java index 9145c7d32..b78585783 100644 --- a/src/test/java/com/launchdarkly/client/files/DataLoaderTest.java +++ b/src/test/java/com/launchdarkly/client/integrations/DataLoaderTest.java @@ -1,4 +1,4 @@ -package com.launchdarkly.client.files; +package com.launchdarkly.client.integrations; import com.google.common.collect.ImmutableList; import com.google.gson.Gson; @@ -6,6 +6,9 @@ import com.google.gson.JsonObject; import com.launchdarkly.client.VersionedData; import com.launchdarkly.client.VersionedDataKind; +import com.launchdarkly.client.integrations.FileDataSourceImpl.DataBuilder; +import com.launchdarkly.client.integrations.FileDataSourceImpl.DataLoader; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FileDataException; import org.junit.Assert; import org.junit.Test; @@ -14,12 +17,13 @@ import static com.launchdarkly.client.VersionedDataKind.FEATURES; import static com.launchdarkly.client.VersionedDataKind.SEGMENTS; -import static com.launchdarkly.client.files.TestData.FLAG_VALUE_1_KEY; -import static com.launchdarkly.client.files.TestData.resourceFilePath; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FLAG_VALUE_1_KEY; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.resourceFilePath; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; +@SuppressWarnings("javadoc") public class DataLoaderTest { private static final Gson gson = new Gson(); private DataBuilder builder = new DataBuilder(); @@ -70,7 +74,7 @@ public void duplicateFlagKeyInFlagsThrowsException() throws Exception { DataLoader ds = new DataLoader(ImmutableList.of(resourceFilePath("flag-only.json"), resourceFilePath("flag-with-duplicate-key.json"))); ds.load(builder); - } catch (DataLoaderException e) { + } catch (FileDataException e) { assertThat(e.getMessage(), containsString("key \"flag1\" was already defined")); } } @@ -81,7 +85,7 @@ public void duplicateFlagKeyInFlagsAndFlagValuesThrowsException() throws Excepti DataLoader ds = new DataLoader(ImmutableList.of(resourceFilePath("flag-only.json"), resourceFilePath("value-with-duplicate-key.json"))); ds.load(builder); - } catch (DataLoaderException e) { + } catch (FileDataException e) { assertThat(e.getMessage(), containsString("key \"flag1\" was already defined")); } } @@ -92,7 +96,7 @@ public void duplicateSegmentKeyThrowsException() throws Exception { DataLoader ds = new DataLoader(ImmutableList.of(resourceFilePath("segment-only.json"), resourceFilePath("segment-with-duplicate-key.json"))); ds.load(builder); - } catch (DataLoaderException e) { + } catch (FileDataException e) { assertThat(e.getMessage(), containsString("key \"seg1\" was already defined")); } } diff --git a/src/test/java/com/launchdarkly/client/files/FileDataSourceTest.java b/src/test/java/com/launchdarkly/client/integrations/FileDataSourceTest.java similarity index 88% rename from src/test/java/com/launchdarkly/client/files/FileDataSourceTest.java rename to src/test/java/com/launchdarkly/client/integrations/FileDataSourceTest.java index 62924d9d5..0d933e967 100644 --- a/src/test/java/com/launchdarkly/client/files/FileDataSourceTest.java +++ b/src/test/java/com/launchdarkly/client/integrations/FileDataSourceTest.java @@ -1,4 +1,4 @@ -package com.launchdarkly.client.files; +package com.launchdarkly.client.integrations; import com.launchdarkly.client.FeatureStore; import com.launchdarkly.client.InMemoryFeatureStore; @@ -14,28 +14,28 @@ import java.nio.file.Paths; import java.util.concurrent.Future; -import static com.launchdarkly.client.files.FileComponents.fileDataSource; -import static com.launchdarkly.client.files.TestData.ALL_FLAG_KEYS; -import static com.launchdarkly.client.files.TestData.ALL_SEGMENT_KEYS; -import static com.launchdarkly.client.files.TestData.getResourceContents; -import static com.launchdarkly.client.files.TestData.resourceFilePath; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.ALL_FLAG_KEYS; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.ALL_SEGMENT_KEYS; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.getResourceContents; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.resourceFilePath; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.junit.Assert.fail; +@SuppressWarnings("javadoc") public class FileDataSourceTest { private static final Path badFilePath = Paths.get("no-such-file.json"); private final FeatureStore store = new InMemoryFeatureStore(); private final LDConfig config = new LDConfig.Builder().build(); - private final FileDataSourceFactory factory; + private final FileDataSourceBuilder factory; public FileDataSourceTest() throws Exception { factory = makeFactoryWithFile(resourceFilePath("all-properties.json")); } - private static FileDataSourceFactory makeFactoryWithFile(Path path) { - return fileDataSource().filePaths(path); + private static FileDataSourceBuilder makeFactoryWithFile(Path path) { + return FileData.dataSource().filePaths(path); } @Test @@ -94,7 +94,7 @@ public void initializedIsFalseAfterUnsuccessfulLoad() throws Exception { @Test public void modifiedFileIsNotReloadedIfAutoUpdateIsOff() throws Exception { File file = makeTempFlagFile(); - FileDataSourceFactory factory1 = makeFactoryWithFile(file.toPath()); + FileDataSourceBuilder factory1 = makeFactoryWithFile(file.toPath()); try { setFileContents(file, getResourceContents("flag-only.json")); try (UpdateProcessor fp = factory1.createUpdateProcessor("", config, store)) { @@ -115,7 +115,7 @@ public void modifiedFileIsNotReloadedIfAutoUpdateIsOff() throws Exception { @Test public void modifiedFileIsReloadedIfAutoUpdateIsOn() throws Exception { File file = makeTempFlagFile(); - FileDataSourceFactory factory1 = makeFactoryWithFile(file.toPath()).autoUpdate(true); + FileDataSourceBuilder factory1 = makeFactoryWithFile(file.toPath()).autoUpdate(true); long maxMsToWait = 10000; try { setFileContents(file, getResourceContents("flag-only.json")); // this file has 1 flag @@ -142,7 +142,7 @@ public void modifiedFileIsReloadedIfAutoUpdateIsOn() throws Exception { public void ifFilesAreBadAtStartTimeAutoUpdateCanStillLoadGoodDataLater() throws Exception { File file = makeTempFlagFile(); setFileContents(file, "not valid"); - FileDataSourceFactory factory1 = makeFactoryWithFile(file.toPath()).autoUpdate(true); + FileDataSourceBuilder factory1 = makeFactoryWithFile(file.toPath()).autoUpdate(true); long maxMsToWait = 10000; try { try (UpdateProcessor fp = factory1.createUpdateProcessor("", config, store)) { diff --git a/src/test/java/com/launchdarkly/client/files/TestData.java b/src/test/java/com/launchdarkly/client/integrations/FileDataSourceTestData.java similarity index 90% rename from src/test/java/com/launchdarkly/client/files/TestData.java rename to src/test/java/com/launchdarkly/client/integrations/FileDataSourceTestData.java index d1f098d7c..d222f4c77 100644 --- a/src/test/java/com/launchdarkly/client/files/TestData.java +++ b/src/test/java/com/launchdarkly/client/integrations/FileDataSourceTestData.java @@ -1,4 +1,4 @@ -package com.launchdarkly.client.files; +package com.launchdarkly.client.integrations; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; @@ -14,7 +14,8 @@ import java.util.Map; import java.util.Set; -public class TestData { +@SuppressWarnings("javadoc") +public class FileDataSourceTestData { private static final Gson gson = new Gson(); // These should match the data in our test files @@ -40,12 +41,11 @@ public class TestData { public static final Set ALL_SEGMENT_KEYS = ImmutableSet.of(FULL_SEGMENT_1_KEY); public static Path resourceFilePath(String filename) throws URISyntaxException { - URL resource = TestData.class.getClassLoader().getResource("filesource/" + filename); + URL resource = FileDataSourceTestData.class.getClassLoader().getResource("filesource/" + filename); return Paths.get(resource.toURI()); } public static String getResourceContents(String filename) throws Exception { return new String(Files.readAllBytes(resourceFilePath(filename))); } - } diff --git a/src/test/java/com/launchdarkly/client/integrations/FlagFileParserJsonTest.java b/src/test/java/com/launchdarkly/client/integrations/FlagFileParserJsonTest.java new file mode 100644 index 000000000..c23a66772 --- /dev/null +++ b/src/test/java/com/launchdarkly/client/integrations/FlagFileParserJsonTest.java @@ -0,0 +1,10 @@ +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.integrations.FileDataSourceParsing.JsonFlagFileParser; + +@SuppressWarnings("javadoc") +public class FlagFileParserJsonTest extends FlagFileParserTestBase { + public FlagFileParserJsonTest() { + super(new JsonFlagFileParser(), ".json"); + } +} diff --git a/src/test/java/com/launchdarkly/client/files/FlagFileParserTestBase.java b/src/test/java/com/launchdarkly/client/integrations/FlagFileParserTestBase.java similarity index 76% rename from src/test/java/com/launchdarkly/client/files/FlagFileParserTestBase.java rename to src/test/java/com/launchdarkly/client/integrations/FlagFileParserTestBase.java index d6165e279..fd2be268f 100644 --- a/src/test/java/com/launchdarkly/client/files/FlagFileParserTestBase.java +++ b/src/test/java/com/launchdarkly/client/integrations/FlagFileParserTestBase.java @@ -1,4 +1,8 @@ -package com.launchdarkly.client.files; +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.integrations.FileDataSourceParsing.FileDataException; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FlagFileParser; +import com.launchdarkly.client.integrations.FileDataSourceParsing.FlagFileRep; import org.junit.Test; @@ -6,14 +10,15 @@ import java.io.FileNotFoundException; import java.net.URISyntaxException; -import static com.launchdarkly.client.files.TestData.FLAG_VALUES; -import static com.launchdarkly.client.files.TestData.FULL_FLAGS; -import static com.launchdarkly.client.files.TestData.FULL_SEGMENTS; -import static com.launchdarkly.client.files.TestData.resourceFilePath; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FLAG_VALUES; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FULL_FLAGS; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.FULL_SEGMENTS; +import static com.launchdarkly.client.integrations.FileDataSourceTestData.resourceFilePath; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.nullValue; +@SuppressWarnings("javadoc") public abstract class FlagFileParserTestBase { private final FlagFileParser parser; private final String fileExtension; @@ -63,7 +68,7 @@ public void canParseFileWithOnlySegment() throws Exception { } } - @Test(expected = DataLoaderException.class) + @Test(expected = FileDataException.class) public void throwsExpectedErrorForBadFile() throws Exception { try (FileInputStream input = openFile("malformed")) { parser.parse(input); diff --git a/src/test/java/com/launchdarkly/client/integrations/FlagFileParserYamlTest.java b/src/test/java/com/launchdarkly/client/integrations/FlagFileParserYamlTest.java new file mode 100644 index 000000000..3ad640e92 --- /dev/null +++ b/src/test/java/com/launchdarkly/client/integrations/FlagFileParserYamlTest.java @@ -0,0 +1,10 @@ +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.integrations.FileDataSourceParsing.YamlFlagFileParser; + +@SuppressWarnings("javadoc") +public class FlagFileParserYamlTest extends FlagFileParserTestBase { + public FlagFileParserYamlTest() { + super(new YamlFlagFileParser(), ".yml"); + } +} diff --git a/src/test/java/com/launchdarkly/client/integrations/RedisFeatureStoreBuilderTest.java b/src/test/java/com/launchdarkly/client/integrations/RedisFeatureStoreBuilderTest.java new file mode 100644 index 000000000..3da39b69b --- /dev/null +++ b/src/test/java/com/launchdarkly/client/integrations/RedisFeatureStoreBuilderTest.java @@ -0,0 +1,50 @@ +package com.launchdarkly.client.integrations; + +import org.junit.Test; + +import java.net.URISyntaxException; +import java.util.concurrent.TimeUnit; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; + +import redis.clients.jedis.JedisPoolConfig; +import redis.clients.jedis.Protocol; + +@SuppressWarnings("javadoc") +public class RedisFeatureStoreBuilderTest { + @Test + public void testDefaultValues() { + RedisDataStoreBuilder conf = Redis.dataStore(); + assertEquals(RedisDataStoreBuilder.DEFAULT_URI, conf.uri); + assertEquals(Protocol.DEFAULT_TIMEOUT, conf.connectTimeout); + assertEquals(Protocol.DEFAULT_TIMEOUT, conf.socketTimeout); + assertEquals(RedisDataStoreBuilder.DEFAULT_PREFIX, conf.prefix); + assertNull(conf.poolConfig); + } + + @Test + public void testPrefixConfigured() throws URISyntaxException { + RedisDataStoreBuilder conf = Redis.dataStore().prefix("prefix"); + assertEquals("prefix", conf.prefix); + } + + @Test + public void testConnectTimeoutConfigured() throws URISyntaxException { + RedisDataStoreBuilder conf = Redis.dataStore().connectTimeout(1, TimeUnit.SECONDS); + assertEquals(1000, conf.connectTimeout); + } + + @Test + public void testSocketTimeoutConfigured() throws URISyntaxException { + RedisDataStoreBuilder conf = Redis.dataStore().socketTimeout(1, TimeUnit.SECONDS); + assertEquals(1000, conf.socketTimeout); + } + + @Test + public void testPoolConfigConfigured() throws URISyntaxException { + JedisPoolConfig poolConfig = new JedisPoolConfig(); + RedisDataStoreBuilder conf = Redis.dataStore().poolConfig(poolConfig); + assertEquals(poolConfig, conf.poolConfig); + } +} diff --git a/src/test/java/com/launchdarkly/client/integrations/RedisFeatureStoreTest.java b/src/test/java/com/launchdarkly/client/integrations/RedisFeatureStoreTest.java new file mode 100644 index 000000000..b93718a2b --- /dev/null +++ b/src/test/java/com/launchdarkly/client/integrations/RedisFeatureStoreTest.java @@ -0,0 +1,69 @@ +package com.launchdarkly.client.integrations; + +import com.launchdarkly.client.Components; +import com.launchdarkly.client.FeatureStore; +import com.launchdarkly.client.FeatureStoreDatabaseTestBase; +import com.launchdarkly.client.integrations.RedisDataStoreImpl.UpdateListener; +import com.launchdarkly.client.utils.CachingStoreWrapper; + +import org.junit.BeforeClass; + +import java.net.URI; + +import static org.junit.Assume.assumeTrue; + +import redis.clients.jedis.Jedis; + +@SuppressWarnings({ "deprecation", "javadoc" }) +public class RedisFeatureStoreTest extends FeatureStoreDatabaseTestBase { + + private static final URI REDIS_URI = URI.create("redis://localhost:6379"); + + public RedisFeatureStoreTest(boolean cached) { + super(cached); + } + + @BeforeClass + public static void maybeSkipDatabaseTests() { + String skipParam = System.getenv("LD_SKIP_DATABASE_TESTS"); + assumeTrue(skipParam == null || skipParam.equals("")); + } + + @Override + protected FeatureStore makeStore() { + RedisDataStoreBuilder redisBuilder = Redis.dataStore().uri(REDIS_URI); + PersistentDataStoreBuilder builder = Components.persistentDataStore(redisBuilder); + if (cached) { + builder.cacheSeconds(30); + } else { + builder.noCaching(); + } + return builder.createFeatureStore(); + } + + @Override + protected FeatureStore makeStoreWithPrefix(String prefix) { + return Components.persistentDataStore( + Redis.dataStore().uri(REDIS_URI).prefix(prefix) + ).noCaching().createFeatureStore(); + } + + @Override + protected void clearAllData() { + try (Jedis client = new Jedis("localhost")) { + client.flushDB(); + } + } + + @Override + protected boolean setUpdateHook(FeatureStore storeUnderTest, final Runnable hook) { + RedisDataStoreImpl core = (RedisDataStoreImpl)((CachingStoreWrapper)storeUnderTest).getCore(); + core.setUpdateListener(new UpdateListener() { + @Override + public void aboutToUpdate(String baseKey, String itemKey) { + hook.run(); + } + }); + return true; + } +} diff --git a/src/test/java/com/launchdarkly/client/utils/CachingStoreWrapperTest.java b/src/test/java/com/launchdarkly/client/utils/CachingStoreWrapperTest.java index 6419b6db1..306501bd9 100644 --- a/src/test/java/com/launchdarkly/client/utils/CachingStoreWrapperTest.java +++ b/src/test/java/com/launchdarkly/client/utils/CachingStoreWrapperTest.java @@ -4,6 +4,7 @@ import com.launchdarkly.client.FeatureStoreCacheConfig; import com.launchdarkly.client.VersionedData; import com.launchdarkly.client.VersionedDataKind; +import com.launchdarkly.client.integrations.CacheMonitor; import org.junit.Assert; import org.junit.Test; @@ -21,9 +22,10 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.nullValue; +import static org.junit.Assert.fail; import static org.junit.Assume.assumeThat; -@SuppressWarnings("javadoc") +@SuppressWarnings({ "javadoc", "deprecation" }) @RunWith(Parameterized.class) public class CachingStoreWrapperTest { @@ -62,7 +64,7 @@ public static Iterable data() { public CachingStoreWrapperTest(CachingMode cachingMode) { this.cachingMode = cachingMode; this.core = new MockCore(); - this.wrapper = new CachingStoreWrapper(core, cachingMode.toCacheConfig()); + this.wrapper = new CachingStoreWrapper(core, cachingMode.toCacheConfig(), null); } @Test @@ -388,7 +390,7 @@ public void initializedCanCacheFalseResult() throws Exception { assumeThat(cachingMode.isCached(), is(true)); // We need to create a different object for this test so we can set a short cache TTL - try (CachingStoreWrapper wrapper1 = new CachingStoreWrapper(core, FeatureStoreCacheConfig.enabled().ttlMillis(500))) { + try (CachingStoreWrapper wrapper1 = new CachingStoreWrapper(core, FeatureStoreCacheConfig.enabled().ttlMillis(500), null)) { assertThat(wrapper1.initialized(), is(false)); assertThat(core.initedQueryCount, equalTo(1)); @@ -406,6 +408,51 @@ public void initializedCanCacheFalseResult() throws Exception { } } + @Test + public void canGetCacheStats() throws Exception { + assumeThat(cachingMode, is(CachingMode.CACHED_WITH_FINITE_TTL)); + + CacheMonitor cacheMonitor = new CacheMonitor(); + + try (CachingStoreWrapper w = new CachingStoreWrapper(core, FeatureStoreCacheConfig.enabled().ttlSeconds(30), cacheMonitor)) { + CacheMonitor.CacheStats stats = cacheMonitor.getCacheStats(); + + assertThat(stats, equalTo(new CacheMonitor.CacheStats(0, 0, 0, 0, 0, 0))); + + // Cause a cache miss + w.get(THINGS, "key1"); + stats = cacheMonitor.getCacheStats(); + assertThat(stats.getHitCount(), equalTo(0L)); + assertThat(stats.getMissCount(), equalTo(1L)); + assertThat(stats.getLoadSuccessCount(), equalTo(1L)); // even though it's a miss, it's a "success" because there was no exception + assertThat(stats.getLoadExceptionCount(), equalTo(0L)); + + // Cause a cache hit + core.forceSet(THINGS, new MockItem("key2", 1, false)); + w.get(THINGS, "key2"); // this one is a cache miss, but causes the item to be loaded and cached + w.get(THINGS, "key2"); // now it's a cache hit + stats = cacheMonitor.getCacheStats(); + assertThat(stats.getHitCount(), equalTo(1L)); + assertThat(stats.getMissCount(), equalTo(2L)); + assertThat(stats.getLoadSuccessCount(), equalTo(2L)); + assertThat(stats.getLoadExceptionCount(), equalTo(0L)); + + // Cause a load exception + core.fakeError = new RuntimeException("sorry"); + try { + w.get(THINGS, "key3"); // cache miss -> tries to load the item -> gets an exception + fail("expected exception"); + } catch (RuntimeException e) { + assertThat(e.getCause(), is((Throwable)core.fakeError)); + } + stats = cacheMonitor.getCacheStats(); + assertThat(stats.getHitCount(), equalTo(1L)); + assertThat(stats.getMissCount(), equalTo(3L)); + assertThat(stats.getLoadSuccessCount(), equalTo(2L)); + assertThat(stats.getLoadExceptionCount(), equalTo(1L)); + } + } + private Map, Map> makeData(MockItem... items) { Map innerMap = new HashMap<>(); for (MockItem item: items) {