Socket
Socket
Sign inDemoInstall

@launchdarkly/js-server-sdk-common

Package Overview
Dependencies
Maintainers
1
Versions
52
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

@launchdarkly/js-server-sdk-common - npm Package Compare versions

Comparing version 1.0.8 to 1.0.9-beta.1

21

dist/BigSegmentsManager.js

@@ -11,2 +11,9 @@ "use strict";

class BigSegmentsManager {
store;
logger;
crypto;
cache;
pollHandle;
staleTimeMs;
statusProvider;
constructor(store,

@@ -47,7 +54,6 @@ // The store will have been created before the manager is instantiated, so we do not need

async getUserMembership(userKey) {
var _a, _b, _c;
if (!this.store) {
return undefined;
}
const memberCache = (_a = this.cache) === null || _a === void 0 ? void 0 : _a.get(userKey);
const memberCache = this.cache?.get(userKey);
let membership;

@@ -58,6 +64,6 @@ if (!memberCache) {

const cacheItem = { membership };
(_b = this.cache) === null || _b === void 0 ? void 0 : _b.set(userKey, cacheItem);
this.cache?.set(userKey, cacheItem);
}
catch (err) {
(_c = this.logger) === null || _c === void 0 ? void 0 : _c.error(`Big Segment store membership query returned error: ${err}`);
this.logger?.error(`Big Segment store membership query returned error: ${err}`);
return [null, 'STORE_ERROR'];

@@ -80,3 +86,2 @@ }

async pollStoreAndUpdateStatus() {
var _a, _b, _c;
if (!this.store) {

@@ -86,3 +91,3 @@ this.statusProvider.setStatus({ available: false, stale: false });

}
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.debug('Querying Big Segment store status');
this.logger?.debug('Querying Big Segment store status');
let newStatus;

@@ -97,3 +102,3 @@ try {

catch (err) {
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.error(`Big Segment store status query returned error: ${err}`);
this.logger?.error(`Big Segment store status query returned error: ${err}`);
newStatus = { available: false, stale: false };

@@ -105,3 +110,3 @@ }

lastStatus.stale !== newStatus.stale) {
(_c = this.logger) === null || _c === void 0 ? void 0 : _c.debug('Big Segment store status changed from %s to %s', JSON.stringify(lastStatus), JSON.stringify(newStatus));
this.logger?.debug('Big Segment store status changed from %s to %s', JSON.stringify(lastStatus), JSON.stringify(newStatus));
this.statusProvider.setStatus(newStatus);

@@ -108,0 +113,0 @@ this.statusProvider.notify();

@@ -7,2 +7,5 @@ "use strict";

class BigSegmentStoreStatusProviderImpl {
onRequestStatus;
lastStatus;
listener;
constructor(onRequestStatus) {

@@ -33,5 +36,4 @@ this.onRequestStatus = onRequestStatus;

notify() {
var _a;
if (this.lastStatus) {
(_a = this.listener) === null || _a === void 0 ? void 0 : _a.call(this, this.lastStatus);
this.listener?.(this.lastStatus);
}

@@ -38,0 +40,0 @@ }

@@ -7,7 +7,14 @@ "use strict";

class LruCache {
values;
keys;
lastUpdated;
next;
prev;
keyMap = new Map();
head = 0;
tail = 0;
max;
size = 0;
maxAge;
constructor(options) {
this.keyMap = new Map();
this.head = 0;
this.tail = 0;
this.size = 0;
const { max } = options;

@@ -14,0 +21,0 @@ this.max = max;

@@ -12,5 +12,7 @@ "use strict";

class TtlCache {
options;
storage = new Map();
checkIntervalHandle;
constructor(options) {
this.options = options;
this.storage = new Map();
this.checkIntervalHandle = setInterval(() => {

@@ -32,3 +34,3 @@ this.purgeStale();

}
return record === null || record === void 0 ? void 0 : record.value;
return record?.value;
}

@@ -35,0 +37,0 @@ /**

@@ -9,5 +9,5 @@ "use strict";

class ClientMessages {
static missingContextKeyNoEvent = 'Context was unspecified or had no key; event will not be sent';
}
ClientMessages.missingContextKeyNoEvent = 'Context was unspecified or had no key; event will not be sent';
exports.default = ClientMessages;
//# sourceMappingURL=ClientMessages.js.map

@@ -7,3 +7,2 @@ "use strict";

function computeDependencies(namespace, item) {
var _a, _b;
const ret = new NamespacedDataSet_1.default();

@@ -14,3 +13,3 @@ const isFlag = namespace === VersionedDataKinds_1.default.Features.namespace;

const flag = item;
(_a = flag === null || flag === void 0 ? void 0 : flag.prerequisites) === null || _a === void 0 ? void 0 : _a.forEach((prereq) => {
flag?.prerequisites?.forEach((prereq) => {
ret.set(namespace, prereq.key, true);

@@ -21,5 +20,4 @@ });

const itemWithRuleClauses = item;
(_b = itemWithRuleClauses === null || itemWithRuleClauses === void 0 ? void 0 : itemWithRuleClauses.rules) === null || _b === void 0 ? void 0 : _b.forEach((rule) => {
var _a;
(_a = rule.clauses) === null || _a === void 0 ? void 0 : _a.forEach((clause) => {
itemWithRuleClauses?.rules?.forEach((rule) => {
rule.clauses?.forEach((clause) => {
if (clause.op === 'segmentMatch') {

@@ -39,2 +37,6 @@ clause.values.forEach((value) => {

class DataSourceUpdates {
featureStore;
hasEventListeners;
onChange;
dependencyTracker = new DependencyTracker_1.default();
constructor(featureStore, hasEventListeners, onChange) {

@@ -44,3 +46,2 @@ this.featureStore = featureStore;

this.onChange = onChange;
this.dependencyTracker = new DependencyTracker_1.default();
}

@@ -61,5 +62,5 @@ init(allData, callback) {

Object.keys(allData).forEach((namespace) => {
const oldDataForKind = (oldData === null || oldData === void 0 ? void 0 : oldData[namespace]) || {};
const oldDataForKind = oldData?.[namespace] || {};
const newDataForKind = allData[namespace];
const mergedData = Object.assign(Object.assign({}, oldDataForKind), newDataForKind);
const mergedData = { ...oldDataForKind, ...newDataForKind };
Object.keys(mergedData).forEach((key) => {

@@ -71,3 +72,3 @@ this.addIfModified(namespace, key, oldDataForKind && oldDataForKind[key], newDataForKind && newDataForKind[key], updatedItems);

}
callback === null || callback === void 0 ? void 0 : callback();
callback?.();
});

@@ -101,3 +102,3 @@ };

}
callback === null || callback === void 0 ? void 0 : callback();
callback?.();
});

@@ -104,0 +105,0 @@ };

@@ -8,14 +8,12 @@ "use strict";

class DependencyTracker {
constructor() {
this.dependenciesFrom = new NamespacedDataSet_1.default();
this.dependenciesTo = new NamespacedDataSet_1.default();
}
dependenciesFrom = new NamespacedDataSet_1.default();
dependenciesTo = new NamespacedDataSet_1.default();
updateDependenciesFrom(namespace, key, newDependencySet) {
const oldDependencySet = this.dependenciesFrom.get(namespace, key);
oldDependencySet === null || oldDependencySet === void 0 ? void 0 : oldDependencySet.enumerate((depNs, depKey) => {
oldDependencySet?.enumerate((depNs, depKey) => {
const depsToThisDep = this.dependenciesTo.get(depNs, depKey);
depsToThisDep === null || depsToThisDep === void 0 ? void 0 : depsToThisDep.remove(namespace, key);
depsToThisDep?.remove(namespace, key);
});
this.dependenciesFrom.set(namespace, key, newDependencySet);
newDependencySet === null || newDependencySet === void 0 ? void 0 : newDependencySet.enumerate((depNs, depKey) => {
newDependencySet?.enumerate((depNs, depKey) => {
let depsToThisDep = this.dependenciesTo.get(depNs, depKey);

@@ -33,3 +31,3 @@ if (!depsToThisDep) {

const affectedItems = this.dependenciesTo.get(modifiedNamespace, modifiedKey);
affectedItems === null || affectedItems === void 0 ? void 0 : affectedItems.enumerate((namespace, key) => {
affectedItems?.enumerate((namespace, key) => {
this.updateModifiedItems(inDependencySet, namespace, key);

@@ -36,0 +34,0 @@ });

@@ -16,2 +16,8 @@ "use strict";

class FileDataSource {
featureStore;
logger;
yamlParser;
fileLoader;
allData = {};
initCallback;
/**

@@ -23,7 +29,4 @@ * This is internal because we want instances to only be created with the

constructor(options, filesystem, featureStore) {
var _a;
this.featureStore = featureStore;
this.allData = {};
this.fileLoader = new FileLoader_1.default(filesystem, options.paths, (_a = options.autoUpdate) !== null && _a !== void 0 ? _a : false, (results) => {
var _a, _b;
this.fileLoader = new FileLoader_1.default(filesystem, options.paths, options.autoUpdate ?? false, (results) => {
// Whenever changes are detected we re-process all of the data.

@@ -36,4 +39,4 @@ // The FileLoader will have handled debouncing for us.

// If this was during start, then the initCallback will be present.
(_a = this.initCallback) === null || _a === void 0 ? void 0 : _a.call(this, err);
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.error(`Error processing files: ${err}`);
this.initCallback?.(err);
this.logger?.error(`Error processing files: ${err}`);
}

@@ -55,3 +58,3 @@ });

// Report back to the caller.
fn === null || fn === void 0 ? void 0 : fn(err);
fn?.(err);
}

@@ -97,6 +100,5 @@ })();

this.featureStore.init(this.allData, () => {
var _a;
// Call the init callback if present.
// Then clear the callback so we cannot call it again.
(_a = this.initCallback) === null || _a === void 0 ? void 0 : _a.call(this);
this.initCallback?.();
this.initCallback = undefined;

@@ -103,0 +105,0 @@ });

@@ -16,2 +16,10 @@ "use strict";

class FileLoader {
filesystem;
paths;
watch;
callback;
watchers = [];
fileData = {};
fileTimestamps = {};
debounceHandle;
constructor(filesystem, paths, watch, callback) {

@@ -22,5 +30,2 @@ this.filesystem = filesystem;

this.callback = callback;
this.watchers = [];
this.fileData = {};
this.fileTimestamps = {};
}

@@ -27,0 +32,0 @@ /**

@@ -11,3 +11,3 @@ "use strict";

}
const action = retryMessage !== null && retryMessage !== void 0 ? retryMessage : 'giving up permanently';
const action = retryMessage ?? 'giving up permanently';
return `Received ${desc} for ${context} - ${action}`;

@@ -14,0 +14,0 @@ }

@@ -7,8 +7,5 @@ "use strict";

class NamespacedDataSet {
constructor() {
this.itemsByNamespace = {};
}
itemsByNamespace = {};
get(namespace, key) {
var _a;
return (_a = this.itemsByNamespace[namespace]) === null || _a === void 0 ? void 0 : _a[key];
return this.itemsByNamespace[namespace]?.[key];
}

@@ -15,0 +12,0 @@ set(namespace, key, value) {

@@ -9,3 +9,3 @@ "use strict";

// emitting events. Allowing the client an opportunity to register events.
setTimeout(() => fn === null || fn === void 0 ? void 0 : fn(), 0);
setTimeout(() => fn?.(), 0);
}

@@ -12,0 +12,0 @@ stop() { }

@@ -11,6 +11,11 @@ "use strict";

class PollingProcessor {
requestor;
featureStore;
stopped = false;
logger;
pollInterval;
timeoutHandle;
constructor(config, requestor, featureStore) {
this.requestor = requestor;
this.featureStore = featureStore;
this.stopped = false;
this.logger = config.logger;

@@ -21,3 +26,2 @@ this.pollInterval = config.pollInterval;

poll(fn) {
var _a;
if (this.stopped) {

@@ -27,19 +31,17 @@ return;

const reportJsonError = (data) => {
var _a, _b;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.error('Polling received invalid data');
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.debug(`Invalid JSON follows: ${data}`);
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDPollingError('Malformed JSON data in polling response'));
this.logger?.error('Polling received invalid data');
this.logger?.debug(`Invalid JSON follows: ${data}`);
fn?.(new errors_1.LDPollingError('Malformed JSON data in polling response'));
};
const startTime = Date.now();
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.debug('Polling LaunchDarkly for feature flag updates');
this.logger?.debug('Polling LaunchDarkly for feature flag updates');
this.requestor.requestAllData((err, body) => {
var _a, _b, _c;
const elapsed = Date.now() - startTime;
const sleepFor = Math.max(this.pollInterval * 1000 - elapsed, 0);
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.debug('Elapsed: %d ms, sleeping for %d ms', elapsed, sleepFor);
this.logger?.debug('Elapsed: %d ms, sleeping for %d ms', elapsed, sleepFor);
if (err) {
if (err.status && !(0, errors_1.isHttpRecoverable)(err.status)) {
const message = (0, httpErrorMessage_1.default)(err, 'polling request');
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.error(message);
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDPollingError(message));
this.logger?.error(message);
fn?.(new errors_1.LDPollingError(message));
// It is not recoverable, return and do not trigger another

@@ -49,3 +51,3 @@ // poll.

}
(_c = this.logger) === null || _c === void 0 ? void 0 : _c.warn((0, httpErrorMessage_1.default)(err, 'polling request', 'will retry'));
this.logger?.warn((0, httpErrorMessage_1.default)(err, 'polling request', 'will retry'));
}

@@ -65,3 +67,3 @@ else if (body) {

this.featureStore.init(initData, () => {
fn === null || fn === void 0 ? void 0 : fn();
fn?.();
// Triggering the next poll after the init has completed.

@@ -68,0 +70,0 @@ this.timeoutHandle = setTimeout(() => {

@@ -9,5 +9,8 @@ "use strict";

class Requestor {
requests;
headers;
uri;
eTagCache = {};
constructor(sdkKey, config, info, requests) {
this.requests = requests;
this.eTagCache = {};
this.headers = (0, defaultHeaders_1.default)(sdkKey, config, info);

@@ -22,5 +25,9 @@ this.uri = `${config.serviceEndpoints.polling}/sdk/latest-all`;

const cacheEntry = this.eTagCache[requestUrl];
const cachedETag = cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.etag;
const cachedETag = cacheEntry?.etag;
const updatedOptions = cachedETag
? Object.assign(Object.assign({}, options), { headers: Object.assign(Object.assign({}, options.headers), { 'if-none-match': cachedETag }) }) : options;
? {
...options,
headers: { ...options.headers, 'if-none-match': cachedETag },
}
: options;
const res = await this.requests.fetch(requestUrl, updatedOptions);

@@ -27,0 +34,0 @@ if (res.status === 304 && cacheEntry) {

@@ -19,2 +19,11 @@ "use strict";

class StreamingProcessor {
featureStore;
diagnosticsManager;
headers;
eventSource;
logger;
streamUri;
streamInitialReconnectDelay;
requests;
connectionAttemptStartTime;
constructor(sdkKey, config, requests, info, featureStore, diagnosticsManager) {

@@ -41,10 +50,9 @@ this.featureStore = featureStore;

const errorFilter = (err) => {
var _a, _b;
if (err.status && !(0, errors_1.isHttpRecoverable)(err.status)) {
this.logConnectionResult(false);
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDStreamingError(err.message, err.status));
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.error((0, httpErrorMessage_1.default)(err, 'streaming request'));
fn?.(new errors_1.LDStreamingError(err.message, err.status));
this.logger?.error((0, httpErrorMessage_1.default)(err, 'streaming request'));
return false;
}
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.warn((0, httpErrorMessage_1.default)(err, 'streaming request', 'will retry'));
this.logger?.warn((0, httpErrorMessage_1.default)(err, 'streaming request', 'will retry'));
this.logConnectionResult(false);

@@ -55,6 +63,5 @@ this.logConnectionStarted();

const reportJsonError = (type, data) => {
var _a, _b;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.error(`Stream received invalid data in "${type}" message`);
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.debug(`Invalid JSON follows: ${data}`);
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDStreamingError('Malformed JSON data in event stream'));
this.logger?.error(`Stream received invalid data in "${type}" message`);
this.logger?.debug(`Invalid JSON follows: ${data}`);
fn?.(new errors_1.LDStreamingError('Malformed JSON data in event stream'));
};

@@ -71,4 +78,3 @@ // TLS is handled by the platform implementation.

eventSource.onclose = () => {
var _a;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.info('Closed LaunchDarkly stream connection');
this.logger?.info('Closed LaunchDarkly stream connection');
};

@@ -79,12 +85,9 @@ eventSource.onerror = () => {

eventSource.onopen = () => {
var _a;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.info('Opened LaunchDarkly stream connection');
this.logger?.info('Opened LaunchDarkly stream connection');
};
eventSource.onretrying = (e) => {
var _a;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.info(`Will retry stream connection in ${e.delayMillis} milliseconds`);
this.logger?.info(`Will retry stream connection in ${e.delayMillis} milliseconds`);
};
eventSource.addEventListener('put', (event) => {
var _a;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.debug('Received put event');
this.logger?.debug('Received put event');
if (event && event.data) {

@@ -101,11 +104,10 @@ this.logConnectionResult(true);

};
this.featureStore.init(initData, () => fn === null || fn === void 0 ? void 0 : fn());
this.featureStore.init(initData, () => fn?.());
}
else {
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDStreamingError('Unexpected payload from event stream'));
fn?.(new errors_1.LDStreamingError('Unexpected payload from event stream'));
}
});
eventSource.addEventListener('patch', (event) => {
var _a, _b;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.debug('Received patch event');
this.logger?.debug('Received patch event');
if (event && event.data) {

@@ -120,3 +122,3 @@ const parsed = (0, serialization_1.deserializePatch)(event.data);

if (key) {
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.debug(`Updating ${key} in ${parsed.kind.namespace}`);
this.logger?.debug(`Updating ${key} in ${parsed.kind.namespace}`);
this.featureStore.upsert(parsed.kind, parsed.data, () => { });

@@ -127,8 +129,7 @@ }

else {
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDStreamingError('Unexpected payload from event stream'));
fn?.(new errors_1.LDStreamingError('Unexpected payload from event stream'));
}
});
eventSource.addEventListener('delete', (event) => {
var _a, _b;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.debug('Received delete event');
this.logger?.debug('Received delete event');
if (event && event.data) {

@@ -143,3 +144,3 @@ const parsed = (0, serialization_1.deserializeDelete)(event.data);

if (key) {
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.debug(`Deleting ${key} in ${parsed.kind.namespace}`);
this.logger?.debug(`Deleting ${key} in ${parsed.kind.namespace}`);
this.featureStore.upsert(parsed.kind, {

@@ -154,3 +155,3 @@ key,

else {
fn === null || fn === void 0 ? void 0 : fn(new errors_1.LDStreamingError('Unexpected payload from event stream'));
fn?.(new errors_1.LDStreamingError('Unexpected payload from event stream'));
}

@@ -160,4 +161,3 @@ });

stop() {
var _a;
(_a = this.eventSource) === null || _a === void 0 ? void 0 : _a.close();
this.eventSource?.close();
this.eventSource = undefined;

@@ -164,0 +164,0 @@ }

@@ -15,2 +15,3 @@ "use strict";

class LDStreamingError extends Error {
code;
constructor(message, code) {

@@ -17,0 +18,0 @@ super(message);

@@ -19,2 +19,3 @@ "use strict";

class Bucketer {
crypto;
constructor(crypto) {

@@ -21,0 +22,0 @@ this.crypto = crypto;

@@ -16,3 +16,3 @@ /**

*/
export declare function allSeriesAsync<T>(collection: T[] | undefined, check: (val: T, index: number) => Promise<boolean>): Promise<boolean>;
export declare function allSeriesAsync<T>(collection: T[] | undefined, check: (val: T, index: number, cb: (res: boolean) => void) => void, cb: (res: boolean) => void): void;
/**

@@ -25,3 +25,3 @@ * Iterate a collection in series awaiting each check operation.

*/
export declare function firstSeriesAsync<T>(collection: T[] | undefined, check: (val: T, index: number) => Promise<boolean>): Promise<boolean>;
export declare function firstSeriesAsync<T>(collection: T[] | undefined, check: (val: T, index: number, cb: (res: boolean) => void) => void, cb: (res: boolean) => void): void;
//# sourceMappingURL=collection.d.ts.map

@@ -14,3 +14,3 @@ "use strict";

let res;
collection === null || collection === void 0 ? void 0 : collection.some((item, index) => {
collection?.some((item, index) => {
res = operator(item, index);

@@ -22,27 +22,36 @@ return !!res;

exports.firstResult = firstResult;
async function seriesAsync(collection, check, all) {
const ITERATION_RECURSION_LIMIT = 50;
function seriesAsync(collection, check, all, index, cb) {
if (!collection) {
return false;
cb(false);
return;
}
for (let index = 0; index < collection.length; index += 1) {
// This warning is to encourage starting many operations at once.
// In this case we only want to evaluate until we encounter something that
// doesn't match. Versus starting all the evaluations and then letting them
// all resolve.
// eslint-disable-next-line no-await-in-loop
const res = await check(collection[index], index);
// If we want all checks to pass, then we return on any failed check.
// If we want only a single result to pass, then we return on a true result.
if (all) {
if (!res) {
return false;
if (index < collection?.length) {
check(collection[index], index, (res) => {
if (all) {
if (!res) {
cb(false);
return;
}
}
}
else if (res) {
return true;
}
else if (res) {
cb(true);
return;
}
if (collection.length > ITERATION_RECURSION_LIMIT) {
// When we hit the recursion limit we defer execution
// by using a resolved promise. This is similar to using setImmediate
// but more portable.
Promise.resolve().then(() => {
seriesAsync(collection, check, all, index + 1, cb);
});
}
else {
seriesAsync(collection, check, all, index + 1, cb);
}
});
}
// In the case of 'all', getting here means all checks passed.
// In the case of 'first', this means no checks passed.
return all;
else {
cb(all);
}
}

@@ -55,4 +64,4 @@ /**

*/
async function allSeriesAsync(collection, check) {
return seriesAsync(collection, check, true);
function allSeriesAsync(collection, check, cb) {
seriesAsync(collection, check, true, 0, cb);
}

@@ -67,6 +76,6 @@ exports.allSeriesAsync = allSeriesAsync;

*/
async function firstSeriesAsync(collection, check) {
return seriesAsync(collection, check, false);
function firstSeriesAsync(collection, check, cb) {
seriesAsync(collection, check, false, 0, cb);
}
exports.firstSeriesAsync = firstSeriesAsync;
//# sourceMappingURL=collection.js.map

@@ -16,3 +16,5 @@ import { AttributeReference } from '@launchdarkly/js-sdk-common';

bucketByAttributeReference?: AttributeReference;
includedSet?: Set<string>;
excludedSet?: Set<string>;
}
//# sourceMappingURL=Segment.d.ts.map
export interface SegmentTarget {
contextKind: string;
values: string[];
valuesSet?: Set<string>;
}
//# sourceMappingURL=SegmentTarget.d.ts.map

@@ -11,2 +11,6 @@ "use strict";

class EvalResult {
isError;
detail;
message;
events;
constructor(isError, detail, message) {

@@ -28,3 +32,3 @@ this.isError = isError;

return new EvalResult(true, {
value: def !== null && def !== void 0 ? def : null,
value: def ?? null,
variationIndex: null,

@@ -31,0 +35,0 @@ reason: { kind: 'ERROR', errorKind },

@@ -26,4 +26,3 @@ "use strict";

function evalTargets(flag, context) {
var _a;
if (!((_a = flag.contextTargets) === null || _a === void 0 ? void 0 : _a.length)) {
if (!flag.contextTargets?.length) {
// There are not context targets, so we are going to evaluate the user targets.

@@ -30,0 +29,0 @@ return (0, collection_1.firstResult)(flag.targets, (target) => evalTarget(flag, target, context));

@@ -38,14 +38,20 @@ "use strict";

class EvalState {
events;
bigSegmentsStatus;
bigSegmentsMembership;
}
class Match {
isMatch;
error = false;
result;
constructor(isMatch) {
this.isMatch = isMatch;
this.error = false;
}
}
class MatchError {
result;
error = true;
isMatch = false;
constructor(result) {
this.result = result;
this.error = true;
this.isMatch = false;
}

@@ -57,2 +63,4 @@ }

class Evaluator {
queries;
bucketer;
constructor(platform, queries) {

@@ -63,9 +71,28 @@ this.queries = queries;

async evaluate(flag, context, eventFactory) {
return new Promise((resolve) => {
const state = new EvalState();
this.evaluateInternal(flag, context, state, [], (res) => {
if (state.bigSegmentsStatus) {
res.detail.reason = {
...res.detail.reason,
bigSegmentsStatus: state.bigSegmentsStatus,
};
}
res.events = state.events;
resolve(res);
}, eventFactory);
});
}
evaluateCb(flag, context, cb, eventFactory) {
const state = new EvalState();
const res = await this.evaluateInternal(flag, context, state, [], eventFactory);
if (state.bigSegmentsStatus) {
res.detail.reason = Object.assign(Object.assign({}, res.detail.reason), { bigSegmentsStatus: state.bigSegmentsStatus });
}
res.events = state.events;
return res;
this.evaluateInternal(flag, context, state, [], (res) => {
if (state.bigSegmentsStatus) {
res.detail.reason = {
...res.detail.reason,
bigSegmentsStatus: state.bigSegmentsStatus,
};
}
res.events = state.events;
cb(res);
}, eventFactory);
}

@@ -82,23 +109,29 @@ /**

*/
async evaluateInternal(flag, context,
evaluateInternal(flag, context,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
state, visitedFlags, eventFactory) {
state, visitedFlags, cb, eventFactory) {
if (!flag.on) {
return (0, variations_1.getOffVariation)(flag, Reasons_1.default.Off);
cb((0, variations_1.getOffVariation)(flag, Reasons_1.default.Off));
return;
}
const prereqResult = await this.checkPrerequisites(flag, context, state, visitedFlags, eventFactory);
// If there is a prereq result, then prereqs have failed, or there was
// an error.
if (prereqResult) {
return prereqResult;
}
const targetRes = (0, evalTargets_1.default)(flag, context);
if (targetRes) {
return targetRes;
}
const ruleRes = await this.evaluateRules(flag, context, state);
if (ruleRes) {
return ruleRes;
}
return this.variationForContext(flag.fallthrough, context, flag, Reasons_1.default.Fallthrough);
this.checkPrerequisites(flag, context, state, visitedFlags, (res) => {
// If there is a prereq result, then prereqs have failed, or there was
// an error.
if (res) {
cb(res);
return;
}
const targetRes = (0, evalTargets_1.default)(flag, context);
if (targetRes) {
cb(targetRes);
return;
}
this.evaluateRules(flag, context, state, (evalRes) => {
if (evalRes) {
cb(evalRes);
return;
}
cb(this.variationForContext(flag.fallthrough, context, flag, Reasons_1.default.Fallthrough));
});
}, eventFactory);
}

@@ -114,44 +147,44 @@ /**

*/
async checkPrerequisites(flag, context, state, visitedFlags, eventFactory) {
checkPrerequisites(flag, context, state, visitedFlags, cb, eventFactory) {
let prereqResult;
if (!flag.prerequisites || !flag.prerequisites.length) {
return undefined;
cb(undefined);
return;
}
// On any error conditions the prereq result will be set, so we do not need
// the result of the series evaluation.
await (0, collection_1.allSeriesAsync)(flag.prerequisites, async (prereq) => {
var _a;
(0, collection_1.allSeriesAsync)(flag.prerequisites, (prereq, _index, prereqCb) => {
if (visitedFlags.indexOf(prereq.key) !== -1) {
prereqResult = EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, `Prerequisite of ${flag.key} causing a circular reference.` +
' This is probably a temporary condition due to an incomplete update.');
return false;
prereqCb(true);
return;
}
const updatedVisitedFlags = [...visitedFlags, prereq.key];
const prereqFlag = await this.queries.getFlag(prereq.key);
if (!prereqFlag) {
prereqResult = (0, variations_1.getOffVariation)(flag, Reasons_1.default.prerequisiteFailed(prereq.key));
return false;
}
const evalResult = await this.evaluateInternal(prereqFlag, context, state, updatedVisitedFlags, eventFactory);
// eslint-disable-next-line no-param-reassign
state.events = (_a = state.events) !== null && _a !== void 0 ? _a : [];
if (eventFactory) {
state.events.push(eventFactory.evalEvent(prereqFlag, context, evalResult.detail, null, flag));
}
if (evalResult.isError) {
prereqResult = evalResult;
return false;
}
if (evalResult.isOff || evalResult.detail.variationIndex !== prereq.variation) {
prereqResult = (0, variations_1.getOffVariation)(flag, Reasons_1.default.prerequisiteFailed(prereq.key));
return false;
}
return true;
this.queries.getFlag(prereq.key, (prereqFlag) => {
if (!prereqFlag) {
prereqResult = (0, variations_1.getOffVariation)(flag, Reasons_1.default.prerequisiteFailed(prereq.key));
prereqCb(false);
return;
}
this.evaluateInternal(prereqFlag, context, state, updatedVisitedFlags, (res) => {
// eslint-disable-next-line no-param-reassign
state.events = state.events ?? [];
if (eventFactory) {
state.events.push(eventFactory.evalEvent(prereqFlag, context, res.detail, null, flag));
}
if (res.isError) {
prereqResult = res;
return prereqCb(false);
}
if (res.isOff || res.detail.variationIndex !== prereq.variation) {
prereqResult = (0, variations_1.getOffVariation)(flag, Reasons_1.default.prerequisiteFailed(prereq.key));
return prereqCb(false);
}
return prereqCb(true);
}, eventFactory);
});
}, () => {
cb(prereqResult);
});
if (prereqResult) {
return prereqResult;
}
// There were no prereqResults for errors or failed prerequisites.
// So they have all passed.
return undefined;
}

@@ -166,41 +199,52 @@ /**

*/
async evaluateRules(flag, context, state) {
evaluateRules(flag, context, state, cb) {
let ruleResult;
await (0, collection_1.firstSeriesAsync)(flag.rules, async (rule, ruleIndex) => {
ruleResult = await this.ruleMatchContext(flag, rule, ruleIndex, context, state, []);
return !!ruleResult;
});
return ruleResult;
(0, collection_1.firstSeriesAsync)(flag.rules, (rule, ruleIndex, ruleCb) => {
this.ruleMatchContext(flag, rule, ruleIndex, context, state, [], (res) => {
ruleResult = res;
ruleCb(!!res);
});
}, () => cb(ruleResult));
}
async clauseMatchContext(clause, context, segmentsVisited, state) {
clauseMatchContext(clause, context, segmentsVisited, state, cb) {
let errorResult;
if (clause.op === 'segmentMatch') {
const match = await (0, collection_1.firstSeriesAsync)(clause.values, async (value) => {
const segment = await this.queries.getSegment(value);
if (segment) {
if (segmentsVisited.includes(segment.key)) {
errorResult = EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, `Segment rule referencing segment ${segment.key} caused a circular reference. ` +
'This is probably a temporary condition due to an incomplete update');
// There was an error, so stop checking further segments.
return true;
(0, collection_1.firstSeriesAsync)(clause.values, (value, _index, innerCb) => {
this.queries.getSegment(value, (segment) => {
if (segment) {
if (segmentsVisited.includes(segment.key)) {
errorResult = EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, `Segment rule referencing segment ${segment.key} caused a circular reference. ` +
'This is probably a temporary condition due to an incomplete update');
// There was an error, so stop checking further segments.
innerCb(true);
return;
}
const newVisited = [...segmentsVisited, segment?.key];
this.segmentMatchContext(segment, context, state, newVisited, (res) => {
if (res.error) {
errorResult = res.result;
}
innerCb(res.error || res.isMatch);
// innerCb(true);
});
}
const newVisited = [...segmentsVisited, segment === null || segment === void 0 ? void 0 : segment.key];
const res = await this.segmentMatchContext(segment, context, state, newVisited);
if (res.error) {
errorResult = res.result;
else {
innerCb(false);
}
return res.error || res.isMatch;
});
}, (match) => {
if (errorResult) {
return cb(new MatchError(errorResult));
}
return false;
return cb(new Match(match));
});
if (errorResult) {
return new MatchError(errorResult);
}
return new Match(match);
// TODO: Should this return here?
return;
}
// This is after segment matching, which does not use the reference.
if (!clause.attributeReference.isValid) {
return new MatchError(EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, 'Invalid attribute reference in clause'));
cb(new MatchError(EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, 'Invalid attribute reference in clause')));
return;
}
return new Match((0, matchClause_1.default)(clause, context));
cb(new Match((0, matchClause_1.default)(clause, context)));
}

@@ -215,19 +259,22 @@ /**

*/
async ruleMatchContext(flag, rule, ruleIndex, context, state, segmentsVisited) {
ruleMatchContext(flag, rule, ruleIndex, context, state, segmentsVisited, cb) {
if (!rule.clauses) {
return undefined;
cb(undefined);
return;
}
let errorResult;
const match = await (0, collection_1.allSeriesAsync)(rule.clauses, async (clause) => {
const res = await this.clauseMatchContext(clause, context, segmentsVisited, state);
errorResult = res.result;
return res.error || res.isMatch;
(0, collection_1.allSeriesAsync)(rule.clauses, (clause, _index, rule_cb) => {
this.clauseMatchContext(clause, context, segmentsVisited, state, (res) => {
errorResult = res.result;
return rule_cb(res.error || res.isMatch);
});
}, (match) => {
if (errorResult) {
return cb(errorResult);
}
if (match) {
return cb(this.variationForContext(rule, context, flag, Reasons_1.default.ruleMatch(rule.id, ruleIndex)));
}
return cb(undefined);
});
if (errorResult) {
return errorResult;
}
if (match) {
return this.variationForContext(rule, context, flag, Reasons_1.default.ruleMatch(rule.id, ruleIndex));
}
return undefined;
}

@@ -253,3 +300,3 @@ variationForContext(varOrRollout, context, flag, reason) {

const [bucket, hadContext] = this.bucketer.bucket(context, flag.key, bucketBy, flag.salt || '', rollout.contextKind, rollout.seed);
const updatedReason = Object.assign({}, reason);
const updatedReason = { ...reason };
let sum = 0;

@@ -282,51 +329,57 @@ for (let i = 0; i < variations.length; i += 1) {

}
async segmentRuleMatchContext(segment, rule, context, state, segmentsVisited) {
segmentRuleMatchContext(segment, rule, context, state, segmentsVisited, cb) {
let errorResult;
const match = await (0, collection_1.allSeriesAsync)(rule.clauses, async (clause) => {
const res = await this.clauseMatchContext(clause, context, segmentsVisited, state);
errorResult = res.result;
return res.error || res.isMatch;
});
if (errorResult) {
return new MatchError(errorResult);
}
if (match) {
if (rule.weight === undefined) {
return new Match(match);
(0, collection_1.allSeriesAsync)(rule.clauses, (clause, _index, innerCb) => {
this.clauseMatchContext(clause, context, segmentsVisited, state, (res) => {
errorResult = res.result;
innerCb(res.error || res.isMatch);
});
}, (match) => {
if (errorResult) {
return cb(new MatchError(errorResult));
}
const bucketBy = (0, variations_1.getBucketBy)(false, rule.bucketByAttributeReference);
if (!bucketBy.isValid) {
return new MatchError(EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, 'Invalid attribute reference in clause'));
if (match) {
if (rule.weight === undefined) {
return cb(new Match(match));
}
const bucketBy = (0, variations_1.getBucketBy)(false, rule.bucketByAttributeReference);
if (!bucketBy.isValid) {
return cb(new MatchError(EvalResult_1.default.forError(ErrorKinds_1.default.MalformedFlag, 'Invalid attribute reference in clause')));
}
const [bucket] = this.bucketer.bucket(context, segment.key, bucketBy, segment.salt || '', rule.rolloutContextKind);
return cb(new Match(bucket < rule.weight / 100000.0));
}
const [bucket] = this.bucketer.bucket(context, segment.key, bucketBy, segment.salt || '', rule.rolloutContextKind);
return new Match(bucket < rule.weight / 100000.0);
}
return new Match(false);
return cb(new Match(false));
});
}
// eslint-disable-next-line class-methods-use-this
async simpleSegmentMatchContext(segment, context, state, segmentsVisited) {
simpleSegmentMatchContext(segment, context, state, segmentsVisited, cb) {
if (!segment.unbounded) {
const includeExclude = (0, matchSegmentTargets_1.default)(segment, context);
if (includeExclude !== undefined) {
return new Match(includeExclude);
cb(new Match(includeExclude));
return;
}
}
let evalResult;
const matched = await (0, collection_1.firstSeriesAsync)(segment.rules, async (rule) => {
const res = await this.segmentRuleMatchContext(segment, rule, context, state, segmentsVisited);
evalResult = res.result;
return res.error || res.isMatch;
(0, collection_1.firstSeriesAsync)(segment.rules, (rule, _index, innerCb) => {
this.segmentRuleMatchContext(segment, rule, context, state, segmentsVisited, (res) => {
evalResult = res.result;
return innerCb(res.error || res.isMatch);
});
}, (matched) => {
if (evalResult) {
return cb(new MatchError(evalResult));
}
return cb(new Match(matched));
});
if (evalResult) {
return new MatchError(evalResult);
}
return new Match(matched);
}
async segmentMatchContext(segment, context,
segmentMatchContext(segment, context,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
state,
// eslint-disable-next-line @typescript-eslint/no-unused-vars
segmentsVisited) {
segmentsVisited, cb) {
if (!segment.unbounded) {
return this.simpleSegmentMatchContext(segment, context, state, segmentsVisited);
this.simpleSegmentMatchContext(segment, context, state, segmentsVisited, cb);
return;
}

@@ -336,3 +389,4 @@ const bigSegmentKind = segment.unboundedContextKind || 'user';

if (!keyForBigSegment) {
return new Match(false);
cb(new Match(false));
return;
}

@@ -346,3 +400,4 @@ if (!segment.generation) {

state.bigSegmentsStatus = computeUpdatedBigSegmentsStatus(state.bigSegmentsStatus, 'NOT_CONFIGURED');
return new Match(false);
cb(new Match(false));
return;
}

@@ -354,31 +409,36 @@ if (state.bigSegmentsMembership && state.bigSegmentsMembership[keyForBigSegment]) {

// *all* of the user's segment memberships.
return this.bigSegmentMatchContext(state.bigSegmentsMembership[keyForBigSegment], segment, context, state);
this.bigSegmentMatchContext(state.bigSegmentsMembership[keyForBigSegment], segment, context, state).then(cb);
return;
}
const result = await this.queries.getBigSegmentsMembership(keyForBigSegment);
// eslint-disable-next-line no-param-reassign
state.bigSegmentsMembership = state.bigSegmentsMembership || {};
if (result) {
const [membership, status] = result;
this.queries.getBigSegmentsMembership(keyForBigSegment).then((result) => {
// eslint-disable-next-line no-param-reassign
state.bigSegmentsMembership[keyForBigSegment] = membership;
// eslint-disable-next-line no-param-reassign
state.bigSegmentsStatus = computeUpdatedBigSegmentsStatus(state.bigSegmentsStatus, status);
}
else {
// eslint-disable-next-line no-param-reassign
state.bigSegmentsStatus = computeUpdatedBigSegmentsStatus(state.bigSegmentsStatus, 'NOT_CONFIGURED');
}
/* eslint-enable no-param-reassign */
return this.bigSegmentMatchContext(state.bigSegmentsMembership[keyForBigSegment], segment, context, state);
state.bigSegmentsMembership = state.bigSegmentsMembership || {};
if (result) {
const [membership, status] = result;
// eslint-disable-next-line no-param-reassign
state.bigSegmentsMembership[keyForBigSegment] = membership;
// eslint-disable-next-line no-param-reassign
state.bigSegmentsStatus = computeUpdatedBigSegmentsStatus(state.bigSegmentsStatus, status);
}
else {
// eslint-disable-next-line no-param-reassign
state.bigSegmentsStatus = computeUpdatedBigSegmentsStatus(state.bigSegmentsStatus, 'NOT_CONFIGURED');
}
/* eslint-enable no-param-reassign */
this.bigSegmentMatchContext(state.bigSegmentsMembership[keyForBigSegment], segment, context, state).then(cb);
});
}
async bigSegmentMatchContext(membership, segment, context, state) {
bigSegmentMatchContext(membership, segment, context, state) {
const segmentRef = (0, makeBigSegmentRef_1.default)(segment);
const included = membership === null || membership === void 0 ? void 0 : membership[segmentRef];
// Typically null is not checked because we filter it from the data
// we get in flag updates. Here it is checked because big segment data
// will be contingent on the store that implements it.
if (included !== undefined && included !== null) {
return new Match(included);
}
return this.simpleSegmentMatchContext(segment, context, state, []);
const included = membership?.[segmentRef];
return new Promise((resolve) => {
// Typically null is not checked because we filter it from the data
// we get in flag updates. Here it is checked because big segment data
// will be contingent on the store that implements it.
if (included !== undefined && included !== null) {
resolve(new Match(included));
return;
}
this.simpleSegmentMatchContext(segment, context, state, [], resolve);
});
}

@@ -385,0 +445,0 @@ }

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
function segmentSearch(context, contextTargets, userTargets) {
function segmentSearch(context, contextTargets, userTargets, userTargetSet) {
if (contextTargets) {

@@ -8,3 +8,19 @@ for (let targetIndex = 0; targetIndex < contextTargets.length; targetIndex += 1) {

const key = context.key(target.contextKind);
if (key && target.values.includes(key)) {
if (key) {
if (target.valuesSet) {
// Only check valuesSet if present.
if (target.valuesSet.has(key)) {
return true;
}
}
else if (target.values.includes(key)) {
return true;
}
}
}
}
if (userTargetSet) {
const userKey = context.key('user');
if (userKey) {
if (userTargetSet.has(userKey)) {
return true;

@@ -14,3 +30,3 @@ }

}
if (userTargets) {
else if (userTargets) {
const userKey = context.key('user');

@@ -26,7 +42,7 @@ if (userKey) {

function matchSegmentTargets(segment, context) {
const included = segmentSearch(context, segment.includedContexts, segment.included);
const included = segmentSearch(context, segment.includedContexts, segment.included, segment.includedSet);
if (included) {
return true;
}
const excluded = segmentSearch(context, segment.excludedContexts, segment.excluded);
const excluded = segmentSearch(context, segment.excludedContexts, segment.excluded, segment.excludedSet);
if (excluded) {

@@ -33,0 +49,0 @@ // The match was an exclusion, so it should be negated.

@@ -69,3 +69,3 @@ "use strict";

}
catch (_a) {
catch {
return false;

@@ -100,4 +100,3 @@ }

static execute(op, a, b) {
var _a, _b;
return (_b = (_a = operators[op]) === null || _a === void 0 ? void 0 : _a.call(operators, a, b)) !== null && _b !== void 0 ? _b : false;
return operators[op]?.(a, b) ?? false;
}

@@ -104,0 +103,0 @@ }

@@ -9,2 +9,4 @@ "use strict";

class Reasons {
static Fallthrough = { kind: 'FALLTHROUGH' };
static Off = { kind: 'OFF' };
static prerequisiteFailed(prerequisiteKey) {

@@ -16,7 +18,5 @@ return { kind: 'PREREQUISITE_FAILED', prerequisiteKey };

}
static TargetMatch = { kind: 'TARGET_MATCH' };
}
Reasons.Fallthrough = { kind: 'FALLTHROUGH' };
Reasons.Off = { kind: 'OFF' };
Reasons.TargetMatch = { kind: 'TARGET_MATCH' };
exports.default = Reasons;
//# sourceMappingURL=Reasons.js.map

@@ -44,6 +44,5 @@ "use strict";

function getBucketBy(isExperiment, bucketByAttributeReference) {
var _a;
return (_a = (isExperiment ? undefined : bucketByAttributeReference)) !== null && _a !== void 0 ? _a : KEY_ATTR_REF;
return (isExperiment ? undefined : bucketByAttributeReference) ?? KEY_ATTR_REF;
}
exports.getBucketBy = getBucketBy;
//# sourceMappingURL=variations.js.map

@@ -5,2 +5,4 @@ "use strict";

class ContextDeduplicator {
flushInterval;
contextKeysCache;
constructor(options) {

@@ -7,0 +9,0 @@ this.contextKeysCache = new LruCache_1.default({ max: options.contextKeysCapacity });

@@ -13,2 +13,9 @@ "use strict";

class DiagnosticsManager {
config;
platform;
featureStore;
startTime;
streamInits = [];
id;
dataSinceDate;
constructor(sdkKey, config, platform, featureStore) {

@@ -18,3 +25,2 @@ this.config = config;

this.featureStore = featureStore;
this.streamInits = [];
this.startTime = Date.now();

@@ -32,3 +38,2 @@ this.dataSinceDate = this.startTime;

createInitEvent() {
var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k;
const sdkData = this.platform.info.sdkData();

@@ -59,8 +64,14 @@ const platformData = this.platform.info.platformData();

contextKeysFlushIntervalMillis: secondsToMillis(this.config.contextKeysFlushInterval),
usingProxy: !!((_b = (_a = this.platform.requests).usingProxy) === null || _b === void 0 ? void 0 : _b.call(_a)),
usingProxyAuthenticator: !!((_d = (_c = this.platform.requests).usingProxyAuth) === null || _d === void 0 ? void 0 : _d.call(_c)),
usingProxy: !!this.platform.requests.usingProxy?.(),
usingProxyAuthenticator: !!this.platform.requests.usingProxyAuth?.(),
diagnosticRecordingIntervalMillis: secondsToMillis(this.config.diagnosticRecordingInterval),
dataStoreType: (_g = (_f = (_e = this.featureStore).getDescription) === null || _f === void 0 ? void 0 : _f.call(_e)) !== null && _g !== void 0 ? _g : 'memory',
dataStoreType: this.featureStore.getDescription?.() ?? 'memory',
},
platform: Object.assign({ name: platformData.name, osArch: (_h = platformData.os) === null || _h === void 0 ? void 0 : _h.arch, osName: (_j = platformData.os) === null || _j === void 0 ? void 0 : _j.name, osVersion: (_k = platformData.os) === null || _k === void 0 ? void 0 : _k.version }, (platformData.additional || {})),
platform: {
name: platformData.name,
osArch: platformData.os?.arch,
osName: platformData.os?.name,
osVersion: platformData.os?.version,
...(platformData.additional || {}),
},
};

@@ -67,0 +78,0 @@ }

@@ -9,2 +9,3 @@ "use strict";

class EventFactory {
withReasons;
constructor(withReasons) {

@@ -14,7 +15,6 @@ this.withReasons = withReasons;

evalEvent(flag, context, detail, defaultVal, prereqOfFlag) {
var _a;
const addExperimentData = (0, isExperiment_1.default)(flag, detail.reason);
return new js_sdk_common_1.internal.InputEvalEvent(this.withReasons, context, flag.key, defaultVal, detail, flag.version,
// Exclude null as a possibility.
(_a = detail.variationIndex) !== null && _a !== void 0 ? _a : undefined, flag.trackEvents || addExperimentData, prereqOfFlag === null || prereqOfFlag === void 0 ? void 0 : prereqOfFlag.key, this.withReasons || addExperimentData ? detail.reason : undefined, flag.debugEventsUntilDate);
detail.variationIndex ?? undefined, flag.trackEvents || addExperimentData, prereqOfFlag?.key, this.withReasons || addExperimentData ? detail.reason : undefined, flag.debugEventsUntilDate);
}

@@ -30,3 +30,3 @@ unknownFlagEvent(key, context, detail) {

customEvent(key, context, data, metricValue) {
return new js_sdk_common_1.internal.InputCustomEvent(context, key, data !== null && data !== void 0 ? data : undefined, metricValue !== null && metricValue !== void 0 ? metricValue : undefined);
return new js_sdk_common_1.internal.InputCustomEvent(context, key, data ?? undefined, metricValue ?? undefined);
}

@@ -33,0 +33,0 @@ }

@@ -8,4 +8,11 @@ "use strict";

class EventSender {
defaultHeaders;
eventsUri;
diagnosticEventsUri;
requests;
crypto;
constructor(config, clientContext) {
this.defaultHeaders = Object.assign({}, (0, defaultHeaders_1.default)(clientContext.basicConfiguration.sdkKey, config, clientContext.platform.info));
this.defaultHeaders = {
...(0, defaultHeaders_1.default)(clientContext.basicConfiguration.sdkKey, config, clientContext.platform.info),
};
this.eventsUri = `${clientContext.basicConfiguration.serviceEndpoints.events}/bulk`;

@@ -20,3 +27,6 @@ this.diagnosticEventsUri = `${clientContext.basicConfiguration.serviceEndpoints.events}/diagnostic`;

};
const headers = Object.assign(Object.assign({}, this.defaultHeaders), { 'content-type': 'application/json' });
const headers = {
...this.defaultHeaders,
'content-type': 'application/json',
};
if (payloadId) {

@@ -23,0 +33,0 @@ headers['x-launchdarkly-payload-id'] = payloadId;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class NullEventSource {
handlers = {};
closed = false;
url;
options;
constructor(url, options) {
this.handlers = {};
this.closed = false;
this.url = url;
this.options = options;
}
onclose;
onerror;
onopen;
onretrying;
addEventListener(type, listener) {

@@ -11,0 +17,0 @@ this.handlers[type] = listener;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class FlagsStateBuilder {
valid;
withReasons;
flagValues = {};
flagMetadata = {};
constructor(valid, withReasons) {
this.valid = valid;
this.withReasons = withReasons;
this.flagValues = {};
this.flagMetadata = {};
}

@@ -43,4 +45,8 @@ addFlag(flag, value, variation, reason, trackEvents, trackReason, detailsOnlyIfTracked) {

getFlagValue: (key) => state.flagValues[key],
getFlagReason: (key) => { var _a; return (_a = (state.flagMetadata[key] ? state.flagMetadata[key].reason : null)) !== null && _a !== void 0 ? _a : null; },
toJSON: () => (Object.assign(Object.assign({}, state.flagValues), { $flagsState: state.flagMetadata, $valid: state.valid })),
getFlagReason: (key) => (state.flagMetadata[key] ? state.flagMetadata[key].reason : null) ?? null,
toJSON: () => ({
...state.flagValues,
$flagsState: state.flagMetadata,
$valid: state.valid,
}),
};

@@ -47,0 +53,0 @@ }

@@ -8,2 +8,3 @@ "use strict";

class FileDataSourceFactory {
options;
constructor(options) {

@@ -10,0 +11,0 @@ this.options = options;

@@ -41,8 +41,6 @@ "use strict";

class TestData {
constructor() {
this.currentFlags = {};
this.currentSegments = {};
this.dataSources = [];
this.flagBuilders = {};
}
currentFlags = {};
currentSegments = {};
dataSources = [];
flagBuilders = {};
/**

@@ -139,3 +137,3 @@ * Get a factory for update processors that will be attached to this TestData instance.

const oldItem = this.currentFlags[flagConfig.key];
const newItem = Object.assign(Object.assign({}, flagConfig), { version: oldItem ? oldItem.version + 1 : flagConfig.version });
const newItem = { ...flagConfig, version: oldItem ? oldItem.version + 1 : flagConfig.version };
(0, serialization_1.processFlag)(newItem);

@@ -164,3 +162,6 @@ this.currentFlags[flagConfig.key] = newItem;

const oldItem = this.currentSegments[segmentConfig.key];
const newItem = Object.assign(Object.assign({}, segmentConfig), { version: oldItem ? oldItem.version + 1 : segmentConfig.version });
const newItem = {
...segmentConfig,
version: oldItem ? oldItem.version + 1 : segmentConfig.version,
};
(0, serialization_1.processSegment)(newItem);

@@ -167,0 +168,0 @@ this.currentSegments[segmentConfig.key] = newItem;

@@ -10,2 +10,7 @@ "use strict";

class TestDataFlagBuilder {
key;
data = {
on: true,
variations: [],
};
/**

@@ -16,6 +21,2 @@ * @internal

this.key = key;
this.data = {
on: true,
variations: [],
};
if (data) {

@@ -39,4 +40,3 @@ // Not the fastest way to deep copy, but this is a testing mechanism.

data.rules.forEach((rule) => {
var _a;
(_a = this.data.rules) === null || _a === void 0 ? void 0 : _a.push(rule.clone());
this.data.rules?.push(rule.clone());
});

@@ -43,0 +43,0 @@ }

@@ -19,2 +19,5 @@ "use strict";

class TestDataRuleBuilder {
flagBuilder;
clauses = [];
variation;
/**

@@ -25,3 +28,2 @@ * @internal

this.flagBuilder = flagBuilder;
this.clauses = [];
if (clauses) {

@@ -28,0 +30,0 @@ this.clauses = [...clauses];

@@ -8,2 +8,6 @@ "use strict";

class TestDataSource {
featureStore;
flags;
segments;
onStop;
constructor(featureStore, flags, segments, onStop) {

@@ -17,6 +21,6 @@ this.featureStore = featureStore;

await this.featureStore.init({
[VersionedDataKinds_1.default.Features.namespace]: Object.assign({}, this.flags),
[VersionedDataKinds_1.default.Segments.namespace]: Object.assign({}, this.segments),
[VersionedDataKinds_1.default.Features.namespace]: { ...this.flags },
[VersionedDataKinds_1.default.Segments.namespace]: { ...this.segments },
});
fn === null || fn === void 0 ? void 0 : fn();
fn?.();
}

@@ -23,0 +27,0 @@ stop() {

@@ -19,2 +19,3 @@ import { LDContext, LDEvaluationDetail, Platform } from '@launchdarkly/js-sdk-common';

private featureStore;
private asyncFeatureStore;
private updateProcessor;

@@ -21,0 +22,0 @@ private eventFactoryDefault;

@@ -26,3 +26,3 @@ "use strict";

const Configuration_1 = require("./options/Configuration");
const AsyncStoreFacade_1 = require("./store/AsyncStoreFacade");
const store_1 = require("./store");
const VersionedDataKinds_1 = require("./store/VersionedDataKinds");

@@ -39,9 +39,33 @@ var InitState;

class LDClientImpl {
sdkKey;
platform;
initState = InitState.Initializing;
featureStore;
asyncFeatureStore;
updateProcessor;
eventFactoryDefault = new EventFactory_1.default(false);
eventFactoryWithReasons = new EventFactory_1.default(true);
eventProcessor;
evaluator;
initResolve;
initReject;
initializedPromise;
logger;
config;
bigSegmentsManager;
onError;
onFailed;
onReady;
diagnosticsManager;
/**
* Intended for use by platform specific client implementations.
*
* It is not included in the main interface because it requires the use of
* a platform event system. For node this would be an EventEmitter, for other
* platforms it would likely be an EventTarget.
*/
bigSegmentStatusProviderInternal;
constructor(sdkKey, platform, options, callbacks) {
var _a, _b, _c, _d, _e;
this.sdkKey = sdkKey;
this.platform = platform;
this.initState = InitState.Initializing;
this.eventFactoryDefault = new EventFactory_1.default(false);
this.eventFactoryWithReasons = new EventFactory_1.default(true);
this.onError = callbacks.onError;

@@ -59,2 +83,3 @@ this.onFailed = callbacks.onFailed;

const featureStore = config.featureStoreFactory(clientContext);
this.asyncFeatureStore = new store_1.AsyncStoreFacade(featureStore);
const dataSourceUpdates = new DataSourceUpdates_1.default(featureStore, hasEventListeners, onUpdate);

@@ -72,3 +97,3 @@ if (config.sendEvents && !config.offline && !config.diagnosticOptOut) {

this.updateProcessor =
(_b = (_a = config.updateProcessorFactory) === null || _a === void 0 ? void 0 : _a.call(config, clientContext, dataSourceUpdates)) !== null && _b !== void 0 ? _b : makeDefaultProcessor();
config.updateProcessorFactory?.(clientContext, dataSourceUpdates) ?? makeDefaultProcessor();
}

@@ -81,15 +106,12 @@ if (!config.sendEvents || config.offline) {

}
const asyncFacade = new AsyncStoreFacade_1.default(featureStore);
this.featureStore = asyncFacade;
const manager = new BigSegmentsManager_1.default((_d = (_c = config.bigSegments) === null || _c === void 0 ? void 0 : _c.store) === null || _d === void 0 ? void 0 : _d.call(_c, clientContext), (_e = config.bigSegments) !== null && _e !== void 0 ? _e : {}, config.logger, this.platform.crypto);
this.featureStore = featureStore;
const manager = new BigSegmentsManager_1.default(config.bigSegments?.store?.(clientContext), config.bigSegments ?? {}, config.logger, this.platform.crypto);
this.bigSegmentsManager = manager;
this.bigSegmentStatusProviderInternal = manager.statusProvider;
const queries = {
async getFlag(key) {
var _a;
return (_a = (await asyncFacade.get(VersionedDataKinds_1.default.Features, key))) !== null && _a !== void 0 ? _a : undefined;
getFlag(key, cb) {
featureStore.get(VersionedDataKinds_1.default.Features, key, (item) => cb(item));
},
async getSegment(key) {
var _a;
return (_a = (await asyncFacade.get(VersionedDataKinds_1.default.Segments, key))) !== null && _a !== void 0 ? _a : undefined;
getSegment(key, cb) {
featureStore.get(VersionedDataKinds_1.default.Segments, key, (item) => cb(item));
},

@@ -102,3 +124,2 @@ getBigSegmentsMembership(userKey) {

this.updateProcessor.start((err) => {
var _a, _b;
if (err) {

@@ -114,3 +135,3 @@ let error;

this.onFailed(error);
(_a = this.initReject) === null || _a === void 0 ? void 0 : _a.call(this, error);
this.initReject?.(error);
this.initState = InitState.Failed;

@@ -120,3 +141,3 @@ }

this.initState = InitState.Initialized;
(_b = this.initResolve) === null || _b === void 0 ? void 0 : _b.call(this, this);
this.initResolve?.(this);
this.onReady();

@@ -141,21 +162,23 @@ }

}
async variation(key, context, defaultValue, callback) {
const res = await this.evaluateIfPossible(key, context, defaultValue, this.eventFactoryDefault);
if (!callback) {
return res.detail.value;
}
callback(null, res.detail.value);
return undefined;
variation(key, context, defaultValue, callback) {
return new Promise((resolve) => {
this.evaluateIfPossible(key, context, defaultValue, this.eventFactoryDefault, (res) => {
resolve(res.detail.value);
callback?.(null, res.detail.value);
});
});
}
async variationDetail(key, context, defaultValue, callback) {
const res = await this.evaluateIfPossible(key, context, defaultValue, this.eventFactoryWithReasons);
callback === null || callback === void 0 ? void 0 : callback(null, res.detail);
return res.detail;
variationDetail(key, context, defaultValue, callback) {
return new Promise((resolve) => {
this.evaluateIfPossible(key, context, defaultValue, this.eventFactoryWithReasons, (res) => {
resolve(res.detail);
callback?.(null, res.detail);
});
});
}
async allFlagsState(context, options, callback) {
var _a, _b, _c, _d, _e;
if (this.config.offline) {
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.info('allFlagsState() called in offline mode. Returning empty state.');
this.logger?.info('allFlagsState() called in offline mode. Returning empty state.');
const allFlagState = new FlagsStateBuilder_1.default(false, false).build();
callback === null || callback === void 0 ? void 0 : callback(null, allFlagState);
callback?.(null, allFlagState);
return allFlagState;

@@ -165,3 +188,3 @@ }

if (!evalContext.valid) {
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.info(`${(_c = evalContext.message) !== null && _c !== void 0 ? _c : 'Invalid context.'}. Returning empty state.`);
this.logger?.info(`${evalContext.message ?? 'Invalid context.'}. Returning empty state.`);
return new FlagsStateBuilder_1.default(false, false).build();

@@ -171,9 +194,9 @@ }

if (!this.initialized()) {
const storeInitialized = await this.featureStore.initialized();
const storeInitialized = await this.asyncFeatureStore.initialized();
if (storeInitialized) {
(_d = this.logger) === null || _d === void 0 ? void 0 : _d.warn('Called allFlagsState before client initialization; using last known' +
this.logger?.warn('Called allFlagsState before client initialization; using last known' +
' values from data store');
}
else {
(_e = this.logger) === null || _e === void 0 ? void 0 : _e.warn('Called allFlagsState before client initialization. Data store not available; ' +
this.logger?.warn('Called allFlagsState before client initialization. Data store not available; ' +
'returning empty state');

@@ -183,23 +206,28 @@ valid = false;

}
const builder = new FlagsStateBuilder_1.default(valid, !!(options === null || options === void 0 ? void 0 : options.withReasons));
const clientOnly = !!(options === null || options === void 0 ? void 0 : options.clientSideOnly);
const detailsOnlyIfTracked = !!(options === null || options === void 0 ? void 0 : options.detailsOnlyForTrackedFlags);
const allFlags = await this.featureStore.all(VersionedDataKinds_1.default.Features);
await (0, collection_1.allSeriesAsync)(Object.values(allFlags), async (storeItem) => {
var _a;
const flag = storeItem;
if (clientOnly && !flag.clientSide) {
return true;
}
const res = await this.evaluator.evaluate(flag, evalContext);
if (res.isError) {
this.onError(new Error(`Error for feature flag "${flag.key}" while evaluating all flags: ${res.message}`));
}
const requireExperimentData = (0, isExperiment_1.default)(flag, res.detail.reason);
builder.addFlag(flag, res.detail.value, (_a = res.detail.variationIndex) !== null && _a !== void 0 ? _a : undefined, res.detail.reason, flag.trackEvents || requireExperimentData, requireExperimentData, detailsOnlyIfTracked);
return true;
const builder = new FlagsStateBuilder_1.default(valid, !!options?.withReasons);
const clientOnly = !!options?.clientSideOnly;
const detailsOnlyIfTracked = !!options?.detailsOnlyForTrackedFlags;
return new Promise((resolve) => {
this.featureStore.all(VersionedDataKinds_1.default.Features, (allFlags) => {
(0, collection_1.allSeriesAsync)(Object.values(allFlags), async (storeItem, _index, innerCB) => {
const flag = storeItem;
if (clientOnly && !flag.clientSide) {
innerCB(true);
return;
}
this.evaluator.evaluateCb(flag, evalContext, (res) => {
if (res.isError) {
this.onError(new Error(`Error for feature flag "${flag.key}" while evaluating all flags: ${res.message}`));
}
const requireExperimentData = (0, isExperiment_1.default)(flag, res.detail.reason);
builder.addFlag(flag, res.detail.value, res.detail.variationIndex ?? undefined, res.detail.reason, flag.trackEvents || requireExperimentData, requireExperimentData, detailsOnlyIfTracked);
innerCB(true);
});
}, () => {
const res = builder.build();
callback?.(null, res);
resolve(res);
});
});
});
const res = builder.build();
callback === null || callback === void 0 ? void 0 : callback(null, res);
return res;
}

@@ -226,6 +254,5 @@ secureModeHash(context) {

track(key, context, data, metricValue) {
var _a;
const checkedContext = js_sdk_common_1.Context.fromLDContext(context);
if (!checkedContext.valid) {
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.warn(ClientMessages_1.default.missingContextKeyNoEvent);
this.logger?.warn(ClientMessages_1.default.missingContextKeyNoEvent);
return;

@@ -236,6 +263,5 @@ }

identify(context) {
var _a;
const checkedContext = js_sdk_common_1.Context.fromLDContext(context);
if (!checkedContext.valid) {
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.warn(ClientMessages_1.default.missingContextKeyNoEvent);
this.logger?.warn(ClientMessages_1.default.missingContextKeyNoEvent);
return;

@@ -250,50 +276,57 @@ }

catch (err) {
callback === null || callback === void 0 ? void 0 : callback(err, false);
callback?.(err, false);
}
callback === null || callback === void 0 ? void 0 : callback(null, true);
callback?.(null, true);
}
async variationInternal(flagKey, context, defaultValue, eventFactory) {
var _a, _b, _c, _d;
variationInternal(flagKey, context, defaultValue, eventFactory, cb) {
if (this.config.offline) {
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.info('Variation called in offline mode. Returning default value.');
return EvalResult_1.default.forError(ErrorKinds_1.default.ClientNotReady, undefined, defaultValue);
this.logger?.info('Variation called in offline mode. Returning default value.');
cb(EvalResult_1.default.forError(ErrorKinds_1.default.ClientNotReady, undefined, defaultValue));
return;
}
const evalContext = js_sdk_common_1.Context.fromLDContext(context);
if (!evalContext.valid) {
this.onError(new errors_1.LDClientError(`${(_b = evalContext.message) !== null && _b !== void 0 ? _b : 'Context not valid;'} returning default value.`));
return EvalResult_1.default.forError(ErrorKinds_1.default.UserNotSpecified, undefined, defaultValue);
this.onError(new errors_1.LDClientError(`${evalContext.message ?? 'Context not valid;'} returning default value.`));
cb(EvalResult_1.default.forError(ErrorKinds_1.default.UserNotSpecified, undefined, defaultValue));
return;
}
const flag = (await this.featureStore.get(VersionedDataKinds_1.default.Features, flagKey));
if (!flag) {
const error = new errors_1.LDClientError(`Unknown feature flag "${flagKey}"; returning default value`);
this.onError(error);
const result = EvalResult_1.default.forError(ErrorKinds_1.default.FlagNotFound, undefined, defaultValue);
this.eventProcessor.sendEvent(this.eventFactoryDefault.unknownFlagEvent(flagKey, evalContext, result.detail));
return result;
}
const evalRes = await this.evaluator.evaluate(flag, evalContext, eventFactory);
if (evalRes.detail.variationIndex === undefined || evalRes.detail.variationIndex === null) {
(_c = this.logger) === null || _c === void 0 ? void 0 : _c.debug('Result value is null in variation');
evalRes.setDefault(defaultValue);
}
(_d = evalRes.events) === null || _d === void 0 ? void 0 : _d.forEach((event) => {
this.eventProcessor.sendEvent(event);
this.featureStore.get(VersionedDataKinds_1.default.Features, flagKey, (item) => {
const flag = item;
if (!flag) {
const error = new errors_1.LDClientError(`Unknown feature flag "${flagKey}"; returning default value`);
this.onError(error);
const result = EvalResult_1.default.forError(ErrorKinds_1.default.FlagNotFound, undefined, defaultValue);
this.eventProcessor.sendEvent(this.eventFactoryDefault.unknownFlagEvent(flagKey, evalContext, result.detail));
cb(result);
return;
}
this.evaluator.evaluateCb(flag, evalContext, (evalRes) => {
if (evalRes.detail.variationIndex === undefined ||
evalRes.detail.variationIndex === null) {
this.logger?.debug('Result value is null in variation');
evalRes.setDefault(defaultValue);
}
evalRes.events?.forEach((event) => {
this.eventProcessor.sendEvent(event);
});
this.eventProcessor.sendEvent(eventFactory.evalEvent(flag, evalContext, evalRes.detail, defaultValue));
cb(evalRes);
}, eventFactory);
});
this.eventProcessor.sendEvent(eventFactory.evalEvent(flag, evalContext, evalRes.detail, defaultValue));
return evalRes;
}
async evaluateIfPossible(flagKey, context, defaultValue, eventFactory) {
var _a, _b;
evaluateIfPossible(flagKey, context, defaultValue, eventFactory, cb) {
if (!this.initialized()) {
const storeInitialized = await this.featureStore.initialized();
if (storeInitialized) {
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.warn('Variation called before LaunchDarkly client initialization completed' +
" (did you wait for the 'ready' event?) - using last known values from feature store");
return this.variationInternal(flagKey, context, defaultValue, eventFactory);
}
(_b = this.logger) === null || _b === void 0 ? void 0 : _b.warn('Variation called before LaunchDarkly client initialization completed (did you wait for the' +
"'ready' event?) - using default value");
return EvalResult_1.default.forError(ErrorKinds_1.default.ClientNotReady, undefined, defaultValue);
this.featureStore.initialized((storeInitialized) => {
if (storeInitialized) {
this.logger?.warn('Variation called before LaunchDarkly client initialization completed' +
" (did you wait for the 'ready' event?) - using last known values from feature store");
this.variationInternal(flagKey, context, defaultValue, eventFactory, cb);
return;
}
this.logger?.warn('Variation called before LaunchDarkly client initialization completed (did you wait for the' +
"'ready' event?) - using default value");
cb(EvalResult_1.default.forError(ErrorKinds_1.default.ClientNotReady, undefined, defaultValue));
});
}
return this.variationInternal(flagKey, context, defaultValue, eventFactory);
this.variationInternal(flagKey, context, defaultValue, eventFactory, cb);
}

@@ -300,0 +333,0 @@ }

@@ -69,5 +69,4 @@ "use strict";

const errors = [];
const validatedOptions = Object.assign({}, exports.defaultValues);
const validatedOptions = { ...exports.defaultValues };
Object.keys(options).forEach((optionName) => {
var _a;
// We need to tell typescript it doesn't actually know what options are.

@@ -99,3 +98,3 @@ // If we don't then it complains we are doing crazy things with it.

else {
(_a = options.logger) === null || _a === void 0 ? void 0 : _a.warn(js_sdk_common_1.OptionMessages.unknownOption(optionName));
options.logger?.warn(js_sdk_common_1.OptionMessages.unknownOption(optionName));
}

@@ -106,3 +105,2 @@ });

function validateEndpoints(options, validatedOptions) {
var _a, _b, _c;
const { baseUri, streamUri, eventsUri } = options;

@@ -118,9 +116,9 @@ const streamingEndpointSpecified = streamUri !== undefined && streamUri !== null;

if (!streamingEndpointSpecified && validatedOptions.stream) {
(_a = validatedOptions.logger) === null || _a === void 0 ? void 0 : _a.warn(js_sdk_common_1.OptionMessages.partialEndpoint('streamUri'));
validatedOptions.logger?.warn(js_sdk_common_1.OptionMessages.partialEndpoint('streamUri'));
}
if (!pollingEndpointSpecified) {
(_b = validatedOptions.logger) === null || _b === void 0 ? void 0 : _b.warn(js_sdk_common_1.OptionMessages.partialEndpoint('baseUri'));
validatedOptions.logger?.warn(js_sdk_common_1.OptionMessages.partialEndpoint('baseUri'));
}
if (!eventEndpointSpecified && validatedOptions.sendEvents) {
(_c = validatedOptions.logger) === null || _c === void 0 ? void 0 : _c.warn(js_sdk_common_1.OptionMessages.partialEndpoint('eventsUri'));
validatedOptions.logger?.warn(js_sdk_common_1.OptionMessages.partialEndpoint('eventsUri'));
}

@@ -134,2 +132,27 @@ }

class Configuration {
serviceEndpoints;
eventsCapacity;
timeout;
logger;
flushInterval;
pollInterval;
proxyOptions;
offline;
stream;
streamInitialReconnectDelay;
useLdd;
sendEvents;
allAttributesPrivate;
privateAttributes;
contextKeysCapacity;
contextKeysFlushInterval;
tlsParams;
diagnosticOptOut;
wrapperName;
wrapperVersion;
tags;
diagnosticRecordingInterval;
featureStoreFactory;
updateProcessorFactory;
bigSegments;
constructor(options = {}) {

@@ -144,4 +167,3 @@ // The default will handle undefined, but not null.

errors.forEach((error) => {
var _a;
(_a = this.logger) === null || _a === void 0 ? void 0 : _a.warn(error);
this.logger?.warn(error);
});

@@ -148,0 +170,0 @@ validateEndpoints(options, validatedOptions);

@@ -11,2 +11,3 @@ "use strict";

class AsyncStoreFacade {
store;
constructor(store) {

@@ -13,0 +14,0 @@ this.store = store;

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class InMemoryFeatureStore {
constructor() {
this.allData = {};
this.initCalled = false;
}
allData = {};
initCalled = false;
addItem(kind, key, item) {

@@ -30,12 +28,11 @@ let items = this.allData[kind.namespace];

if (item && !item.deleted) {
return callback === null || callback === void 0 ? void 0 : callback(item);
return callback?.(item);
}
}
}
return callback === null || callback === void 0 ? void 0 : callback(null);
return callback?.(null);
}
all(kind, callback) {
var _a;
const result = {};
const items = (_a = this.allData[kind.namespace]) !== null && _a !== void 0 ? _a : {};
const items = this.allData[kind.namespace] ?? {};
Object.entries(items).forEach(([key, item]) => {

@@ -46,3 +43,3 @@ if (item && !item.deleted) {

});
callback === null || callback === void 0 ? void 0 : callback(result);
callback?.(result);
}

@@ -52,3 +49,3 @@ init(allData, callback) {

this.allData = allData;
callback === null || callback === void 0 ? void 0 : callback();
callback?.();
}

@@ -58,10 +55,10 @@ delete(kind, key, version, callback) {

this.addItem(kind, key, deletedItem);
callback === null || callback === void 0 ? void 0 : callback();
callback?.();
}
upsert(kind, data, callback) {
this.addItem(kind, data.key, data);
callback === null || callback === void 0 ? void 0 : callback();
callback?.();
}
initialized(callback) {
return callback === null || callback === void 0 ? void 0 : callback(this.initCalled);
return callback?.(this.initCalled);
}

@@ -68,0 +65,0 @@ /* eslint-disable class-methods-use-this */

@@ -66,9 +66,18 @@ "use strict";

class PersistentDataStoreWrapper {
core;
isInitialized = false;
/**
* Cache for storing individual items.
*/
itemCache;
/**
* Cache for storing all items of a type.
*/
allItemsCache;
/**
* Used to preserve order of operations of async requests.
*/
queue = new UpdateQueue_1.default();
constructor(core, ttl) {
this.core = core;
this.isInitialized = false;
/**
* Used to preserve order of operations of async requests.
*/
this.queue = new UpdateQueue_1.default();
if (ttl) {

@@ -121,6 +130,5 @@ this.itemCache = new TtlCache_1.default({

this.core.get(persistKind, key, (descriptor) => {
var _a;
if (descriptor && descriptor.serializedItem) {
const value = deserialize(persistKind, descriptor);
(_a = this.itemCache) === null || _a === void 0 ? void 0 : _a.set(cacheKey(kind, key), value);
this.itemCache?.set(cacheKey(kind, key), value);
callback(itemIfNotDeleted(value));

@@ -132,7 +140,6 @@ }

initialized(callback) {
var _a;
if (this.isInitialized) {
callback(true);
}
else if ((_a = this.itemCache) === null || _a === void 0 ? void 0 : _a.get(initializationCheckedKey)) {
else if (this.itemCache?.get(initializationCheckedKey)) {
callback(false);

@@ -142,6 +149,5 @@ }

this.core.initialized((storeInitialized) => {
var _a;
this.isInitialized = storeInitialized;
if (!this.isInitialized) {
(_a = this.itemCache) === null || _a === void 0 ? void 0 : _a.set(initializationCheckedKey, true);
this.itemCache?.set(initializationCheckedKey, true);
}

@@ -153,4 +159,3 @@ callback(this.isInitialized);

all(kind, callback) {
var _a;
const items = (_a = this.allItemsCache) === null || _a === void 0 ? void 0 : _a.get(allForKindCacheKey(kind));
const items = this.allItemsCache?.get(allForKindCacheKey(kind));
if (items) {

@@ -162,3 +167,2 @@ callback(items);

this.core.getAll(persistKind, (storeItems) => {
var _a;
if (!storeItems) {

@@ -176,3 +180,3 @@ callback({});

});
(_a = this.allItemsCache) === null || _a === void 0 ? void 0 : _a.set(allForKindCacheKey(kind), filteredItems);
this.allItemsCache?.set(allForKindCacheKey(kind), filteredItems);
callback(filteredItems);

@@ -189,11 +193,10 @@ });

this.core.upsert(persistKind, data.key, persistKind.serialize(data), (err, updatedDescriptor) => {
var _a, _b;
if (!err && updatedDescriptor) {
if (updatedDescriptor.serializedItem) {
const value = deserialize(persistKind, updatedDescriptor);
(_a = this.itemCache) === null || _a === void 0 ? void 0 : _a.set(cacheKey(kind, data.key), value);
this.itemCache?.set(cacheKey(kind, data.key), value);
}
else if (updatedDescriptor.deleted) {
// Deleted and there was not a serialized representation.
(_b = this.itemCache) === null || _b === void 0 ? void 0 : _b.set(data.key, {
this.itemCache?.set(data.key, {
key: data.key,

@@ -213,5 +216,4 @@ version: updatedDescriptor.version,

close() {
var _a, _b;
(_a = this.itemCache) === null || _a === void 0 ? void 0 : _a.close();
(_b = this.allItemsCache) === null || _b === void 0 ? void 0 : _b.close();
this.itemCache?.close();
this.allItemsCache?.close();
this.core.close();

@@ -218,0 +220,0 @@ }

@@ -40,2 +40,26 @@ "use strict";

}
if (value.includedSet) {
value.included = [...value.includedSet];
delete value.includedSet;
}
if (value.excludedSet) {
value.excluded = [...value.excludedSet];
delete value.excludedSet;
}
if (value.includedContexts) {
value.includedContexts.forEach((target) => {
if (target.valuesSet) {
target.values = [...target.valuesSet];
}
delete target.valuesSet;
});
}
if (value.excludedContexts) {
value.excludedContexts.forEach((target) => {
if (target.valuesSet) {
target.values = [...target.valuesSet];
}
delete target.valuesSet;
});
}
return value;

@@ -53,3 +77,2 @@ }

function processFlag(flag) {
var _a;
if (flag.fallthrough && flag.fallthrough.rollout) {

@@ -59,6 +82,5 @@ const rollout = flag.fallthrough.rollout;

}
(_a = flag === null || flag === void 0 ? void 0 : flag.rules) === null || _a === void 0 ? void 0 : _a.forEach((rule) => {
var _a;
flag?.rules?.forEach((rule) => {
processRollout(rule.rollout);
(_a = rule === null || rule === void 0 ? void 0 : rule.clauses) === null || _a === void 0 ? void 0 : _a.forEach((clause) => {
rule?.clauses?.forEach((clause) => {
if (clause && clause.attribute) {

@@ -80,5 +102,27 @@ // Clauses before U2C would have had literals for attributes.

function processSegment(segment) {
var _a;
(_a = segment === null || segment === void 0 ? void 0 : segment.rules) === null || _a === void 0 ? void 0 : _a.forEach((rule) => {
var _a;
if (segment?.included?.length && segment.included.length > 100) {
segment.includedSet = new Set(segment.included);
delete segment.included;
}
if (segment?.excluded?.length && segment.excluded.length > 100) {
segment.excludedSet = new Set(segment.excluded);
delete segment.excluded;
}
if (segment?.includedContexts?.length && segment.includedContexts.length > 100) {
segment.includedContexts.forEach((target) => {
if (target?.values?.length && target.values.length > 100) {
target.valuesSet = new Set(target.values);
target.values = [];
}
});
}
if (segment?.excludedContexts?.length && segment.excludedContexts.length > 100) {
segment.excludedContexts.forEach((target) => {
if (target?.values?.length && target.values.length > 100) {
target.valuesSet = new Set(target.values);
target.values = [];
}
});
}
segment?.rules?.forEach((rule) => {
if (rule.bucketBy) {

@@ -89,3 +133,3 @@ // Rules before U2C would have had literals for attributes.

}
(_a = rule === null || rule === void 0 ? void 0 : rule.clauses) === null || _a === void 0 ? void 0 : _a.forEach((clause) => {
rule?.clauses?.forEach((clause) => {
if (clause && clause.attribute) {

@@ -107,3 +151,3 @@ // Clauses before U2C would have had literals for attributes.

}
catch (_a) {
catch {
return undefined;

@@ -116,3 +160,2 @@ }

function deserializeAll(data) {
var _a, _b;
// The reviver lacks the context of where a different key exists, being as it

@@ -128,6 +171,6 @@ // starts at the deepest level and works outward. As a result attributes are

}
Object.values(((_a = parsed === null || parsed === void 0 ? void 0 : parsed.data) === null || _a === void 0 ? void 0 : _a.flags) || []).forEach((flag) => {
Object.values(parsed?.data?.flags || []).forEach((flag) => {
processFlag(flag);
});
Object.values(((_b = parsed === null || parsed === void 0 ? void 0 : parsed.data) === null || _b === void 0 ? void 0 : _b.segments) || []).forEach((segment) => {
Object.values(parsed?.data?.segments || []).forEach((segment) => {
processSegment(segment);

@@ -151,6 +194,6 @@ });

}
Object.values((parsed === null || parsed === void 0 ? void 0 : parsed.flags) || []).forEach((flag) => {
Object.values(parsed?.flags || []).forEach((flag) => {
processFlag(flag);
});
Object.values((parsed === null || parsed === void 0 ? void 0 : parsed.segments) || []).forEach((segment) => {
Object.values(parsed?.segments || []).forEach((segment) => {
processSegment(segment);

@@ -157,0 +200,0 @@ });

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class UpdateQueue {
constructor() {
this.queue = [];
}
queue = [];
enqueue(updateFn, cb) {

@@ -28,3 +26,3 @@ this.queue.push([updateFn, cb]);

// Call the original callback.
cb === null || cb === void 0 ? void 0 : cb();
cb?.();
};

@@ -31,0 +29,0 @@ fn(newCb);

"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
class VersionedDataKinds {
static Features = {
namespace: 'features',
streamApiPath: '/flags/',
requestPath: '/sdk/latest-flags/',
};
static Segments = {
namespace: 'segments',
streamApiPath: '/segments/',
requestPath: '/sdk/latest-segments/',
};
}
VersionedDataKinds.Features = {
namespace: 'features',
streamApiPath: '/flags/',
requestPath: '/sdk/latest-flags/',
};
VersionedDataKinds.Segments = {
namespace: 'segments',
streamApiPath: '/segments/',
requestPath: '/sdk/latest-segments/',
};
exports.default = VersionedDataKinds;
//# sourceMappingURL=VersionedDataKinds.js.map
{
"name": "@launchdarkly/js-server-sdk-common",
"version": "1.0.8",
"version": "1.0.9-beta.1",
"type": "commonjs",

@@ -5,0 +5,0 @@ "main": "./dist/index.js",

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

Sorry, the diff of this file is not supported yet

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc