diff --git a/packages/optimizely-sdk/lib/core/odp/lru_cache/CacheElement.tests.ts b/packages/optimizely-sdk/lib/core/odp/lru_cache/CacheElement.tests.ts deleted file mode 100644 index 728026c7f..000000000 --- a/packages/optimizely-sdk/lib/core/odp/lru_cache/CacheElement.tests.ts +++ /dev/null @@ -1,53 +0,0 @@ -/** - * Copyright 2022, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { assert } from 'chai' -import { CacheElement } from './CacheElement'; - -const sleep = async (ms: number) => { - return await new Promise(r => setTimeout(r, ms)) -} - -describe('/odp/lru_cache/CacheElement', () => { - let element: CacheElement - - beforeEach(() => { - element = new CacheElement('foo') - }) - - it('should initialize a valid CacheElement', () => { - assert.exists(element) - assert.equal(element.value, 'foo') - assert.isNotNull(element.time) - assert.doesNotThrow(() => element.is_stale(0)) - }) - - it('should return false if not stale based on timeout', () => { - const timeoutLong = 1000 - assert.equal(element.is_stale(timeoutLong), false) - }) - - it('should return false if not stale because timeout is less than or equal to 0', () => { - const timeoutNone = 0 - assert.equal(element.is_stale(timeoutNone), false) - }) - - it('should return true if stale based on timeout', async () => { - await sleep(100) - const timeoutShort = 1 - assert.equal(element.is_stale(timeoutShort), true) - }) -}) \ No newline at end of file diff --git a/packages/optimizely-sdk/lib/core/odp/lru_cache/LRUCache.tests.ts b/packages/optimizely-sdk/lib/core/odp/lru_cache/LRUCache.tests.ts deleted file mode 100644 index 9f888e598..000000000 --- a/packages/optimizely-sdk/lib/core/odp/lru_cache/LRUCache.tests.ts +++ /dev/null @@ -1,307 +0,0 @@ -/** - * Copyright 2022, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { assert } from 'chai' -import { LRUCache, ClientLRUCache, ServerLRUCache } from './LRUCache' - -const sleep = async (ms: number) => { - return await new Promise(r => setTimeout(r, ms)) -} - -describe('/lib/core/odp/lru_cache (Default)', () => { - let cache: LRUCache; - - describe('LRU Cache > Initialization', () => { - it('should successfully create a new cache with maxSize > 0 and timeout > 0', () => { - cache = new LRUCache({ - maxSize: 1000, - timeout: 2000 - }) - - assert.exists(cache) - - assert.equal(cache.maxSize, 1000) - assert.equal(cache.timeout, 2000) - }) - - it('should successfully create a new cache with maxSize == 0 and timeout == 0', () => { - cache = new LRUCache({ - maxSize: 0, - timeout: 0 - }) - - assert.exists(cache) - - assert.equal(cache.maxSize, 0) - assert.equal(cache.timeout, 0) - }) - }) - - describe('LRU Cache > Save & Lookup', () => { - const maxCacheSize = 2 - - beforeEach(() => { - cache = new LRUCache({ - maxSize: maxCacheSize, - timeout: 1000 - }) - }) - - it('should have no values in the cache upon initialization', () => { - assert.isNull(cache.peek(1)) - }) - - it('should save keys and values of any valid type', () => { - cache.save({ key: 'a', value: 1 }) // { a: 1 } - assert.equal(cache.peek('a'), 1) - - cache.save({ key: 2, value: 'b' }) // { a: 1, 2: 'b' } - assert.equal(cache.peek(2), 'b') - - const foo = Symbol('foo') - const bar = {} - cache.save({ key: foo, value: bar }) // { 2: 'b', Symbol('foo'): {} } - assert.deepEqual({}, cache.peek(foo)) - }) - - it('should save values up to its maxSize', () => { - cache.save({ key: 'a', value: 1 }) // { a: 1 } - assert.equal(cache.peek('a'), 1) - - cache.save({ key: 'b', value: 2 }) // { a: 1, b: 2 } - assert.equal(cache.peek('a'), 1) - assert.equal(cache.peek('b'), 2) - - cache.save({ key: 'c', value: 3 }) // { b: 2, c: 3 } - assert.equal(cache.peek('a'), null) - assert.equal(cache.peek('b'), 2) - assert.equal(cache.peek('c'), 3) - }) - - it('should override values of matching keys when saving', () => { - cache.save({ key: 'a', value: 1 }) // { a: 1 } - assert.equal(cache.peek('a'), 1) - - cache.save({ key: 'a', value: 2 }) // { a: 2 } - assert.equal(cache.peek('a'), 2) - - cache.save({ key: 'a', value: 3 }) // { a: 3 } - assert.equal(cache.peek('a'), 3) - }) - - it('should update cache accordingly when using lookup/peek', () => { - assert.isNull(cache.lookup(3)) - - cache.save({ key: 'b', value: 201 }) // { b: 201 } - cache.save({ key: 'a', value: 101 }) // { b: 201, a: 101 } - - assert.equal(cache.lookup('b'), 201) // { a: 101, b: 201 } - - cache.save({ key: 'c', value: 302 }) // { b: 201, c: 302 } - - assert.isNull(cache.peek(1)) - assert.equal(cache.peek('b'), 201) - assert.equal(cache.peek('c'), 302) - assert.equal(cache.lookup('c'), 302) // { b: 201, c: 302 } - - cache.save({ key: 'a', value: 103 }) // { c: 302, a: 103 } - assert.equal(cache.peek('a'), 103) - assert.isNull(cache.peek('b')) - assert.equal(cache.peek('c'), 302) - }) - }) - - describe('LRU Cache > Size', () => { - it('should keep LRU Cache map size capped at cache.capacity', () => { - const maxCacheSize = 2 - - cache = new LRUCache({ - maxSize: maxCacheSize, - timeout: 1000 - }) - - cache.save({ key: 'a', value: 1 }) // { a: 1 } - cache.save({ key: 'b', value: 2 }) // { a: 1, b: 2 } - - assert.equal(cache.map.size, maxCacheSize) - assert.equal(cache.map.size, cache.maxSize) - }) - - it('should not save to cache if maxSize is 0', () => { - cache = new LRUCache({ - maxSize: 0, - timeout: 1000 - }) - - assert.isNull(cache.lookup('a')) - cache.save({ key: 'a', value: 100 }) - assert.isNull(cache.lookup('a')) - }) - - it('should not save to cache if maxSize is negative', () => { - cache = new LRUCache({ - maxSize: -500, - timeout: 1000 - }) - - assert.isNull(cache.lookup('a')) - cache.save({ key: 'a', value: 100 }) - assert.isNull(cache.lookup('a')) - }) - }) - - describe('LRU Cache > Timeout', () => { - it('should discard stale entries in the cache on peek/lookup when timeout is greater than 0', async () => { - const maxTimeout = 100 - - cache = new LRUCache({ - maxSize: 1000, - timeout: maxTimeout - }) - - cache.save({ key: 'a', value: 100 }) // { a: 100 } - cache.save({ key: 'b', value: 200 }) // { a: 100, b: 200 } - cache.save({ key: 'c', value: 300 }) // { a: 100, b: 200, c: 300 } - - assert.equal(cache.peek('a'), 100) - assert.equal(cache.peek('b'), 200) - assert.equal(cache.peek('c'), 300) - - await sleep(150) - - assert.isNull(cache.lookup('a')) - assert.isNull(cache.lookup('b')) - assert.isNull(cache.lookup('c')) - - cache.save({ key: 'd', value: 400 }) // { d: 400 } - cache.save({ key: 'a', value: 101 }) // { d: 400, a: 101 } - - assert.equal(cache.lookup('a'), 101) // { d: 400, a: 101 } - assert.equal(cache.lookup('d'), 400) // { a: 101, d: 400 } - }) - - it('should never have stale entries if timeout is 0', async () => { - const maxTimeout = 0 - - cache = new LRUCache({ - maxSize: 1000, - timeout: maxTimeout - }) - - cache.save({ key: 'a', value: 100 }) // { a: 100 } - cache.save({ key: 'b', value: 200 }) // { a: 100, b: 200 } - - await sleep(100) - assert.equal(cache.lookup('a'), 100) - assert.equal(cache.lookup('b'), 200) - }) - - it('should never have stale entries if timeout is less than 0', async () => { - const maxTimeout = -500 - - cache = new LRUCache({ - maxSize: 1000, - timeout: maxTimeout - }) - - cache.save({ key: 'a', value: 100 }) // { a: 100 } - cache.save({ key: 'b', value: 200 }) // { a: 100, b: 200 } - - await sleep(100) - assert.equal(cache.lookup('a'), 100) - assert.equal(cache.lookup('b'), 200) - }) - }) - - describe('LRU Cache > Reset', () => { - it('should be able to reset the cache', async () => { - cache = new LRUCache({ maxSize: 2, timeout: 100 }) - cache.save({ key: 'a', value: 100 }) // { a: 100 } - cache.save({ key: 'b', value: 200 }) // { a: 100, b: 200 } - - await sleep(0) - - assert.equal(cache.map.size, 2) - cache.reset() // { } - - await sleep(150) - - assert.equal(cache.map.size, 0) - - it('should be fully functional after resetting the cache', () => { - cache.save({ key: 'c', value: 300 }) // { c: 300 } - cache.save({ key: 'd', value: 400 }) // { c: 300, d: 400 } - assert.isNull(cache.peek('b')) - assert.equal(cache.peek('c'), 300) - assert.equal(cache.peek('d'), 400) - - cache.save({ key: 'a', value: 500 }) // { d: 400, a: 500 } - cache.save({ key: 'b', value: 600 }) // { a: 500, b: 600 } - assert.isNull(cache.peek('c')) - assert.equal(cache.peek('a'), 500) - assert.equal(cache.peek('b'), 600) - - const _ = cache.lookup('a') // { b: 600, a: 500 } - assert.equal(500, _) - - cache.save({ key: 'c', value: 700 }) // { a: 500, c: 700 } - assert.isNull(cache.peek('b')) - assert.equal(cache.peek('a'), 500) - assert.equal(cache.peek('c'), 700) - }) - }) - }) -}) - -describe('/lib/core/odp/lru_cache (Client)', () => { - let cache: ClientLRUCache; - - it('should create and test the default client LRU Cache', () => { - cache = new ClientLRUCache() - assert.exists(cache) - assert.isNull(cache.lookup('a')) - assert.equal(cache.maxSize, 100) - assert.equal(cache.timeout, 600 * 1000) - - cache.save({ key: 'a', value: 100 }) - cache.save({ key: 'b', value: 200 }) - cache.save({ key: 'c', value: 300 }) - assert.equal(cache.map.size, 3) - assert.equal(cache.peek('a'), 100) - assert.equal(cache.lookup('b'), 200) - assert.deepEqual(cache.map.keys().next().value, 'a') - }) -}) - -describe('/lib/core/odp/lru_cache (Server)', () => { - let cache: ServerLRUCache; - - it('should create and test the default server LRU Cache', () => { - cache = new ServerLRUCache() - assert.exists(cache) - assert.isNull(cache.lookup('a')) - assert.equal(cache.maxSize, 10000) - assert.equal(cache.timeout, 600 * 1000) - - cache.save({ key: 'a', value: 100 }) - cache.save({ key: 'b', value: 200 }) - cache.save({ key: 'c', value: 300 }) - assert.equal(cache.map.size, 3) - assert.equal(cache.peek('a'), 100) - assert.equal(cache.lookup('b'), 200) - assert.deepEqual(cache.map.keys().next().value, 'a') - }) -}) diff --git a/packages/optimizely-sdk/lib/core/odp/lru_cache/LRUCache.ts b/packages/optimizely-sdk/lib/core/odp/lru_cache/LRUCache.ts deleted file mode 100644 index 83d474365..000000000 --- a/packages/optimizely-sdk/lib/core/odp/lru_cache/LRUCache.ts +++ /dev/null @@ -1,118 +0,0 @@ -/** - * Copyright 2022, Optimizely - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import CacheElement from "./CacheElement" - -/** - * Least-Recently Used Cache (LRU Cache) Implementation with Generic Key-Value Pairs - * Analogous to a Map that has a specified max size and a timeout per element. - * - Removes the least-recently used element from the cache if max size exceeded. - * - Removes stale elements (entries older than their timeout) from the cache. - */ -export class LRUCache { - private _map: Map> = new Map() - private _maxSize // Defines maximum size of _map - private _timeout // Milliseconds each entry has before it becomes stale - - get map(): Map> { return this._map } - get maxSize(): number { return this._maxSize } - get timeout(): number { return this._timeout } - - constructor({ maxSize, timeout }: { maxSize: number, timeout: number }) { - this._maxSize = maxSize - this._timeout = timeout - } - - /** - * Returns a valid, non-stale value from LRU Cache based on an input key. - * Additionally moves the element to the end of the cache and removes from cache if stale. - */ - public lookup(key: K): V | null { - if (this._maxSize <= 0) { return null } - - const element: CacheElement | undefined = this._map.get(key) - - if (!element) return null - - if (element.is_stale(this._timeout)) { - this._map.delete(key) - return null - } - - this._map.delete(key) - this._map.set(key, element) - - return element.value - } - - /** - * Inserts/moves an input key-value pair to the end of the LRU Cache. - * Removes the least-recently used element if the cache exceeds it's maxSize. - */ - public save({ key, value }: { key: K, value: V }): void { - if (this._maxSize <= 0) return - - const element: CacheElement | undefined = this._map.get(key) - if (element) this._map.delete(key) - this._map.set(key, new CacheElement(value)) - - if (this._map.size > this._maxSize) { - const firstMapEntryKey = this._map.keys().next().value - this._map.delete(firstMapEntryKey) - } - } - - /** - * Clears the LRU Cache - */ - public reset(): void { - if (this._maxSize <= 0) return - - this._map.clear() - } - - /** - * Reads value from specified key without moving elements in the LRU Cache. - * @param {K} key - */ - public peek(key: K): V | null { - if (this._maxSize <= 0) return null - - const element: CacheElement | undefined = this._map.get(key) - - return element?.value ?? null - } -} - -export class ClientLRUCache extends LRUCache { - constructor() { - super({ - maxSize: 100, - timeout: 600 * 1000 // 600 secs - }) - } -} - -export class ServerLRUCache extends LRUCache { - constructor() { - super({ - maxSize: 10000, - timeout: 600 * 1000 // 600 secs - }) - } -} - -export default LRUCache \ No newline at end of file diff --git a/packages/optimizely-sdk/lib/plugins/odp/odp_config.ts b/packages/optimizely-sdk/lib/core/odp/odp_config.ts similarity index 100% rename from packages/optimizely-sdk/lib/plugins/odp/odp_config.ts rename to packages/optimizely-sdk/lib/core/odp/odp_config.ts diff --git a/packages/optimizely-sdk/lib/plugins/odp/odp_event.ts b/packages/optimizely-sdk/lib/core/odp/odp_event.ts similarity index 100% rename from packages/optimizely-sdk/lib/plugins/odp/odp_event.ts rename to packages/optimizely-sdk/lib/core/odp/odp_event.ts diff --git a/packages/optimizely-sdk/lib/plugins/odp/rest_api_manager.ts b/packages/optimizely-sdk/lib/core/odp/odp_event_api_manager.ts similarity index 96% rename from packages/optimizely-sdk/lib/plugins/odp/rest_api_manager.ts rename to packages/optimizely-sdk/lib/core/odp/odp_event_api_manager.ts index 8a58de202..10f18ce32 100644 --- a/packages/optimizely-sdk/lib/plugins/odp/rest_api_manager.ts +++ b/packages/optimizely-sdk/lib/core/odp/odp_event_api_manager.ts @@ -23,14 +23,14 @@ const EVENT_SENDING_FAILURE_MESSAGE = 'ODP event send failed'; /** * Manager for communicating with the Optimizely Data Platform REST API */ -export interface IRestApiManager { +export interface IOdpEventApiManager { sendEvents(apiKey: string, apiHost: string, events: OdpEvent[]): Promise; } /** * Concrete implementation for accessing the ODP REST API */ -export class RestApiManager implements IRestApiManager { +export class OdpEventApiManager implements IOdpEventApiManager { private readonly logger: LogHandler; private readonly requestHandler: RequestHandler; diff --git a/packages/optimizely-sdk/lib/plugins/odp/odp_event_manager.ts b/packages/optimizely-sdk/lib/core/odp/odp_event_manager.ts similarity index 92% rename from packages/optimizely-sdk/lib/plugins/odp/odp_event_manager.ts rename to packages/optimizely-sdk/lib/core/odp/odp_event_manager.ts index 766d5fa0e..8ac7bb041 100644 --- a/packages/optimizely-sdk/lib/plugins/odp/odp_event_manager.ts +++ b/packages/optimizely-sdk/lib/core/odp/odp_event_manager.ts @@ -19,7 +19,7 @@ import { OdpEvent } from './odp_event'; import { uuid } from '../../utils/fns'; import { ODP_USER_KEY } from '../../utils/enums'; import { OdpConfig } from './odp_config'; -import { RestApiManager } from './rest_api_manager'; +import { OdpEventApiManager } from './odp_event_api_manager'; const MAX_RETRIES = 3; const DEFAULT_BATCH_SIZE = 10; @@ -80,7 +80,7 @@ export class OdpEventManager implements IOdpEventManager { * REST API Manager used to send the events * @private */ - private readonly apiManager: RestApiManager; + private readonly apiManager: OdpEventApiManager; /** * Handler for recording execution logs * @private @@ -113,23 +113,23 @@ export class OdpEventManager implements IOdpEventManager { private readonly clientVersion: string; public constructor({ - odpConfig, - apiManager, - logger, - clientEngine, - clientVersion, - queueSize, - batchSize, - flushInterval, - }: { - odpConfig: OdpConfig, - apiManager: RestApiManager, - logger: LogHandler, - clientEngine: string, - clientVersion: string, - queueSize?: number, - batchSize?: number, - flushInterval?: number + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + queueSize, + batchSize, + flushInterval, + }: { + odpConfig: OdpConfig; + apiManager: OdpEventApiManager; + logger: LogHandler; + clientEngine: string; + clientVersion: string; + queueSize?: number; + batchSize?: number; + flushInterval?: number; }) { this.odpConfig = odpConfig; this.apiManager = apiManager; @@ -231,7 +231,11 @@ export class OdpEventManager implements IOdpEventManager { } if (this.queue.length >= this.queueSize) { - this.logger.log(LogLevel.WARNING, 'Failed to Process ODP Event. Event Queue full. queueSize = %s.', this.queue.length); + this.logger.log( + LogLevel.WARNING, + 'Failed to Process ODP Event. Event Queue full. queueSize = %s.', + this.queue.length + ); return; } @@ -379,7 +383,7 @@ export class OdpEventManager implements IOdpEventManager { private invalidDataFound(data: Map): boolean { const validTypes: string[] = ['string', 'number', 'boolean']; let foundInvalidValue = false; - data.forEach((value) => { + data.forEach(value => { if (!validTypes.includes(typeof value) && value !== null) { foundInvalidValue = true; } diff --git a/packages/optimizely-sdk/lib/plugins/odp/odp_response_schema.ts b/packages/optimizely-sdk/lib/core/odp/odp_response_schema.ts similarity index 100% rename from packages/optimizely-sdk/lib/plugins/odp/odp_response_schema.ts rename to packages/optimizely-sdk/lib/core/odp/odp_response_schema.ts diff --git a/packages/optimizely-sdk/lib/plugins/odp/graphql_manager.ts b/packages/optimizely-sdk/lib/core/odp/odp_segment_api_manager.ts similarity index 82% rename from packages/optimizely-sdk/lib/plugins/odp/graphql_manager.ts rename to packages/optimizely-sdk/lib/core/odp/odp_segment_api_manager.ts index 12d8fbc8f..aa21b96b5 100644 --- a/packages/optimizely-sdk/lib/plugins/odp/graphql_manager.ts +++ b/packages/optimizely-sdk/lib/core/odp/odp_segment_api_manager.ts @@ -41,14 +41,20 @@ const AUDIENCE_FETCH_FAILURE_MESSAGE = 'Audience segments fetch failed'; /** * Manager for communicating with the Optimizely Data Platform GraphQL endpoint */ -export interface IGraphQLManager { - fetchSegments(apiKey: string, apiHost: string, userKey: string, userValue: string, segmentsToCheck: string[]): Promise; +export interface IOdpSegmentApiManager { + fetchSegments( + apiKey: string, + apiHost: string, + userKey: string, + userValue: string, + segmentsToCheck: string[] + ): Promise; } /** * Concrete implementation for communicating with the ODP GraphQL endpoint */ -export class GraphQLManager implements IGraphQLManager { +export class OdpSegmentApiManager implements IOdpSegmentApiManager { private readonly logger: LogHandler; private readonly requestHandler: RequestHandler; @@ -70,7 +76,13 @@ export class GraphQLManager implements IGraphQLManager { * @param userValue Associated value to query for the user key * @param segmentsToCheck Audience segments to check for experiment inclusion */ - public async fetchSegments(apiKey: string, apiHost: string, userKey: ODP_USER_KEY, userValue: string, segmentsToCheck: string[]): Promise { + public async fetchSegments( + apiKey: string, + apiHost: string, + userKey: ODP_USER_KEY, + userValue: string, + segmentsToCheck: string[] + ): Promise { if (!apiKey || !apiHost) { this.logger.log(LogLevel.ERROR, `${AUDIENCE_FETCH_FAILURE_MESSAGE} (Parameters apiKey or apiHost invalid)`); return null; @@ -96,7 +108,7 @@ export class GraphQLManager implements IGraphQLManager { } if (parsedSegments.errors?.length > 0) { - const errors = parsedSegments.errors.map((e) => e.message).join('; '); + const errors = parsedSegments.errors.map(e => e.message).join('; '); this.logger.log(LogLevel.ERROR, `${AUDIENCE_FETCH_FAILURE_MESSAGE} (${errors})`); @@ -116,16 +128,17 @@ export class GraphQLManager implements IGraphQLManager { * Converts the query parameters to a GraphQL JSON payload * @returns GraphQL JSON string */ - private toGraphQLJson = (userKey: string, userValue: string, segmentsToCheck: string[]): string => ([ - '{"query" : "query {customer"', - `(${userKey} : "${userValue}") `, - '{audiences', - '(subset: [', - ...segmentsToCheck?.map((segment, index) => - `\\"${segment}\\"${index < segmentsToCheck.length - 1 ? ',' : ''}`, - ) || '', - '] {edges {node {name state}}}}}"}', - ].join('')); + private toGraphQLJson = (userKey: string, userValue: string, segmentsToCheck: string[]): string => + [ + '{"query" : "query {customer"', + `(${userKey} : "${userValue}") `, + '{audiences', + '(subset: [', + ...(segmentsToCheck?.map( + (segment, index) => `\\"${segment}\\"${index < segmentsToCheck.length - 1 ? ',' : ''}` + ) || ''), + '] {edges {node {name state}}}}}"}', + ].join(''); /** * Handler for querying the ODP GraphQL endpoint @@ -136,7 +149,13 @@ export class GraphQLManager implements IGraphQLManager { * @param query GraphQL formatted query string * @returns JSON response string from ODP or null */ - private async querySegments(apiKey: string, endpoint: string, userKey: string, userValue: string, query: string): Promise { + private async querySegments( + apiKey: string, + endpoint: string, + userKey: string, + userValue: string, + query: string + ): Promise { const method = 'POST'; const url = endpoint; const headers = { diff --git a/packages/optimizely-sdk/lib/core/odp/odp_segment_manager.ts b/packages/optimizely-sdk/lib/core/odp/odp_segment_manager.ts new file mode 100644 index 000000000..b3d86d354 --- /dev/null +++ b/packages/optimizely-sdk/lib/core/odp/odp_segment_manager.ts @@ -0,0 +1,116 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { getLogger, LogHandler, LogLevel } from '../../modules/logging'; +import { ERROR_MESSAGES, ODP_USER_KEY } from '../../utils/enums'; +import { LRUCache } from '../../utils/lru_cache'; +import { OdpSegmentApiManager } from './odp_segment_api_manager'; +import { OdpConfig } from './odp_config'; +import { OptimizelySegmentOption } from './optimizely_segment_option'; + +// Schedules connections to ODP for audience segmentation and caches the results. +export class OdpSegmentManager { + odpConfig: OdpConfig; + segmentsCache: LRUCache>; + odpSegmentApiManager: OdpSegmentApiManager; + logger: LogHandler; + + constructor( + odpConfig: OdpConfig, + segmentsCache: LRUCache>, + odpSegmentApiManager: OdpSegmentApiManager, + logger?: LogHandler + ) { + this.odpConfig = odpConfig; + this.segmentsCache = segmentsCache; + this.odpSegmentApiManager = odpSegmentApiManager; + this.logger = logger || getLogger('OdpSegmentManager'); + } + + /** + * Attempts to fetch and return a list of a user's qualified segments from the local segments cache. + * If no cached data exists for the target user, this fetches and caches data from the ODP server instead. + * @param userKey Key used for identifying the id type. + * @param userValue The id value itself. + * @param options An array of OptimizelySegmentOption used to ignore and/or reset the cache. + * @returns Qualified segments for the user from the cache or the ODP server if the cache is empty. + */ + async fetchQualifiedSegments( + userKey: ODP_USER_KEY, + userValue: string, + options: Array + ): Promise | null> { + const { apiHost: odpApiHost, apiKey: odpApiKey } = this.odpConfig; + + if (!odpApiKey || !odpApiHost) { + this.logger.log(LogLevel.WARNING, ERROR_MESSAGES.FETCH_SEGMENTS_FAILED_INVALID_IDENTIFIER); + return null; + } + + const segmentsToCheck = this.odpConfig.segmentsToCheck; + if (!segmentsToCheck || segmentsToCheck.length <= 0) { + this.logger.log(LogLevel.DEBUG, 'No segments are used in the project. Returning an empty list.'); + return []; + } + + const cacheKey = this.makeCacheKey(userKey, userValue); + + const ignoreCache = options.includes(OptimizelySegmentOption.IGNORE_CACHE); + const resetCache = options.includes(OptimizelySegmentOption.RESET_CACHE); + + if (resetCache) this.reset(); + + if (!ignoreCache && !resetCache) { + const cachedSegments = this.segmentsCache.lookup(cacheKey); + if (cachedSegments) { + this.logger.log(LogLevel.DEBUG, 'ODP cache hit. Returning segments from cache "%s".', cacheKey); + return cachedSegments; + } + this.logger.log(LogLevel.DEBUG, `ODP cache miss.`); + } + + this.logger.log(LogLevel.DEBUG, `Making a call to ODP server.`); + + const segments = await this.odpSegmentApiManager.fetchSegments( + odpApiKey, + odpApiHost, + userKey, + userValue, + segmentsToCheck + ); + + if (segments && !ignoreCache) this.segmentsCache.save({ key: cacheKey, value: segments }); + + return segments; + } + + /** + * Clears the segments cache + */ + reset(): void { + this.segmentsCache.reset(); + } + + /** + * Creates a key used to identify which user fetchQualifiedSegments should lookup and save to in the segments cache + * @param userKey User type based on ODP_USER_KEY, such as "vuid" or "fs_user_id" + * @param userValue Arbitrary string, such as "test-user" + * @returns Concatenates inputs and returns the string "{userKey}-$-{userValue}" + */ + makeCacheKey(userKey: string, userValue: string): string { + return `${userKey}-$-${userValue}`; + } +} diff --git a/packages/optimizely-sdk/lib/plugins/odp/odp_types.ts b/packages/optimizely-sdk/lib/core/odp/odp_types.ts similarity index 100% rename from packages/optimizely-sdk/lib/plugins/odp/odp_types.ts rename to packages/optimizely-sdk/lib/core/odp/odp_types.ts diff --git a/packages/optimizely-sdk/lib/core/odp/optimizely_segment_option.ts b/packages/optimizely-sdk/lib/core/odp/optimizely_segment_option.ts new file mode 100644 index 000000000..e9a7d0712 --- /dev/null +++ b/packages/optimizely-sdk/lib/core/odp/optimizely_segment_option.ts @@ -0,0 +1,21 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// Options for defining behavior of OdpSegmentManager's caching mechanism when calling fetchSegments() +export enum OptimizelySegmentOption { + IGNORE_CACHE, + RESET_CACHE, +} diff --git a/packages/optimizely-sdk/lib/utils/enums/index.ts b/packages/optimizely-sdk/lib/utils/enums/index.ts index d00e65b66..d8931acde 100644 --- a/packages/optimizely-sdk/lib/utils/enums/index.ts +++ b/packages/optimizely-sdk/lib/utils/enums/index.ts @@ -30,6 +30,7 @@ export const ERROR_MESSAGES = { DATAFILE_AND_SDK_KEY_MISSING: '%s: You must provide at least one of sdkKey or datafile. Cannot start Optimizely', EXPERIMENT_KEY_NOT_IN_DATAFILE: '%s: Experiment key %s is not in datafile.', FEATURE_NOT_IN_DATAFILE: '%s: Feature key %s is not in datafile.', + FETCH_SEGMENTS_FAILED_INVALID_IDENTIFIER: '%s: Audience segments fetch failed. (invalid identifier)', IMPROPERLY_FORMATTED_EXPERIMENT: '%s: Experiment key %s is improperly formatted.', INVALID_ATTRIBUTES: '%s: Provided attributes are in an invalid format.', INVALID_BUCKETING_ID: '%s: Unable to generate hash for bucketing ID %s: %s', @@ -116,10 +117,14 @@ export const LOG_MESSAGES = { USER_DOESNT_MEET_CONDITIONS_FOR_TARGETING_RULE: '%s: User %s does not meet conditions for targeting rule %s.', USER_MEETS_CONDITIONS_FOR_TARGETING_RULE: '%s: User %s meets conditions for targeting rule %s.', USER_HAS_VARIATION: '%s: User %s is in variation %s of experiment %s.', - USER_HAS_FORCED_DECISION_WITH_RULE_SPECIFIED: 'Variation (%s) is mapped to flag (%s), rule (%s) and user (%s) in the forced decision map.', - USER_HAS_FORCED_DECISION_WITH_NO_RULE_SPECIFIED: 'Variation (%s) is mapped to flag (%s) and user (%s) in the forced decision map.', - USER_HAS_FORCED_DECISION_WITH_RULE_SPECIFIED_BUT_INVALID: 'Invalid variation is mapped to flag (%s), rule (%s) and user (%s) in the forced decision map.', - USER_HAS_FORCED_DECISION_WITH_NO_RULE_SPECIFIED_BUT_INVALID: 'Invalid variation is mapped to flag (%s) and user (%s) in the forced decision map.', + USER_HAS_FORCED_DECISION_WITH_RULE_SPECIFIED: + 'Variation (%s) is mapped to flag (%s), rule (%s) and user (%s) in the forced decision map.', + USER_HAS_FORCED_DECISION_WITH_NO_RULE_SPECIFIED: + 'Variation (%s) is mapped to flag (%s) and user (%s) in the forced decision map.', + USER_HAS_FORCED_DECISION_WITH_RULE_SPECIFIED_BUT_INVALID: + 'Invalid variation is mapped to flag (%s), rule (%s) and user (%s) in the forced decision map.', + USER_HAS_FORCED_DECISION_WITH_NO_RULE_SPECIFIED_BUT_INVALID: + 'Invalid variation is mapped to flag (%s) and user (%s) in the forced decision map.', USER_HAS_FORCED_VARIATION: '%s: Variation %s is mapped to experiment %s and user %s in the forced variation map.', USER_HAS_NO_VARIATION: '%s: User %s is in no variation of experiment %s.', USER_HAS_NO_FORCED_VARIATION: '%s: User %s is not in the forced variation map.', @@ -172,7 +177,7 @@ export const CONTROL_ATTRIBUTES = { BUCKETING_ID: '$opt_bucketing_id', STICKY_BUCKETING_KEY: '$opt_experiment_bucket_map', USER_AGENT: '$opt_user_agent', - FORCED_DECISION_NULL_RULE_KEY: '$opt_null_rule_key' + FORCED_DECISION_NULL_RULE_KEY: '$opt_null_rule_key', }; export const JAVASCRIPT_CLIENT_ENGINE = 'javascript-sdk'; @@ -233,57 +238,57 @@ export const DATAFILE_VERSIONS = { */ export const enum VERSION_TYPE { PRE_RELEASE_VERSION_DELIMITER = '-', - BUILD_VERSION_DELIMITER = '+' + BUILD_VERSION_DELIMITER = '+', } export const DECISION_MESSAGES = { SDK_NOT_READY: 'Optimizely SDK not configured properly yet.', FLAG_KEY_INVALID: 'No flag was found for key "%s".', VARIABLE_VALUE_INVALID: 'Variable value for key "%s" is invalid or wrong type.', -} +}; /* -* Notification types for use with NotificationCenter -* Format is EVENT: -* -* SDK consumers can use these to register callbacks with the notification center. -* -* @deprecated since 3.1.0 -* ACTIVATE: An impression event will be sent to Optimizely -* Callbacks will receive an object argument with the following properties: -* - experiment {Object} -* - userId {string} -* - attributes {Object|undefined} -* - variation {Object} -* - logEvent {Object} -* -* DECISION: A decision is made in the system. i.e. user activation, -* feature access or feature-variable value retrieval -* Callbacks will receive an object argument with the following properties: -* - type {string} -* - userId {string} -* - attributes {Object|undefined} -* - decisionInfo {Object|undefined} -* -* LOG_EVENT: A batch of events, which could contain impressions and/or conversions, -* will be sent to Optimizely -* Callbacks will receive an object argument with the following properties: -* - url {string} -* - httpVerb {string} -* - params {Object} -* -* OPTIMIZELY_CONFIG_UPDATE: This Optimizely instance has been updated with a new -* config -* -* TRACK: A conversion event will be sent to Optimizely -* Callbacks will receive the an object argument with the following properties: -* - eventKey {string} -* - userId {string} -* - attributes {Object|undefined} -* - eventTags {Object|undefined} -* - logEvent {Object} -* -*/ + * Notification types for use with NotificationCenter + * Format is EVENT: + * + * SDK consumers can use these to register callbacks with the notification center. + * + * @deprecated since 3.1.0 + * ACTIVATE: An impression event will be sent to Optimizely + * Callbacks will receive an object argument with the following properties: + * - experiment {Object} + * - userId {string} + * - attributes {Object|undefined} + * - variation {Object} + * - logEvent {Object} + * + * DECISION: A decision is made in the system. i.e. user activation, + * feature access or feature-variable value retrieval + * Callbacks will receive an object argument with the following properties: + * - type {string} + * - userId {string} + * - attributes {Object|undefined} + * - decisionInfo {Object|undefined} + * + * LOG_EVENT: A batch of events, which could contain impressions and/or conversions, + * will be sent to Optimizely + * Callbacks will receive an object argument with the following properties: + * - url {string} + * - httpVerb {string} + * - params {Object} + * + * OPTIMIZELY_CONFIG_UPDATE: This Optimizely instance has been updated with a new + * config + * + * TRACK: A conversion event will be sent to Optimizely + * Callbacks will receive the an object argument with the following properties: + * - eventKey {string} + * - userId {string} + * - attributes {Object|undefined} + * - eventTags {Object|undefined} + * - logEvent {Object} + * + */ export enum NOTIFICATION_TYPES { ACTIVATE = 'ACTIVATE:experiment, user_id,attributes, variation, event', DECISION = 'DECISION:type, userId, attributes, decisionInfo', diff --git a/packages/optimizely-sdk/lib/core/odp/lru_cache/index.ts b/packages/optimizely-sdk/lib/utils/lru_cache/browser_lru_cache.ts similarity index 74% rename from packages/optimizely-sdk/lib/core/odp/lru_cache/index.ts rename to packages/optimizely-sdk/lib/utils/lru_cache/browser_lru_cache.ts index cb21e5693..23daedf01 100644 --- a/packages/optimizely-sdk/lib/core/odp/lru_cache/index.ts +++ b/packages/optimizely-sdk/lib/utils/lru_cache/browser_lru_cache.ts @@ -14,10 +14,13 @@ * limitations under the License. */ -import { LRUCache, ClientLRUCache, ServerLRUCache } from "./LRUCache"; +import LRUCache from './lru_cache'; -export { - LRUCache, - ClientLRUCache, - ServerLRUCache, -} \ No newline at end of file +export class BrowserLRUCache extends LRUCache { + constructor() { + super({ + maxSize: 100, + timeout: 600 * 1000, // 600 secs + }); + } +} diff --git a/packages/optimizely-sdk/lib/utils/lru_cache/cache_element.tests.ts b/packages/optimizely-sdk/lib/utils/lru_cache/cache_element.tests.ts new file mode 100644 index 000000000..dfba16fa7 --- /dev/null +++ b/packages/optimizely-sdk/lib/utils/lru_cache/cache_element.tests.ts @@ -0,0 +1,53 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { assert } from 'chai'; +import { CacheElement } from './cache_element'; + +const sleep = async (ms: number) => { + return await new Promise(r => setTimeout(r, ms)); +}; + +describe('/odp/lru_cache/CacheElement', () => { + let element: CacheElement; + + beforeEach(() => { + element = new CacheElement('foo'); + }); + + it('should initialize a valid CacheElement', () => { + assert.exists(element); + assert.equal(element.value, 'foo'); + assert.isNotNull(element.time); + assert.doesNotThrow(() => element.is_stale(0)); + }); + + it('should return false if not stale based on timeout', () => { + const timeoutLong = 1000; + assert.equal(element.is_stale(timeoutLong), false); + }); + + it('should return false if not stale because timeout is less than or equal to 0', () => { + const timeoutNone = 0; + assert.equal(element.is_stale(timeoutNone), false); + }); + + it('should return true if stale based on timeout', async () => { + await sleep(100); + const timeoutShort = 1; + assert.equal(element.is_stale(timeoutShort), true); + }); +}); diff --git a/packages/optimizely-sdk/lib/core/odp/lru_cache/CacheElement.ts b/packages/optimizely-sdk/lib/utils/lru_cache/cache_element.ts similarity index 62% rename from packages/optimizely-sdk/lib/core/odp/lru_cache/CacheElement.ts rename to packages/optimizely-sdk/lib/utils/lru_cache/cache_element.ts index a8734527f..c286aab7a 100644 --- a/packages/optimizely-sdk/lib/core/odp/lru_cache/CacheElement.ts +++ b/packages/optimizely-sdk/lib/utils/lru_cache/cache_element.ts @@ -18,21 +18,25 @@ * CacheElement represents an individual generic item within the LRUCache */ export class CacheElement { - private _value: V | null - private _time: number + private _value: V | null; + private _time: number; - get value(): V | null { return this._value } - get time(): number { return this._time } + get value(): V | null { + return this._value; + } + get time(): number { + return this._time; + } - constructor(value: V | null = null) { - this._value = value - this._time = Date.now() - } + constructor(value: V | null = null) { + this._value = value; + this._time = Date.now(); + } - public is_stale(timeout: number): boolean { - if (timeout <= 0) return false - return Date.now() - this._time >= timeout - } + public is_stale(timeout: number): boolean { + if (timeout <= 0) return false; + return Date.now() - this._time >= timeout; + } } -export default CacheElement \ No newline at end of file +export default CacheElement; diff --git a/packages/optimizely-sdk/lib/utils/lru_cache/index.ts b/packages/optimizely-sdk/lib/utils/lru_cache/index.ts new file mode 100644 index 000000000..185093ead --- /dev/null +++ b/packages/optimizely-sdk/lib/utils/lru_cache/index.ts @@ -0,0 +1,21 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { LRUCache } from './lru_cache'; +import { BrowserLRUCache } from './browser_lru_cache'; +import { ServerLRUCache } from './server_lru_cache'; + +export { LRUCache, BrowserLRUCache, ServerLRUCache }; diff --git a/packages/optimizely-sdk/lib/utils/lru_cache/lru_cache.tests.ts b/packages/optimizely-sdk/lib/utils/lru_cache/lru_cache.tests.ts new file mode 100644 index 000000000..4c9de8d1a --- /dev/null +++ b/packages/optimizely-sdk/lib/utils/lru_cache/lru_cache.tests.ts @@ -0,0 +1,309 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { assert } from 'chai'; +import { LRUCache } from './lru_cache'; +import { BrowserLRUCache } from './browser_lru_cache'; +import { ServerLRUCache } from './server_lru_cache'; + +const sleep = async (ms: number) => { + return await new Promise(r => setTimeout(r, ms)); +}; + +describe('/lib/core/odp/lru_cache (Default)', () => { + let cache: LRUCache; + + describe('LRU Cache > Initialization', () => { + it('should successfully create a new cache with maxSize > 0 and timeout > 0', () => { + cache = new LRUCache({ + maxSize: 1000, + timeout: 2000, + }); + + assert.exists(cache); + + assert.equal(cache.maxSize, 1000); + assert.equal(cache.timeout, 2000); + }); + + it('should successfully create a new cache with maxSize == 0 and timeout == 0', () => { + cache = new LRUCache({ + maxSize: 0, + timeout: 0, + }); + + assert.exists(cache); + + assert.equal(cache.maxSize, 0); + assert.equal(cache.timeout, 0); + }); + }); + + describe('LRU Cache > Save & Lookup', () => { + const maxCacheSize = 2; + + beforeEach(() => { + cache = new LRUCache({ + maxSize: maxCacheSize, + timeout: 1000, + }); + }); + + it('should have no values in the cache upon initialization', () => { + assert.isNull(cache.peek(1)); + }); + + it('should save keys and values of any valid type', () => { + cache.save({ key: 'a', value: 1 }); // { a: 1 } + assert.equal(cache.peek('a'), 1); + + cache.save({ key: 2, value: 'b' }); // { a: 1, 2: 'b' } + assert.equal(cache.peek(2), 'b'); + + const foo = Symbol('foo'); + const bar = {}; + cache.save({ key: foo, value: bar }); // { 2: 'b', Symbol('foo'): {} } + assert.deepEqual({}, cache.peek(foo)); + }); + + it('should save values up to its maxSize', () => { + cache.save({ key: 'a', value: 1 }); // { a: 1 } + assert.equal(cache.peek('a'), 1); + + cache.save({ key: 'b', value: 2 }); // { a: 1, b: 2 } + assert.equal(cache.peek('a'), 1); + assert.equal(cache.peek('b'), 2); + + cache.save({ key: 'c', value: 3 }); // { b: 2, c: 3 } + assert.equal(cache.peek('a'), null); + assert.equal(cache.peek('b'), 2); + assert.equal(cache.peek('c'), 3); + }); + + it('should override values of matching keys when saving', () => { + cache.save({ key: 'a', value: 1 }); // { a: 1 } + assert.equal(cache.peek('a'), 1); + + cache.save({ key: 'a', value: 2 }); // { a: 2 } + assert.equal(cache.peek('a'), 2); + + cache.save({ key: 'a', value: 3 }); // { a: 3 } + assert.equal(cache.peek('a'), 3); + }); + + it('should update cache accordingly when using lookup/peek', () => { + assert.isNull(cache.lookup(3)); + + cache.save({ key: 'b', value: 201 }); // { b: 201 } + cache.save({ key: 'a', value: 101 }); // { b: 201, a: 101 } + + assert.equal(cache.lookup('b'), 201); // { a: 101, b: 201 } + + cache.save({ key: 'c', value: 302 }); // { b: 201, c: 302 } + + assert.isNull(cache.peek(1)); + assert.equal(cache.peek('b'), 201); + assert.equal(cache.peek('c'), 302); + assert.equal(cache.lookup('c'), 302); // { b: 201, c: 302 } + + cache.save({ key: 'a', value: 103 }); // { c: 302, a: 103 } + assert.equal(cache.peek('a'), 103); + assert.isNull(cache.peek('b')); + assert.equal(cache.peek('c'), 302); + }); + }); + + describe('LRU Cache > Size', () => { + it('should keep LRU Cache map size capped at cache.capacity', () => { + const maxCacheSize = 2; + + cache = new LRUCache({ + maxSize: maxCacheSize, + timeout: 1000, + }); + + cache.save({ key: 'a', value: 1 }); // { a: 1 } + cache.save({ key: 'b', value: 2 }); // { a: 1, b: 2 } + + assert.equal(cache.map.size, maxCacheSize); + assert.equal(cache.map.size, cache.maxSize); + }); + + it('should not save to cache if maxSize is 0', () => { + cache = new LRUCache({ + maxSize: 0, + timeout: 1000, + }); + + assert.isNull(cache.lookup('a')); + cache.save({ key: 'a', value: 100 }); + assert.isNull(cache.lookup('a')); + }); + + it('should not save to cache if maxSize is negative', () => { + cache = new LRUCache({ + maxSize: -500, + timeout: 1000, + }); + + assert.isNull(cache.lookup('a')); + cache.save({ key: 'a', value: 100 }); + assert.isNull(cache.lookup('a')); + }); + }); + + describe('LRU Cache > Timeout', () => { + it('should discard stale entries in the cache on peek/lookup when timeout is greater than 0', async () => { + const maxTimeout = 100; + + cache = new LRUCache({ + maxSize: 1000, + timeout: maxTimeout, + }); + + cache.save({ key: 'a', value: 100 }); // { a: 100 } + cache.save({ key: 'b', value: 200 }); // { a: 100, b: 200 } + cache.save({ key: 'c', value: 300 }); // { a: 100, b: 200, c: 300 } + + assert.equal(cache.peek('a'), 100); + assert.equal(cache.peek('b'), 200); + assert.equal(cache.peek('c'), 300); + + await sleep(150); + + assert.isNull(cache.lookup('a')); + assert.isNull(cache.lookup('b')); + assert.isNull(cache.lookup('c')); + + cache.save({ key: 'd', value: 400 }); // { d: 400 } + cache.save({ key: 'a', value: 101 }); // { d: 400, a: 101 } + + assert.equal(cache.lookup('a'), 101); // { d: 400, a: 101 } + assert.equal(cache.lookup('d'), 400); // { a: 101, d: 400 } + }); + + it('should never have stale entries if timeout is 0', async () => { + const maxTimeout = 0; + + cache = new LRUCache({ + maxSize: 1000, + timeout: maxTimeout, + }); + + cache.save({ key: 'a', value: 100 }); // { a: 100 } + cache.save({ key: 'b', value: 200 }); // { a: 100, b: 200 } + + await sleep(100); + assert.equal(cache.lookup('a'), 100); + assert.equal(cache.lookup('b'), 200); + }); + + it('should never have stale entries if timeout is less than 0', async () => { + const maxTimeout = -500; + + cache = new LRUCache({ + maxSize: 1000, + timeout: maxTimeout, + }); + + cache.save({ key: 'a', value: 100 }); // { a: 100 } + cache.save({ key: 'b', value: 200 }); // { a: 100, b: 200 } + + await sleep(100); + assert.equal(cache.lookup('a'), 100); + assert.equal(cache.lookup('b'), 200); + }); + }); + + describe('LRU Cache > Reset', () => { + it('should be able to reset the cache', async () => { + cache = new LRUCache({ maxSize: 2, timeout: 100 }); + cache.save({ key: 'a', value: 100 }); // { a: 100 } + cache.save({ key: 'b', value: 200 }); // { a: 100, b: 200 } + + await sleep(0); + + assert.equal(cache.map.size, 2); + cache.reset(); // { } + + await sleep(150); + + assert.equal(cache.map.size, 0); + + it('should be fully functional after resetting the cache', () => { + cache.save({ key: 'c', value: 300 }); // { c: 300 } + cache.save({ key: 'd', value: 400 }); // { c: 300, d: 400 } + assert.isNull(cache.peek('b')); + assert.equal(cache.peek('c'), 300); + assert.equal(cache.peek('d'), 400); + + cache.save({ key: 'a', value: 500 }); // { d: 400, a: 500 } + cache.save({ key: 'b', value: 600 }); // { a: 500, b: 600 } + assert.isNull(cache.peek('c')); + assert.equal(cache.peek('a'), 500); + assert.equal(cache.peek('b'), 600); + + const _ = cache.lookup('a'); // { b: 600, a: 500 } + assert.equal(500, _); + + cache.save({ key: 'c', value: 700 }); // { a: 500, c: 700 } + assert.isNull(cache.peek('b')); + assert.equal(cache.peek('a'), 500); + assert.equal(cache.peek('c'), 700); + }); + }); + }); +}); + +describe('/lib/core/odp/lru_cache (Client)', () => { + let cache: BrowserLRUCache; + + it('should create and test the default client LRU Cache', () => { + cache = new BrowserLRUCache(); + assert.exists(cache); + assert.isNull(cache.lookup('a')); + assert.equal(cache.maxSize, 100); + assert.equal(cache.timeout, 600 * 1000); + + cache.save({ key: 'a', value: 100 }); + cache.save({ key: 'b', value: 200 }); + cache.save({ key: 'c', value: 300 }); + assert.equal(cache.map.size, 3); + assert.equal(cache.peek('a'), 100); + assert.equal(cache.lookup('b'), 200); + assert.deepEqual(cache.map.keys().next().value, 'a'); + }); +}); + +describe('/lib/core/odp/lru_cache (Server)', () => { + let cache: ServerLRUCache; + + it('should create and test the default server LRU Cache', () => { + cache = new ServerLRUCache(); + assert.exists(cache); + assert.isNull(cache.lookup('a')); + assert.equal(cache.maxSize, 10000); + assert.equal(cache.timeout, 600 * 1000); + + cache.save({ key: 'a', value: 100 }); + cache.save({ key: 'b', value: 200 }); + cache.save({ key: 'c', value: 300 }); + assert.equal(cache.map.size, 3); + assert.equal(cache.peek('a'), 100); + assert.equal(cache.lookup('b'), 200); + assert.deepEqual(cache.map.keys().next().value, 'a'); + }); +}); diff --git a/packages/optimizely-sdk/lib/utils/lru_cache/lru_cache.ts b/packages/optimizely-sdk/lib/utils/lru_cache/lru_cache.ts new file mode 100644 index 000000000..b0b2a60f5 --- /dev/null +++ b/packages/optimizely-sdk/lib/utils/lru_cache/lru_cache.ts @@ -0,0 +1,108 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import CacheElement from './cache_element'; + +/** + * Least-Recently Used Cache (LRU Cache) Implementation with Generic Key-Value Pairs + * Analogous to a Map that has a specified max size and a timeout per element. + * - Removes the least-recently used element from the cache if max size exceeded. + * - Removes stale elements (entries older than their timeout) from the cache. + */ +export class LRUCache { + private _map: Map> = new Map(); + private _maxSize; // Defines maximum size of _map + private _timeout; // Milliseconds each entry has before it becomes stale + + get map(): Map> { + return this._map; + } + get maxSize(): number { + return this._maxSize; + } + get timeout(): number { + return this._timeout; + } + + constructor({ maxSize, timeout }: { maxSize: number; timeout: number }) { + this._maxSize = maxSize; + this._timeout = timeout; + } + + /** + * Returns a valid, non-stale value from LRU Cache based on an input key. + * Additionally moves the element to the end of the cache and removes from cache if stale. + */ + public lookup(key: K): V | null { + if (this._maxSize <= 0) { + return null; + } + + const element: CacheElement | undefined = this._map.get(key); + + if (!element) return null; + + if (element.is_stale(this._timeout)) { + this._map.delete(key); + return null; + } + + this._map.delete(key); + this._map.set(key, element); + + return element.value; + } + + /** + * Inserts/moves an input key-value pair to the end of the LRU Cache. + * Removes the least-recently used element if the cache exceeds it's maxSize. + */ + public save({ key, value }: { key: K; value: V }): void { + if (this._maxSize <= 0) return; + + const element: CacheElement | undefined = this._map.get(key); + if (element) this._map.delete(key); + this._map.set(key, new CacheElement(value)); + + if (this._map.size > this._maxSize) { + const firstMapEntryKey = this._map.keys().next().value; + this._map.delete(firstMapEntryKey); + } + } + + /** + * Clears the LRU Cache + */ + public reset(): void { + if (this._maxSize <= 0) return; + + this._map.clear(); + } + + /** + * Reads value from specified key without moving elements in the LRU Cache. + * @param {K} key + */ + public peek(key: K): V | null { + if (this._maxSize <= 0) return null; + + const element: CacheElement | undefined = this._map.get(key); + + return element?.value ?? null; + } +} + +export default LRUCache; diff --git a/packages/optimizely-sdk/lib/utils/lru_cache/server_lru_cache.ts b/packages/optimizely-sdk/lib/utils/lru_cache/server_lru_cache.ts new file mode 100644 index 000000000..f39e15894 --- /dev/null +++ b/packages/optimizely-sdk/lib/utils/lru_cache/server_lru_cache.ts @@ -0,0 +1,26 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import LRUCache from './lru_cache'; + +export class ServerLRUCache extends LRUCache { + constructor() { + super({ + maxSize: 10000, + timeout: 600 * 1000, // 600 secs + }); + } +} diff --git a/packages/optimizely-sdk/tests/restApiManager.spec.ts b/packages/optimizely-sdk/tests/odpEventApiManager.spec.ts similarity index 83% rename from packages/optimizely-sdk/tests/restApiManager.spec.ts rename to packages/optimizely-sdk/tests/odpEventApiManager.spec.ts index 132649da7..49492a8c6 100644 --- a/packages/optimizely-sdk/tests/restApiManager.spec.ts +++ b/packages/optimizely-sdk/tests/odpEventApiManager.spec.ts @@ -18,8 +18,8 @@ import { anyString, anything, instance, mock, resetCalls, verify, when } from 'ts-mockito'; import { LogHandler, LogLevel } from '../lib/modules/logging'; -import { RestApiManager } from '../lib/plugins/odp/rest_api_manager'; -import { OdpEvent } from '../lib/plugins/odp/odp_event'; +import { OdpEventApiManager } from '../lib/core/odp/odp_event_api_manager'; +import { OdpEvent } from '../lib/core/odp/odp_event'; import { RequestHandler } from '../lib/utils/http_request_handler/http'; const VALID_ODP_PUBLIC_KEY = 'not-real-api-key'; @@ -32,15 +32,11 @@ data1.set('key14', null); const data2 = new Map(); data2.set('key2', 'value-2'); const ODP_EVENTS = [ - new OdpEvent('t1', 'a1', - new Map([['id-key-1', 'id-value-1']]), - data1), - new OdpEvent('t2', 'a2', - new Map([['id-key-2', 'id-value-2']]), - data2), + new OdpEvent('t1', 'a1', new Map([['id-key-1', 'id-value-1']]), data1), + new OdpEvent('t2', 'a2', new Map([['id-key-2', 'id-value-2']]), data2), ]; -describe('RestApiManager', () => { +describe('OdpEventApiManager', () => { let mockLogger: LogHandler; let mockRequestHandler: RequestHandler; @@ -54,11 +50,10 @@ describe('RestApiManager', () => { resetCalls(mockRequestHandler); }); - const managerInstance = () => new RestApiManager(instance(mockRequestHandler), instance(mockLogger)); + const managerInstance = () => new OdpEventApiManager(instance(mockRequestHandler), instance(mockLogger)); const abortableRequest = (statusCode: number, body: string) => { return { - abort: () => { - }, + abort: () => {}, responsePromise: Promise.resolve({ statusCode, body, @@ -68,7 +63,9 @@ describe('RestApiManager', () => { }; it('should should send events successfully and not suggest retry', async () => { - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, '')); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, '') + ); const manager = managerInstance(); const shouldRetry = await manager.sendEvents(VALID_ODP_PUBLIC_KEY, ODP_REST_API_HOST, ODP_EVENTS); @@ -78,7 +75,9 @@ describe('RestApiManager', () => { }); it('should not suggest a retry for 400 HTTP response', async () => { - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(400, '')); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(400, '') + ); const manager = managerInstance(); const shouldRetry = await manager.sendEvents(VALID_ODP_PUBLIC_KEY, ODP_REST_API_HOST, ODP_EVENTS); @@ -88,7 +87,9 @@ describe('RestApiManager', () => { }); it('should suggest a retry for 500 HTTP response', async () => { - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(500, '')); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(500, '') + ); const manager = managerInstance(); const shouldRetry = await manager.sendEvents(VALID_ODP_PUBLIC_KEY, ODP_REST_API_HOST, ODP_EVENTS); @@ -99,8 +100,7 @@ describe('RestApiManager', () => { it('should suggest a retry for network timeout', async () => { when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn({ - abort: () => { - }, + abort: () => {}, responsePromise: Promise.reject(new Error('Request timed out')), }); const manager = managerInstance(); diff --git a/packages/optimizely-sdk/tests/odpEventManager.spec.ts b/packages/optimizely-sdk/tests/odpEventManager.spec.ts index 12ee14a84..13f70d1e9 100644 --- a/packages/optimizely-sdk/tests/odpEventManager.spec.ts +++ b/packages/optimizely-sdk/tests/odpEventManager.spec.ts @@ -14,12 +14,12 @@ * limitations under the License. */ -import { OdpConfig } from '../lib/plugins/odp/odp_config'; -import { OdpEventManager, STATE } from '../lib/plugins/odp/odp_event_manager'; +import { OdpConfig } from '../lib/core/odp/odp_config'; +import { OdpEventManager, STATE } from '../lib/core/odp/odp_event_manager'; import { anything, capture, instance, mock, resetCalls, spy, verify, when } from 'ts-mockito'; -import { RestApiManager } from '../lib/plugins/odp/rest_api_manager'; +import { OdpEventApiManager } from '../lib/core/odp/odp_event_api_manager'; import { LogHandler, LogLevel } from '../lib/modules/logging'; -import { OdpEvent } from '../lib/plugins/odp/odp_event'; +import { OdpEvent } from '../lib/core/odp/odp_event'; import { RequestHandler } from '../lib/utils/http_request_handler/http'; const API_KEY = 'test-api-key'; @@ -30,21 +30,25 @@ const EVENTS: OdpEvent[] = [ 't1', 'a1', new Map([['id-key-1', 'id-value-1']]), - new Map(Object.entries({ - 'key-1': 'value1', - 'key-2': null, - 'key-3': 3.3, - 'key-4': true, - })), + new Map( + Object.entries({ + 'key-1': 'value1', + 'key-2': null, + 'key-3': 3.3, + 'key-4': true, + }) + ) ), new OdpEvent( 't2', 'a2', new Map([['id-key-2', 'id-value-2']]), - new Map(Object.entries({ - 'key-2': 'value2', - 'data_source': 'my-source', - })), + new Map( + Object.entries({ + 'key-2': 'value2', + data_source: 'my-source', + }) + ) ), ]; // naming for object destructuring @@ -55,28 +59,32 @@ const PROCESSED_EVENTS: OdpEvent[] = [ 't1', 'a1', new Map([['id-key-1', 'id-value-1']]), - new Map(Object.entries({ - 'idempotence_id': MOCK_IDEMPOTENCE_ID, - 'data_source_type': 'sdk', - 'data_source': clientEngine, - 'data_source_version': clientVersion, - 'key-1': 'value1', - 'key-2': null, - 'key-3': 3.3, - 'key-4': true, - })), + new Map( + Object.entries({ + idempotence_id: MOCK_IDEMPOTENCE_ID, + data_source_type: 'sdk', + data_source: clientEngine, + data_source_version: clientVersion, + 'key-1': 'value1', + 'key-2': null, + 'key-3': 3.3, + 'key-4': true, + }) + ) ), new OdpEvent( 't2', 'a2', new Map([['id-key-2', 'id-value-2']]), - new Map(Object.entries({ - 'idempotence_id': MOCK_IDEMPOTENCE_ID, - 'data_source_type': 'sdk', - 'data_source': clientEngine, - 'data_source_version': clientVersion, - 'key-2': 'value2', - })), + new Map( + Object.entries({ + idempotence_id: MOCK_IDEMPOTENCE_ID, + data_source_type: 'sdk', + data_source: clientEngine, + data_source_version: clientVersion, + 'key-2': 'value2', + }) + ) ), ]; const makeEvent = (id: number) => { @@ -95,8 +103,7 @@ const pause = (timeoutMilliseconds: number): Promise => { }; const abortableRequest = (statusCode: number, body: string) => { return { - abort: () => { - }, + abort: () => {}, responsePromise: Promise.resolve({ statusCode, body, @@ -107,15 +114,15 @@ const abortableRequest = (statusCode: number, body: string) => { describe('OdpEventManager', () => { let mockLogger: LogHandler; - let mockApiManager: RestApiManager; + let mockApiManager: OdpEventApiManager; let odpConfig: OdpConfig; let logger: LogHandler; - let apiManager: RestApiManager; + let apiManager: OdpEventApiManager; beforeAll(() => { mockLogger = mock(); - mockApiManager = mock(); + mockApiManager = mock(); odpConfig = new OdpConfig(API_KEY, API_HOST, []); logger = instance(mockLogger); @@ -129,7 +136,11 @@ describe('OdpEventManager', () => { it('should log and discard events when event manager not running', () => { const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, }); // since we've not called start() then... @@ -144,7 +155,11 @@ describe('OdpEventManager', () => { when(mockOdpConfig.isReady()).thenReturn(false); const odpConfig = instance(mockOdpConfig); const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, }); eventManager['state'] = STATE.RUNNING; // simulate running without calling start() @@ -155,17 +170,23 @@ describe('OdpEventManager', () => { it('should discard events with invalid data', () => { const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, }); // make an event with invalid data key-value entry const badEvent = new OdpEvent( 't3', 'a3', new Map([['id-key-3', 'id-value-3']]), - new Map(Object.entries({ - 'key-1': false, - 'key-2': { random: 'object', whichShouldFail: true }, - })), + new Map( + Object.entries({ + 'key-1': false, + 'key-2': { random: 'object', whichShouldFail: true }, + }) + ) ); eventManager.sendEvent(badEvent); @@ -175,7 +196,12 @@ describe('OdpEventManager', () => { it('should log a max queue hit and discard ', () => { // set queue to maximum of 1 const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, queueSize: 1, // With max queue size set to 1... + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + queueSize: 1, // With max queue size set to 1... }); eventManager['state'] = STATE.RUNNING; eventManager['queue'].push(EVENTS[0]); // simulate 1 event already in the queue then... @@ -183,18 +209,26 @@ describe('OdpEventManager', () => { // ...try adding the second event eventManager.sendEvent(EVENTS[1]); - verify(mockLogger.log(LogLevel.WARNING, 'Failed to Process ODP Event. Event Queue full. queueSize = %s.', 1)).once(); + verify( + mockLogger.log(LogLevel.WARNING, 'Failed to Process ODP Event. Event Queue full. queueSize = %s.', 1) + ).once(); }); it('should add additional information to each event', () => { const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, }); const processedEventData = PROCESSED_EVENTS[0].data; const eventData = eventManager['augmentCommonData'](EVENTS[0].data); - expect((eventData.get('idempotence_id') as string).length).toEqual((processedEventData.get('idempotence_id') as string).length); + expect((eventData.get('idempotence_id') as string).length).toEqual( + (processedEventData.get('idempotence_id') as string).length + ); expect(eventData.get('data_source_type')).toEqual(processedEventData.get('data_source_type')); expect(eventData.get('data_source')).toEqual(processedEventData.get('data_source')); expect(eventData.get('data_source_version')).toEqual(processedEventData.get('data_source_version')); @@ -206,7 +240,11 @@ describe('OdpEventManager', () => { it('should attempt to flush an empty queue at flush intervals', async () => { const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, flushInterval: 100, }); const spiedEventManager = spy(eventManager); @@ -222,7 +260,11 @@ describe('OdpEventManager', () => { when(mockApiManager.sendEvents(anything(), anything(), anything())).thenResolve(false); const apiManager = instance(mockApiManager); const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, batchSize: 10, // with batch size of 10... flushInterval: 250, }); @@ -240,7 +282,13 @@ describe('OdpEventManager', () => { it('should dispatch events with correct payload', async () => { const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, batchSize: 10, flushInterval: 100, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + batchSize: 10, + flushInterval: 100, }); eventManager.start(); @@ -264,8 +312,12 @@ describe('OdpEventManager', () => { when(mockApiManager.sendEvents(anything(), anything(), anything())).thenResolve(true); const apiManager = instance(mockApiManager); const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, - batchSize: 2, // batch size of 2 + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + batchSize: 2, // batch size of 2 flushInterval: 100, }); @@ -284,8 +336,12 @@ describe('OdpEventManager', () => { when(mockApiManager.sendEvents(anything(), anything(), anything())).thenResolve(false); const apiManager = instance(mockApiManager); const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, - batchSize: 2, // batches of 2 with... + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + batchSize: 2, // batches of 2 with... flushInterval: 100, }); @@ -303,10 +359,18 @@ describe('OdpEventManager', () => { it('should prepare correct payload for register VUID', async () => { const mockRequestHandler: RequestHandler = mock(); - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, '')); - const apiManager = new RestApiManager(instance(mockRequestHandler), logger); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, '') + ); + const apiManager = new OdpEventApiManager(instance(mockRequestHandler), logger); const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, batchSize: 10, flushInterval: 100, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + batchSize: 10, + flushInterval: 100, }); const vuid = 'vuid_330e05cad15746d9af8a75b8d10'; @@ -323,7 +387,7 @@ describe('OdpEventManager', () => { const event = events[0]; expect(event.type).toEqual('fullstack'); expect(event.action).toEqual('client_initialized'); - expect(event.identifiers).toEqual({ 'vuid': vuid }); + expect(event.identifiers).toEqual({ vuid: vuid }); expect(event.data.idempotence_id.length).toBe(36); // uuid length expect(event.data.data_source_type).toEqual('sdk'); expect(event.data.data_source).toEqual('javascript-sdk'); @@ -332,10 +396,17 @@ describe('OdpEventManager', () => { it('should prepare correct payload for identify user', async () => { const mockRequestHandler: RequestHandler = mock(); - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, '')); - const apiManager = new RestApiManager(instance(mockRequestHandler), logger); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, '') + ); + const apiManager = new OdpEventApiManager(instance(mockRequestHandler), logger); const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, flushInterval: 100, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, + flushInterval: 100, }); const vuid = 'vuid_330e05cad15746d9af8a75b8d10'; const fsUserId = 'test-fs-user-id'; @@ -353,7 +424,7 @@ describe('OdpEventManager', () => { const event = events[0]; expect(event.type).toEqual('fullstack'); expect(event.action).toEqual('identified'); - expect(event.identifiers).toEqual({ 'vuid': vuid, 'fs_user_id': fsUserId }); + expect(event.identifiers).toEqual({ vuid: vuid, fs_user_id: fsUserId }); expect(event.data.idempotence_id.length).toBe(36); // uuid length expect(event.data.data_source_type).toEqual('sdk'); expect(event.data.data_source).toEqual('javascript-sdk'); @@ -362,7 +433,11 @@ describe('OdpEventManager', () => { it('should apply updated ODP configuration when available', () => { const eventManager = new OdpEventManager({ - odpConfig, apiManager, logger, clientEngine, clientVersion, + odpConfig, + apiManager, + logger, + clientEngine, + clientVersion, }); const apiKey = 'testing-api-key'; const apiHost = 'https://some.other.example.com'; diff --git a/packages/optimizely-sdk/tests/graphQlManager.spec.ts b/packages/optimizely-sdk/tests/odpSegmentApiManager.ts similarity index 83% rename from packages/optimizely-sdk/tests/graphQlManager.spec.ts rename to packages/optimizely-sdk/tests/odpSegmentApiManager.ts index 8f2c228ff..5056145a2 100644 --- a/packages/optimizely-sdk/tests/graphQlManager.spec.ts +++ b/packages/optimizely-sdk/tests/odpSegmentApiManager.ts @@ -18,7 +18,7 @@ import { anyString, anything, instance, mock, resetCalls, verify, when } from 'ts-mockito'; import { LogHandler, LogLevel } from '../lib/modules/logging'; -import { GraphQLManager } from '../lib/plugins/odp/graphql_manager'; +import { OdpSegmentApiManager } from '../lib/core/odp/odp_segment_api_manager'; import { RequestHandler } from '../lib/utils/http_request_handler/http'; import { ODP_USER_KEY } from '../lib/utils/enums'; @@ -26,13 +26,9 @@ const API_key = 'not-real-api-key'; const GRAPHQL_ENDPOINT = 'https://some.example.com/graphql/endpoint'; const USER_KEY = ODP_USER_KEY.FS_USER_ID; const USER_VALUE = 'tester-101'; -const SEGMENTS_TO_CHECK = [ - 'has_email', - 'has_email_opted_in', - 'push_on_sale', -]; +const SEGMENTS_TO_CHECK = ['has_email', 'has_email_opted_in', 'push_on_sale']; -describe('GraphQLManager', () => { +describe('OdpSegmentApiManager', () => { let mockLogger: LogHandler; let mockRequestHandler: RequestHandler; @@ -46,12 +42,11 @@ describe('GraphQLManager', () => { resetCalls(mockRequestHandler); }); - const managerInstance = () => new GraphQLManager(instance(mockRequestHandler), instance(mockLogger)); + const managerInstance = () => new OdpSegmentApiManager(instance(mockRequestHandler), instance(mockLogger)); const abortableRequest = (statusCode: number, body: string) => { return { - abort: () => { - }, + abort: () => {}, responsePromise: Promise.resolve({ statusCode, body, @@ -137,17 +132,20 @@ describe('GraphQLManager', () => { const response = manager['toGraphQLJson'](USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); - expect(response) - .toBe(`{"query" : "query {customer"(${USER_KEY} : "${USER_VALUE}") {audiences(subset: [\\"has_email\\",\\"has_email_opted_in\\",\\"push_on_sale\\"] {edges {node {name state}}}}}"}`, - ); + expect(response).toBe( + `{"query" : "query {customer"(${USER_KEY} : "${USER_VALUE}") {audiences(subset: [\\"has_email\\",\\"has_email_opted_in\\",\\"push_on_sale\\"] {edges {node {name state}}}}}"}` + ); }); it('should fetch valid qualified segments', async () => { - const responseJsonWithQualifiedSegments = '{"data":{"customer":{"audiences":' + + const responseJsonWithQualifiedSegments = + '{"data":{"customer":{"audiences":' + '{"edges":[{"node":{"name":"has_email",' + '"state":"qualified"}},{"node":{"name":' + '"has_email_opted_in","state":"qualified"}}]}}}}'; - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, responseJsonWithQualifiedSegments)); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, responseJsonWithQualifiedSegments) + ); const manager = managerInstance(); const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); @@ -168,9 +166,10 @@ describe('GraphQLManager', () => { }); it('should handle empty qualified segments', async () => { - const responseJsonWithNoQualifiedSegments = '{"data":{"customer":{"audiences":' + - '{"edges":[ ]}}}}'; - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, responseJsonWithNoQualifiedSegments)); + const responseJsonWithNoQualifiedSegments = '{"data":{"customer":{"audiences":' + '{"edges":[ ]}}}}'; + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, responseJsonWithNoQualifiedSegments) + ); const manager = managerInstance(); const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); @@ -181,16 +180,25 @@ describe('GraphQLManager', () => { it('should handle error with invalid identifier', async () => { const INVALID_USER_ID = 'invalid-user'; - const errorJsonResponse = '{"errors":[{"message":' + + const errorJsonResponse = + '{"errors":[{"message":' + '"Exception while fetching data (/customer) : ' + `Exception: could not resolve _fs_user_id = ${INVALID_USER_ID}",` + '"locations":[{"line":1,"column":8}],"path":["customer"],' + '"extensions":{"classification":"DataFetchingException"}}],' + '"data":{"customer":null}}'; - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, errorJsonResponse)); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, errorJsonResponse) + ); const manager = managerInstance(); - const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, INVALID_USER_ID, SEGMENTS_TO_CHECK); + const segments = await manager.fetchSegments( + API_key, + GRAPHQL_ENDPOINT, + USER_KEY, + INVALID_USER_ID, + SEGMENTS_TO_CHECK + ); expect(segments).toBeNull(); verify(mockLogger.log(anything(), anyString())).once(); @@ -198,7 +206,9 @@ describe('GraphQLManager', () => { it('should handle unrecognized JSON responses', async () => { const unrecognizedJson = '{"unExpectedObject":{ "withSome": "value", "thatIsNotParseable": "true" }}'; - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, unrecognizedJson)); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, unrecognizedJson) + ); const manager = managerInstance(); const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); @@ -208,11 +218,14 @@ describe('GraphQLManager', () => { }); it('should handle other exception types', async () => { - const errorJsonResponse = '{"errors":[{"message":"Validation error of type ' + + const errorJsonResponse = + '{"errors":[{"message":"Validation error of type ' + 'UnknownArgument: Unknown field argument not_real_userKey @ ' + '\'customer\'","locations":[{"line":1,"column":17}],' + '"extensions":{"classification":"ValidationError"}}]}'; - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, errorJsonResponse)); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, errorJsonResponse) + ); const manager = managerInstance(); const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); @@ -223,7 +236,9 @@ describe('GraphQLManager', () => { it('should handle bad responses', async () => { const badResponse = '{"data":{ }}'; - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(200, badResponse)); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(200, badResponse) + ); const manager = managerInstance(); const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); @@ -233,7 +248,9 @@ describe('GraphQLManager', () => { }); it('should handle non 200 HTTP status code response', async () => { - when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn(abortableRequest(400, '')); + when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn( + abortableRequest(400, '') + ); const manager = managerInstance(); const segments = await manager.fetchSegments(API_key, GRAPHQL_ENDPOINT, USER_KEY, USER_VALUE, SEGMENTS_TO_CHECK); @@ -244,8 +261,7 @@ describe('GraphQLManager', () => { it('should handle a timeout', async () => { when(mockRequestHandler.makeRequest(anything(), anything(), anything(), anything())).thenReturn({ - abort: () => { - }, + abort: () => {}, responsePromise: Promise.reject(new Error('Request timed out')), }); const manager = managerInstance(); diff --git a/packages/optimizely-sdk/tests/odpSegmentManager.spec.ts b/packages/optimizely-sdk/tests/odpSegmentManager.spec.ts new file mode 100644 index 000000000..91277c5ee --- /dev/null +++ b/packages/optimizely-sdk/tests/odpSegmentManager.spec.ts @@ -0,0 +1,138 @@ +/** + * Copyright 2022, Optimizely + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/// + +import { mock, resetCalls, instance } from 'ts-mockito'; + +import { LogHandler } from '../lib/modules/logging'; +import { ODP_USER_KEY } from '../lib/utils/enums'; +import { RequestHandler } from '../lib/utils/http_request_handler/http'; + +import { OdpSegmentManager } from '../lib/core/odp/odp_segment_manager'; +import { OdpConfig } from '../lib/core/odp/odp_config'; +import { LRUCache } from '../lib/utils/lru_cache'; +import { OptimizelySegmentOption } from './../lib/core/odp/optimizely_segment_option'; +import { OdpSegmentApiManager } from '../lib/core/odp/odp_segment_api_manager'; + +describe('OdpSegmentManager', () => { + class MockOdpSegmentApiManager extends OdpSegmentApiManager { + public async fetchSegments( + apiKey: string, + apiHost: string, + userKey: ODP_USER_KEY, + userValue: string, + segmentsToCheck: string[] + ): Promise { + if (apiKey == 'invalid-key') return null; + return segmentsToCheck; + } + } + + const mockLogHandler = mock(); + const mockRequestHandler = mock(); + + let manager: OdpSegmentManager; + let odpConfig: OdpConfig; + const apiManager = new MockOdpSegmentApiManager(instance(mockRequestHandler), instance(mockLogHandler)); + + let options: Array = []; + + const userKey: ODP_USER_KEY = ODP_USER_KEY.VUID; + const userValue = 'test-user'; + + beforeEach(() => { + resetCalls(mockLogHandler); + resetCalls(mockRequestHandler); + + const API_KEY = 'test-api-key'; + const API_HOST = 'https://odp.example.com'; + odpConfig = new OdpConfig(API_KEY, API_HOST, []); + const segmentsCache = new LRUCache>({ + maxSize: 1000, + timeout: 1000, + }); + + manager = new OdpSegmentManager(odpConfig, segmentsCache, apiManager); + }); + + it('should fetch segments successfully on cache miss.', async () => { + odpConfig.update('host', 'valid', ['new-customer']); + setCache(userKey, '123', ['a']); + + const segments = await manager.fetchQualifiedSegments(userKey, userValue, options); + expect(segments).toEqual(['new-customer']); + }); + + it('should fetch segments successfully on cache hit.', async () => { + odpConfig.update('host', 'valid', ['new-customer']); + setCache(userKey, userValue, ['a']); + + const segments = await manager.fetchQualifiedSegments(userKey, userValue, options); + expect(segments).toEqual(['a']); + }); + + it('should throw an error when fetching segments returns an error.', async () => { + odpConfig.update('host', 'invalid-key', ['new-customer']); + + const segments = await manager.fetchQualifiedSegments(userKey, userValue, []); + expect(segments).toBeNull; + }); + + it('should ignore the cache if the option is included in the options array.', async () => { + odpConfig.update('host', 'valid', ['new-customer']); + setCache(userKey, userValue, ['a']); + options = [OptimizelySegmentOption.IGNORE_CACHE]; + + const segments = await manager.fetchQualifiedSegments(userKey, userValue, options); + expect(segments).toEqual(['new-customer']); + expect(cacheCount()).toBe(1); + }); + + it('should reset the cache if the option is included in the options array.', async () => { + odpConfig.update('host', 'valid', ['new-customer']); + setCache(userKey, userValue, ['a']); + setCache(userKey, '123', ['a']); + setCache(userKey, '456', ['a']); + options = [OptimizelySegmentOption.RESET_CACHE]; + + const segments = await manager.fetchQualifiedSegments(userKey, userValue, options); + expect(segments).toEqual(['new-customer']); + expect(peekCache(userKey, userValue)).toEqual(segments); + expect(cacheCount()).toBe(1); + }); + + it('should make a valid cache key.', () => { + expect('vuid-$-test-user').toBe(manager.makeCacheKey(userKey, userValue)); + }); + + // Utility Functions + + function setCache(userKey: string, userValue: string, value: Array) { + const cacheKey = manager.makeCacheKey(userKey, userValue); + manager.segmentsCache.save({ + key: cacheKey, + value, + }); + } + + function peekCache(userKey: string, userValue: string): Array | null { + const cacheKey = manager.makeCacheKey(userKey, userValue); + return manager.segmentsCache.peek(cacheKey); + } + + const cacheCount = () => manager.segmentsCache.map.size; +});