diff --git a/.npmignore b/.npmignore index 646de33..d16e175 100644 --- a/.npmignore +++ b/.npmignore @@ -4,6 +4,6 @@ report tmp/ .env -dist .cache -.github \ No newline at end of file +.github +docs \ No newline at end of file diff --git a/.prettierrc b/.prettierrc index 0b0eae1..f4eb619 100644 --- a/.prettierrc +++ b/.prettierrc @@ -1,5 +1,5 @@ { - "printWidth": 80, + "printWidth": 100, "tabWidth": 2, "useTabs": false, "semi": true, diff --git a/README.md b/README.md index 93952f7..09db69f 100644 --- a/README.md +++ b/README.md @@ -34,6 +34,38 @@ const cache = createCache({ }); ``` +#### Engines and options + +| Engines Name | key | supported options | | | +|---------------------|-----|-----------------------|---|---| +| Least Recently Used | LRU | HashTable, size | | | +| Time To Live | TTL | HashTable, defaultTTL | | | + +### Thing to know about TTL engine. + +Creating a ttl cache instance. [How it works and Architecture](./docs/ttl-engine.md) + +```javascript +import { createCache } from 'node-cache-engine'; + +const cache = createCache({ + engine = 'TTL', + HashTable = YourCustomHashTable, // for custom hash Table. default hashTable is 'src/dataStructure/HashTable.js' + defaultTTL = 3600, // In milliseconds. defaultTTL is an optional property. This will use when ttl value is not passed on item add. +}); + +const ttl = 5 * 60 * 1000; // (defined 5 minutes) ttl should be in milliseconds + +cache.add('key', 'value', ttl); // add into cache. parameters key, value, ttl. +cache.get('key'); // get from cache +cache.has('key'); // checking from key is existing in cache +cache.remove('key'); // removing from cache +cache.size(); // get the size of cache +cache.runGC(); // manual cleaning the expired item. + +``` + + ### Creating Custom HashTable When and Why you should create custom hash table? The default hash table implemented with `Map`. If you want much more performance than default you can implement your own (like node wrapped c++ hash table). I think 1 to 5 million cache entry default hash table is fine if your use case is more than this go for custom hash table. @@ -42,6 +74,5 @@ To implement custom hashTable you have to use methods with symbols name provided #### Next? -* TTL engine. -* TTL combining with LRU engine +* TTL engine combining with LRU engine * LFU (Least frequently used) engine. diff --git a/babel.config.js b/babel.config.js index 2d0288b..f3cab26 100644 --- a/babel.config.js +++ b/babel.config.js @@ -9,5 +9,8 @@ module.exports = { }, ], ], - plugins: [['@babel/plugin-proposal-optional-chaining']], + plugins: [ + ['@babel/plugin-proposal-optional-chaining'], + ['@babel/plugin-transform-destructuring'], + ], }; diff --git a/docs/images/ttl-arct.png b/docs/images/ttl-arct.png new file mode 100644 index 0000000..e44fa9d Binary files /dev/null and b/docs/images/ttl-arct.png differ diff --git a/docs/ttl-engine.md b/docs/ttl-engine.md new file mode 100644 index 0000000..b7c2c1c --- /dev/null +++ b/docs/ttl-engine.md @@ -0,0 +1,9 @@ +# TTL cache engine Architecture + +One of the challenges on ttl cache replacement is to clean the expired items. For smart cleaning, when we add new item it will time partitioned by ttl/expired value and put it into a time corresponded bucket. +Whenever a get method called it check the element exist and check it is expired or not. If it is expired then removes the item and clean previous buckets. +There is also a `runGC()` method in ttl cache. It will clean the buckets in between last cleaned time and now. +TTL engine do not run `runGC()` method automatically or in an interval. +We do not need to iterate or look all items for cleaning because of expired time partitioning. check below image for more information of architecture. + +![](./images/ttl-arct.png) \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index df4db63..6ffe9d6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -916,12 +916,20 @@ } }, "@babel/plugin-transform-destructuring": { - "version": "7.8.8", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.8.8.tgz", - "integrity": "sha512-eRJu4Vs2rmttFCdhPUM3bV0Yo/xPSdPw6ML9KHs/bjB4bLA5HXlbvYXPOD5yASodGod+krjYx21xm1QmL8dCJQ==", + "version": "7.10.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.10.1.tgz", + "integrity": "sha512-V/nUc4yGWG71OhaTH705pU8ZSdM6c1KmmLP8ys59oOYbT7RpMYAR3MsVOt6OHL0WzG7BlTU076va9fjJyYzJMA==", "dev": true, "requires": { - "@babel/helper-plugin-utils": "^7.8.3" + "@babel/helper-plugin-utils": "^7.10.1" + }, + "dependencies": { + "@babel/helper-plugin-utils": { + "version": "7.10.3", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.3.tgz", + "integrity": "sha512-j/+j8NAWUTxOtx4LKHybpSClxHoq6I91DQ/mKgAXn5oNUPIUiGppjPIX3TDtJWPrdfP9Kfl7e4fgVMiQR9VE/g==", + "dev": true + } } }, "@babel/plugin-transform-dotall-regex": { @@ -8591,6 +8599,12 @@ "minimist": "^1.2.5" } }, + "mockdate": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/mockdate/-/mockdate-3.0.2.tgz", + "integrity": "sha512-ldfYSUW1ocqSHGTK6rrODUiqAFPGAg0xaHqYJ5tvj1hQyFsjuHpuWgWFTZWwDVlzougN/s2/mhDr8r5nY5xDpA==", + "dev": true + }, "module-details-from-path": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/module-details-from-path/-/module-details-from-path-1.0.3.tgz", @@ -12073,9 +12087,9 @@ } }, "yargs-parser": { - "version": "18.1.1", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.1.tgz", - "integrity": "sha512-KRHEsOM16IX7XuLnMOqImcPNbLVXMNHYAoFc3BKR8Ortl5gzDbtXvvEoGx9imk5E+X1VeNKNlcHr8B8vi+7ipA==", + "version": "18.1.3", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz", + "integrity": "sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==", "dev": true, "requires": { "camelcase": "^5.0.0", diff --git a/package.json b/package.json index 8b78f8c..dc2dc63 100644 --- a/package.json +++ b/package.json @@ -48,6 +48,7 @@ "@babel/core": "^7.9.0", "@babel/node": "^7.8.7", "@babel/plugin-proposal-optional-chaining": "^7.9.0", + "@babel/plugin-transform-destructuring": "^7.10.1", "@babel/preset-env": "^7.9.0", "babel-eslint": "^10.1.0", "babel-jest": "^25.1.0", @@ -57,6 +58,7 @@ "jest": "^25.1.0", "lint-staged": "^10.0.8", "microbundle": "^0.12.0", + "mockdate": "^3.0.2", "pre-commit": "^1.2.2", "prettier": "^1.19.1" } diff --git a/src/cache.js b/src/cache.js index d11674a..b2d2fef 100644 --- a/src/cache.js +++ b/src/cache.js @@ -1,18 +1,20 @@ import DefaultHashTable from './dataStructure/HashTable'; import LRU from './engines/LeastRecentlyUsed'; +import TTL from './engines/TimeToLive'; function factory({ size = Number.MAX_SAFE_INTEGER, engine = 'LRU', HashTable = DefaultHashTable, + defaultTTL, } = {}) { switch (engine) { case 'LRU': return new LRU({ size, HashTable }); + case 'TTL': + return new TTL({ HashTable, defaultTTL }); default: - throw Error( - `Engine : ${engine} is not implemented. Currently we have only 'LRU' engine.`, - ); + throw Error(`Engine : ${engine} is not implemented. Engine options are 'LRU', 'TTL'`); } } diff --git a/src/cache.test.js b/src/cache.test.js index d3ab840..0bef32f 100644 --- a/src/cache.test.js +++ b/src/cache.test.js @@ -1,5 +1,6 @@ import createCache from './cache'; import LRU from './engines/LeastRecentlyUsed'; +import TTL from './engines/TimeToLive'; describe('cache factory', () => { it('should create a default cache instance of LRU if no engine is mentioned', () => { @@ -12,6 +13,11 @@ describe('cache factory', () => { expect(cache1 instanceof LRU).toBe(true); }); + it('should create ttl cache instance',()=>{ + const cache1 = createCache({ engine: 'TTL' }); + expect(cache1 instanceof TTL).toBe(true); + }) + it('should throw error if engine type is not implemented', () => { expect(() => createCache({ engine: 'NOT_HERE' })).toThrow( `Engine : NOT_HERE is not implemented`, diff --git a/src/engines/TimeToLive.js b/src/engines/TimeToLive.js new file mode 100644 index 0000000..bf2d8f1 --- /dev/null +++ b/src/engines/TimeToLive.js @@ -0,0 +1,149 @@ +import DefaultHashTable from '../dataStructure/HashTable'; +import DoublyLinkedList from '../dataStructure/DoublyLinkedList'; +import * as hashTableProp from '../hashTableSymbol'; + +function TimeToLive({ HashTable = DefaultHashTable, defaultTTL } = {}) { + const store = new HashTable(); + const timePartition = new HashTable(); + const timeIndexInterval = 5 * 60 * 1000; // milliseconds. + + let lowestTimePartition = Date.now(); + + this.add = (key, value, ttl = defaultTTL) => { + if (!ttl || !Number.isInteger(ttl) || ttl <= 0) + throw Error( + 'Expected ttl value (should be positive integer). ' + + 'you can have to mention it in add method or mention as defaultTTL at constructor', + ); + + const expireTTL = Date.now() + ttl; + const bucket = getTimeBucket(expireTTL); + bucket.addFirst(key); + const tNode = bucket.getFirstNode(); + const payload = { value, ttl: expireTTL, tNode }; + store[hashTableProp.add](key, payload); + }; + + this.get = key => { + const payload = store[hashTableProp.get](key); + if (payload) { + const { ttl, value } = payload; + if (checkIfElementExpire({ ttl, key })) return undefined; + else return value; + } + return undefined; + }; + + this.has = key => { + return ( + store[hashTableProp.has](key) && + !checkIfElementExpire({ ttl: store[hashTableProp.get](key), key }) + ); + }; + + this.remove = key => { + if (this.has(key)) { + const { ttl, tNode } = store[hashTableProp.get](key); + const timeBucket = getTimeBucket(ttl); + timeBucket.remove(tNode); + store[hashTableProp.remove](key); + } + }; + + this.size = () => { + return store[hashTableProp.size](); + }; + + this.runGC = () => { + const cleanTo = getBackwardTimeIndex({ time: Date.now(), interval: timeIndexInterval }); + cleanExpiredBuckets(cleanTo); + + const nextCleanBucket = getForwardTimeIndex({ time: Date.now(), interval: timeIndexInterval }); + cleanNotExpiredBucket(nextCleanBucket); + }; + + function getTimeBucket(expireTTL) { + const timeIndex = getForwardTimeIndex({ + time: expireTTL, + interval: timeIndexInterval, + }); + + if (timePartition[hashTableProp.has](timeIndex)) { + return timePartition[hashTableProp.get](timeIndex); + } else { + const list = new DoublyLinkedList(); + timePartition[hashTableProp.add](timeIndex, list); + return list; + } + } + + const checkIfElementExpire = ({ ttl, key }) => { + if (ttl < Date.now()) { + const timeIndex = getBackwardTimeIndex({ + time: ttl, + interval: timeIndexInterval, + }); + this.remove(key); + cleanExpiredBuckets(timeIndex); + return true; + } + return false; + }; + + function cleanExpiredBucket(timeIndex) { + if (timePartition[hashTableProp.has](timeIndex)) { + const tNodes = timePartition[hashTableProp.get](timeIndex); + for (const tNode of tNodes) { + store[hashTableProp.remove](tNode.value); + } + timePartition[hashTableProp.remove](timeIndex); + } + } + + function cleanExpiredBuckets(tillTimeIndex) { + const cleanFrom = getForwardTimeIndex({ + time: lowestTimePartition, + interval: timeIndexInterval, + }); + + for (const curTimeIndex of getIndexBetween({ + from: cleanFrom, + to: tillTimeIndex, + interval: timeIndexInterval, + })) { + cleanExpiredBucket(curTimeIndex); + } + lowestTimePartition = tillTimeIndex; + } + + function cleanNotExpiredBucket(timeIndex) { + if (timePartition[hashTableProp.has](timeIndex)) { + const tNodes = timePartition[hashTableProp.get](timeIndex); + for (const { value: key } of tNodes) { + const { ttl } = store[hashTableProp.get](key); + if (ttl < Date.now()) { + store[hashTableProp.remove](key); + } + } + } + } +} + +// time : unix timestamp milliseconds +// interval : milliseconds (better to be factors of 60 (minutes)) +function getForwardTimeIndex({ time, interval }) { + const timeParts = (time / interval) | 0; + return timeParts * interval + interval; +} + +function getBackwardTimeIndex({ time, interval }) { + const timeParts = (time / interval) | 0; + return timeParts * interval; +} + +function* getIndexBetween({ from, to, interval }) { + for (let i = from; i <= to; i += interval) yield i; +} + +export { getForwardTimeIndex, getBackwardTimeIndex, getIndexBetween }; +export default TimeToLive; diff --git a/src/engines/TimeToLive.test.js b/src/engines/TimeToLive.test.js new file mode 100644 index 0000000..a82e7d3 --- /dev/null +++ b/src/engines/TimeToLive.test.js @@ -0,0 +1,135 @@ +import MockDate from 'mockdate'; + +import TimeToLive, { + getForwardTimeIndex, + getBackwardTimeIndex, + getIndexBetween, +} from './TimeToLive'; + +const toMinute = val => val * 60 * 1000; + +describe('TimeToLive (TTL) : getForwardTimeIndex', () => { + it('should index time to next upcoming interval', () => { + const date1 = new Date('2020-06-14T03:23:34'); + expect(getForwardTimeIndex({ time: date1.getTime(), interval: toMinute(5) })).toBe( + new Date('2020-06-14T03:25:00').getTime(), + ); + + const date2 = new Date('2020-06-14T03:43:36'); + expect(getForwardTimeIndex({ time: date2.getTime(), interval: toMinute(10) })).toBe( + new Date('2020-06-14T03:50:00').getTime(), + ); + + const date3 = new Date('2020-06-14T03:43:36'); + expect(getForwardTimeIndex({ time: date3.getTime(), interval: toMinute(3) })).toBe( + new Date('2020-06-14T03:45:00').getTime(), + ); + }); +}); + +describe('TimeToLive (TTL) : getBackwardTimeIndex', () => { + it('should index time to before upcoming interval', () => { + const date1 = new Date('2020-06-14T03:23:34'); + expect(getBackwardTimeIndex({ time: date1.getTime(), interval: toMinute(5) })).toBe( + new Date('2020-06-14T03:20:00').getTime(), + ); + + const date2 = new Date('2020-06-14T03:43:36'); + expect(getBackwardTimeIndex({ time: date2.getTime(), interval: toMinute(10) })).toBe( + new Date('2020-06-14T03:40:00').getTime(), + ); + }); +}); + +describe('TimeToLive (TTL) : getBackwardTimeIndex', () => { + it('should generate index between from and to', () => { + const interval = 5 * 60 * 1000; + const date1 = new Date('2020-06-14T03:23:34'); + const date2 = new Date('2020-06-14T03:36:34'); + const from = getForwardTimeIndex({ time: date1.getTime(), interval }); + const to = getBackwardTimeIndex({ time: date2.getTime(), interval }); + + expect([...getIndexBetween({ from, to, interval })]).toEqual([ + new Date('2020-06-14T03:25:00').getTime(), + new Date('2020-06-14T03:30:00').getTime(), + new Date('2020-06-14T03:35:00').getTime(), + ]); + }); +}); + +describe('TimeToLive', () => { + it('should have basic cache features', () => { + const ttlCache = new TimeToLive(); + ttlCache.add('apple', 5, 1000); + ttlCache.add('orange', 2, 2000); + expect(ttlCache.get('apple')).toBe(5); + expect(ttlCache.has('apple')).toBe(true); + expect(ttlCache.size()).toBe(2); + + ttlCache.remove('apple'); + expect(ttlCache.size()).toBe(1); + expect(ttlCache.get('apple')).toBe(undefined); + expect(ttlCache.get('orange')).toBe(2); + }); + + it('should not return expired item', () => { + MockDate.set(new Date('2020-06-25T03:25:00')); + const ttlCache = new TimeToLive(); + ttlCache.add('ben', 5, 5000); + expect(ttlCache.get('ben')).toBe(5); + + MockDate.set(new Date('2020-06-25T03:26:00')); + expect(ttlCache.get('ben')).toBe(undefined); + expect(ttlCache.size()).toBe(0); + + ttlCache.add('mango', 10, 180000); + ttlCache.add('grape', 33, 660000); + + MockDate.set(new Date('2020-06-25T03:45:00')); + expect(ttlCache.get('grape')).toBe(undefined); + expect(ttlCache.size()).toBe(0); + + MockDate.reset(); + }); + + it('should clean expired item when runGC calls', () => { + MockDate.set(new Date('2020-06-25T03:24:00')); + const ttlCache = new TimeToLive(); + + ttlCache.add('test1', 5, 180000); // 3mint + ttlCache.add('test2', 2, 660000); // 12 mint + ttlCache.add('test3', 60, 900000); // 15 mint + ttlCache.add('test4', 43, 1080000); // 18 mint + expect(ttlCache.size()).toBe(4); + + MockDate.set(new Date('2020-06-25T03:40:00')); + ttlCache.runGC(); + expect(ttlCache.size()).toBe(1); + }); +}); + +describe('TimeToLive error test', () => { + it('should throw error ttl value is not given', () => { + const ttlCache = new TimeToLive(); + expect(() => ttlCache.add('key1', 'value')).toThrow( + 'Expected ttl value (should be positive integer). ' + + 'you can have to mention it in add method or mention as defaultTTL at constructor', + ); + + expect(() => ttlCache.add('key1', 'value', 0)).toThrow( + 'Expected ttl value (should be positive integer). ' + + 'you can have to mention it in add method or mention as defaultTTL at constructor', + ); + + expect(() => ttlCache.add('key1', 'value', -1)).toThrow( + 'Expected ttl value (should be positive integer). ' + + 'you can have to mention it in add method or mention as defaultTTL at constructor', + ); + }); + + it('should not throw error ttl value is not give but defined as default TTL', () => { + const ttlCache = new TimeToLive({ defaultTTL: 5000 }); + ttlCache.add('key1', 'value'); + expect(ttlCache.get('key1')).toBe('value'); + }); +});