LRU (Least Recently Used) Cache is a data structure that stores key-value pairs with a size limit, evicting the least recently used item when capacity is reached.
What is LRU Cache? #
An LRU cache maintains items in order of use. When capacity is reached, the least recently used item is removed.
Operations:
- get(key) - Get value, mark as recently used
- put(key, value) - Add/update value, mark as recently used
- Both operations should be O(1)
Requirements:
- Fixed capacity
- Fast access (O(1))
- Track access order
- Evict LRU item when full
Basic Implementation #
Using HashMap + Doubly Linked List.
class Node {
constructor(key, value) {
this.key = key;
this.value = value;
this.prev = null;
this.next = null;
}
}
class LRUCache {
constructor(capacity) {
this.capacity = capacity;
this.cache = new Map();
this.head = new Node(0, 0); // Dummy head
this.tail = new Node(0, 0); // Dummy tail
this.head.next = this.tail;
this.tail.prev = this.head;
}
get(key) {
if (!this.cache.has(key)) {
return -1;
}
const node = this.cache.get(key);
this.moveToHead(node);
return node.value;
}
put(key, value) {
if (this.cache.has(key)) {
const node = this.cache.get(key);
node.value = value;
this.moveToHead(node);
} else {
const newNode = new Node(key, value);
this.cache.set(key, newNode);
this.addToHead(newNode);
if (this.cache.size > this.capacity) {
const lru = this.removeTail();
this.cache.delete(lru.key);
}
}
}
addToHead(node) {
node.prev = this.head;
node.next = this.head.next;
this.head.next.prev = node;
this.head.next = node;
}
removeNode(node) {
node.prev.next = node.next;
node.next.prev = node.prev;
}
moveToHead(node) {
this.removeNode(node);
this.addToHead(node);
}
removeTail() {
const lru = this.tail.prev;
this.removeNode(lru);
return lru;
}
}
// Usage
const cache = new LRUCache(2);
cache.put(1, 1);
cache.put(2, 2);
console.log(cache.get(1)); // 1
cache.put(3, 3); // Evicts key 2
console.log(cache.get(2)); // -1 (not found)
cache.put(4, 4); // Evicts key 1
console.log(cache.get(1)); // -1
console.log(cache.get(3)); // 3
console.log(cache.get(4)); // 4
Using JavaScript Map #
Map maintains insertion order, but we need custom logic.
class LRUCache {
constructor(capacity) {
this.capacity = capacity;
this.cache = new Map();
}
get(key) {
if (!this.cache.has(key)) {
return -1;
}
const value = this.cache.get(key);
// Move to end (most recent)
this.cache.delete(key);
this.cache.set(key, value);
return value;
}
put(key, value) {
if (this.cache.has(key)) {
this.cache.delete(key);
}
this.cache.set(key, value);
if (this.cache.size > this.capacity) {
// First key is least recently used
const firstKey = this.cache.keys().next().value;
this.cache.delete(firstKey);
}
}
}
const cache = new LRUCache(2);
cache.put(1, 1);
cache.put(2, 2);
console.log(cache.get(1)); // 1
cache.put(3, 3);
console.log(cache.get(2)); // -1
LRU Cache with TTL #
Add time-to-live expiration.
class LRUCacheWithTTL {
constructor(capacity, ttl) {
this.capacity = capacity;
this.ttl = ttl;
this.cache = new Map();
this.head = new Node(0, 0);
this.tail = new Node(0, 0);
this.head.next = this.tail;
this.tail.prev = this.head;
}
get(key) {
if (!this.cache.has(key)) {
return -1;
}
const node = this.cache.get(key);
// Check expiration
if (Date.now() - node.timestamp > this.ttl) {
this.removeNode(node);
this.cache.delete(key);
return -1;
}
this.moveToHead(node);
return node.value;
}
put(key, value) {
if (this.cache.has(key)) {
const node = this.cache.get(key);
node.value = value;
node.timestamp = Date.now();
this.moveToHead(node);
} else {
const newNode = new Node(key, value);
newNode.timestamp = Date.now();
this.cache.set(key, newNode);
this.addToHead(newNode);
if (this.cache.size > this.capacity) {
const lru = this.removeTail();
this.cache.delete(lru.key);
}
}
}
// addToHead, removeNode, moveToHead, removeTail same as before
}
const cache = new LRUCacheWithTTL(3, 5000); // 5 second TTL
cache.put('a', 1);
console.log(cache.get('a')); // 1
setTimeout(() => {
console.log(cache.get('a')); // -1 (expired)
}, 6000);LFU Cache #
Least Frequently Used cache.
class LFUCache {
constructor(capacity) {
this.capacity = capacity;
this.minFreq = 0;
this.keyToVal = new Map();
this.keyToFreq = new Map();
this.freqToKeys = new Map();
}
get(key) {
if (!this.keyToVal.has(key)) {
return -1;
}
this.increaseFreq(key);
return this.keyToVal.get(key);
}
put(key, value) {
if (this.capacity === 0) return;
if (this.keyToVal.has(key)) {
this.keyToVal.set(key, value);
this.increaseFreq(key);
return;
}
if (this.keyToVal.size >= this.capacity) {
this.removeMinFreqKey();
}
this.keyToVal.set(key, value);
this.keyToFreq.set(key, 1);
if (!this.freqToKeys.has(1)) {
this.freqToKeys.set(1, new Set());
}
this.freqToKeys.get(1).add(key);
this.minFreq = 1;
}
increaseFreq(key) {
const freq = this.keyToFreq.get(key);
this.keyToFreq.set(key, freq + 1);
this.freqToKeys.get(freq).delete(key);
if (!this.freqToKeys.has(freq + 1)) {
this.freqToKeys.set(freq + 1, new Set());
}
this.freqToKeys.get(freq + 1).add(key);
if (this.freqToKeys.get(freq).size === 0) {
this.freqToKeys.delete(freq);
if (freq === this.minFreq) {
this.minFreq++;
}
}
}
removeMinFreqKey() {
const keys = this.freqToKeys.get(this.minFreq);
const keyToRemove = keys.values().next().value;
keys.delete(keyToRemove);
if (keys.size === 0) {
this.freqToKeys.delete(this.minFreq);
}
this.keyToVal.delete(keyToRemove);
this.keyToFreq.delete(keyToRemove);
}
}
const cache = new LFUCache(2);
cache.put(1, 1);
cache.put(2, 2);
console.log(cache.get(1)); // 1
cache.put(3, 3); // Evicts key 2
console.log(cache.get(2)); // -1
console.log(cache.get(3)); // 3
cache.put(4, 4); // Evicts key 1
console.log(cache.get(1)); // -1
Multi-Level Cache #
Combine L1 and L2 caches.
class MultiLevelCache {
constructor(l1Capacity, l2Capacity) {
this.l1 = new LRUCache(l1Capacity);
this.l2 = new LRUCache(l2Capacity);
}
get(key) {
let value = this.l1.get(key);
if (value !== -1) {
return value; // L1 hit
}
value = this.l2.get(key);
if (value !== -1) {
// L2 hit - promote to L1
this.l1.put(key, value);
return value;
}
return -1; // Miss
}
put(key, value) {
// Check if updating existing key
if (this.l1.cache.has(key)) {
this.l1.put(key, value);
return;
}
if (this.l2.cache.has(key)) {
this.l2.cache.delete(key);
this.l1.put(key, value);
return;
}
// New key
if (this.l1.cache.size >= this.l1.capacity) {
// Evict from L1 to L2
const evicted = this.l1.removeTail();
this.l2.put(evicted.key, evicted.value);
}
this.l1.put(key, value);
}
}
const cache = new MultiLevelCache(2, 3);
cache.put(1, 1);
cache.put(2, 2);
cache.put(3, 3); // 1 moves to L2
console.log(cache.get(1)); // L2 hit, promotes to L1
console.log(cache.get(2)); // L1 hit
Cache with Statistics #
Track hit rate and other metrics.
class LRUCacheWithStats extends LRUCache {
constructor(capacity) {
super(capacity);
this.hits = 0;
this.misses = 0;
}
get(key) {
const value = super.get(key);
if (value === -1) {
this.misses++;
} else {
this.hits++;
}
return value;
}
getStats() {
const total = this.hits + this.misses;
return {
hits: this.hits,
misses: this.misses,
hitRate: total === 0 ? 0 : this.hits / total,
size: this.cache.size,
capacity: this.capacity
};
}
resetStats() {
this.hits = 0;
this.misses = 0;
}
}
const cache = new LRUCacheWithStats(3);
cache.put(1, 'a');
cache.put(2, 'b');
cache.put(3, 'c');
cache.get(1); // Hit
cache.get(2); // Hit
cache.get(4); // Miss
console.log(cache.getStats());
// { hits: 2, misses: 1, hitRate: 0.667, size: 3, capacity: 3 }
Async LRU Cache #
For async data fetching.
class AsyncLRUCache {
constructor(capacity, fetchFn) {
this.cache = new LRUCache(capacity);
this.fetchFn = fetchFn;
this.pending = new Map();
}
async get(key) {
// Check cache
const cached = this.cache.get(key);
if (cached !== -1) {
return cached;
}
// Check if already fetching
if (this.pending.has(key)) {
return this.pending.get(key);
}
// Fetch data
const promise = this.fetchFn(key)
.then(value => {
this.cache.put(key, value);
this.pending.delete(key);
return value;
})
.catch(err => {
this.pending.delete(key);
throw err;
});
this.pending.set(key, promise);
return promise;
}
put(key, value) {
this.cache.put(key, value);
this.pending.delete(key);
}
}
// Usage
async function fetchUser(id) {
const response = await fetch(`/api/users/${id}`);
return response.json();
}
const userCache = new AsyncLRUCache(100, fetchUser);
const user = await userCache.get(123);
const user2 = await userCache.get(123); // From cache
Cache Invalidation #
Strategies for invalidating stale data.
class InvalidatableCache extends LRUCache {
constructor(capacity) {
super(capacity);
this.tags = new Map(); // key -> Set of tags
this.tagKeys = new Map(); // tag -> Set of keys
}
put(key, value, tags = []) {
super.put(key, value);
// Store tags
this.tags.set(key, new Set(tags));
// Index by tag
for (const tag of tags) {
if (!this.tagKeys.has(tag)) {
this.tagKeys.set(tag, new Set());
}
this.tagKeys.get(tag).add(key);
}
}
invalidate(key) {
if (!this.cache.has(key)) return;
const node = this.cache.get(key);
this.removeNode(node);
this.cache.delete(key);
// Clean up tags
if (this.tags.has(key)) {
for (const tag of this.tags.get(key)) {
this.tagKeys.get(tag).delete(key);
}
this.tags.delete(key);
}
}
invalidateByTag(tag) {
if (!this.tagKeys.has(tag)) return;
const keys = Array.from(this.tagKeys.get(tag));
for (const key of keys) {
this.invalidate(key);
}
this.tagKeys.delete(tag);
}
clear() {
this.cache.clear();
this.tags.clear();
this.tagKeys.clear();
this.head.next = this.tail;
this.tail.prev = this.head;
}
}
const cache = new InvalidatableCache(10);
cache.put('user:1', { name: 'Alice' }, ['user', 'user:1']);
cache.put('user:2', { name: 'Bob' }, ['user', 'user:2']);
cache.put('post:1', { title: 'Hello' }, ['post', 'user:1']);
cache.invalidateByTag('user'); // Invalidates user:1, user:2
console.log(cache.get('user:1')); // -1
console.log(cache.get('post:1')); // Still cached
Write-Through Cache #
Updates cache and data source together.
class WriteThroughCache {
constructor(capacity, dataSource) {
this.cache = new LRUCache(capacity);
this.dataSource = dataSource;
}
async get(key) {
const cached = this.cache.get(key);
if (cached !== -1) {
return cached;
}
const value = await this.dataSource.read(key);
if (value !== null) {
this.cache.put(key, value);
}
return value;
}
async put(key, value) {
await this.dataSource.write(key, value);
this.cache.put(key, value);
}
async delete(key) {
await this.dataSource.delete(key);
this.cache.invalidate(key);
}
}
// Usage
const db = {
data: new Map(),
async read(key) {
return this.data.get(key) || null;
},
async write(key, value) {
this.data.set(key, value);
},
async delete(key) {
this.data.delete(key);
}
};
const cache = new WriteThroughCache(100, db);
await cache.put('key', 'value'); // Writes to both cache and DB
Time Complexity #
| Operation | LRU Cache | LFU Cache |
|---|---|---|
| Get | O(1) | O(1) |
| Put | O(1) | O(1) |
| Space | O(n) | O(n) |
Best Practices #
- Use doubly linked list - O(1) insertion/deletion
- HashMap for lookup - O(1) key access
- Dummy nodes - Simplify edge cases
- Track capacity - Evict when full
- Consider TTL - Time-based expiration
- Add statistics - Monitor hit rate
- Handle async - Avoid duplicate fetches
- Invalidation strategy - Tags, TTL, or manual
- Thread safety - Lock if concurrent access
- Memory limits - Monitor total size
LRU cache is essential for improving performance through caching. Understanding the implementation helps optimize real-world applications.