Building a Simple In-Memory Cache with TTL
TL;DR
Cache = store expensive results in memory. Add TTL to expire old data. Use LRU to limit memory. Check cache before expensive operations. 10x-100x speed improvement for repeated data.
Our homepage was slow. Every page load queried the database for user counts, stats, and trending posts. Same data, queried 1000 times per minute. Database CPU at 80%.
I added a 60-second cache. Same queries now hit memory. Database CPU dropped to 12%. Page load time: 800ms → 120ms. One hour of work.
Here's how to build a simple, production-ready cache in 50 lines of code.
What Is a Cache?
A cache stores expensive operation results in fast memory:
// Without cache - slow
app.get('/stats', async (req, res) => {
const stats = await db.query('SELECT COUNT(*) FROM users'); // 200ms
res.json(stats);
});
// With cache - fast
const cache = {};
app.get('/stats', async (req, res) => {
if (cache.stats) {
return res.json(cache.stats); // 0.1ms - 2000x faster!
}
const stats = await db.query('SELECT COUNT(*) FROM users'); // 200ms
cache.stats = stats;
res.json(stats);
});
When to cache:
- Database queries (expensive)
- API calls (slow, rate-limited)
- Computed results (CPU-intensive)
- Static data (doesn't change often)
The Simplest Cache
class SimpleCache {
constructor() {
this.cache = {};
}
set(key, value) {
this.cache[key] = value;
}
get(key) {
return this.cache[key];
}
has(key) {
return key in this.cache;
}
delete(key) {
delete this.cache[key];
}
clear() {
this.cache = {};
}
}
// Usage
const cache = new SimpleCache();
async function getUser(id) {
const cacheKey = `user:${id}`;
if (cache.has(cacheKey)) {
return cache.get(cacheKey); // Return cached
}
const user = await db.query('SELECT * FROM users WHERE id = ?', [id]);
cache.set(cacheKey, user);
return user;
}
Problem: Data never expires. Cache grows forever.
Adding TTL (Time To Live)
Expire cached data after a time period:
class CacheWithTTL {
constructor() {
this.cache = new Map();
}
set(key, value, ttlMs = 60000) {
const expiresAt = Date.now() + ttlMs;
this.cache.set(key, { value, expiresAt });
}
get(key) {
const item = this.cache.get(key);
if (!item) {
return undefined; // Not found
}
if (Date.now() > item.expiresAt) {
this.cache.delete(key); // Expired
return undefined;
}
return item.value; // Valid
}
has(key) {
return this.get(key) !== undefined;
}
delete(key) {
this.cache.delete(key);
}
clear() {
this.cache.clear();
}
}
// Usage
const cache = new CacheWithTTL();
async function getStats() {
const cacheKey = 'stats';
const cached = cache.get(cacheKey);
if (cached) {
return cached;
}
const stats = await db.query('SELECT COUNT(*) FROM users');
cache.set(cacheKey, stats, 60000); // Cache for 60 seconds
return stats;
}
Automatic Cleanup
Remove expired items periodically:
class CacheWithTTL {
constructor(cleanupIntervalMs = 60000) {
this.cache = new Map();
// Clean up expired items every minute
this.cleanupInterval = setInterval(() => {
this.cleanup();
}, cleanupIntervalMs);
}
cleanup() {
const now = Date.now();
let removed = 0;
for (const [key, item] of this.cache.entries()) {
if (now > item.expiresAt) {
this.cache.delete(key);
removed++;
}
}
if (removed > 0) {
console.log(`Cache cleanup: removed ${removed} expired items`);
}
}
destroy() {
clearInterval(this.cleanupInterval);
this.cache.clear();
}
// ... rest of methods
}
LRU Cache (Least Recently Used)
Limit memory by removing least recently used items:
class LRUCache {
constructor(maxSize = 100) {
this.maxSize = maxSize;
this.cache = new Map(); // Map maintains insertion order
}
get(key) {
if (!this.cache.has(key)) {
return undefined;
}
// Move to end (most recently used)
const value = this.cache.get(key);
this.cache.delete(key);
this.cache.set(key, value);
return value;
}
set(key, value) {
// Remove if exists (will re-add at end)
if (this.cache.has(key)) {
this.cache.delete(key);
}
// Add to end
this.cache.set(key, value);
// Remove oldest if over limit
if (this.cache.size > this.maxSize) {
const oldestKey = this.cache.keys().next().value;
this.cache.delete(oldestKey);
}
}
has(key) {
return this.cache.has(key);
}
delete(key) {
this.cache.delete(key);
}
clear() {
this.cache.clear();
}
size() {
return this.cache.size;
}
}
// Usage
const cache = new LRUCache(1000); // Max 1000 items
async function getUser(id) {
const cacheKey = `user:${id}`;
if (cache.has(cacheKey)) {
return cache.get(cacheKey);
}
const user = await db.query('SELECT * FROM users WHERE id = ?', [id]);
cache.set(cacheKey, user);
return user;
}
LRU Cache with TTL (Best of Both)
Combine LRU eviction with TTL expiration:
class LRUCacheWithTTL {
constructor(maxSize = 100, defaultTTL = 60000) {
this.maxSize = maxSize;
this.defaultTTL = defaultTTL;
this.cache = new Map();
}
get(key) {
if (!this.cache.has(key)) {
return undefined;
}
const item = this.cache.get(key);
// Check expiration
if (Date.now() > item.expiresAt) {
this.cache.delete(key);
return undefined;
}
// Move to end (most recently used)
this.cache.delete(key);
this.cache.set(key, item);
return item.value;
}
set(key, value, ttl = this.defaultTTL) {
const expiresAt = Date.now() + ttl;
// Remove if exists
if (this.cache.has(key)) {
this.cache.delete(key);
}
// Add to end
this.cache.set(key, { value, expiresAt });
// Evict oldest if over limit
if (this.cache.size > this.maxSize) {
const oldestKey = this.cache.keys().next().value;
this.cache.delete(oldestKey);
}
}
has(key) {
return this.get(key) !== undefined;
}
delete(key) {
this.cache.delete(key);
}
clear() {
this.cache.clear();
}
size() {
return this.cache.size;
}
// Get stats
stats() {
return {
size: this.cache.size,
maxSize: this.maxSize,
utilization: (this.cache.size / this.maxSize * 100).toFixed(1) + '%'
};
}
}
// Usage
const cache = new LRUCacheWithTTL(1000, 60000); // 1000 items, 60s TTL
async function getUser(id) {
const cacheKey = `user:${id}`;
const cached = cache.get(cacheKey);
if (cached) {
return cached;
}
const user = await db.query('SELECT * FROM users WHERE id = ?', [id]);
cache.set(cacheKey, user, 300000); // Cache for 5 minutes
return user;
}
Cache Helper Function
Wrap cache logic in a helper:
async function cached(key, fetchFn, ttl = 60000) {
const cached = cache.get(key);
if (cached !== undefined) {
return cached;
}
const value = await fetchFn();
cache.set(key, value, ttl);
return value;
}
// Usage
const stats = await cached('stats', async () => {
return await db.query('SELECT COUNT(*) FROM users');
}, 60000);
const user = await cached(`user:${id}`, async () => {
return await db.query('SELECT * FROM users WHERE id = ?', [id]);
}, 300000);
Cache Decorator
function cache(ttl = 60000) {
const cache = new LRUCacheWithTTL(1000, ttl);
return function(target, propertyKey, descriptor) {
const originalMethod = descriptor.value;
descriptor.value = async function(...args) {
const cacheKey = `${propertyKey}:${JSON.stringify(args)}`;
const cached = cache.get(cacheKey);
if (cached !== undefined) {
return cached;
}
const result = await originalMethod.apply(this, args);
cache.set(cacheKey, result, ttl);
return result;
};
return descriptor;
};
}
// Usage
class UserService {
@cache(60000) // Cache for 60 seconds
async getUser(id) {
return await db.query('SELECT * FROM users WHERE id = ?', [id]);
}
@cache(300000) // Cache for 5 minutes
async getStats() {
return await db.query('SELECT COUNT(*) FROM users');
}
}
Middleware Pattern
function cacheMiddleware(cache, getTTL) {
return (req, res, next) => {
const cacheKey = req.originalUrl;
const cached = cache.get(cacheKey);
if (cached) {
return res.json(cached);
}
// Intercept res.json to cache response
const originalJson = res.json.bind(res);
res.json = (data) => {
const ttl = getTTL(req, data);
if (ttl > 0) {
cache.set(cacheKey, data, ttl);
}
return originalJson(data);
};
next();
};
}
// Usage
const cache = new LRUCacheWithTTL(1000);
app.get('/api/stats',
cacheMiddleware(cache, () => 60000), // Cache for 60s
async (req, res) => {
const stats = await db.query('SELECT COUNT(*) FROM users');
res.json(stats);
}
);
app.get('/api/user/:id',
cacheMiddleware(cache, () => 300000), // Cache for 5 minutes
async (req, res) => {
const user = await db.query('SELECT * FROM users WHERE id = ?', [req.params.id]);
res.json(user);
}
);
Cache Invalidation
class CacheWithInvalidation extends LRUCacheWithTTL {
// Invalidate by pattern
invalidatePattern(pattern) {
const regex = new RegExp(pattern);
let removed = 0;
for (const key of this.cache.keys()) {
if (regex.test(key)) {
this.cache.delete(key);
removed++;
}
}
return removed;
}
// Invalidate by tag
setWithTags(key, value, tags = [], ttl) {
super.set(key, value, ttl);
// Store tag associations
if (!this.tags) this.tags = new Map();
for (const tag of tags) {
if (!this.tags.has(tag)) {
this.tags.set(tag, new Set());
}
this.tags.get(tag).add(key);
}
}
invalidateTag(tag) {
if (!this.tags || !this.tags.has(tag)) {
return 0;
}
const keys = this.tags.get(tag);
for (const key of keys) {
this.cache.delete(key);
}
const count = keys.size;
this.tags.delete(tag);
return count;
}
}
// Usage
const cache = new CacheWithInvalidation(1000);
// Cache with tags
cache.setWithTags('user:123', userData, ['user', 'user:123'], 60000);
cache.setWithTags('posts:user:123', posts, ['posts', 'user:123'], 60000);
// Invalidate all user-related cache
cache.invalidateTag('user:123');
// Invalidate by pattern
cache.set('user:1', user1);
cache.set('user:2', user2);
cache.set('post:1', post1);
cache.invalidatePattern('^user:'); // Removes user:1, user:2
Real-World Example
class DataCache {
constructor() {
this.cache = new LRUCacheWithTTL(5000, 60000); // 5000 items, 60s default
}
async getUser(id) {
return await cached(
`user:${id}`,
() => db.query('SELECT * FROM users WHERE id = ?', [id]),
300000 // 5 minutes
);
}
async getUserPosts(userId) {
return await cached(
`posts:${userId}`,
() => db.query('SELECT * FROM posts WHERE user_id = ?', [userId]),
60000 // 1 minute
);
}
async getStats() {
return await cached(
'stats',
async () => {
const [users, posts, comments] = await Promise.all([
db.query('SELECT COUNT(*) as count FROM users'),
db.query('SELECT COUNT(*) as count FROM posts'),
db.query('SELECT COUNT(*) as count FROM comments')
]);
return {
users: users[0].count,
posts: posts[0].count,
comments: comments[0].count
};
},
300000 // 5 minutes
);
}
invalidateUser(userId) {
this.cache.delete(`user:${userId}`);
this.cache.delete(`posts:${userId}`);
}
// Helper
async cached(key, fetchFn, ttl) {
const cached = this.cache.get(key);
if (cached !== undefined) {
return cached;
}
const value = await fetchFn();
this.cache.set(key, value, ttl);
return value;
}
}
// Usage
const dataCache = new DataCache();
app.get('/api/user/:id', async (req, res) => {
const user = await dataCache.getUser(req.params.id);
res.json(user);
});
app.get('/api/user/:id/posts', async (req, res) => {
const posts = await dataCache.getUserPosts(req.params.id);
res.json(posts);
});
app.get('/api/stats', async (req, res) => {
const stats = await dataCache.getStats();
res.json(stats);
});
app.post('/api/user/:id', async (req, res) => {
await db.query('UPDATE users SET ... WHERE id = ?', [req.params.id]);
dataCache.invalidateUser(req.params.id); // Clear cache
res.json({ success: true });
});
Measuring Cache Effectiveness
class CacheWithMetrics extends LRUCacheWithTTL {
constructor(maxSize, defaultTTL) {
super(maxSize, defaultTTL);
this.hits = 0;
this.misses = 0;
}
get(key) {
const value = super.get(key);
if (value !== undefined) {
this.hits++;
} else {
this.misses++;
}
return value;
}
metrics() {
const total = this.hits + this.misses;
const hitRate = total > 0 ? (this.hits / total * 100).toFixed(1) : 0;
return {
hits: this.hits,
misses: this.misses,
total: total,
hitRate: `${hitRate}%`,
size: this.size(),
maxSize: this.maxSize
};
}
resetMetrics() {
this.hits = 0;
this.misses = 0;
}
}
// Usage
const cache = new CacheWithMetrics(1000, 60000);
// Check metrics periodically
setInterval(() => {
console.log('Cache metrics:', cache.metrics());
// { hits: 8543, misses: 1234, total: 9777, hitRate: '87.4%', size: 342, maxSize: 1000 }
}, 60000);
When NOT to Cache
// DON'T cache frequently changing data
const balance = await getBalance(userId); // Changes every transaction
// DON'T cache user-specific data globally
cache.set('user', userData); // Will be wrong for next user!
// DON'T cache large objects
cache.set('report', hugeReport); // 50 MB - kills memory
// DON'T cache forever
cache.set('data', value, Infinity); // Memory leak
// DO cache expensive, stable data
const stats = await cached('stats', getStats, 60000);
const config = await cached('config', getConfig, 3600000);
Memory Management
class SafeCache extends LRUCacheWithTTL {
constructor(maxSize, defaultTTL, maxMemoryMB = 100) {
super(maxSize, defaultTTL);
this.maxMemoryBytes = maxMemoryMB * 1024 * 1024;
}
set(key, value, ttl) {
// Check memory before adding
const estimatedSize = this.estimateSize(value);
if (estimatedSize > this.maxMemoryBytes / 10) {
console.warn(`Value too large: ${estimatedSize} bytes`);
return; // Don't cache huge objects
}
super.set(key, value, ttl);
}
estimateSize(obj) {
const str = JSON.stringify(obj);
return str.length * 2; // ~2 bytes per character
}
getMemoryUsage() {
let total = 0;
for (const item of this.cache.values()) {
total += this.estimateSize(item.value);
}
return {
bytes: total,
mb: (total / 1024 / 1024).toFixed(2),
items: this.cache.size
};
}
}
Common Patterns
Cache-Aside
// Application manages cache
async function getUser(id) {
const cached = cache.get(`user:${id}`);
if (cached) return cached;
const user = await db.query('SELECT * FROM users WHERE id = ?', [id]);
cache.set(`user:${id}`, user);
return user;
}
Write-Through
// Write to cache and database
async function updateUser(id, data) {
await db.query('UPDATE users SET ... WHERE id = ?', [id, data]);
cache.set(`user:${id}`, { id, ...data }); // Update cache
}
Write-Behind
// Write to cache first, database later
async function updateUser(id, data) {
cache.set(`user:${id}`, { id, ...data });
// Queue database update
updateQueue.add({ type: 'user', id, data });
}
Using node-cache (Production Ready)
const NodeCache = require('node-cache');
const cache = new NodeCache({
stdTTL: 60, // Default TTL: 60 seconds
checkperiod: 120, // Cleanup every 2 minutes
useClones: false // Don't clone objects (faster)
});
// Set
cache.set('key', value, 300); // 5 minutes TTL
// Get
const value = cache.get('key');
// Multiple keys
cache.mset([
{ key: 'key1', val: 'value1', ttl: 60 },
{ key: 'key2', val: 'value2', ttl: 120 }
]);
// Event listeners
cache.on('set', (key, value) => {
console.log(`Cache set: ${key}`);
});
cache.on('expired', (key, value) => {
console.log(`Cache expired: ${key}`);
});
// Stats
const stats = cache.getStats();
console.log(stats);
// { keys: 342, hits: 8543, misses: 1234, ksize: 5472, vsize: 123456 }
The Bottom Line
In-memory caching dramatically speeds up applications by storing expensive results in RAM.
Use LRU + TTL for production - limits memory, expires stale data.
Cache expensive operations - database queries, API calls, computed results.
Invalidate wisely - clear cache when data changes, not on every write.
Monitor hit rate - aim for 80%+ cache hits for effective caching.
Our homepage hit the database 1000 times per minute. Added a 60-second cache. Database CPU: 80% → 12%. Page load: 800ms → 120ms. One hour of work, massive improvement.
Start with simple caching today. Add TTL to expire data. Use LRU to limit memory. Your database will thank you.