Redis is the go-to solution for caching in web applications. Here are patterns for implementing effective caching strategies.
Basic Operations#
1import Redis from 'ioredis';
2
3const redis = new Redis(process.env.REDIS_URL);
4
5// Basic set/get
6await redis.set('key', 'value');
7const value = await redis.get('key');
8
9// Set with expiration (seconds)
10await redis.setex('key', 3600, 'value'); // 1 hour
11
12// Set with expiration (milliseconds)
13await redis.psetex('key', 60000, 'value'); // 1 minute
14
15// Set if not exists
16const wasSet = await redis.setnx('key', 'value');
17
18// Set with options
19await redis.set('key', 'value', 'EX', 3600, 'NX'); // Expire + not exists
20
21// Get and set
22const oldValue = await redis.getset('key', 'newValue');
23
24// Delete
25await redis.del('key');
26
27// Check existence
28const exists = await redis.exists('key');
29
30// Get remaining TTL
31const ttl = await redis.ttl('key'); // seconds
32const pttl = await redis.pttl('key'); // millisecondsCache-Aside Pattern#
1// Most common caching pattern
2class CacheAside {
3 constructor(
4 private redis: Redis,
5 private ttl = 3600
6 ) {}
7
8 async get<T>(key: string, fetchFn: () => Promise<T>): Promise<T> {
9 // Try cache first
10 const cached = await this.redis.get(key);
11
12 if (cached) {
13 return JSON.parse(cached) as T;
14 }
15
16 // Cache miss - fetch from source
17 const data = await fetchFn();
18
19 // Store in cache
20 await this.redis.setex(key, this.ttl, JSON.stringify(data));
21
22 return data;
23 }
24
25 async invalidate(key: string): Promise<void> {
26 await this.redis.del(key);
27 }
28
29 async invalidatePattern(pattern: string): Promise<void> {
30 const keys = await this.redis.keys(pattern);
31 if (keys.length > 0) {
32 await this.redis.del(...keys);
33 }
34 }
35}
36
37// Usage
38const cache = new CacheAside(redis, 3600);
39
40const user = await cache.get(`user:${userId}`, () =>
41 prisma.user.findUnique({ where: { id: userId } })
42);
43
44// Invalidate on update
45await prisma.user.update({ where: { id: userId }, data });
46await cache.invalidate(`user:${userId}`);Write-Through Cache#
1// Write to cache and database together
2class WriteThroughCache<T> {
3 constructor(
4 private redis: Redis,
5 private repository: Repository<T>,
6 private keyPrefix: string,
7 private ttl = 3600
8 ) {}
9
10 async get(id: string): Promise<T | null> {
11 const key = `${this.keyPrefix}:${id}`;
12 const cached = await this.redis.get(key);
13
14 if (cached) {
15 return JSON.parse(cached);
16 }
17
18 // Cache miss
19 const data = await this.repository.findById(id);
20
21 if (data) {
22 await this.redis.setex(key, this.ttl, JSON.stringify(data));
23 }
24
25 return data;
26 }
27
28 async save(id: string, data: T): Promise<T> {
29 const key = `${this.keyPrefix}:${id}`;
30
31 // Write to both cache and database
32 const [saved] = await Promise.all([
33 this.repository.save(id, data),
34 this.redis.setex(key, this.ttl, JSON.stringify(data)),
35 ]);
36
37 return saved;
38 }
39
40 async delete(id: string): Promise<void> {
41 const key = `${this.keyPrefix}:${id}`;
42
43 await Promise.all([
44 this.repository.delete(id),
45 this.redis.del(key),
46 ]);
47 }
48}Cache with Stampede Prevention#
1// Prevent cache stampede with locking
2class StampedeProtectedCache {
3 constructor(
4 private redis: Redis,
5 private ttl = 3600,
6 private lockTtl = 10
7 ) {}
8
9 async get<T>(key: string, fetchFn: () => Promise<T>): Promise<T> {
10 // Try cache
11 const cached = await this.redis.get(key);
12 if (cached) {
13 return JSON.parse(cached);
14 }
15
16 // Try to acquire lock
17 const lockKey = `lock:${key}`;
18 const acquired = await this.redis.set(
19 lockKey,
20 '1',
21 'EX',
22 this.lockTtl,
23 'NX'
24 );
25
26 if (acquired) {
27 try {
28 // We have the lock - fetch and cache
29 const data = await fetchFn();
30 await this.redis.setex(key, this.ttl, JSON.stringify(data));
31 return data;
32 } finally {
33 await this.redis.del(lockKey);
34 }
35 } else {
36 // Another process is fetching - wait and retry
37 await this.sleep(100);
38 return this.get(key, fetchFn);
39 }
40 }
41
42 private sleep(ms: number): Promise<void> {
43 return new Promise((resolve) => setTimeout(resolve, ms));
44 }
45}
46
47// Or use probabilistic early expiration
48class EarlyExpirationCache {
49 async get<T>(key: string, fetchFn: () => Promise<T>, ttl: number): Promise<T> {
50 const result = await this.redis.hgetall(key);
51
52 if (result.data) {
53 const expiry = parseInt(result.expiry);
54 const delta = parseInt(result.delta);
55
56 // Probabilistic early recomputation
57 const shouldRecompute =
58 Date.now() - delta * Math.log(Math.random()) > expiry;
59
60 if (!shouldRecompute) {
61 return JSON.parse(result.data);
62 }
63 }
64
65 // Recompute
66 const start = Date.now();
67 const data = await fetchFn();
68 const delta = Date.now() - start;
69
70 await this.redis.hset(key, {
71 data: JSON.stringify(data),
72 expiry: Date.now() + ttl * 1000,
73 delta: delta,
74 });
75
76 await this.redis.expire(key, ttl + 60); // Buffer
77
78 return data;
79 }
80}Session Storage#
1// Session management with Redis
2class RedisSessionStore {
3 constructor(
4 private redis: Redis,
5 private prefix = 'session:',
6 private ttl = 86400 // 24 hours
7 ) {}
8
9 async create(data: SessionData): Promise<string> {
10 const sessionId = crypto.randomUUID();
11 const key = `${this.prefix}${sessionId}`;
12
13 await this.redis.setex(key, this.ttl, JSON.stringify(data));
14
15 return sessionId;
16 }
17
18 async get(sessionId: string): Promise<SessionData | null> {
19 const key = `${this.prefix}${sessionId}`;
20 const data = await this.redis.get(key);
21
22 if (!data) return null;
23
24 // Refresh TTL on access
25 await this.redis.expire(key, this.ttl);
26
27 return JSON.parse(data);
28 }
29
30 async update(sessionId: string, data: Partial<SessionData>): Promise<void> {
31 const key = `${this.prefix}${sessionId}`;
32 const existing = await this.get(sessionId);
33
34 if (existing) {
35 await this.redis.setex(
36 key,
37 this.ttl,
38 JSON.stringify({ ...existing, ...data })
39 );
40 }
41 }
42
43 async destroy(sessionId: string): Promise<void> {
44 const key = `${this.prefix}${sessionId}`;
45 await this.redis.del(key);
46 }
47
48 async destroyAllForUser(userId: string): Promise<void> {
49 const pattern = `${this.prefix}*`;
50 const keys = await this.redis.keys(pattern);
51
52 for (const key of keys) {
53 const data = await this.redis.get(key);
54 if (data) {
55 const session = JSON.parse(data);
56 if (session.userId === userId) {
57 await this.redis.del(key);
58 }
59 }
60 }
61 }
62}Rate Limiting#
1// Sliding window rate limiting
2class RateLimiter {
3 constructor(
4 private redis: Redis,
5 private limit: number,
6 private windowMs: number
7 ) {}
8
9 async check(key: string): Promise<{ allowed: boolean; remaining: number }> {
10 const now = Date.now();
11 const windowStart = now - this.windowMs;
12
13 const multi = this.redis.multi();
14
15 // Remove old entries
16 multi.zremrangebyscore(key, 0, windowStart);
17
18 // Count current entries
19 multi.zcard(key);
20
21 // Add current request
22 multi.zadd(key, now, `${now}-${Math.random()}`);
23
24 // Set expiry
25 multi.expire(key, Math.ceil(this.windowMs / 1000));
26
27 const results = await multi.exec();
28 const count = results?.[1]?.[1] as number;
29
30 const allowed = count < this.limit;
31 const remaining = Math.max(0, this.limit - count - 1);
32
33 if (!allowed) {
34 // Remove the request we just added
35 await this.redis.zremrangebyscore(key, now, now);
36 }
37
38 return { allowed, remaining };
39 }
40}
41
42// Token bucket
43class TokenBucket {
44 constructor(
45 private redis: Redis,
46 private maxTokens: number,
47 private refillRate: number // tokens per second
48 ) {}
49
50 async consume(key: string, tokens = 1): Promise<boolean> {
51 const script = `
52 local key = KEYS[1]
53 local max_tokens = tonumber(ARGV[1])
54 local refill_rate = tonumber(ARGV[2])
55 local now = tonumber(ARGV[3])
56 local requested = tonumber(ARGV[4])
57
58 local data = redis.call('HMGET', key, 'tokens', 'last_update')
59 local current_tokens = tonumber(data[1]) or max_tokens
60 local last_update = tonumber(data[2]) or now
61
62 local elapsed = now - last_update
63 local refill = math.floor(elapsed * refill_rate / 1000)
64 current_tokens = math.min(max_tokens, current_tokens + refill)
65
66 if current_tokens >= requested then
67 current_tokens = current_tokens - requested
68 redis.call('HMSET', key, 'tokens', current_tokens, 'last_update', now)
69 redis.call('EXPIRE', key, math.ceil(max_tokens / refill_rate) + 1)
70 return 1
71 end
72
73 return 0
74 `;
75
76 const result = await this.redis.eval(
77 script,
78 1,
79 key,
80 this.maxTokens,
81 this.refillRate,
82 Date.now(),
83 tokens
84 );
85
86 return result === 1;
87 }
88}Pub/Sub for Cache Invalidation#
1// Distributed cache invalidation
2class DistributedCache {
3 private subscriber: Redis;
4 private localCache: Map<string, any> = new Map();
5
6 constructor(private redis: Redis) {
7 this.subscriber = redis.duplicate();
8
9 this.subscriber.subscribe('cache:invalidate');
10 this.subscriber.on('message', (channel, message) => {
11 if (channel === 'cache:invalidate') {
12 const { pattern } = JSON.parse(message);
13 this.invalidateLocal(pattern);
14 }
15 });
16 }
17
18 async get<T>(key: string, fetchFn: () => Promise<T>): Promise<T> {
19 // Check local cache first
20 if (this.localCache.has(key)) {
21 return this.localCache.get(key);
22 }
23
24 // Check Redis
25 const cached = await this.redis.get(key);
26 if (cached) {
27 const data = JSON.parse(cached);
28 this.localCache.set(key, data);
29 return data;
30 }
31
32 // Fetch and cache
33 const data = await fetchFn();
34 await this.redis.setex(key, 3600, JSON.stringify(data));
35 this.localCache.set(key, data);
36
37 return data;
38 }
39
40 async invalidate(pattern: string): Promise<void> {
41 // Invalidate in Redis
42 const keys = await this.redis.keys(pattern);
43 if (keys.length > 0) {
44 await this.redis.del(...keys);
45 }
46
47 // Notify all instances
48 await this.redis.publish(
49 'cache:invalidate',
50 JSON.stringify({ pattern })
51 );
52 }
53
54 private invalidateLocal(pattern: string): void {
55 const regex = new RegExp(pattern.replace('*', '.*'));
56 for (const key of this.localCache.keys()) {
57 if (regex.test(key)) {
58 this.localCache.delete(key);
59 }
60 }
61 }
62}Best Practices#
Keys:
✓ Use consistent naming: type:id:field
✓ Keep keys short but descriptive
✓ Use colons as separators
✓ Avoid special characters
TTL:
✓ Always set TTL on cache entries
✓ Use shorter TTL for volatile data
✓ Consider cache warming for critical data
✓ Handle cache misses gracefully
Performance:
✓ Use pipelines for multiple operations
✓ Avoid KEYS in production (use SCAN)
✓ Monitor memory usage
✓ Use appropriate data structures
Conclusion#
Redis caching significantly improves application performance. Use cache-aside for most cases, implement stampede protection for high-traffic scenarios, and leverage pub/sub for distributed invalidation. Monitor cache hit rates and adjust TTLs based on data volatility.