Caching dramatically improves application performance. This guide covers caching strategies at every layer of your application stack.
Caching Layers#
┌─────────────────────────────────────────────────────┐
│ Browser Cache │
├─────────────────────────────────────────────────────┤
│ CDN Cache │
├─────────────────────────────────────────────────────┤
│ Application Cache (Redis) │
├─────────────────────────────────────────────────────┤
│ Database Query Cache │
└─────────────────────────────────────────────────────┘
Browser Caching#
Cache-Control Headers#
1// Next.js API route
2export async function GET(request: Request) {
3 const data = await fetchData();
4
5 return Response.json(data, {
6 headers: {
7 // Cache for 1 hour, revalidate if stale
8 'Cache-Control': 'public, max-age=3600, stale-while-revalidate=86400',
9
10 // Or for private/user-specific data
11 // 'Cache-Control': 'private, max-age=60',
12
13 // ETags for conditional requests
14 'ETag': generateETag(data),
15 },
16 });
17}
18
19// Static assets
20// next.config.js
21module.exports = {
22 async headers() {
23 return [
24 {
25 source: '/_next/static/:path*',
26 headers: [
27 {
28 key: 'Cache-Control',
29 value: 'public, max-age=31536000, immutable',
30 },
31 ],
32 },
33 ];
34 },
35};Service Worker Caching#
1// sw.js
2const CACHE_NAME = 'app-v1';
3const STATIC_ASSETS = [
4 '/',
5 '/styles.css',
6 '/app.js',
7];
8
9// Cache static assets on install
10self.addEventListener('install', (event) => {
11 event.waitUntil(
12 caches.open(CACHE_NAME).then((cache) => {
13 return cache.addAll(STATIC_ASSETS);
14 })
15 );
16});
17
18// Stale-while-revalidate for API calls
19self.addEventListener('fetch', (event) => {
20 if (event.request.url.includes('/api/')) {
21 event.respondWith(
22 caches.open(CACHE_NAME).then(async (cache) => {
23 const cachedResponse = await cache.match(event.request);
24
25 const fetchPromise = fetch(event.request).then((response) => {
26 cache.put(event.request, response.clone());
27 return response;
28 });
29
30 return cachedResponse || fetchPromise;
31 })
32 );
33 }
34});CDN Caching#
Vercel Edge Config#
1// Caching at the edge
2export const config = {
3 runtime: 'edge',
4};
5
6export default async function handler(request: Request) {
7 const data = await fetchFromOrigin();
8
9 return new Response(JSON.stringify(data), {
10 headers: {
11 'Content-Type': 'application/json',
12 'Cache-Control': 's-maxage=3600, stale-while-revalidate',
13 'CDN-Cache-Control': 'max-age=7200',
14 'Vercel-CDN-Cache-Control': 'max-age=86400',
15 },
16 });
17}Cache Invalidation#
1// Purge specific paths
2async function invalidateCache(paths: string[]) {
3 await fetch('https://api.vercel.com/v1/projects/{projectId}/purge', {
4 method: 'POST',
5 headers: {
6 Authorization: `Bearer ${process.env.VERCEL_TOKEN}`,
7 },
8 body: JSON.stringify({ paths }),
9 });
10}
11
12// Invalidate on content update
13export async function updateProduct(id: string, data: ProductData) {
14 await db.products.update(id, data);
15
16 await invalidateCache([
17 `/products/${id}`,
18 '/products',
19 '/api/products',
20 ]);
21}Application-Level Caching#
Redis Caching Layer#
1import Redis from 'ioredis';
2
3class CacheService {
4 private redis: Redis;
5 private defaultTTL = 3600;
6
7 constructor() {
8 this.redis = new Redis(process.env.REDIS_URL);
9 }
10
11 async get<T>(key: string): Promise<T | null> {
12 const cached = await this.redis.get(key);
13 return cached ? JSON.parse(cached) : null;
14 }
15
16 async set<T>(key: string, value: T, ttl = this.defaultTTL): Promise<void> {
17 await this.redis.setex(key, ttl, JSON.stringify(value));
18 }
19
20 async getOrSet<T>(
21 key: string,
22 fetcher: () => Promise<T>,
23 ttl = this.defaultTTL
24 ): Promise<T> {
25 const cached = await this.get<T>(key);
26 if (cached) return cached;
27
28 const value = await fetcher();
29 await this.set(key, value, ttl);
30 return value;
31 }
32
33 async invalidate(pattern: string): Promise<void> {
34 const keys = await this.redis.keys(pattern);
35 if (keys.length) {
36 await this.redis.del(...keys);
37 }
38 }
39}
40
41// Usage
42const cache = new CacheService();
43
44async function getProduct(id: string) {
45 return cache.getOrSet(
46 `product:${id}`,
47 () => db.products.findById(id),
48 3600
49 );
50}Cache Patterns#
Cache-Aside#
1async function getUserProfile(userId: string) {
2 // Check cache first
3 const cached = await cache.get(`user:${userId}`);
4 if (cached) return cached;
5
6 // Fetch from database
7 const user = await db.users.findById(userId);
8
9 // Store in cache
10 await cache.set(`user:${userId}`, user, 3600);
11
12 return user;
13}Write-Through#
1async function updateUserProfile(userId: string, data: UserData) {
2 // Update database
3 const user = await db.users.update(userId, data);
4
5 // Update cache immediately
6 await cache.set(`user:${userId}`, user, 3600);
7
8 return user;
9}Write-Behind (Write-Back)#
1const writeQueue: Map<string, { data: any; timestamp: number }> = new Map();
2
3async function updateWithWriteBehind(key: string, data: any) {
4 // Update cache immediately
5 await cache.set(key, data);
6
7 // Queue database write
8 writeQueue.set(key, { data, timestamp: Date.now() });
9}
10
11// Background worker flushes to database
12setInterval(async () => {
13 for (const [key, { data }] of writeQueue) {
14 await db.update(key, data);
15 writeQueue.delete(key);
16 }
17}, 5000);Query-Level Caching#
Prisma with Redis#
1import { Prisma } from '@prisma/client';
2
3const prismaWithCache = prisma.$extends({
4 query: {
5 $allModels: {
6 async findMany({ model, operation, args, query }) {
7 const cacheKey = `prisma:${model}:${JSON.stringify(args)}`;
8
9 const cached = await cache.get(cacheKey);
10 if (cached) return cached;
11
12 const result = await query(args);
13 await cache.set(cacheKey, result, 300);
14
15 return result;
16 },
17 },
18 },
19});Memoization for Expensive Computations#
1function memoize<T extends (...args: any[]) => any>(
2 fn: T,
3 keyFn?: (...args: Parameters<T>) => string
4): T {
5 const cache = new Map<string, ReturnType<T>>();
6
7 return ((...args: Parameters<T>) => {
8 const key = keyFn ? keyFn(...args) : JSON.stringify(args);
9
10 if (cache.has(key)) {
11 return cache.get(key);
12 }
13
14 const result = fn(...args);
15 cache.set(key, result);
16 return result;
17 }) as T;
18}
19
20// Usage
21const expensiveCalculation = memoize(
22 (data: number[]) => data.reduce((a, b) => a + complexOperation(b), 0)
23);Cache Invalidation Strategies#
Time-Based (TTL)#
await cache.set('data', value, 3600); // Expires in 1 hourEvent-Based#
1// Pub/Sub for cache invalidation
2const subscriber = new Redis(process.env.REDIS_URL);
3
4subscriber.subscribe('cache-invalidation');
5
6subscriber.on('message', async (channel, message) => {
7 const { pattern } = JSON.parse(message);
8 await cache.invalidate(pattern);
9});
10
11// Publish invalidation events
12async function onProductUpdate(productId: string) {
13 await publisher.publish('cache-invalidation', JSON.stringify({
14 pattern: `product:${productId}*`,
15 }));
16}Version-Based#
1class VersionedCache {
2 private version = Date.now();
3
4 getKey(base: string): string {
5 return `${base}:v${this.version}`;
6 }
7
8 invalidateAll() {
9 this.version = Date.now();
10 }
11}Best Practices#
- Cache at the right level: Start with CDN, then application cache
- Set appropriate TTLs: Balance freshness with performance
- Plan for invalidation: Know how and when to clear caches
- Monitor hit rates: Track cache effectiveness
- Handle cache failures: Application should work without cache
Conclusion#
Effective caching requires understanding your data access patterns. Layer your caches appropriately, implement proper invalidation, and monitor cache performance.