Back to Blog
Node.jsMemoryV8Performance

Node.js Memory Management

Understand Node.js memory management. From V8 heap to garbage collection to memory leak detection.

B
Bootspring Team
Engineering
March 9, 2021
7 min read

Understanding memory management helps build efficient Node.js applications. Here's what you need to know.

Memory Structure#

1// V8 Memory Layout 2/* 3Heap: 4├── New Space (Young Generation) 5│ ├── Semi-space 1 (From) 6│ └── Semi-space 2 (To) 7└── Old Space (Old Generation) 8 ├── Old Pointer Space 9 ├── Old Data Space 10 ├── Large Object Space 11 ├── Code Space 12 └── Map Space 13*/ 14 15// Check memory usage 16const used = process.memoryUsage(); 17console.log({ 18 rss: `${Math.round(used.rss / 1024 / 1024)}MB`, 19 heapTotal: `${Math.round(used.heapTotal / 1024 / 1024)}MB`, 20 heapUsed: `${Math.round(used.heapUsed / 1024 / 1024)}MB`, 21 external: `${Math.round(used.external / 1024 / 1024)}MB`, 22 arrayBuffers: `${Math.round(used.arrayBuffers / 1024 / 1024)}MB`, 23}); 24 25// rss: Resident Set Size - total memory allocated 26// heapTotal: V8's total heap size 27// heapUsed: V8's used heap 28// external: Memory used by C++ objects bound to JS 29// arrayBuffers: Memory for ArrayBuffers and SharedArrayBuffers

Heap Configuration#

1# Increase heap size (default ~1.5GB on 64-bit) 2node --max-old-space-size=4096 app.js 3 4# Set new space size 5node --max-semi-space-size=64 app.js 6 7# View V8 options 8node --v8-options | grep -i heap 9 10# Common flags 11node --expose-gc # Expose global.gc() 12node --trace-gc # Log GC events 13node --max-old-space-size=8192 # 8GB heap

Garbage Collection#

1// GC in V8 2/* 3Scavenge (Minor GC): 4- Cleans New Space 5- Fast, frequent 6- Promotes surviving objects to Old Space 7 8Mark-Sweep-Compact (Major GC): 9- Cleans Old Space 10- Slower, less frequent 11- Can cause pauses 12*/ 13 14// Force garbage collection (if --expose-gc) 15if (global.gc) { 16 global.gc(); 17} 18 19// Monitor GC 20const v8 = require('v8'); 21 22// Get heap statistics 23const heapStats = v8.getHeapStatistics(); 24console.log({ 25 totalHeapSize: `${Math.round(heapStats.total_heap_size / 1024 / 1024)}MB`, 26 usedHeapSize: `${Math.round(heapStats.used_heap_size / 1024 / 1024)}MB`, 27 heapSizeLimit: `${Math.round(heapStats.heap_size_limit / 1024 / 1024)}MB`, 28 mallocedMemory: `${Math.round(heapStats.malloced_memory / 1024 / 1024)}MB`, 29}); 30 31// GC events via perf_hooks 32const { PerformanceObserver } = require('perf_hooks'); 33 34const obs = new PerformanceObserver((list) => { 35 const entry = list.getEntries()[0]; 36 console.log(`GC: ${entry.kind} - ${entry.duration.toFixed(2)}ms`); 37}); 38 39obs.observe({ entryTypes: ['gc'] });

Memory Leaks#

1// Common leak: Global variables 2global.leakedData = []; // Never cleaned up 3 4function processRequest(data) { 5 global.leakedData.push(data); // Grows forever 6} 7 8// Common leak: Closures 9function createLeak() { 10 const largeData = new Array(1000000).fill('x'); 11 12 return function() { 13 // largeData is retained even if not used 14 console.log('Closure'); 15 }; 16} 17 18const leaked = createLeak(); 19 20// Common leak: Event listeners 21const EventEmitter = require('events'); 22const emitter = new EventEmitter(); 23 24function addListeners() { 25 // Adding listener without removing 26 emitter.on('event', () => { 27 // Handler 28 }); 29} 30 31// Call repeatedly = memory leak 32setInterval(addListeners, 1000); 33 34// Fix: Remove listeners 35function addListenersFixed() { 36 const handler = () => { 37 // Handler 38 emitter.off('event', handler); 39 }; 40 emitter.on('event', handler); 41} 42 43// Common leak: Timers 44function leakyTimer() { 45 const data = new Array(1000000).fill('x'); 46 47 setInterval(() => { 48 console.log(data.length); 49 }, 1000); 50} 51 52// Fix: Clear timers 53function fixedTimer() { 54 const data = new Array(1000000).fill('x'); 55 56 const interval = setInterval(() => { 57 console.log(data.length); 58 }, 1000); 59 60 // Clear when done 61 setTimeout(() => clearInterval(interval), 5000); 62}

Detecting Leaks#

1// Method 1: Monitor heap growth 2let lastHeapUsed = 0; 3 4setInterval(() => { 5 const { heapUsed } = process.memoryUsage(); 6 const diff = heapUsed - lastHeapUsed; 7 8 if (diff > 0) { 9 console.log(`Heap grew by ${Math.round(diff / 1024)}KB`); 10 } 11 12 lastHeapUsed = heapUsed; 13}, 5000); 14 15// Method 2: Heap snapshots 16const v8 = require('v8'); 17const fs = require('fs'); 18 19function takeHeapSnapshot() { 20 const filename = `heap-${Date.now()}.heapsnapshot`; 21 const snapshotStream = v8.writeHeapSnapshot(filename); 22 console.log(`Heap snapshot written to ${snapshotStream}`); 23} 24 25// Take snapshots at different times 26takeHeapSnapshot(); // Initial 27setTimeout(takeHeapSnapshot, 60000); // After 1 minute 28 29// Method 3: Using clinic.js 30// npm install -g clinic 31// clinic doctor -- node app.js 32// clinic heapprofile -- node app.js

Memory-Efficient Patterns#

1// Use streams for large data 2const fs = require('fs'); 3 4// Bad: Load entire file 5function processFileBad(path) { 6 const content = fs.readFileSync(path, 'utf8'); 7 return content.split('\n').length; 8} 9 10// Good: Stream file 11async function processFileGood(path) { 12 const stream = fs.createReadStream(path); 13 let lines = 0; 14 15 for await (const chunk of stream) { 16 lines += chunk.toString().split('\n').length - 1; 17 } 18 19 return lines; 20} 21 22// Object pooling 23class ObjectPool { 24 constructor(createFn, size = 10) { 25 this.createFn = createFn; 26 this.pool = Array.from({ length: size }, createFn); 27 } 28 29 acquire() { 30 return this.pool.pop() || this.createFn(); 31 } 32 33 release(obj) { 34 // Reset object state 35 this.pool.push(obj); 36 } 37} 38 39const bufferPool = new ObjectPool( 40 () => Buffer.alloc(1024), 41 100 42); 43 44// WeakMap for metadata 45const metadata = new WeakMap(); 46 47function processObject(obj) { 48 metadata.set(obj, { processed: true }); 49} 50// When obj is GC'd, metadata is also cleaned 51 52// Avoid creating objects in loops 53// Bad 54function processBad(items) { 55 return items.map(item => ({ 56 id: item.id, 57 name: item.name, 58 })); 59} 60 61// Good: Reuse objects when possible 62function processGood(items, output = []) { 63 output.length = items.length; 64 65 for (let i = 0; i < items.length; i++) { 66 if (!output[i]) output[i] = {}; 67 output[i].id = items[i].id; 68 output[i].name = items[i].name; 69 } 70 71 return output; 72}

Buffer Management#

1// Buffer allocation 2// Buffer.alloc - Zero-filled, safe 3const safe = Buffer.alloc(1024); 4 5// Buffer.allocUnsafe - May contain old data, faster 6const unsafe = Buffer.allocUnsafe(1024); 7 8// Buffer.allocUnsafeSlow - No pooling 9const slow = Buffer.allocUnsafeSlow(1024); 10 11// Buffer pooling 12const POOL_SIZE = 8 * 1024; // 8KB 13const pool = Buffer.allocUnsafe(POOL_SIZE); 14let offset = 0; 15 16function getBuffer(size) { 17 if (offset + size > POOL_SIZE) { 18 offset = 0; 19 } 20 21 const buffer = pool.slice(offset, offset + size); 22 offset += size; 23 return buffer; 24} 25 26// Release large buffers 27function processLargeBuffer(size) { 28 let buffer = Buffer.alloc(size); 29 30 // Process buffer... 31 32 buffer = null; // Allow GC 33 global.gc?.(); // Force GC if available 34}

Monitoring in Production#

1// Memory monitoring class 2class MemoryMonitor { 3 constructor(options = {}) { 4 this.threshold = options.threshold || 0.9; 5 this.interval = options.interval || 30000; 6 this.onWarning = options.onWarning || console.warn; 7 } 8 9 start() { 10 this.intervalId = setInterval(() => { 11 this.check(); 12 }, this.interval); 13 } 14 15 stop() { 16 clearInterval(this.intervalId); 17 } 18 19 check() { 20 const { heapUsed, heapTotal } = process.memoryUsage(); 21 const usage = heapUsed / heapTotal; 22 23 if (usage > this.threshold) { 24 this.onWarning({ 25 message: 'High memory usage', 26 usage: `${(usage * 100).toFixed(1)}%`, 27 heapUsed: `${Math.round(heapUsed / 1024 / 1024)}MB`, 28 heapTotal: `${Math.round(heapTotal / 1024 / 1024)}MB`, 29 }); 30 } 31 32 return { usage, heapUsed, heapTotal }; 33 } 34} 35 36// Usage 37const monitor = new MemoryMonitor({ 38 threshold: 0.85, 39 interval: 60000, 40 onWarning: (data) => { 41 console.error('Memory warning:', data); 42 // Alert monitoring system 43 }, 44}); 45 46monitor.start(); 47 48// Graceful shutdown on OOM 49process.on('warning', (warning) => { 50 if (warning.name === 'MaxListenersExceededWarning') { 51 console.error('Possible memory leak:', warning); 52 } 53});

Heap Dump Analysis#

1// Generate heap dump 2const v8 = require('v8'); 3 4// Write snapshot to file 5v8.writeHeapSnapshot(); 6 7// Or using heapdump module 8const heapdump = require('heapdump'); 9 10heapdump.writeSnapshot((err, filename) => { 11 console.log('Heap dump written to', filename); 12}); 13 14// Trigger dump on SIGUSR2 15process.on('SIGUSR2', () => { 16 heapdump.writeSnapshot(); 17}); 18 19// Analyze in Chrome DevTools: 20// 1. Open DevTools 21// 2. Go to Memory tab 22// 3. Load snapshot 23// 4. Compare snapshots to find leaks

Best Practices#

Prevention: ✓ Clear references when done ✓ Remove event listeners ✓ Clear timers and intervals ✓ Use WeakMap/WeakSet for caches Monitoring: ✓ Track memory usage over time ✓ Set up alerts for high usage ✓ Take periodic heap snapshots ✓ Monitor GC frequency Optimization: ✓ Use streams for large data ✓ Pool frequently created objects ✓ Avoid closures capturing large data ✓ Process data in chunks

Conclusion#

Node.js memory management relies on V8's garbage collector. Prevent leaks by cleaning up references, use streams for large data, and monitor memory usage in production. Regular heap snapshots help identify leaks before they cause issues.

Share this article

Help spread the word about Bootspring