The perf_hooks module provides APIs for performance measurement and timing. Here's how to use it.
Basic Performance Measurement#
1const { performance, PerformanceObserver } = require('perf_hooks');
2
3// Measure time
4const start = performance.now();
5
6// Do some work
7for (let i = 0; i < 1000000; i++) {
8 Math.sqrt(i);
9}
10
11const end = performance.now();
12console.log(`Execution time: ${end - start} ms`);
13
14// Using performance.mark and measure
15performance.mark('start');
16
17// Do work
18await someAsyncOperation();
19
20performance.mark('end');
21performance.measure('My Operation', 'start', 'end');
22
23// Get measurements
24const measures = performance.getEntriesByType('measure');
25console.log(measures[0].duration); // Duration in msPerformance Observer#
1const { performance, PerformanceObserver } = require('perf_hooks');
2
3// Create observer
4const obs = new PerformanceObserver((list) => {
5 const entries = list.getEntries();
6 entries.forEach((entry) => {
7 console.log(`${entry.name}: ${entry.duration.toFixed(2)} ms`);
8 });
9});
10
11// Observe specific entry types
12obs.observe({ entryTypes: ['measure', 'function'] });
13
14// Measure operations
15performance.mark('fetch-start');
16await fetch('https://api.example.com/data');
17performance.mark('fetch-end');
18performance.measure('API Fetch', 'fetch-start', 'fetch-end');
19
20// Disconnect when done
21obs.disconnect();
22
23// Observe with buffered entries
24obs.observe({ entryTypes: ['measure'], buffered: true });Function Timing#
1const { performance, PerformanceObserver } = require('perf_hooks');
2
3// Wrap function for timing
4const timedFunction = performance.timerify(originalFunction);
5
6// Observer for function entries
7const obs = new PerformanceObserver((list) => {
8 list.getEntries().forEach((entry) => {
9 console.log(`${entry.name} took ${entry.duration} ms`);
10 });
11});
12
13obs.observe({ entryTypes: ['function'] });
14
15// Call timed function
16timedFunction(args);
17
18// Wrap async function
19async function fetchData(url) {
20 const response = await fetch(url);
21 return response.json();
22}
23
24const timedFetch = performance.timerify(fetchData);
25
26// Time class methods
27class DataService {
28 constructor() {
29 this.getData = performance.timerify(this.getData.bind(this));
30 }
31
32 async getData(id) {
33 // ... fetch data
34 }
35}Custom Metrics#
1const { performance } = require('perf_hooks');
2
3// Mark with details
4performance.mark('request-start', {
5 detail: {
6 url: '/api/users',
7 method: 'GET',
8 },
9});
10
11// Access mark details
12const marks = performance.getEntriesByName('request-start');
13console.log(marks[0].detail); // { url: '/api/users', method: 'GET' }
14
15// Clear marks
16performance.clearMarks('request-start');
17performance.clearMarks(); // Clear all
18
19// Clear measures
20performance.clearMeasures('My Operation');
21performance.clearMeasures(); // Clear all
22
23// Resource timing (if available)
24const resources = performance.getEntriesByType('resource');
25resources.forEach((resource) => {
26 console.log(`${resource.name}: ${resource.duration} ms`);
27});HTTP Server Timing#
1const http = require('http');
2const { performance, PerformanceObserver } = require('perf_hooks');
3
4// Middleware for request timing
5function timingMiddleware(handler) {
6 return async (req, res) => {
7 const requestId = `${req.method}-${req.url}-${Date.now()}`;
8
9 performance.mark(`${requestId}-start`);
10
11 // Track response
12 const originalEnd = res.end;
13 res.end = function (...args) {
14 performance.mark(`${requestId}-end`);
15 performance.measure(requestId, `${requestId}-start`, `${requestId}-end`);
16
17 const measure = performance.getEntriesByName(requestId)[0];
18 console.log(`${req.method} ${req.url}: ${measure.duration.toFixed(2)} ms`);
19
20 // Cleanup
21 performance.clearMarks(`${requestId}-start`);
22 performance.clearMarks(`${requestId}-end`);
23 performance.clearMeasures(requestId);
24
25 return originalEnd.apply(this, args);
26 };
27
28 await handler(req, res);
29 };
30}
31
32// Usage
33const server = http.createServer(
34 timingMiddleware(async (req, res) => {
35 // Handle request
36 res.end('Hello World');
37 })
38);Database Query Timing#
1const { performance } = require('perf_hooks');
2
3class QueryTimer {
4 constructor() {
5 this.queries = [];
6 }
7
8 async time(name, queryFn) {
9 const start = performance.now();
10
11 try {
12 const result = await queryFn();
13 const duration = performance.now() - start;
14
15 this.queries.push({
16 name,
17 duration,
18 timestamp: new Date(),
19 success: true,
20 });
21
22 return result;
23 } catch (error) {
24 const duration = performance.now() - start;
25
26 this.queries.push({
27 name,
28 duration,
29 timestamp: new Date(),
30 success: false,
31 error: error.message,
32 });
33
34 throw error;
35 }
36 }
37
38 getStats() {
39 const durations = this.queries.map((q) => q.duration);
40 return {
41 count: this.queries.length,
42 total: durations.reduce((a, b) => a + b, 0),
43 average: durations.reduce((a, b) => a + b, 0) / durations.length,
44 min: Math.min(...durations),
45 max: Math.max(...durations),
46 };
47 }
48
49 clear() {
50 this.queries = [];
51 }
52}
53
54// Usage
55const timer = new QueryTimer();
56
57const users = await timer.time('getUsers', () =>
58 db.query('SELECT * FROM users')
59);
60
61const posts = await timer.time('getPosts', () =>
62 db.query('SELECT * FROM posts WHERE user_id = ?', [userId])
63);
64
65console.log(timer.getStats());Histogram#
1const { createHistogram, performance } = require('perf_hooks');
2
3// Create histogram for timing distribution
4const histogram = createHistogram();
5
6// Record values
7for (let i = 0; i < 1000; i++) {
8 const start = performance.now();
9 // Do work
10 someOperation();
11 const duration = performance.now() - start;
12 histogram.record(Math.floor(duration * 1e6)); // Convert to nanoseconds
13}
14
15// Get statistics
16console.log('Min:', histogram.min);
17console.log('Max:', histogram.max);
18console.log('Mean:', histogram.mean);
19console.log('Stddev:', histogram.stddev);
20console.log('Percentiles:');
21console.log(' 50th:', histogram.percentile(50));
22console.log(' 90th:', histogram.percentile(90));
23console.log(' 99th:', histogram.percentile(99));
24
25// Reset histogram
26histogram.reset();
27
28// Record with timestamps
29const recordableHistogram = createHistogram({ lowest: 1, highest: 1e9 });Event Loop Monitoring#
1const { monitorEventLoopDelay } = require('perf_hooks');
2
3// Create monitor
4const h = monitorEventLoopDelay({ resolution: 20 });
5
6// Start monitoring
7h.enable();
8
9// After some time, check results
10setTimeout(() => {
11 console.log('Event loop delay:');
12 console.log(' Min:', h.min / 1e6, 'ms');
13 console.log(' Max:', h.max / 1e6, 'ms');
14 console.log(' Mean:', h.mean / 1e6, 'ms');
15 console.log(' Stddev:', h.stddev / 1e6, 'ms');
16 console.log(' P50:', h.percentile(50) / 1e6, 'ms');
17 console.log(' P99:', h.percentile(99) / 1e6, 'ms');
18
19 h.disable();
20 h.reset();
21}, 10000);
22
23// Detect event loop blocking
24setInterval(() => {
25 const p99 = h.percentile(99) / 1e6;
26 if (p99 > 100) {
27 console.warn(`Event loop delay P99: ${p99.toFixed(2)} ms`);
28 }
29}, 5000);Resource Usage Tracking#
1const { performance } = require('perf_hooks');
2
3// Track memory usage
4function getMemoryMetrics() {
5 const usage = process.memoryUsage();
6 return {
7 heapUsed: usage.heapUsed / 1024 / 1024,
8 heapTotal: usage.heapTotal / 1024 / 1024,
9 external: usage.external / 1024 / 1024,
10 rss: usage.rss / 1024 / 1024,
11 };
12}
13
14// Combined metrics collector
15class MetricsCollector {
16 constructor() {
17 this.metrics = [];
18 this.observer = new PerformanceObserver((list) => {
19 list.getEntries().forEach((entry) => {
20 this.metrics.push({
21 name: entry.name,
22 duration: entry.duration,
23 timestamp: Date.now(),
24 memory: getMemoryMetrics(),
25 });
26 });
27 });
28 }
29
30 start() {
31 this.observer.observe({ entryTypes: ['measure'] });
32 }
33
34 stop() {
35 this.observer.disconnect();
36 }
37
38 getReport() {
39 return {
40 metrics: this.metrics,
41 summary: {
42 totalOperations: this.metrics.length,
43 averageDuration:
44 this.metrics.reduce((sum, m) => sum + m.duration, 0) /
45 this.metrics.length,
46 peakMemory: Math.max(...this.metrics.map((m) => m.memory.heapUsed)),
47 },
48 };
49 }
50}Express Middleware#
1const { performance } = require('perf_hooks');
2
3function performanceMiddleware(options = {}) {
4 const { logSlowRequests = 1000, onMetric } = options;
5
6 return (req, res, next) => {
7 const start = performance.now();
8 const startMemory = process.memoryUsage().heapUsed;
9
10 res.on('finish', () => {
11 const duration = performance.now() - start;
12 const memoryDelta = process.memoryUsage().heapUsed - startMemory;
13
14 const metric = {
15 method: req.method,
16 path: req.path,
17 statusCode: res.statusCode,
18 duration,
19 memoryDelta,
20 timestamp: new Date().toISOString(),
21 };
22
23 if (duration > logSlowRequests) {
24 console.warn('Slow request:', metric);
25 }
26
27 if (onMetric) {
28 onMetric(metric);
29 }
30 });
31
32 next();
33 };
34}
35
36// Usage
37app.use(performanceMiddleware({
38 logSlowRequests: 500,
39 onMetric: (metric) => metricsService.record(metric),
40}));Best Practices#
Measurement:
✓ Use marks and measures
✓ Clean up entries after use
✓ Use observers for async
✓ Include context in marks
Monitoring:
✓ Track event loop delay
✓ Monitor memory alongside timing
✓ Set up alerting thresholds
✓ Log slow operations
Performance:
✓ Minimize observer overhead
✓ Sample in production
✓ Aggregate metrics
✓ Use histograms for distribution
Avoid:
✗ Too many marks without cleanup
✗ Synchronous getEntries in hot paths
✗ Ignoring memory leaks
✗ Excessive logging overhead
Conclusion#
The perf_hooks module provides powerful APIs for performance measurement in Node.js. Use marks and measures for timing, observers for async operations, and histograms for distribution analysis. Monitor event loop delay to detect blocking operations and combine with memory metrics for comprehensive profiling.