The perf_hooks module provides precise performance timing APIs similar to browser Performance APIs. Here's how to use it.
Basic Timing#
1import { performance } from 'node:perf_hooks';
2
3// Get high-resolution timestamp
4const start = performance.now();
5
6// Do work
7for (let i = 0; i < 1000000; i++) {
8 Math.sqrt(i);
9}
10
11const end = performance.now();
12console.log(`Execution time: ${end - start}ms`);
13
14// Time origin
15console.log('Time origin:', performance.timeOrigin);
16// Unix timestamp when Node.js startedPerformance Marks and Measures#
1import { performance } from 'node:perf_hooks';
2
3// Create marks
4performance.mark('start');
5
6// Do work
7await processData();
8
9performance.mark('process-complete');
10
11// More work
12await saveResults();
13
14performance.mark('end');
15
16// Create measures between marks
17performance.measure('processing', 'start', 'process-complete');
18performance.measure('saving', 'process-complete', 'end');
19performance.measure('total', 'start', 'end');
20
21// Get measures
22const measures = performance.getEntriesByType('measure');
23measures.forEach((measure) => {
24 console.log(`${measure.name}: ${measure.duration.toFixed(2)}ms`);
25});
26
27// Clear marks and measures
28performance.clearMarks();
29performance.clearMeasures();PerformanceObserver#
1import { PerformanceObserver, performance } from 'node:perf_hooks';
2
3// Create observer
4const obs = new PerformanceObserver((list) => {
5 const entries = list.getEntries();
6 entries.forEach((entry) => {
7 console.log(`${entry.name}: ${entry.duration.toFixed(2)}ms`);
8 });
9});
10
11// Observe specific entry types
12obs.observe({ entryTypes: ['measure', 'function'] });
13
14// Make measurements
15performance.mark('A');
16await doWork();
17performance.mark('B');
18performance.measure('work', 'A', 'B');
19
20// Disconnect when done
21obs.disconnect();Timing Functions#
1import { performance } from 'node:perf_hooks';
2
3// Wrap function for timing
4const timedFn = performance.timerify(originalFunction);
5
6// Observer for function timing
7const obs = new PerformanceObserver((list) => {
8 const entries = list.getEntries();
9 entries.forEach((entry) => {
10 console.log(`Function ${entry.name} took ${entry.duration}ms`);
11 });
12});
13
14obs.observe({ entryTypes: ['function'] });
15
16// Call timed function
17timedFn(arg1, arg2);
18
19// Async function timing
20async function timeAsync(fn, name) {
21 const start = performance.now();
22 try {
23 return await fn();
24 } finally {
25 const duration = performance.now() - start;
26 console.log(`${name}: ${duration.toFixed(2)}ms`);
27 }
28}
29
30await timeAsync(() => fetchData(), 'fetchData');HTTP Server Timing#
1import http from 'node:http';
2import { performance, PerformanceObserver } from 'node:perf_hooks';
3
4// Track request timing
5const requestTiming = new Map();
6
7const server = http.createServer(async (req, res) => {
8 const requestId = `${Date.now()}-${Math.random()}`;
9
10 performance.mark(`${requestId}-start`);
11
12 // Process request
13 performance.mark(`${requestId}-before-db`);
14 const data = await queryDatabase();
15 performance.mark(`${requestId}-after-db`);
16
17 performance.mark(`${requestId}-before-render`);
18 const html = renderTemplate(data);
19 performance.mark(`${requestId}-after-render`);
20
21 res.end(html);
22
23 performance.mark(`${requestId}-end`);
24
25 // Measure phases
26 performance.measure(
27 `${requestId}-db`,
28 `${requestId}-before-db`,
29 `${requestId}-after-db`
30 );
31 performance.measure(
32 `${requestId}-render`,
33 `${requestId}-before-render`,
34 `${requestId}-after-render`
35 );
36 performance.measure(
37 `${requestId}-total`,
38 `${requestId}-start`,
39 `${requestId}-end`
40 );
41});Histogram for Statistics#
1import { createHistogram, performance } from 'node:perf_hooks';
2
3// Create histogram for timing distribution
4const histogram = createHistogram();
5
6// Record timings
7for (let i = 0; i < 1000; i++) {
8 const start = performance.now();
9 await doOperation();
10 const duration = performance.now() - start;
11
12 histogram.record(Math.floor(duration * 1000)); // microseconds
13}
14
15// Get statistics
16console.log('Statistics:');
17console.log('Min:', histogram.min / 1000, 'ms');
18console.log('Max:', histogram.max / 1000, 'ms');
19console.log('Mean:', histogram.mean / 1000, 'ms');
20console.log('Stddev:', histogram.stddev / 1000, 'ms');
21console.log('50th percentile:', histogram.percentile(50) / 1000, 'ms');
22console.log('95th percentile:', histogram.percentile(95) / 1000, 'ms');
23console.log('99th percentile:', histogram.percentile(99) / 1000, 'ms');Resource Timing#
1import { PerformanceObserver } from 'node:perf_hooks';
2
3// Monitor garbage collection
4const gcObserver = new PerformanceObserver((list) => {
5 const entries = list.getEntries();
6 entries.forEach((entry) => {
7 console.log(`GC ${entry.detail.kind}: ${entry.duration.toFixed(2)}ms`);
8 });
9});
10
11gcObserver.observe({ entryTypes: ['gc'] });
12
13// Monitor event loop
14const eventLoopObserver = new PerformanceObserver((list) => {
15 const entries = list.getEntries();
16 entries.forEach((entry) => {
17 if (entry.duration > 100) {
18 console.warn(`Slow event loop: ${entry.duration.toFixed(2)}ms`);
19 }
20 });
21});
22
23// Note: 'function' entries show event loop utilizationCustom Performance Entries#
1import { performance, PerformanceEntry } from 'node:perf_hooks';
2
3// Custom timing class
4class APICallTiming {
5 constructor(endpoint) {
6 this.endpoint = endpoint;
7 this.markName = `api-${endpoint}-${Date.now()}`;
8 }
9
10 start() {
11 performance.mark(`${this.markName}-start`);
12 }
13
14 end() {
15 performance.mark(`${this.markName}-end`);
16 performance.measure(
17 `API: ${this.endpoint}`,
18 `${this.markName}-start`,
19 `${this.markName}-end`
20 );
21 }
22}
23
24// Usage
25const timing = new APICallTiming('/users');
26timing.start();
27const response = await fetch('http://api.example.com/users');
28timing.end();Performance Timeline#
1import { performance } from 'node:perf_hooks';
2
3// Get all entries
4const allEntries = performance.getEntries();
5console.log('All entries:', allEntries.length);
6
7// Get entries by type
8const marks = performance.getEntriesByType('mark');
9const measures = performance.getEntriesByType('measure');
10
11// Get entries by name
12const specificMeasure = performance.getEntriesByName('api-call');
13
14// Clear specific entries
15performance.clearMarks('myMark');
16performance.clearMeasures('myMeasure');
17
18// Clear all
19performance.clearMarks();
20performance.clearMeasures();Profiling Middleware#
1import { performance } from 'node:perf_hooks';
2
3// Express middleware for timing
4function timingMiddleware(req, res, next) {
5 const requestId = `req-${Date.now()}`;
6 req.timingId = requestId;
7
8 performance.mark(`${requestId}-start`);
9
10 // Track response time
11 const originalEnd = res.end;
12 res.end = function (...args) {
13 performance.mark(`${requestId}-end`);
14 performance.measure(
15 `${req.method} ${req.path}`,
16 `${requestId}-start`,
17 `${requestId}-end`
18 );
19
20 // Add timing header
21 const measure = performance.getEntriesByName(
22 `${req.method} ${req.path}`
23 )[0];
24 if (measure) {
25 res.setHeader('Server-Timing', `total;dur=${measure.duration}`);
26 }
27
28 return originalEnd.apply(this, args);
29 };
30
31 next();
32}
33
34// Middleware for specific operations
35function dbTimingMiddleware(req, res, next) {
36 const originalQuery = req.db.query;
37
38 req.db.query = async function (...args) {
39 const queryId = `query-${Date.now()}`;
40 performance.mark(`${queryId}-start`);
41
42 try {
43 return await originalQuery.apply(this, args);
44 } finally {
45 performance.mark(`${queryId}-end`);
46 performance.measure(`DB Query`, `${queryId}-start`, `${queryId}-end`);
47 }
48 };
49
50 next();
51}Benchmark Utility#
1import { performance } from 'node:perf_hooks';
2
3async function benchmark(name, fn, iterations = 1000) {
4 // Warmup
5 for (let i = 0; i < 10; i++) {
6 await fn();
7 }
8
9 const times = [];
10
11 for (let i = 0; i < iterations; i++) {
12 const start = performance.now();
13 await fn();
14 times.push(performance.now() - start);
15 }
16
17 times.sort((a, b) => a - b);
18
19 const stats = {
20 name,
21 iterations,
22 min: times[0],
23 max: times[times.length - 1],
24 mean: times.reduce((a, b) => a + b) / times.length,
25 median: times[Math.floor(times.length / 2)],
26 p95: times[Math.floor(times.length * 0.95)],
27 p99: times[Math.floor(times.length * 0.99)],
28 };
29
30 console.log(`\n${name}:`);
31 console.log(` Min: ${stats.min.toFixed(3)}ms`);
32 console.log(` Max: ${stats.max.toFixed(3)}ms`);
33 console.log(` Mean: ${stats.mean.toFixed(3)}ms`);
34 console.log(` Median: ${stats.median.toFixed(3)}ms`);
35 console.log(` P95: ${stats.p95.toFixed(3)}ms`);
36 console.log(` P99: ${stats.p99.toFixed(3)}ms`);
37
38 return stats;
39}
40
41// Usage
42await benchmark('JSON parse', () => {
43 JSON.parse('{"key": "value"}');
44});Best Practices#
Measurement:
✓ Use marks and measures
✓ Name entries descriptively
✓ Clear entries after collection
✓ Use PerformanceObserver for async
Accuracy:
✓ Warmup before benchmarking
✓ Multiple iterations
✓ Calculate percentiles
✓ Account for GC pauses
Production:
✓ Sample measurements
✓ Aggregate statistics
✓ Monitor percentiles (p95, p99)
✓ Set alerting thresholds
Avoid:
✗ Too many performance marks
✗ Forgetting to clear entries
✗ Blocking with analysis
✗ Ignoring outliers
Conclusion#
The perf_hooks module provides powerful tools for precise performance measurement in Node.js. Use marks and measures for timing code sections, PerformanceObserver for async monitoring, and histograms for statistical analysis. Implement timing middleware for HTTP servers and build benchmark utilities for comparing implementations. Remember to clear entries to prevent memory growth and sample in production to minimize overhead.