The zlib module provides compression and decompression using Gzip, Deflate, and Brotli algorithms. Here's how to use it.
Basic Compression#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4// Promisified versions
5const gzip = promisify(zlib.gzip);
6const gunzip = promisify(zlib.gunzip);
7const deflate = promisify(zlib.deflate);
8const inflate = promisify(zlib.inflate);
9const brotliCompress = promisify(zlib.brotliCompress);
10const brotliDecompress = promisify(zlib.brotliDecompress);
11
12// Gzip compression
13async function compressGzip(data) {
14 const compressed = await gzip(data);
15 console.log(`Original: ${data.length}, Compressed: ${compressed.length}`);
16 return compressed;
17}
18
19// Gzip decompression
20async function decompressGzip(compressed) {
21 const decompressed = await gunzip(compressed);
22 return decompressed.toString();
23}
24
25// Usage
26const text = 'Hello, World!'.repeat(100);
27const compressed = await compressGzip(text);
28const decompressed = await decompressGzip(compressed);Stream-Based Compression#
1import zlib from 'node:zlib';
2import fs from 'node:fs';
3import { pipeline } from 'node:stream/promises';
4
5// Compress file with streams
6async function compressFile(input, output) {
7 await pipeline(
8 fs.createReadStream(input),
9 zlib.createGzip(),
10 fs.createWriteStream(output)
11 );
12 console.log(`Compressed ${input} to ${output}`);
13}
14
15// Decompress file with streams
16async function decompressFile(input, output) {
17 await pipeline(
18 fs.createReadStream(input),
19 zlib.createGunzip(),
20 fs.createWriteStream(output)
21 );
22 console.log(`Decompressed ${input} to ${output}`);
23}
24
25// Usage
26await compressFile('data.txt', 'data.txt.gz');
27await decompressFile('data.txt.gz', 'data-restored.txt');HTTP Compression#
1import http from 'node:http';
2import zlib from 'node:zlib';
3import fs from 'node:fs';
4
5const server = http.createServer((req, res) => {
6 const acceptEncoding = req.headers['accept-encoding'] || '';
7
8 const raw = fs.createReadStream('large-file.html');
9
10 // Check supported encodings
11 if (acceptEncoding.includes('br')) {
12 res.setHeader('Content-Encoding', 'br');
13 raw.pipe(zlib.createBrotliCompress()).pipe(res);
14 } else if (acceptEncoding.includes('gzip')) {
15 res.setHeader('Content-Encoding', 'gzip');
16 raw.pipe(zlib.createGzip()).pipe(res);
17 } else if (acceptEncoding.includes('deflate')) {
18 res.setHeader('Content-Encoding', 'deflate');
19 raw.pipe(zlib.createDeflate()).pipe(res);
20 } else {
21 raw.pipe(res);
22 }
23});
24
25server.listen(3000);Compression Options#
1import zlib from 'node:zlib';
2
3// Gzip with options
4const gzipOptions = {
5 level: 9, // 0-9, higher = better compression, slower
6 memLevel: 8, // 1-9, memory usage
7 strategy: zlib.constants.Z_DEFAULT_STRATEGY,
8};
9
10const gzipStream = zlib.createGzip(gzipOptions);
11
12// Brotli with options
13const brotliOptions = {
14 params: {
15 [zlib.constants.BROTLI_PARAM_MODE]: zlib.constants.BROTLI_MODE_TEXT,
16 [zlib.constants.BROTLI_PARAM_QUALITY]: 11, // 0-11
17 [zlib.constants.BROTLI_PARAM_SIZE_HINT]: 1000000,
18 },
19};
20
21const brotliStream = zlib.createBrotliCompress(brotliOptions);
22
23// Deflate with options
24const deflateOptions = {
25 level: zlib.constants.Z_BEST_COMPRESSION,
26 windowBits: 15,
27};
28
29const deflateStream = zlib.createDeflate(deflateOptions);Comparing Algorithms#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4const gzip = promisify(zlib.gzip);
5const deflate = promisify(zlib.deflate);
6const brotli = promisify(zlib.brotliCompress);
7
8async function compareCompression(data) {
9 const buffer = Buffer.from(data);
10
11 console.log(`Original size: ${buffer.length} bytes`);
12
13 // Gzip
14 const gzipped = await gzip(buffer);
15 console.log(`Gzip: ${gzipped.length} bytes (${ratio(buffer, gzipped)}%)`);
16
17 // Deflate
18 const deflated = await deflate(buffer);
19 console.log(`Deflate: ${deflated.length} bytes (${ratio(buffer, deflated)}%)`);
20
21 // Brotli
22 const brotlied = await brotli(buffer);
23 console.log(`Brotli: ${brotlied.length} bytes (${ratio(buffer, brotlied)}%)`);
24}
25
26function ratio(original, compressed) {
27 return ((compressed.length / original.length) * 100).toFixed(2);
28}
29
30// Test with different data types
31const text = 'Hello World '.repeat(10000);
32const json = JSON.stringify(Array(1000).fill({ name: 'Test', value: 123 }));
33
34await compareCompression(text);
35await compareCompression(json);Incremental Compression#
1import zlib from 'node:zlib';
2
3// Compress data incrementally
4class IncrementalCompressor {
5 constructor(algorithm = 'gzip') {
6 this.chunks = [];
7
8 switch (algorithm) {
9 case 'gzip':
10 this.compressor = zlib.createGzip();
11 break;
12 case 'deflate':
13 this.compressor = zlib.createDeflate();
14 break;
15 case 'brotli':
16 this.compressor = zlib.createBrotliCompress();
17 break;
18 }
19
20 this.compressor.on('data', (chunk) => {
21 this.chunks.push(chunk);
22 });
23 }
24
25 write(data) {
26 this.compressor.write(data);
27 }
28
29 end() {
30 return new Promise((resolve) => {
31 this.compressor.on('end', () => {
32 resolve(Buffer.concat(this.chunks));
33 });
34 this.compressor.end();
35 });
36 }
37}
38
39// Usage
40const compressor = new IncrementalCompressor('gzip');
41compressor.write('First chunk of data\n');
42compressor.write('Second chunk of data\n');
43compressor.write('Third chunk of data\n');
44const result = await compressor.end();Memory-Efficient Processing#
1import zlib from 'node:zlib';
2import { Transform } from 'node:stream';
3
4// Process large data without loading into memory
5class ChunkProcessor extends Transform {
6 _transform(chunk, encoding, callback) {
7 // Process each chunk
8 const processed = chunk.toString().toUpperCase();
9 callback(null, processed);
10 }
11}
12
13async function processLargeFile(input, output) {
14 await pipeline(
15 fs.createReadStream(input),
16 zlib.createGunzip(),
17 new ChunkProcessor(),
18 zlib.createGzip(),
19 fs.createWriteStream(output)
20 );
21}Compression Utilities#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4// Utility class for compression
5class Compressor {
6 static async gzip(data, options = {}) {
7 const gzip = promisify(zlib.gzip);
8 return gzip(Buffer.from(data), options);
9 }
10
11 static async gunzip(data) {
12 const gunzip = promisify(zlib.gunzip);
13 return (await gunzip(data)).toString();
14 }
15
16 static async brotli(data, options = {}) {
17 const compress = promisify(zlib.brotliCompress);
18 return compress(Buffer.from(data), options);
19 }
20
21 static async unbrotli(data) {
22 const decompress = promisify(zlib.brotliDecompress);
23 return (await decompress(data)).toString();
24 }
25
26 static async compress(data, algorithm = 'gzip') {
27 switch (algorithm) {
28 case 'gzip':
29 return this.gzip(data);
30 case 'brotli':
31 return this.brotli(data);
32 default:
33 throw new Error(`Unknown algorithm: ${algorithm}`);
34 }
35 }
36
37 static async decompress(data, algorithm = 'gzip') {
38 switch (algorithm) {
39 case 'gzip':
40 return this.gunzip(data);
41 case 'brotli':
42 return this.unbrotli(data);
43 default:
44 throw new Error(`Unknown algorithm: ${algorithm}`);
45 }
46 }
47}JSON Compression#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4const gzip = promisify(zlib.gzip);
5const gunzip = promisify(zlib.gunzip);
6
7// Compress JSON for storage or transmission
8async function compressJson(obj) {
9 const json = JSON.stringify(obj);
10 const compressed = await gzip(json);
11 return compressed.toString('base64');
12}
13
14async function decompressJson(base64String) {
15 const compressed = Buffer.from(base64String, 'base64');
16 const decompressed = await gunzip(compressed);
17 return JSON.parse(decompressed.toString());
18}
19
20// Usage
21const data = {
22 users: Array(1000).fill({ name: 'John', email: 'john@example.com' }),
23};
24
25const compressed = await compressJson(data);
26console.log(`Compressed length: ${compressed.length}`);
27
28const restored = await decompressJson(compressed);
29console.log(`Restored ${restored.users.length} users`);Error Handling#
1import zlib from 'node:zlib';
2import { pipeline } from 'node:stream/promises';
3
4// Handle compression errors
5async function safeCompress(input, output) {
6 try {
7 await pipeline(
8 fs.createReadStream(input),
9 zlib.createGzip(),
10 fs.createWriteStream(output)
11 );
12 return { success: true };
13 } catch (error) {
14 if (error.code === 'ENOENT') {
15 return { success: false, error: 'File not found' };
16 }
17 if (error.code === 'Z_DATA_ERROR') {
18 return { success: false, error: 'Compression failed' };
19 }
20 throw error;
21 }
22}
23
24// Validate compressed data
25async function isValidGzip(buffer) {
26 return new Promise((resolve) => {
27 zlib.gunzip(buffer, (err) => {
28 resolve(!err);
29 });
30 });
31}Express Middleware#
1import express from 'express';
2import zlib from 'node:zlib';
3
4// Custom compression middleware
5function compressionMiddleware(options = {}) {
6 const threshold = options.threshold || 1024;
7
8 return (req, res, next) => {
9 const acceptEncoding = req.headers['accept-encoding'] || '';
10 const originalSend = res.send.bind(res);
11
12 res.send = (body) => {
13 if (typeof body === 'string' && body.length > threshold) {
14 if (acceptEncoding.includes('br')) {
15 zlib.brotliCompress(body, (err, compressed) => {
16 if (!err) {
17 res.setHeader('Content-Encoding', 'br');
18 res.setHeader('Content-Length', compressed.length);
19 originalSend(compressed);
20 } else {
21 originalSend(body);
22 }
23 });
24 return;
25 }
26
27 if (acceptEncoding.includes('gzip')) {
28 zlib.gzip(body, (err, compressed) => {
29 if (!err) {
30 res.setHeader('Content-Encoding', 'gzip');
31 res.setHeader('Content-Length', compressed.length);
32 originalSend(compressed);
33 } else {
34 originalSend(body);
35 }
36 });
37 return;
38 }
39 }
40
41 originalSend(body);
42 };
43
44 next();
45 };
46}
47
48const app = express();
49app.use(compressionMiddleware({ threshold: 1024 }));Best Practices#
Algorithm Choice:
✓ Brotli for static assets (best ratio)
✓ Gzip for dynamic content (fast)
✓ Deflate for legacy support
✓ Match client Accept-Encoding
Performance:
✓ Use streams for large files
✓ Set appropriate compression level
✓ Cache compressed assets
✓ Set minimum size threshold
Implementation:
✓ Handle errors properly
✓ Validate before decompressing
✓ Use pipeline for streams
✓ Promisify callback APIs
Avoid:
✗ Compressing already compressed data
✗ High compression for real-time
✗ Ignoring Content-Encoding header
✗ Memory issues with large buffers
Conclusion#
The zlib module provides efficient compression for Node.js applications. Use Brotli for best compression ratios on static assets, Gzip for dynamic content, and streams for large files. Always check Accept-Encoding headers in HTTP responses and handle errors appropriately. Set compression thresholds to avoid overhead on small responses.