The zlib module provides compression and decompression using gzip, deflate, and brotli algorithms. Here's how to use it effectively.
Basic Compression#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4const gzip = promisify(zlib.gzip);
5const gunzip = promisify(zlib.gunzip);
6
7// Compress string
8async function compressString(input) {
9 const buffer = Buffer.from(input);
10 const compressed = await gzip(buffer);
11
12 console.log(`Original: ${buffer.length} bytes`);
13 console.log(`Compressed: ${compressed.length} bytes`);
14 console.log(`Ratio: ${((1 - compressed.length / buffer.length) * 100).toFixed(1)}%`);
15
16 return compressed;
17}
18
19// Decompress
20async function decompressBuffer(compressed) {
21 const decompressed = await gunzip(compressed);
22 return decompressed.toString();
23}
24
25// Usage
26const text = 'Hello World! '.repeat(1000);
27const compressed = await compressString(text);
28const decompressed = await decompressBuffer(compressed);
29console.log(text === decompressed); // trueStream Compression#
1import zlib from 'node:zlib';
2import fs from 'node:fs';
3import { pipeline } from 'node:stream/promises';
4
5// Compress file
6async function compressFile(input, output) {
7 await pipeline(
8 fs.createReadStream(input),
9 zlib.createGzip(),
10 fs.createWriteStream(output)
11 );
12 console.log(`Compressed ${input} to ${output}`);
13}
14
15// Decompress file
16async function decompressFile(input, output) {
17 await pipeline(
18 fs.createReadStream(input),
19 zlib.createGunzip(),
20 fs.createWriteStream(output)
21 );
22 console.log(`Decompressed ${input} to ${output}`);
23}
24
25// Usage
26await compressFile('large-file.txt', 'large-file.txt.gz');
27await decompressFile('large-file.txt.gz', 'large-file-restored.txt');Compression Options#
1import zlib from 'node:zlib';
2
3// Gzip with options
4const gzip = zlib.createGzip({
5 level: zlib.constants.Z_BEST_COMPRESSION, // 0-9, default 6
6 memLevel: 8, // 1-9, default 8
7 strategy: zlib.constants.Z_DEFAULT_STRATEGY,
8 chunkSize: 16 * 1024, // Default 16KB
9});
10
11// Compression levels
12const levels = {
13 none: zlib.constants.Z_NO_COMPRESSION, // 0
14 fast: zlib.constants.Z_BEST_SPEED, // 1
15 default: zlib.constants.Z_DEFAULT_COMPRESSION, // 6
16 best: zlib.constants.Z_BEST_COMPRESSION, // 9
17};
18
19// Strategy options
20const strategies = {
21 default: zlib.constants.Z_DEFAULT_STRATEGY,
22 filtered: zlib.constants.Z_FILTERED,
23 huffmanOnly: zlib.constants.Z_HUFFMAN_ONLY,
24 rle: zlib.constants.Z_RLE,
25 fixed: zlib.constants.Z_FIXED,
26};Deflate/Inflate#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4const deflate = promisify(zlib.deflate);
5const inflate = promisify(zlib.inflate);
6const deflateRaw = promisify(zlib.deflateRaw);
7const inflateRaw = promisify(zlib.inflateRaw);
8
9// Deflate (with zlib header)
10async function deflateData(data) {
11 return await deflate(Buffer.from(data));
12}
13
14async function inflateData(compressed) {
15 return (await inflate(compressed)).toString();
16}
17
18// Raw deflate (no header)
19async function deflateRawData(data) {
20 return await deflateRaw(Buffer.from(data));
21}
22
23async function inflateRawData(compressed) {
24 return (await inflateRaw(compressed)).toString();
25}
26
27// Usage
28const original = 'Hello, World!';
29const deflated = await deflateData(original);
30const inflated = await inflateData(deflated);
31console.log(inflated); // 'Hello, World!'Brotli Compression#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3import fs from 'node:fs';
4import { pipeline } from 'node:stream/promises';
5
6const brotliCompress = promisify(zlib.brotliCompress);
7const brotliDecompress = promisify(zlib.brotliDecompress);
8
9// Compress with Brotli
10async function compressBrotli(data) {
11 return await brotliCompress(Buffer.from(data), {
12 params: {
13 [zlib.constants.BROTLI_PARAM_QUALITY]: 11, // 0-11, default 11
14 [zlib.constants.BROTLI_PARAM_SIZE_HINT]: data.length,
15 },
16 });
17}
18
19// Decompress
20async function decompressBrotli(compressed) {
21 return (await brotliDecompress(compressed)).toString();
22}
23
24// Stream compression
25async function compressFileBrotli(input, output) {
26 await pipeline(
27 fs.createReadStream(input),
28 zlib.createBrotliCompress({
29 params: {
30 [zlib.constants.BROTLI_PARAM_QUALITY]: 6,
31 },
32 }),
33 fs.createWriteStream(output)
34 );
35}HTTP Compression#
1import http from 'node:http';
2import zlib from 'node:zlib';
3
4const server = http.createServer((req, res) => {
5 const acceptEncoding = req.headers['accept-encoding'] || '';
6 const body = 'Hello World! '.repeat(1000);
7
8 // Check supported encodings
9 if (/\bbr\b/.test(acceptEncoding)) {
10 res.writeHead(200, {
11 'Content-Encoding': 'br',
12 'Content-Type': 'text/plain',
13 });
14 zlib.brotliCompress(body, (err, result) => {
15 res.end(result);
16 });
17 } else if (/\bgzip\b/.test(acceptEncoding)) {
18 res.writeHead(200, {
19 'Content-Encoding': 'gzip',
20 'Content-Type': 'text/plain',
21 });
22 zlib.gzip(body, (err, result) => {
23 res.end(result);
24 });
25 } else if (/\bdeflate\b/.test(acceptEncoding)) {
26 res.writeHead(200, {
27 'Content-Encoding': 'deflate',
28 'Content-Type': 'text/plain',
29 });
30 zlib.deflate(body, (err, result) => {
31 res.end(result);
32 });
33 } else {
34 res.writeHead(200, {
35 'Content-Type': 'text/plain',
36 });
37 res.end(body);
38 }
39});
40
41server.listen(3000);Streaming with Transform#
1import zlib from 'node:zlib';
2import fs from 'node:fs';
3import { Transform } from 'node:stream';
4import { pipeline } from 'node:stream/promises';
5
6// Custom transform before compression
7class UppercaseTransform extends Transform {
8 _transform(chunk, encoding, callback) {
9 this.push(chunk.toString().toUpperCase());
10 callback();
11 }
12}
13
14// Pipeline: read -> transform -> compress -> write
15async function processAndCompress(input, output) {
16 await pipeline(
17 fs.createReadStream(input),
18 new UppercaseTransform(),
19 zlib.createGzip(),
20 fs.createWriteStream(output)
21 );
22}
23
24// Pipeline: read -> decompress -> transform -> write
25async function decompressAndProcess(input, output) {
26 await pipeline(
27 fs.createReadStream(input),
28 zlib.createGunzip(),
29 new UppercaseTransform(),
30 fs.createWriteStream(output)
31 );
32}Memory-Efficient Processing#
1import zlib from 'node:zlib';
2import fs from 'node:fs';
3
4// Process large files in chunks
5function compressLargeFile(inputPath, outputPath) {
6 return new Promise((resolve, reject) => {
7 const input = fs.createReadStream(inputPath);
8 const output = fs.createWriteStream(outputPath);
9 const gzip = zlib.createGzip();
10
11 let bytesRead = 0;
12 let bytesWritten = 0;
13
14 input.on('data', (chunk) => {
15 bytesRead += chunk.length;
16 });
17
18 gzip.on('data', (chunk) => {
19 bytesWritten += chunk.length;
20 });
21
22 output.on('finish', () => {
23 console.log(`Read: ${bytesRead} bytes`);
24 console.log(`Written: ${bytesWritten} bytes`);
25 console.log(`Compression ratio: ${((1 - bytesWritten / bytesRead) * 100).toFixed(1)}%`);
26 resolve();
27 });
28
29 input.pipe(gzip).pipe(output);
30 input.on('error', reject);
31 gzip.on('error', reject);
32 output.on('error', reject);
33 });
34}Flush Control#
1import zlib from 'node:zlib';
2
3const gzip = zlib.createGzip();
4
5// Manual flush for streaming protocols
6function sendChunk(data) {
7 gzip.write(data);
8 gzip.flush(zlib.constants.Z_SYNC_FLUSH); // Flush immediately
9}
10
11// Flush types
12const flushTypes = {
13 noFlush: zlib.constants.Z_NO_FLUSH,
14 partialFlush: zlib.constants.Z_PARTIAL_FLUSH,
15 syncFlush: zlib.constants.Z_SYNC_FLUSH,
16 fullFlush: zlib.constants.Z_FULL_FLUSH,
17 finish: zlib.constants.Z_FINISH,
18 block: zlib.constants.Z_BLOCK,
19 trees: zlib.constants.Z_TREES,
20};Compare Algorithms#
1import zlib from 'node:zlib';
2import { promisify } from 'node:util';
3
4const gzip = promisify(zlib.gzip);
5const deflate = promisify(zlib.deflate);
6const brotliCompress = promisify(zlib.brotliCompress);
7
8async function compareAlgorithms(data) {
9 const buffer = Buffer.from(data);
10 const results = [];
11
12 // Gzip
13 const gzipStart = Date.now();
14 const gzipped = await gzip(buffer);
15 results.push({
16 algorithm: 'gzip',
17 size: gzipped.length,
18 time: Date.now() - gzipStart,
19 });
20
21 // Deflate
22 const deflateStart = Date.now();
23 const deflated = await deflate(buffer);
24 results.push({
25 algorithm: 'deflate',
26 size: deflated.length,
27 time: Date.now() - deflateStart,
28 });
29
30 // Brotli
31 const brotliStart = Date.now();
32 const brotlied = await brotliCompress(buffer);
33 results.push({
34 algorithm: 'brotli',
35 size: brotlied.length,
36 time: Date.now() - brotliStart,
37 });
38
39 console.log(`Original size: ${buffer.length} bytes`);
40 console.table(results);
41
42 return results;
43}
44
45// Test
46const testData = 'Hello World! '.repeat(10000);
47await compareAlgorithms(testData);Best Practices#
Algorithm Selection:
✓ gzip - widely supported, good balance
✓ brotli - better compression, modern browsers
✓ deflate - fast, less overhead
✓ Match client Accept-Encoding
Performance:
✓ Use streams for large files
✓ Choose appropriate compression level
✓ Consider brotli for static assets
✓ Cache compressed versions
Options:
✓ Level 6 for general use
✓ Level 9 for max compression
✓ Level 1-3 for speed priority
✓ Use flush for streaming
Avoid:
✗ Compressing already compressed data
✗ Sync operations on large data
✗ Ignoring errors
✗ Over-compressing small payloads
Conclusion#
The zlib module provides comprehensive compression support through gzip, deflate, and brotli algorithms. Use streams for large files, choose appropriate compression levels based on your speed/size tradeoffs, and implement HTTP compression for web servers. Brotli offers better compression ratios but is slower; gzip provides a good balance and universal support. Always use the promisified versions or streams for async operations to avoid blocking the event loop.